2010-01-04 07:37:45 +03:00
|
|
|
import os
|
2015-02-10 03:08:18 +03:00
|
|
|
import errno
|
2010-01-04 07:37:45 +03:00
|
|
|
|
2015-02-10 22:43:07 +03:00
|
|
|
from mercurial import bookmarks
|
2015-01-17 01:48:56 +03:00
|
|
|
from mercurial import commands
|
|
|
|
from mercurial import encoding
|
2015-01-06 22:18:07 +03:00
|
|
|
from mercurial import error
|
|
|
|
from mercurial import exchange
|
2014-10-03 20:43:48 +04:00
|
|
|
from mercurial import extensions
|
2010-01-04 07:37:45 +03:00
|
|
|
from mercurial import hg
|
2015-02-10 20:18:13 +03:00
|
|
|
from mercurial import localrepo
|
2015-01-06 22:18:07 +03:00
|
|
|
from mercurial import namespaces
|
2015-02-12 10:34:56 +03:00
|
|
|
from mercurial import obsolete
|
2014-04-01 06:22:23 +04:00
|
|
|
from mercurial import repoview
|
2014-03-18 22:46:10 +04:00
|
|
|
from mercurial import revset
|
2015-02-10 22:52:19 +03:00
|
|
|
from mercurial import scmutil
|
2014-03-18 22:46:10 +04:00
|
|
|
from mercurial import templatekw
|
2015-01-06 22:18:07 +03:00
|
|
|
from mercurial import url
|
|
|
|
from mercurial import util
|
2015-02-10 22:52:19 +03:00
|
|
|
from mercurial.i18n import _
|
|
|
|
from mercurial.node import hex, short
|
2010-01-11 02:24:02 +03:00
|
|
|
from hgext import schemes
|
2010-01-04 07:37:45 +03:00
|
|
|
|
2015-01-15 01:45:24 +03:00
|
|
|
_remotenames = {
|
|
|
|
"bookmarks": {},
|
|
|
|
"branches": {},
|
|
|
|
}
|
2014-12-16 20:33:01 +03:00
|
|
|
|
2014-10-03 20:43:48 +04:00
|
|
|
def expush(orig, repo, remote, *args, **kwargs):
|
|
|
|
res = orig(repo, remote, *args, **kwargs)
|
2015-03-12 00:48:08 +03:00
|
|
|
pullremotenames(repo, remote)
|
|
|
|
return res
|
2014-10-03 20:43:48 +04:00
|
|
|
|
|
|
|
def expull(orig, repo, remote, *args, **kwargs):
|
|
|
|
res = orig(repo, remote, *args, **kwargs)
|
2015-01-29 04:45:26 +03:00
|
|
|
pullremotenames(repo, remote)
|
|
|
|
return res
|
|
|
|
|
|
|
|
def pullremotenames(repo, remote):
|
2014-10-03 20:43:48 +04:00
|
|
|
lock = repo.lock()
|
|
|
|
try:
|
2015-03-03 10:47:42 +03:00
|
|
|
path = activepath(repo.ui, remote)
|
|
|
|
if path:
|
2015-03-03 10:56:57 +03:00
|
|
|
# on a push, we don't want to keep obsolete heads since
|
|
|
|
# they won't show up as heads on the next pull, so we
|
|
|
|
# remove them here otherwise we would require the user
|
|
|
|
# to issue a pull to refresh .hg/remotenames
|
|
|
|
bmap = {}
|
|
|
|
repo = repo.unfiltered()
|
|
|
|
for branch, nodes in remote.branchmap().iteritems():
|
2015-04-10 01:48:36 +03:00
|
|
|
bmap[branch] = []
|
|
|
|
for node in nodes:
|
|
|
|
if node in repo and not repo[node].obsolete():
|
|
|
|
bmap[branch].append(node)
|
2015-03-03 10:56:57 +03:00
|
|
|
saveremotenames(repo, path, bmap, remote.listkeys('bookmarks'))
|
2014-10-03 20:43:48 +04:00
|
|
|
finally:
|
|
|
|
lock.release()
|
|
|
|
|
2015-03-13 23:21:29 +03:00
|
|
|
loadremotenames(repo)
|
|
|
|
writedistance(repo)
|
|
|
|
|
2014-04-01 06:22:23 +04:00
|
|
|
def blockerhook(orig, repo, *args, **kwargs):
|
|
|
|
blockers = orig(repo)
|
|
|
|
|
2015-02-10 20:15:37 +03:00
|
|
|
unblock = util.safehasattr(repo, '_unblockhiddenremotenames')
|
|
|
|
if not unblock:
|
2015-01-07 04:19:08 +03:00
|
|
|
return blockers
|
|
|
|
|
2015-01-15 01:45:24 +03:00
|
|
|
# add remotenames to blockers by looping over all names in our own cache
|
2014-04-01 06:22:23 +04:00
|
|
|
cl = repo.changelog
|
2015-01-15 01:45:24 +03:00
|
|
|
for remotename in _remotenames.keys():
|
|
|
|
rname = 'remote' + remotename
|
|
|
|
try:
|
|
|
|
ns = repo.names[rname]
|
|
|
|
except KeyError:
|
|
|
|
continue
|
|
|
|
for name in ns.listnames(repo):
|
|
|
|
blockers.update(cl.rev(node) for node in ns.nodes(repo, name))
|
2014-04-01 06:22:23 +04:00
|
|
|
|
|
|
|
return blockers
|
|
|
|
|
2015-01-29 01:03:19 +03:00
|
|
|
def exupdatefromremote(orig, ui, repo, remotemarks, path, trfunc, explicit=()):
|
|
|
|
if ui.configbool('remotenames', 'syncbookmarks', False):
|
|
|
|
return orig(ui, repo, remotemarks, path, trfunc, explicit)
|
|
|
|
|
2015-02-12 09:31:14 +03:00
|
|
|
ui.debug('remotenames: skipped syncing local bookmarks\n')
|
2015-01-29 01:03:19 +03:00
|
|
|
|
2015-01-29 04:45:48 +03:00
|
|
|
def exclone(orig, ui, *args, **opts):
|
|
|
|
"""
|
|
|
|
We may not want local bookmarks on clone... but we always want remotenames!
|
|
|
|
"""
|
|
|
|
srcpeer, dstpeer = orig(ui, *args, **opts)
|
|
|
|
|
|
|
|
pullremotenames(dstpeer.local(), srcpeer)
|
|
|
|
|
|
|
|
if not ui.configbool('remotenames', 'syncbookmarks', False):
|
2015-02-12 09:31:14 +03:00
|
|
|
ui.debug('remotenames: removing cloned bookmarks\n')
|
2015-02-10 03:08:18 +03:00
|
|
|
repo = dstpeer.local()
|
|
|
|
wlock = repo.wlock()
|
|
|
|
try:
|
|
|
|
try:
|
|
|
|
repo.vfs.unlink('bookmarks')
|
|
|
|
except OSError, inst:
|
|
|
|
if inst.errno != errno.ENOENT:
|
|
|
|
raise
|
|
|
|
finally:
|
|
|
|
wlock.release()
|
|
|
|
|
2015-01-29 04:45:48 +03:00
|
|
|
return (srcpeer, dstpeer)
|
|
|
|
|
2015-02-10 20:18:13 +03:00
|
|
|
def excommit(orig, repo, *args, **opts):
|
|
|
|
res = orig(repo, *args, **opts)
|
|
|
|
writedistance(repo)
|
|
|
|
return res
|
|
|
|
|
2015-02-10 07:16:15 +03:00
|
|
|
def exupdate(orig, repo, *args, **opts):
|
|
|
|
res = orig(repo, *args, **opts)
|
|
|
|
writedistance(repo)
|
|
|
|
return res
|
|
|
|
|
2015-02-10 07:38:30 +03:00
|
|
|
def exsetcurrent(orig, repo, mark):
|
|
|
|
res = orig(repo, mark)
|
|
|
|
writedistance(repo)
|
|
|
|
return res
|
|
|
|
|
2015-03-20 06:51:38 +03:00
|
|
|
|
2010-01-04 07:37:45 +03:00
|
|
|
def reposetup(ui, repo):
|
2011-03-29 17:24:25 +04:00
|
|
|
if not repo.local():
|
|
|
|
return
|
2010-01-04 07:37:45 +03:00
|
|
|
|
2014-03-31 21:34:43 +04:00
|
|
|
loadremotenames(repo)
|
2015-03-20 06:51:38 +03:00
|
|
|
ns = namespaces.namespace
|
|
|
|
|
2015-03-20 07:17:31 +03:00
|
|
|
if ui.configbool('remotenames', 'bookmarks', True):
|
|
|
|
mark2nodes = _remotenames.get('bookmarks')
|
|
|
|
node2marks = {}
|
2015-03-20 06:51:38 +03:00
|
|
|
for name, node in mark2nodes.iteritems():
|
2015-03-20 07:17:31 +03:00
|
|
|
node2marks.setdefault(node[0], []).append(name)
|
|
|
|
remotebookmarkns = ns(
|
|
|
|
'remotebookmarks',
|
|
|
|
templatename='remotebookmarks',
|
|
|
|
logname='bookmark',
|
|
|
|
colorname='remotebookmarks',
|
|
|
|
listnames=lambda repo: mark2nodes.keys(),
|
|
|
|
namemap=lambda repo, name: mark2nodes.get(name),
|
|
|
|
nodemap=lambda repo, node: node2marks.get(node, []))
|
|
|
|
repo.names.addnamespace(remotebookmarkns)
|
|
|
|
|
|
|
|
# hoisting only works if there are remote bookmarks
|
|
|
|
hoist = ui.config('remotenames', 'hoist')
|
|
|
|
if hoist:
|
|
|
|
hoist += '/'
|
|
|
|
|
|
|
|
if hoist:
|
|
|
|
hoist2nodes = {}
|
|
|
|
node2hoists = {}
|
|
|
|
for name, node in mark2nodes.iteritems():
|
|
|
|
if name.startswith(hoist):
|
|
|
|
hoist2nodes[name[len(hoist):]] = node
|
|
|
|
node2hoists.setdefault(node[0], []).append(name)
|
|
|
|
hoistedmarkns = ns(
|
|
|
|
'hoistedbookmarks',
|
|
|
|
templatename='hoistedbookmarks',
|
|
|
|
logname='hoistedname',
|
|
|
|
colorname='hoistedname',
|
|
|
|
listnames=lambda repo: hoist2nodes.keys(),
|
|
|
|
namemap=lambda repo, name: hoist2nodes.get(name),
|
|
|
|
nodemap=lambda repo, node: node2hoists.get(name, []))
|
|
|
|
repo.names.addnamespace(hoistedmarkns)
|
|
|
|
|
|
|
|
if ui.configbool('remotenames', 'branches', True):
|
|
|
|
branch2nodes = _remotenames.get('branches')
|
|
|
|
node2branch = {}
|
|
|
|
for name, nodes in branch2nodes.iteritems():
|
|
|
|
for node in nodes:
|
|
|
|
node2branch[node] = [name]
|
|
|
|
remotebranchns = ns(
|
|
|
|
'remotebranches',
|
|
|
|
templatename='remotebranches',
|
|
|
|
logname='branch',
|
|
|
|
colorname='remotebranch',
|
|
|
|
listnames=lambda repo: branch2nodes.keys(),
|
|
|
|
namemap=lambda repo, name: branch2nodes.get(name),
|
|
|
|
nodemap=lambda repo, node: node2branch.get(node, []))
|
|
|
|
repo.names.addnamespace(remotebranchns)
|
2014-12-17 09:23:41 +03:00
|
|
|
|
2015-03-12 00:57:32 +03:00
|
|
|
def _tracking(ui):
|
2015-03-12 22:53:08 +03:00
|
|
|
# omg default true
|
|
|
|
return ui.configbool('remotenames', 'tracking', True)
|
2015-03-12 00:57:32 +03:00
|
|
|
|
2015-03-14 08:12:55 +03:00
|
|
|
def _setuprebase(rebase):
|
2015-03-19 10:06:51 +03:00
|
|
|
extensions.wrapcommand(rebase.cmdtable, 'rebase', exrebase)
|
2015-03-12 01:38:19 +03:00
|
|
|
|
|
|
|
def exrebase(orig, ui, repo, **opts):
|
|
|
|
dest = opts['dest']
|
2015-03-14 08:46:12 +03:00
|
|
|
source = opts['source']
|
|
|
|
revs = opts['rev']
|
|
|
|
base = opts['base']
|
2015-03-19 10:06:51 +03:00
|
|
|
current = bookmarks.readcurrent(repo)
|
2015-03-14 08:46:12 +03:00
|
|
|
|
2015-03-19 10:06:51 +03:00
|
|
|
if not (dest or source or revs or base) and current:
|
2015-03-12 01:38:19 +03:00
|
|
|
tracking = _readtracking(repo)
|
2015-03-19 10:06:51 +03:00
|
|
|
if current in tracking:
|
|
|
|
opts['dest'] = tracking[current]
|
2015-03-12 01:38:19 +03:00
|
|
|
|
|
|
|
return orig(ui, repo, **opts)
|
|
|
|
|
2015-03-13 01:55:16 +03:00
|
|
|
def exstrip(orig, ui, repo, *args, **opts):
|
|
|
|
ret = orig(ui, repo, *args, **opts)
|
|
|
|
writedistance(repo)
|
|
|
|
return ret
|
|
|
|
|
2015-03-13 02:50:27 +03:00
|
|
|
def exhistedit(orig, ui, repo, *args, **opts):
|
|
|
|
ret = orig(ui, repo, *args, **opts)
|
|
|
|
writedistance(repo)
|
|
|
|
return ret
|
|
|
|
|
2015-03-14 01:57:43 +03:00
|
|
|
def expaths(orig, ui, repo, *args, **opts):
|
|
|
|
"""allow adding and removing remote paths
|
|
|
|
|
|
|
|
This is very hacky and only exists as an experimentation.
|
|
|
|
|
|
|
|
"""
|
|
|
|
delete = opts.get('delete')
|
2015-03-14 02:39:30 +03:00
|
|
|
add = opts.get('add')
|
2015-03-14 01:57:43 +03:00
|
|
|
if delete:
|
|
|
|
# find the first section and remote path that matches, and delete that
|
|
|
|
foundpaths = False
|
|
|
|
oldhgrc = repo.vfs.read('hgrc').splitlines(True)
|
|
|
|
f = repo.vfs('hgrc', 'w')
|
|
|
|
for line in oldhgrc:
|
|
|
|
if '[paths]' in line:
|
|
|
|
foundpaths = True
|
|
|
|
if not (foundpaths and line.strip().startswith(delete)):
|
|
|
|
f.write(line)
|
|
|
|
f.close()
|
2015-03-14 03:51:29 +03:00
|
|
|
saveremotenames(repo, delete)
|
|
|
|
writedistance(repo)
|
2015-03-14 01:57:43 +03:00
|
|
|
return
|
|
|
|
|
2015-03-14 02:39:30 +03:00
|
|
|
if add:
|
|
|
|
# find the first section that matches, then look for previous value; if
|
|
|
|
# not found add a new entry
|
|
|
|
foundpaths = False
|
|
|
|
oldhgrc = repo.vfs.read('hgrc').splitlines(True)
|
|
|
|
f = repo.vfs('hgrc', 'w')
|
|
|
|
done = False
|
|
|
|
for line in oldhgrc:
|
|
|
|
if '[paths]' in line:
|
|
|
|
foundpaths = True
|
|
|
|
if foundpaths and line.strip().startswith(add):
|
|
|
|
done = True
|
|
|
|
line = '%s = %s\n' % (add, args[0])
|
|
|
|
f.write(line)
|
|
|
|
|
|
|
|
# did we not find an existing path?
|
|
|
|
if not done:
|
|
|
|
done = True
|
|
|
|
f.write("%s = %s\n" % (add, args[0]))
|
|
|
|
|
|
|
|
f.close()
|
|
|
|
return
|
|
|
|
|
2015-03-14 01:57:43 +03:00
|
|
|
return orig(ui, repo, *args)
|
|
|
|
|
2015-01-17 01:48:56 +03:00
|
|
|
def extsetup(ui):
|
2015-02-10 23:02:58 +03:00
|
|
|
extensions.wrapfunction(exchange, 'push', expush)
|
|
|
|
extensions.wrapfunction(exchange, 'pull', expull)
|
|
|
|
extensions.wrapfunction(repoview, '_getdynamicblockers', blockerhook)
|
|
|
|
extensions.wrapfunction(bookmarks, 'updatefromremote', exupdatefromremote)
|
|
|
|
extensions.wrapfunction(bookmarks, 'setcurrent', exsetcurrent)
|
|
|
|
extensions.wrapfunction(hg, 'clone', exclone)
|
|
|
|
extensions.wrapfunction(hg, 'updaterepo', exupdate)
|
|
|
|
extensions.wrapfunction(localrepo.localrepository, 'commit', excommit)
|
|
|
|
|
2015-01-29 02:53:49 +03:00
|
|
|
entry = extensions.wrapcommand(commands.table, 'bookmarks', exbookmarks)
|
2015-01-17 01:48:56 +03:00
|
|
|
entry[1].append(('a', 'all', None, 'show both remote and local bookmarks'))
|
2015-01-17 02:47:07 +03:00
|
|
|
entry[1].append(('', 'remote', None, 'show only remote bookmarks'))
|
2015-01-17 01:48:56 +03:00
|
|
|
|
2015-03-12 00:57:32 +03:00
|
|
|
if _tracking(ui):
|
2015-03-13 23:40:41 +03:00
|
|
|
entry[1].append(('t', 'track', '', 'track this bookmark or remote name',
|
|
|
|
'BOOKMARK'))
|
2015-03-14 06:38:55 +03:00
|
|
|
entry[1].append(('u', 'untrack', None,
|
|
|
|
'remove tracking for this bookmark',
|
|
|
|
'BOOKMARK'))
|
2015-03-14 08:12:55 +03:00
|
|
|
try:
|
|
|
|
rebase = extensions.find('rebase')
|
|
|
|
if rebase:
|
|
|
|
_setuprebase(rebase)
|
|
|
|
except KeyError:
|
|
|
|
# rebase isn't on
|
|
|
|
pass
|
2015-03-12 00:57:32 +03:00
|
|
|
|
2015-01-29 02:53:49 +03:00
|
|
|
entry = extensions.wrapcommand(commands.table, 'branches', exbranches)
|
2015-01-17 01:06:05 +03:00
|
|
|
entry[1].append(('a', 'all', None, 'show both remote and local branches'))
|
2015-01-17 02:47:07 +03:00
|
|
|
entry[1].append(('', 'remote', None, 'show only remote branches'))
|
2015-01-17 01:06:05 +03:00
|
|
|
|
2015-02-10 20:15:37 +03:00
|
|
|
entry = extensions.wrapcommand(commands.table, 'log', exlog)
|
|
|
|
entry[1].append(('', 'remote', None, 'show remote names even if hidden'))
|
|
|
|
|
2015-02-10 22:52:19 +03:00
|
|
|
entry = extensions.wrapcommand(commands.table, 'push', expushcmd)
|
|
|
|
entry[1].append(('t', 'to', '', 'push revs to this bookmark', 'BOOKMARK'))
|
2015-03-10 03:36:25 +03:00
|
|
|
entry[1].append(('d', 'delete', '', 'delete remote bookmark', 'BOOKMARK'))
|
2015-02-10 22:52:19 +03:00
|
|
|
|
2015-03-14 01:57:43 +03:00
|
|
|
entry = extensions.wrapcommand(commands.table, 'paths', expaths)
|
|
|
|
entry[1].append(('d', 'delete', '', 'delete remote path', 'NAME'))
|
2015-03-14 02:39:30 +03:00
|
|
|
entry[1].append(('a', 'add', '', 'add remote path', 'NAME PATH'))
|
2015-03-14 01:57:43 +03:00
|
|
|
|
2015-03-14 03:17:49 +03:00
|
|
|
extensions.wrapcommand(commands.table, 'pull', expullcmd)
|
|
|
|
|
2015-03-14 04:02:21 +03:00
|
|
|
entry = extensions.wrapcommand(commands.table, 'clone', exclonecmd)
|
|
|
|
entry[1].append(('', 'mirror', None, 'sync all bookmarks'))
|
|
|
|
|
2015-02-10 22:52:19 +03:00
|
|
|
exchange.pushdiscoverymapping['bookmarks'] = expushdiscoverybookmarks
|
|
|
|
|
2015-02-10 23:02:58 +03:00
|
|
|
templatekw.keywords['remotenames'] = remotenameskw
|
|
|
|
|
2015-03-13 01:55:16 +03:00
|
|
|
try:
|
|
|
|
strip = extensions.find('strip')
|
|
|
|
if strip:
|
|
|
|
extensions.wrapcommand(strip.cmdtable, 'strip', exstrip)
|
|
|
|
except KeyError:
|
|
|
|
# strip isn't on
|
|
|
|
pass
|
|
|
|
|
2015-03-13 02:50:27 +03:00
|
|
|
try:
|
|
|
|
histedit = extensions.find('histedit')
|
|
|
|
if histedit:
|
|
|
|
extensions.wrapcommand(histedit.cmdtable, 'histedit', exhistedit)
|
|
|
|
except KeyError:
|
|
|
|
# histedit isn't on
|
|
|
|
pass
|
2015-03-13 01:55:16 +03:00
|
|
|
|
2015-02-10 20:15:37 +03:00
|
|
|
def exlog(orig, ui, repo, *args, **opts):
|
|
|
|
# hack for logging that turns on the dynamic blockerhook
|
|
|
|
if opts.get('remote'):
|
|
|
|
repo.__setattr__('_unblockhiddenremotenames', True)
|
|
|
|
res = orig(ui, repo, *args, **opts)
|
|
|
|
if opts.get('remote'):
|
|
|
|
repo.__setattr__('_unblockhiddenremotenames', False)
|
|
|
|
return res
|
|
|
|
|
2015-02-12 07:09:29 +03:00
|
|
|
_pushto = False
|
2015-03-10 03:36:25 +03:00
|
|
|
_delete = None
|
2015-02-10 22:52:19 +03:00
|
|
|
|
|
|
|
def expushdiscoverybookmarks(pushop):
|
2015-02-12 06:56:36 +03:00
|
|
|
repo = pushop.repo.unfiltered()
|
2015-02-11 20:22:26 +03:00
|
|
|
remotemarks = pushop.remote.listkeys('bookmarks')
|
2015-02-14 00:34:32 +03:00
|
|
|
force = pushop.force
|
2015-02-11 20:22:26 +03:00
|
|
|
|
2015-03-10 03:36:25 +03:00
|
|
|
if _delete:
|
|
|
|
if _delete not in remotemarks:
|
|
|
|
raise util.Abort(_('remote bookmark %s does not exist'))
|
|
|
|
pushop.outbookmarks.append([_delete, remotemarks[_delete], ''])
|
|
|
|
return exchange._pushdiscoverybookmarks(pushop)
|
|
|
|
|
2015-02-13 07:56:14 +03:00
|
|
|
if not _pushto:
|
2015-02-13 07:58:27 +03:00
|
|
|
ret = exchange._pushdiscoverybookmarks(pushop)
|
2015-02-14 00:34:32 +03:00
|
|
|
if not (repo.ui.configbool('remotenames', 'pushanonheads')
|
|
|
|
or force):
|
2015-02-13 07:56:14 +03:00
|
|
|
# check to make sure we don't push an anonymous head
|
|
|
|
if pushop.revs:
|
|
|
|
revs = set(pushop.revs)
|
|
|
|
else:
|
|
|
|
revs = set(repo.lookup(r) for r in repo.revs('head()'))
|
2015-03-10 10:06:51 +03:00
|
|
|
revs -= set(pushop.remoteheads)
|
2015-02-13 07:56:14 +03:00
|
|
|
# find heads that don't have a bookmark going with them
|
|
|
|
for bookmark in pushop.bookmarks:
|
|
|
|
rev = repo.lookup(bookmark)
|
|
|
|
if rev in revs:
|
|
|
|
revs.remove(rev)
|
2015-04-10 01:48:36 +03:00
|
|
|
# remove heads that advance bookmarks (old mercurial behavior)
|
2015-02-13 07:59:17 +03:00
|
|
|
for bookmark, old, new in pushop.outbookmarks:
|
|
|
|
rev = repo.lookup(new)
|
|
|
|
if rev in revs:
|
|
|
|
revs.remove(rev)
|
2015-02-13 07:56:14 +03:00
|
|
|
|
2015-04-10 03:56:24 +03:00
|
|
|
# we use known() instead of lookup() due to lookup throwing an
|
|
|
|
# aborting error causing the connection to close
|
2015-04-10 01:48:36 +03:00
|
|
|
anonheads = []
|
|
|
|
knownlist = pushop.remote.known(revs)
|
|
|
|
for node, known in zip(revs, knownlist):
|
|
|
|
obs = repo[node].obsolete()
|
|
|
|
closes = repo[node].closesbranch()
|
|
|
|
if known or obs or closes:
|
|
|
|
continue
|
|
|
|
anonheads.append(short(node))
|
|
|
|
|
|
|
|
if anonheads:
|
2015-02-13 07:56:14 +03:00
|
|
|
msg = _("push would create new anonymous heads (%s)")
|
2015-02-14 03:06:16 +03:00
|
|
|
hint = _("use --force to override this warning")
|
2015-04-10 01:48:36 +03:00
|
|
|
raise util.Abort(msg % ', '.join(sorted(anonheads)), hint=hint)
|
2015-02-13 07:58:27 +03:00
|
|
|
return ret
|
2015-02-10 22:52:19 +03:00
|
|
|
|
2015-02-12 07:09:29 +03:00
|
|
|
bookmark = pushop.bookmarks[0]
|
|
|
|
rev = pushop.revs[0]
|
2015-02-10 22:52:19 +03:00
|
|
|
|
|
|
|
# allow new bookmark only if force is True
|
|
|
|
old = ''
|
|
|
|
if bookmark in remotemarks:
|
|
|
|
old = remotemarks[bookmark]
|
|
|
|
elif not force:
|
2015-02-12 10:53:22 +03:00
|
|
|
msg = _('not creating new bookmark')
|
|
|
|
hint = _('use --force to create a new bookmark')
|
|
|
|
raise util.Abort(msg, hint=hint)
|
2015-02-10 22:52:19 +03:00
|
|
|
|
|
|
|
# allow non-ff only if force is True
|
|
|
|
if not force and old != '':
|
|
|
|
if old not in repo:
|
2015-02-12 10:53:22 +03:00
|
|
|
msg = _('remote bookmark revision is not in local repo')
|
|
|
|
hint = _('pull and merge or rebase or use --force')
|
|
|
|
raise util.Abort(msg, hint=hint)
|
2015-02-12 10:34:56 +03:00
|
|
|
foreground = obsolete.foreground(repo, [repo.lookup(old)])
|
|
|
|
if repo[rev].node() not in foreground:
|
2015-02-12 10:53:22 +03:00
|
|
|
msg = _('pushed rev is not in the foreground of remote bookmark')
|
|
|
|
hint = _('use --force flag to complete non-fast-forward update')
|
|
|
|
raise util.Abort(msg, hint=hint)
|
2015-02-12 10:41:42 +03:00
|
|
|
if repo[old] == repo[rev]:
|
|
|
|
repo.ui.warn(_('remote bookmark already points at pushed rev\n'))
|
|
|
|
return
|
2015-02-10 22:52:19 +03:00
|
|
|
|
|
|
|
pushop.outbookmarks.append((bookmark, old, hex(rev)))
|
|
|
|
|
2015-03-14 01:50:02 +03:00
|
|
|
def _pushrevs(repo, ui, rev):
|
|
|
|
pushrev = ui.config('remotenames', 'pushrev')
|
2015-03-19 19:48:50 +03:00
|
|
|
if pushrev == '!':
|
|
|
|
return []
|
|
|
|
elif pushrev:
|
2015-03-14 01:50:02 +03:00
|
|
|
return [repo.lookup(pushrev)]
|
|
|
|
if rev:
|
|
|
|
return [repo.lookup(rev)]
|
|
|
|
return []
|
2015-03-10 10:06:51 +03:00
|
|
|
|
2015-03-14 03:17:49 +03:00
|
|
|
def expullcmd(orig, ui, repo, source="default", **opts):
|
|
|
|
revrenames = dict((v, k) for k, v in _getrenames(ui).iteritems())
|
|
|
|
source = revrenames.get(source, source)
|
|
|
|
return orig(ui, repo, source, **opts)
|
|
|
|
|
2015-02-10 22:52:19 +03:00
|
|
|
def expushcmd(orig, ui, repo, dest=None, **opts):
|
2015-03-10 03:36:25 +03:00
|
|
|
# needed for discovery method
|
|
|
|
global _pushto, _delete
|
|
|
|
|
|
|
|
_delete = opts.get('delete')
|
2015-03-10 09:37:13 +03:00
|
|
|
if _delete:
|
|
|
|
flag = None
|
|
|
|
for f in ('to', 'bookmark', 'branch', 'rev'):
|
|
|
|
if opts.get(f):
|
|
|
|
flag = f
|
|
|
|
break
|
|
|
|
if flag:
|
|
|
|
msg = _('do not specify --delete and '
|
2015-03-12 00:33:56 +03:00
|
|
|
'--%s at the same time') % flag
|
2015-03-10 09:37:13 +03:00
|
|
|
raise util.Abort(msg)
|
|
|
|
# we want to skip pushing any changesets while deleting a remote
|
|
|
|
# bookmark, so we send the null revision
|
|
|
|
opts['rev'] = ['null']
|
|
|
|
return orig(ui, repo, dest, **opts)
|
|
|
|
|
2015-03-10 10:06:51 +03:00
|
|
|
revs = opts.get('rev')
|
2015-02-10 22:52:19 +03:00
|
|
|
to = opts.get('to')
|
2015-03-12 03:24:01 +03:00
|
|
|
|
2015-03-19 19:48:27 +03:00
|
|
|
paths = dict((path, url) for path, url in ui.configitems('paths'))
|
2015-03-14 03:20:31 +03:00
|
|
|
revrenames = dict((v, k) for k, v in _getrenames(ui).iteritems())
|
|
|
|
|
2015-03-14 10:05:54 +03:00
|
|
|
origdest = dest
|
2015-03-12 03:24:01 +03:00
|
|
|
if not dest and not to and not revs and _tracking(ui):
|
|
|
|
current = bookmarks.readcurrent(repo)
|
|
|
|
tracking = _readtracking(repo)
|
|
|
|
# print "tracking on %s %s" % (current, tracking)
|
|
|
|
if current and current in tracking:
|
|
|
|
track = tracking[current]
|
|
|
|
path, book = splitremotename(track)
|
2015-03-12 10:06:11 +03:00
|
|
|
# un-rename a path, if needed
|
|
|
|
path = revrenames.get(path, path)
|
2015-03-12 03:24:01 +03:00
|
|
|
if book and path in paths:
|
|
|
|
dest = path
|
|
|
|
to = book
|
|
|
|
|
2015-03-14 03:20:31 +03:00
|
|
|
# un-rename passed path
|
|
|
|
dest = revrenames.get(dest, dest)
|
|
|
|
|
2015-03-14 10:05:54 +03:00
|
|
|
# if dest was renamed to default but we aren't specifically requesting
|
|
|
|
# to push to default, change dest to default-push, if available
|
|
|
|
if not origdest and dest == 'default' and 'default-push' in paths:
|
|
|
|
dest = 'default-push'
|
|
|
|
|
2015-03-19 19:48:27 +03:00
|
|
|
# hgsubversion does funcky things on push. Just call it directly
|
|
|
|
if dest in paths and paths[dest].startswith('svn+ssh'):
|
|
|
|
orig(ui, repo, dest, **opts)
|
|
|
|
|
2015-02-10 22:52:19 +03:00
|
|
|
if not to:
|
2015-02-12 05:18:56 +03:00
|
|
|
if ui.configbool('remotenames', 'forceto', False):
|
2015-02-12 10:53:22 +03:00
|
|
|
msg = _('must specify --to when pushing')
|
|
|
|
hint = _('see configuration option %s') % 'remotenames.forceto'
|
|
|
|
raise util.Abort(msg, hint=hint)
|
2015-03-10 03:36:25 +03:00
|
|
|
|
2015-03-10 10:06:51 +03:00
|
|
|
if not revs:
|
2015-03-14 01:50:02 +03:00
|
|
|
opts['rev'] = _pushrevs(repo, ui, None)
|
2015-03-10 10:06:51 +03:00
|
|
|
|
2015-02-10 22:52:19 +03:00
|
|
|
return orig(ui, repo, dest, **opts)
|
|
|
|
|
|
|
|
if opts.get('bookmark'):
|
2015-02-12 10:53:22 +03:00
|
|
|
msg = _('do not specify --to/-t and --bookmark/-B at the same time')
|
|
|
|
raise util.Abort(msg)
|
2015-02-10 22:52:19 +03:00
|
|
|
if opts.get('branch'):
|
2015-02-12 10:53:22 +03:00
|
|
|
msg = _('do not specify --to/-t and --branch/-b at the same time')
|
|
|
|
raise util.Abort(msg)
|
2015-02-10 22:52:19 +03:00
|
|
|
|
|
|
|
if revs:
|
|
|
|
revs = [repo.lookup(r) for r in scmutil.revrange(repo, revs)]
|
|
|
|
else:
|
2015-03-14 01:50:02 +03:00
|
|
|
revs = _pushrevs(repo, ui, '.')
|
2015-02-10 22:52:19 +03:00
|
|
|
if len(revs) != 1:
|
2015-02-12 10:53:22 +03:00
|
|
|
msg = _('--to requires exactly one rev to push')
|
|
|
|
hint = _('use --rev BOOKMARK or omit --rev for current commit (.)')
|
|
|
|
raise util.Abort(msg, hint=hint)
|
2015-02-10 22:52:19 +03:00
|
|
|
rev = revs[0]
|
|
|
|
|
2015-02-12 07:09:29 +03:00
|
|
|
_pushto = True
|
2015-02-10 22:52:19 +03:00
|
|
|
|
|
|
|
# big can o' copypasta from exchange.push
|
|
|
|
dest = ui.expandpath(dest or 'default-push', dest or 'default')
|
|
|
|
dest, branches = hg.parseurl(dest, opts.get('branch'))
|
|
|
|
try:
|
|
|
|
other = hg.peer(repo, opts, dest)
|
|
|
|
except error.RepoError:
|
|
|
|
if dest == "default-push":
|
|
|
|
hint = _('see the "path" section in "hg help config"')
|
|
|
|
raise util.Abort(_("default repository not configured!"),
|
|
|
|
hint=hint)
|
|
|
|
else:
|
|
|
|
raise
|
|
|
|
|
|
|
|
# all checks pass, go for it!
|
2015-02-12 09:31:27 +03:00
|
|
|
ui.status(_('pushing rev %s to destination %s bookmark %s\n') % (
|
2015-02-10 22:52:19 +03:00
|
|
|
short(rev), dest, to))
|
|
|
|
|
|
|
|
# TODO: subrepo stuff
|
|
|
|
|
|
|
|
pushop = exchange.push(repo, other, opts.get('force'), revs=revs,
|
|
|
|
bookmarks=(to,))
|
|
|
|
|
|
|
|
result = not pushop.cgresult
|
|
|
|
if pushop.bkresult is not None:
|
|
|
|
if pushop.bkresult == 2:
|
|
|
|
result = 2
|
|
|
|
elif not result and pushop.bkresult:
|
|
|
|
result = 2
|
|
|
|
|
2015-02-12 07:09:29 +03:00
|
|
|
_pushto = False
|
2015-02-10 22:52:19 +03:00
|
|
|
return result
|
|
|
|
|
2015-03-14 04:02:21 +03:00
|
|
|
def exclonecmd(orig, ui, *args, **opts):
|
|
|
|
if opts['mirror']:
|
|
|
|
ui.setconfig('remotenames', 'syncbookmarks', True, 'mirror-clone')
|
|
|
|
orig(ui, *args, **opts)
|
|
|
|
|
2015-01-29 02:53:49 +03:00
|
|
|
def exbranches(orig, ui, repo, *args, **opts):
|
2015-01-17 02:47:07 +03:00
|
|
|
if not opts.get('remote'):
|
|
|
|
orig(ui, repo, *args, **opts)
|
2015-01-17 01:48:56 +03:00
|
|
|
|
2015-01-17 02:47:07 +03:00
|
|
|
if opts.get('all') or opts.get('remote'):
|
2015-01-29 02:26:42 +03:00
|
|
|
# exit early if namespace doesn't even exist
|
|
|
|
namespace = 'remotebranches'
|
|
|
|
if namespace not in repo.names:
|
|
|
|
return
|
|
|
|
|
|
|
|
ns = repo.names[namespace]
|
|
|
|
label = 'log.' + ns.colorname
|
|
|
|
fm = ui.formatter('branches', opts)
|
2015-01-17 01:06:05 +03:00
|
|
|
|
2015-02-10 06:11:55 +03:00
|
|
|
# it seems overkill to hide displaying hidden remote branches
|
|
|
|
repo = repo.unfiltered()
|
|
|
|
|
2015-01-17 01:06:05 +03:00
|
|
|
# create a sorted by descending rev list
|
|
|
|
revs = set()
|
|
|
|
for name in ns.listnames(repo):
|
2015-02-10 06:11:28 +03:00
|
|
|
for n in ns.nodes(repo, name):
|
|
|
|
revs.add(repo.changelog.rev(n))
|
2015-01-17 01:06:05 +03:00
|
|
|
|
|
|
|
for r in sorted(revs, reverse=True):
|
2015-02-10 07:49:41 +03:00
|
|
|
ctx = repo[r]
|
|
|
|
for name in ns.names(repo, ctx.node()):
|
2015-01-17 01:06:05 +03:00
|
|
|
fm.startitem()
|
|
|
|
padsize = max(31 - len(str(r)) - encoding.colwidth(name), 0)
|
2015-01-17 02:35:06 +03:00
|
|
|
|
2015-02-10 07:49:41 +03:00
|
|
|
tmplabel = label
|
|
|
|
if ctx.obsolete():
|
|
|
|
tmplabel = tmplabel + ' changeset.obsolete'
|
2015-01-17 02:35:06 +03:00
|
|
|
fm.write(ns.colorname, '%s', name, label=label)
|
2015-01-17 01:06:05 +03:00
|
|
|
fmt = ' ' * padsize + ' %d:%s'
|
|
|
|
fm.condwrite(not ui.quiet, 'rev node', fmt, r,
|
2015-02-10 07:49:41 +03:00
|
|
|
fm.hexfunc(ctx.node()), label=tmplabel)
|
2015-01-17 01:06:05 +03:00
|
|
|
fm.plain('\n')
|
|
|
|
fm.end()
|
|
|
|
|
2015-03-12 01:16:53 +03:00
|
|
|
def _readtracking(repo):
|
|
|
|
tracking = {}
|
|
|
|
try:
|
2015-03-12 01:38:19 +03:00
|
|
|
for line in repo.vfs.read('bookmarks.tracking').strip().split('\n'):
|
|
|
|
try:
|
|
|
|
book, track = line.strip().split(' ')
|
|
|
|
tracking[book] = track
|
|
|
|
except ValueError:
|
|
|
|
# corrupt file, ignore entry
|
|
|
|
pass
|
2015-03-12 01:16:53 +03:00
|
|
|
except IOError:
|
|
|
|
pass
|
|
|
|
return tracking
|
|
|
|
|
|
|
|
def _writetracking(repo, tracking):
|
|
|
|
data = ''
|
|
|
|
for book, track in tracking.iteritems():
|
|
|
|
data += '%s %s\n' % (book, track)
|
2015-03-12 22:53:08 +03:00
|
|
|
repo.vfs.write('bookmarks.tracking', data)
|
2015-03-12 01:16:53 +03:00
|
|
|
|
2015-03-14 04:02:03 +03:00
|
|
|
def _removetracking(repo, bookmarks):
|
|
|
|
tracking = _readtracking(repo)
|
|
|
|
needwrite = False
|
|
|
|
for bmark in bookmarks:
|
|
|
|
try:
|
|
|
|
del tracking[bmark]
|
|
|
|
needwrite = True
|
|
|
|
except KeyError:
|
|
|
|
pass
|
|
|
|
if needwrite:
|
|
|
|
_writetracking(repo, tracking)
|
|
|
|
|
2015-01-29 02:53:49 +03:00
|
|
|
def exbookmarks(orig, ui, repo, *args, **opts):
|
2015-01-29 02:27:57 +03:00
|
|
|
"""Bookmark output is sorted by bookmark name.
|
|
|
|
|
|
|
|
This has the side benefit of grouping all remote bookmarks by remote name.
|
|
|
|
|
|
|
|
"""
|
2015-03-12 22:54:08 +03:00
|
|
|
delete = opts.get('delete')
|
|
|
|
rename = opts.get('rename')
|
|
|
|
inactive = opts.get('inactive')
|
|
|
|
remote = opts.get('remote')
|
|
|
|
track = opts.get('track')
|
2015-03-14 06:38:55 +03:00
|
|
|
untrack = opts.get('untrack')
|
2015-03-11 06:38:03 +03:00
|
|
|
|
|
|
|
disallowed = set(ui.configlist('remotenames', 'disallowedbookmarks'))
|
2015-03-14 03:37:05 +03:00
|
|
|
|
|
|
|
if not delete:
|
|
|
|
for name in args:
|
|
|
|
if name in disallowed:
|
|
|
|
raise util.Abort(_("bookmark '%s' not allowed by configuration")
|
|
|
|
% name)
|
2015-03-11 06:38:03 +03:00
|
|
|
|
2015-03-14 06:38:55 +03:00
|
|
|
if untrack:
|
|
|
|
if track:
|
|
|
|
msg = _('do not specify --untrack and --track at the same time')
|
|
|
|
raise util.Abort(msg)
|
|
|
|
_removetracking(repo, args)
|
|
|
|
return
|
|
|
|
|
2015-03-12 22:54:08 +03:00
|
|
|
if delete or rename or args or inactive:
|
2015-03-14 03:54:40 +03:00
|
|
|
ret = orig(ui, repo, *args, **opts)
|
|
|
|
if track:
|
|
|
|
tracking = _readtracking(repo)
|
|
|
|
for arg in args:
|
|
|
|
tracking[arg] = track
|
|
|
|
_writetracking(repo, tracking)
|
2015-03-14 04:02:03 +03:00
|
|
|
# update the cache
|
2015-03-14 03:54:40 +03:00
|
|
|
writedistance(repo)
|
2015-03-14 04:02:03 +03:00
|
|
|
|
|
|
|
# also remove tracking for a deleted bookmark, if it exists
|
|
|
|
if delete:
|
|
|
|
_removetracking(repo, args)
|
|
|
|
|
2015-03-14 03:54:40 +03:00
|
|
|
return ret
|
2015-01-29 02:27:57 +03:00
|
|
|
|
2015-03-12 22:54:08 +03:00
|
|
|
# copy pasta from commands.py; need to patch core
|
|
|
|
if not remote:
|
|
|
|
fm = ui.formatter('bookmarks', opts)
|
|
|
|
hexfn = fm.hexfunc
|
|
|
|
marks = repo._bookmarks
|
|
|
|
if len(marks) == 0 and not fm:
|
|
|
|
ui.status(_("no bookmarks set\n"))
|
|
|
|
for bmark, n in sorted(marks.iteritems()):
|
|
|
|
current = repo._bookmarkcurrent
|
|
|
|
if bmark == current:
|
|
|
|
prefix, label = '*', 'bookmarks.current'
|
|
|
|
else:
|
|
|
|
prefix, label = ' ', ''
|
|
|
|
|
|
|
|
fm.startitem()
|
|
|
|
if not ui.quiet:
|
|
|
|
fm.plain(' %s ' % prefix, label=label)
|
|
|
|
fm.write('bookmark', '%s', bmark, label=label)
|
|
|
|
pad = " " * (25 - encoding.colwidth(bmark))
|
|
|
|
rev = repo.changelog.rev(n)
|
|
|
|
h = hexfn(n)
|
|
|
|
fm.condwrite(not ui.quiet, 'rev node', pad + ' %d:%s', rev, h,
|
|
|
|
label=label)
|
2015-03-14 11:07:06 +03:00
|
|
|
if ui.verbose:
|
|
|
|
rname, distance = distancefromtracked(repo, bmark)
|
|
|
|
ab = ''
|
|
|
|
if distance != (0, 0):
|
|
|
|
ab = ': %s ahead, %s behind' % distance
|
|
|
|
if rname:
|
|
|
|
pad = " " * (25 - encoding.colwidth(str(rev)) -
|
|
|
|
encoding.colwidth(str(h)))
|
|
|
|
fm.write('bookmark', pad + '[%s%s]', rname, ab, label=label)
|
2015-03-12 22:54:08 +03:00
|
|
|
fm.data(active=(bmark == current))
|
|
|
|
fm.plain('\n')
|
|
|
|
fm.end()
|
|
|
|
|
|
|
|
if remote or opts.get('all'):
|
2015-01-29 02:27:57 +03:00
|
|
|
n = 'remotebookmarks'
|
|
|
|
if n not in repo.names:
|
|
|
|
return
|
|
|
|
ns = repo.names[n]
|
|
|
|
color = ns.colorname
|
|
|
|
label = 'log.' + color
|
|
|
|
|
|
|
|
fm = ui.formatter('bookmarks', opts)
|
|
|
|
|
2015-02-09 06:40:27 +03:00
|
|
|
# it seems overkill to hide displaying hidden remote bookmarks
|
|
|
|
repo = repo.unfiltered()
|
|
|
|
|
2015-01-29 02:27:57 +03:00
|
|
|
for name in sorted(ns.listnames(repo)):
|
|
|
|
node = ns.nodes(repo, name)[0]
|
2015-02-09 06:53:08 +03:00
|
|
|
ctx = repo[node]
|
2015-01-29 02:27:57 +03:00
|
|
|
fm.startitem()
|
|
|
|
|
|
|
|
if not ui.quiet:
|
|
|
|
fm.plain(' ')
|
|
|
|
|
|
|
|
padsize = max(25 - encoding.colwidth(name), 0)
|
|
|
|
fmt = ' ' * padsize + ' %d:%s'
|
|
|
|
|
2015-02-09 06:53:08 +03:00
|
|
|
tmplabel = label
|
|
|
|
if ctx.obsolete():
|
|
|
|
tmplabel = tmplabel + ' changeset.obsolete'
|
2015-01-29 02:27:57 +03:00
|
|
|
fm.write(color, '%s', name, label=label)
|
2015-02-09 06:53:08 +03:00
|
|
|
fm.condwrite(not ui.quiet, 'rev node', fmt, ctx.rev(),
|
|
|
|
fm.hexfunc(node), label=tmplabel)
|
2015-01-29 02:27:57 +03:00
|
|
|
fm.plain('\n')
|
2015-03-12 22:54:08 +03:00
|
|
|
fm.end()
|
2015-01-29 02:27:57 +03:00
|
|
|
|
2014-03-21 22:37:03 +04:00
|
|
|
def activepath(ui, remote):
|
|
|
|
realpath = ''
|
|
|
|
local = None
|
|
|
|
try:
|
|
|
|
local = remote.local()
|
|
|
|
except AttributeError:
|
|
|
|
pass
|
|
|
|
|
|
|
|
# determine the remote path from the repo, if possible; else just
|
|
|
|
# use the string given to us
|
|
|
|
rpath = remote
|
|
|
|
if local:
|
|
|
|
rpath = getattr(remote, 'root', None)
|
|
|
|
if rpath is None:
|
|
|
|
# Maybe a localpeer? (hg@1ac628cd7113, 2.3)
|
|
|
|
rpath = getattr(getattr(remote, '_repo', None),
|
|
|
|
'root', None)
|
|
|
|
elif not isinstance(remote, str):
|
2014-03-25 07:45:03 +04:00
|
|
|
try:
|
|
|
|
rpath = remote._url
|
2015-03-09 23:11:37 +03:00
|
|
|
except AttributeError:
|
2014-03-25 07:45:03 +04:00
|
|
|
rpath = remote.url
|
2014-03-21 22:37:03 +04:00
|
|
|
|
|
|
|
for path, uri in ui.configitems('paths'):
|
|
|
|
uri = ui.expandpath(expandscheme(ui, uri))
|
|
|
|
if local:
|
|
|
|
uri = os.path.realpath(uri)
|
|
|
|
else:
|
|
|
|
if uri.startswith('http'):
|
|
|
|
try:
|
|
|
|
uri = url.url(uri).authinfo()[0]
|
|
|
|
except AttributeError:
|
|
|
|
try:
|
|
|
|
uri = util.url(uri).authinfo()[0]
|
|
|
|
except AttributeError:
|
|
|
|
uri = url.getauthinfo(uri)[0]
|
|
|
|
uri = uri.rstrip('/')
|
2015-03-14 04:10:49 +03:00
|
|
|
# guard against hgsubversion nonsense
|
|
|
|
if not isinstance(rpath, basestring):
|
|
|
|
continue
|
2014-03-21 22:37:03 +04:00
|
|
|
rpath = rpath.rstrip('/')
|
|
|
|
if uri == rpath:
|
|
|
|
realpath = path
|
|
|
|
# prefer a non-default name to default
|
|
|
|
if path != 'default' and path != 'default-push':
|
|
|
|
break
|
2015-03-03 10:22:51 +03:00
|
|
|
|
2015-03-12 10:06:11 +03:00
|
|
|
renames = _getrenames(ui)
|
2015-03-03 10:22:51 +03:00
|
|
|
realpath = renames.get(realpath, realpath)
|
2014-03-21 22:37:03 +04:00
|
|
|
return realpath
|
2014-03-21 22:34:32 +04:00
|
|
|
|
2015-03-03 10:22:51 +03:00
|
|
|
# memoization
|
|
|
|
_renames = None
|
2015-03-12 10:06:11 +03:00
|
|
|
def _getrenames(ui):
|
2015-03-03 10:22:51 +03:00
|
|
|
global _renames
|
|
|
|
if _renames is None:
|
|
|
|
_renames = {}
|
|
|
|
for k, v in ui.configitems('remotenames'):
|
|
|
|
if k.startswith('rename.'):
|
|
|
|
_renames[k[7:]] = v
|
|
|
|
return _renames
|
|
|
|
|
2014-03-21 22:34:32 +04:00
|
|
|
def expandscheme(ui, uri):
|
|
|
|
'''For a given uri, expand the scheme for it'''
|
|
|
|
urischemes = [s for s in schemes.schemes.iterkeys()
|
|
|
|
if uri.startswith('%s://' % s)]
|
|
|
|
for s in urischemes:
|
|
|
|
# TODO: refactor schemes so we don't
|
|
|
|
# duplicate this logic
|
2015-03-09 23:12:23 +03:00
|
|
|
ui.note(_('performing schemes expansion with '
|
|
|
|
'scheme %s\n') % s)
|
2014-03-21 22:34:32 +04:00
|
|
|
scheme = hg.schemes[s]
|
|
|
|
parts = uri.split('://', 1)[1].split('/', scheme.parts)
|
|
|
|
if len(parts) > scheme.parts:
|
|
|
|
tail = parts[-1]
|
|
|
|
parts = parts[:-1]
|
|
|
|
else:
|
|
|
|
tail = ''
|
2014-12-16 10:45:59 +03:00
|
|
|
ctx = dict((str(i + 1), v) for i, v in enumerate(parts))
|
|
|
|
uri = ''.join(scheme.templater.process(scheme.url, ctx)) + tail
|
2014-03-21 22:34:32 +04:00
|
|
|
return uri
|
|
|
|
|
2014-04-01 04:27:54 +04:00
|
|
|
def splitremotename(remote):
|
2014-03-22 21:41:01 +04:00
|
|
|
name = ''
|
|
|
|
if '/' in remote:
|
|
|
|
remote, name = remote.split('/', 1)
|
|
|
|
return remote, name
|
2014-03-21 22:34:32 +04:00
|
|
|
|
2014-04-01 04:27:54 +04:00
|
|
|
def joinremotename(remote, ref):
|
2014-03-22 21:41:20 +04:00
|
|
|
if ref:
|
|
|
|
remote += '/' + ref
|
|
|
|
return remote
|
|
|
|
|
2015-01-14 03:28:01 +03:00
|
|
|
def readremotenames(repo):
|
2014-03-31 21:34:43 +04:00
|
|
|
rfile = repo.join('remotenames')
|
|
|
|
# exit early if there is nothing to do
|
|
|
|
if not os.path.exists(rfile):
|
|
|
|
return
|
|
|
|
|
2015-01-14 03:28:01 +03:00
|
|
|
# needed to heuristically determine if a file is in the old format
|
2014-12-16 21:50:05 +03:00
|
|
|
branches = repo.names['branches'].listnames(repo)
|
|
|
|
bookmarks = repo.names['bookmarks'].listnames(repo)
|
|
|
|
|
2014-03-31 21:34:43 +04:00
|
|
|
f = open(rfile)
|
|
|
|
for line in f:
|
2015-01-14 03:28:01 +03:00
|
|
|
nametype = None
|
2014-03-31 21:34:43 +04:00
|
|
|
line = line.strip()
|
|
|
|
if not line:
|
|
|
|
continue
|
2015-01-15 01:45:24 +03:00
|
|
|
nametype = None
|
|
|
|
remote, rname = None, None
|
|
|
|
|
2015-01-06 21:50:19 +03:00
|
|
|
node, name = line.split(' ', 1)
|
2015-01-14 08:45:42 +03:00
|
|
|
|
|
|
|
# check for nametype being written into the file format
|
|
|
|
if ' ' in name:
|
|
|
|
nametype, name = name.split(' ', 1)
|
|
|
|
|
2015-01-06 23:31:17 +03:00
|
|
|
remote, rname = splitremotename(name)
|
|
|
|
|
|
|
|
# skip old data that didn't write the name (only wrote the alias)
|
|
|
|
if not rname:
|
|
|
|
continue
|
|
|
|
|
2015-01-14 08:45:42 +03:00
|
|
|
# old format didn't save the nametype, so check for the name in
|
|
|
|
# branches and bookmarks
|
2015-01-14 03:28:01 +03:00
|
|
|
if nametype is None:
|
|
|
|
if rname in branches:
|
|
|
|
nametype = 'branches'
|
|
|
|
elif rname in bookmarks:
|
|
|
|
nametype = 'bookmarks'
|
|
|
|
|
|
|
|
yield node, nametype, remote, rname
|
|
|
|
|
|
|
|
f.close()
|
|
|
|
|
|
|
|
def loadremotenames(repo):
|
|
|
|
alias_default = repo.ui.configbool('remotenames', 'alias.default')
|
|
|
|
|
|
|
|
for node, nametype, remote, rname in readremotenames(repo):
|
2015-01-06 23:31:17 +03:00
|
|
|
# handle alias_default here
|
|
|
|
if remote != "default" and rname == "default" and alias_default:
|
|
|
|
name = remote
|
2015-01-14 03:28:01 +03:00
|
|
|
else:
|
|
|
|
name = joinremotename(remote, rname)
|
2015-01-06 23:31:17 +03:00
|
|
|
|
2015-01-14 03:28:01 +03:00
|
|
|
# if the node doesn't exist, skip it
|
2015-01-06 22:16:51 +03:00
|
|
|
try:
|
|
|
|
ctx = repo[node]
|
|
|
|
except error.RepoLookupError:
|
2014-03-31 21:34:43 +04:00
|
|
|
continue
|
2014-12-16 21:50:05 +03:00
|
|
|
|
2015-01-14 03:28:01 +03:00
|
|
|
# only mark as remote if the head changeset isn't marked closed
|
2014-03-31 21:34:43 +04:00
|
|
|
if not ctx.extra().get('close'):
|
2015-02-10 06:11:28 +03:00
|
|
|
nodes = _remotenames[nametype].get(name, [])
|
|
|
|
nodes.append(ctx.node())
|
|
|
|
_remotenames[nametype][name] = nodes
|
2014-03-31 21:34:43 +04:00
|
|
|
|
2015-03-03 12:08:44 +03:00
|
|
|
def transition(repo, ui):
|
|
|
|
"""
|
|
|
|
Help with transitioning to using a remotenames workflow.
|
|
|
|
|
|
|
|
Allows deleting matching local bookmarks defined in a config file:
|
|
|
|
|
|
|
|
[remotenames]
|
|
|
|
transitionbookmarks = master, stable
|
|
|
|
"""
|
|
|
|
transmarks = ui.configlist('remotenames', 'transitionbookmarks')
|
|
|
|
localmarks = repo._bookmarks
|
|
|
|
for mark in transmarks:
|
|
|
|
if mark in localmarks:
|
|
|
|
del localmarks[mark]
|
|
|
|
localmarks.write()
|
|
|
|
|
2015-03-14 02:57:19 +03:00
|
|
|
def saveremotenames(repo, remote, branches={}, bookmarks={}):
|
2015-03-13 23:32:15 +03:00
|
|
|
# delete old files
|
|
|
|
try:
|
|
|
|
repo.vfs.unlink('remotedistance')
|
|
|
|
except OSError, inst:
|
|
|
|
if inst.errno != errno.ENOENT:
|
|
|
|
raise
|
|
|
|
|
2015-03-03 12:08:44 +03:00
|
|
|
if not repo.vfs.exists('remotenames'):
|
|
|
|
transition(repo, repo.ui)
|
|
|
|
|
2015-03-14 03:50:35 +03:00
|
|
|
# while we're removing old paths, also update _remotenames
|
|
|
|
for btype, rmap in _remotenames.iteritems():
|
|
|
|
for rname in rmap.copy():
|
|
|
|
if remote == splitremotename(rname)[0]:
|
|
|
|
del _remotenames[btype][rname]
|
|
|
|
|
2015-01-14 03:16:27 +03:00
|
|
|
# read in all data first before opening file to write
|
|
|
|
olddata = set(readremotenames(repo))
|
|
|
|
|
2015-03-14 02:43:13 +03:00
|
|
|
f = repo.vfs('remotenames', 'w')
|
2015-01-14 03:16:27 +03:00
|
|
|
|
|
|
|
# only update the given 'remote', so iterate over old data and re-save it
|
|
|
|
for node, nametype, oldremote, rname in olddata:
|
|
|
|
if oldremote != remote:
|
2015-01-14 07:27:35 +03:00
|
|
|
n = joinremotename(oldremote, rname)
|
|
|
|
f.write('%s %s %s\n' % (node, nametype, n))
|
2015-01-14 03:16:27 +03:00
|
|
|
|
2014-03-18 22:46:10 +04:00
|
|
|
for branch, nodes in branches.iteritems():
|
2014-03-21 20:56:00 +04:00
|
|
|
for n in nodes:
|
2015-01-14 07:27:35 +03:00
|
|
|
rname = joinremotename(remote, branch)
|
|
|
|
f.write('%s branches %s\n' % (hex(n), rname))
|
2014-03-18 22:46:10 +04:00
|
|
|
for bookmark, n in bookmarks.iteritems():
|
2015-01-14 07:27:35 +03:00
|
|
|
f.write('%s bookmarks %s\n' % (n, joinremotename(remote, bookmark)))
|
2014-03-21 20:56:00 +04:00
|
|
|
f.close()
|
|
|
|
|
2015-03-13 02:48:30 +03:00
|
|
|
def distancefromtracked(repo, bookmark):
|
|
|
|
"""return the (ahead, behind) distance between the tracked names"""
|
2015-02-10 06:30:17 +03:00
|
|
|
|
2015-03-19 09:49:31 +03:00
|
|
|
if not repo.ui.configbool('remotenames', 'calculatedistance', True):
|
|
|
|
return (0, 0)
|
|
|
|
|
2015-03-13 02:48:30 +03:00
|
|
|
tracking = _readtracking(repo)
|
|
|
|
remotename = ''
|
|
|
|
distance = (0, 0)
|
|
|
|
|
2015-03-19 10:07:08 +03:00
|
|
|
if bookmark and bookmark in repo and bookmark in tracking:
|
2015-03-13 02:48:30 +03:00
|
|
|
remotename = tracking[bookmark]
|
|
|
|
|
|
|
|
if not remotename:
|
|
|
|
return (remotename, distance)
|
|
|
|
|
|
|
|
# load the cache
|
|
|
|
try:
|
|
|
|
distance = repo.vfs.read('cache/tracking.%s' % bookmark).strip()
|
2015-03-14 11:07:06 +03:00
|
|
|
return (remotename, tuple(int(d) for d in distance.split(' ')))
|
2015-03-13 02:48:30 +03:00
|
|
|
except IOError:
|
|
|
|
pass
|
|
|
|
|
|
|
|
if remotename in repo:
|
|
|
|
rev1 = repo[bookmark].rev()
|
|
|
|
rev2 = repo[remotename].rev()
|
|
|
|
distance = (str(len(repo.revs('only(%d, %d)' % (rev1, rev2)))),
|
|
|
|
str(len(repo.revs('only(%d, %d)' % (rev2, rev1)))))
|
|
|
|
# save in a cache
|
|
|
|
repo.vfs.write('cache/tracking.%s' % bookmark, ' '.join(distance))
|
|
|
|
return (remotename, distance)
|
|
|
|
|
|
|
|
def writedistance(repo):
|
2015-03-19 09:49:31 +03:00
|
|
|
"""
|
|
|
|
Caclulate and cache the distance between bookmarks and what they
|
|
|
|
track, plus the distance from the tipmost head on current topological
|
|
|
|
branch. This can be an expensive operation especially in repositories
|
|
|
|
with a high commit rate, so it can be turned off in your hgrc:
|
|
|
|
|
|
|
|
[remotenames]
|
|
|
|
cachedistance = False
|
|
|
|
"""
|
|
|
|
if not repo.ui.configbool('remotenames', 'cachedistance', True):
|
|
|
|
return
|
|
|
|
|
2015-02-10 06:30:17 +03:00
|
|
|
wlock = repo.wlock()
|
|
|
|
try:
|
2015-03-13 02:48:30 +03:00
|
|
|
for bmark, remotename in _readtracking(repo).iteritems():
|
|
|
|
# delete the cache if it exists
|
|
|
|
try:
|
|
|
|
repo.vfs.unlink('cache/tracking.%s' % bmark)
|
2015-03-14 00:31:26 +03:00
|
|
|
except OSError, inst:
|
|
|
|
if inst.errno != errno.ENOENT:
|
|
|
|
raise
|
|
|
|
|
|
|
|
try:
|
2015-03-13 02:48:30 +03:00
|
|
|
distancefromtracked(repo, bmark)
|
|
|
|
except OSError, inst:
|
|
|
|
if inst.errno != errno.ENOENT:
|
|
|
|
raise
|
2015-03-13 03:38:27 +03:00
|
|
|
|
|
|
|
# are we on a 'branch' but not at the head, i.e. is there a bookmark
|
|
|
|
# that we are heading towards?
|
|
|
|
try:
|
|
|
|
repo.vfs.unlink('cache/tracking.current')
|
|
|
|
except OSError, inst:
|
|
|
|
if inst.errno != errno.ENOENT:
|
|
|
|
raise
|
|
|
|
|
|
|
|
try:
|
|
|
|
revs = list(repo.revs('limit(.:: and bookmark() - ., 1)'))
|
|
|
|
if revs:
|
|
|
|
# if we are here then we have one or more bookmarks and we'll
|
|
|
|
# pick the first one for now
|
|
|
|
bmark = repo[revs[0]].bookmarks()[0]
|
|
|
|
d = len(repo.revs('only(%d, .)' % revs[0]))
|
|
|
|
repo.vfs.write('cache/tracking.current', '%s %d' % (bmark, d))
|
|
|
|
except OSError, inst:
|
|
|
|
if inst.errno != errno.ENOENT:
|
|
|
|
raise
|
|
|
|
|
2015-02-10 06:30:17 +03:00
|
|
|
finally:
|
|
|
|
wlock.release()
|
|
|
|
|
2014-03-21 20:48:38 +04:00
|
|
|
#########
|
|
|
|
# revsets
|
|
|
|
#########
|
|
|
|
|
2011-03-30 04:02:08 +04:00
|
|
|
def upstream_revs(filt, repo, subset, x):
|
2014-04-01 04:55:54 +04:00
|
|
|
upstream_tips = set()
|
2015-01-15 01:45:24 +03:00
|
|
|
for remotename in _remotenames.keys():
|
|
|
|
rname = 'remote' + remotename
|
|
|
|
try:
|
|
|
|
ns = repo.names[rname]
|
|
|
|
except KeyError:
|
|
|
|
continue
|
|
|
|
for name in ns.listnames(repo):
|
2015-02-27 00:37:00 +03:00
|
|
|
if filt(splitremotename(name)[0]):
|
2015-01-15 01:45:24 +03:00
|
|
|
upstream_tips.update(ns.nodes(repo, name))
|
2014-04-01 04:55:54 +04:00
|
|
|
|
2014-04-01 04:20:52 +04:00
|
|
|
if not upstream_tips:
|
|
|
|
return revset.baseset([])
|
2014-03-11 01:49:44 +04:00
|
|
|
|
2014-04-01 04:55:54 +04:00
|
|
|
tipancestors = repo.revs('::%ln', upstream_tips)
|
2014-12-16 10:45:59 +03:00
|
|
|
return revset.filteredset(subset, lambda n: n in tipancestors)
|
2011-03-30 04:02:08 +04:00
|
|
|
|
|
|
|
def upstream(repo, subset, x):
|
|
|
|
'''``upstream()``
|
2014-04-01 04:27:54 +04:00
|
|
|
Select changesets in an upstream repository according to remotenames.
|
2011-03-30 04:02:08 +04:00
|
|
|
'''
|
2015-02-27 01:30:58 +03:00
|
|
|
repo = repo.unfiltered()
|
2015-02-27 00:37:00 +03:00
|
|
|
upstream_names = repo.ui.configlist('remotenames', 'upstream')
|
2015-02-27 01:31:31 +03:00
|
|
|
# override default args from hgrc with args passed in on the command line
|
|
|
|
if x:
|
|
|
|
upstream_names = [revset.getstring(symbol,
|
|
|
|
"remote path must be a string")
|
|
|
|
for symbol in revset.getlist(x)]
|
2015-02-27 00:37:00 +03:00
|
|
|
filt = lambda x: True
|
2015-02-27 01:05:15 +03:00
|
|
|
default_path = dict(repo.ui.configitems('paths')).get('default')
|
|
|
|
if not upstream_names and default_path:
|
|
|
|
default_path = expandscheme(repo.ui, default_path)
|
|
|
|
upstream_names = [activepath(repo.ui, default_path)]
|
2015-02-27 00:37:00 +03:00
|
|
|
if upstream_names:
|
|
|
|
filt = lambda name: name in upstream_names
|
2011-03-30 04:02:08 +04:00
|
|
|
return upstream_revs(filt, repo, subset, x)
|
|
|
|
|
|
|
|
def pushed(repo, subset, x):
|
|
|
|
'''``pushed()``
|
2014-04-01 04:27:54 +04:00
|
|
|
Select changesets in any remote repository according to remotenames.
|
2011-03-30 04:02:08 +04:00
|
|
|
'''
|
2014-12-16 10:45:59 +03:00
|
|
|
revset.getargs(x, 0, 0, "pushed takes no arguments")
|
2011-03-30 04:02:08 +04:00
|
|
|
return upstream_revs(lambda x: True, repo, subset, x)
|
|
|
|
|
2014-04-01 04:27:54 +04:00
|
|
|
def remotenamesrevset(repo, subset, x):
|
|
|
|
"""``remotenames()``
|
2012-06-20 13:24:55 +04:00
|
|
|
All remote branches heads.
|
|
|
|
"""
|
2014-12-16 10:45:59 +03:00
|
|
|
revset.getargs(x, 0, 0, "remotenames takes no arguments")
|
2014-04-01 04:55:54 +04:00
|
|
|
remoterevs = set()
|
|
|
|
cl = repo.changelog
|
2015-01-15 01:45:24 +03:00
|
|
|
for remotename in _remotenames.keys():
|
|
|
|
rname = 'remote' + remotename
|
|
|
|
try:
|
|
|
|
ns = repo.names[rname]
|
|
|
|
except KeyError:
|
|
|
|
continue
|
|
|
|
for name in ns.listnames(repo):
|
|
|
|
remoterevs.update(ns.nodes(repo, name))
|
|
|
|
|
2014-04-01 04:55:54 +04:00
|
|
|
return revset.baseset(sorted(cl.rev(n) for n in remoterevs))
|
2012-06-20 13:24:55 +04:00
|
|
|
|
2014-03-18 22:46:10 +04:00
|
|
|
revset.symbols.update({'upstream': upstream,
|
|
|
|
'pushed': pushed,
|
2014-04-01 04:27:54 +04:00
|
|
|
'remotenames': remotenamesrevset})
|
2012-06-20 19:23:51 +04:00
|
|
|
|
2014-03-21 20:48:38 +04:00
|
|
|
###########
|
|
|
|
# templates
|
|
|
|
###########
|
|
|
|
|
2015-01-06 07:29:40 +03:00
|
|
|
def remotenameskw(**args):
|
|
|
|
""":remotenames: List of strings. List of remote names associated with the
|
|
|
|
changeset. If remotenames.suppressbranches is True then branch names will
|
|
|
|
be hidden if there is a bookmark at the same changeset.
|
|
|
|
|
|
|
|
"""
|
|
|
|
repo, ctx = args['repo'], args['ctx']
|
|
|
|
|
2015-01-17 03:08:04 +03:00
|
|
|
remotenames = []
|
|
|
|
if 'remotebookmarks' in repo.names:
|
|
|
|
remotenames = repo.names['remotebookmarks'].names(repo, ctx.node())
|
2015-01-06 07:29:40 +03:00
|
|
|
|
|
|
|
suppress = repo.ui.configbool('remotenames', 'suppressbranches', False)
|
2015-01-17 03:08:04 +03:00
|
|
|
if (not remotenames or not suppress) and 'remotebranches' in repo.names:
|
2015-01-15 01:45:24 +03:00
|
|
|
remotenames += repo.names['remotebranches'].names(repo, ctx.node())
|
2015-01-06 07:29:40 +03:00
|
|
|
|
|
|
|
return templatekw.showlist('remotename', remotenames,
|
|
|
|
plural='remotenames', **args)
|