2010-01-04 07:37:45 +03:00
|
|
|
import os
|
2015-02-10 03:08:18 +03:00
|
|
|
import errno
|
2010-01-04 07:37:45 +03:00
|
|
|
|
2015-02-10 22:43:07 +03:00
|
|
|
from mercurial import bookmarks
|
2015-01-17 01:48:56 +03:00
|
|
|
from mercurial import commands
|
|
|
|
from mercurial import encoding
|
2015-01-06 22:18:07 +03:00
|
|
|
from mercurial import error
|
|
|
|
from mercurial import exchange
|
2014-10-03 20:43:48 +04:00
|
|
|
from mercurial import extensions
|
2010-01-04 07:37:45 +03:00
|
|
|
from mercurial import hg
|
2015-02-10 20:18:13 +03:00
|
|
|
from mercurial import localrepo
|
2015-01-06 22:18:07 +03:00
|
|
|
from mercurial import namespaces
|
2015-02-12 10:34:56 +03:00
|
|
|
from mercurial import obsolete
|
2014-04-01 06:22:23 +04:00
|
|
|
from mercurial import repoview
|
2014-03-18 22:46:10 +04:00
|
|
|
from mercurial import revset
|
2015-02-10 22:52:19 +03:00
|
|
|
from mercurial import scmutil
|
2014-03-18 22:46:10 +04:00
|
|
|
from mercurial import templatekw
|
2015-01-06 22:18:07 +03:00
|
|
|
from mercurial import url
|
|
|
|
from mercurial import util
|
2015-02-10 22:52:19 +03:00
|
|
|
from mercurial.i18n import _
|
|
|
|
from mercurial.node import hex, short
|
2010-01-11 02:24:02 +03:00
|
|
|
from hgext import schemes
|
2010-01-04 07:37:45 +03:00
|
|
|
|
2015-01-15 01:45:24 +03:00
|
|
|
_remotenames = {
|
|
|
|
"bookmarks": {},
|
|
|
|
"branches": {},
|
|
|
|
}
|
2014-12-16 20:33:01 +03:00
|
|
|
|
2014-10-03 20:43:48 +04:00
|
|
|
def expush(orig, repo, remote, *args, **kwargs):
|
|
|
|
res = orig(repo, remote, *args, **kwargs)
|
2015-03-12 00:48:08 +03:00
|
|
|
pullremotenames(repo, remote)
|
|
|
|
return res
|
2014-10-03 20:43:48 +04:00
|
|
|
|
|
|
|
def expull(orig, repo, remote, *args, **kwargs):
|
|
|
|
res = orig(repo, remote, *args, **kwargs)
|
2015-01-29 04:45:26 +03:00
|
|
|
pullremotenames(repo, remote)
|
2015-02-10 07:14:55 +03:00
|
|
|
writedistance(repo)
|
2015-01-29 04:45:26 +03:00
|
|
|
return res
|
|
|
|
|
|
|
|
def pullremotenames(repo, remote):
|
2014-10-03 20:43:48 +04:00
|
|
|
lock = repo.lock()
|
|
|
|
try:
|
2015-03-03 10:47:42 +03:00
|
|
|
path = activepath(repo.ui, remote)
|
|
|
|
if path:
|
2015-03-03 10:56:57 +03:00
|
|
|
# on a push, we don't want to keep obsolete heads since
|
|
|
|
# they won't show up as heads on the next pull, so we
|
|
|
|
# remove them here otherwise we would require the user
|
|
|
|
# to issue a pull to refresh .hg/remotenames
|
|
|
|
bmap = {}
|
|
|
|
repo = repo.unfiltered()
|
|
|
|
for branch, nodes in remote.branchmap().iteritems():
|
|
|
|
bmap[branch] = [n for n in nodes if not repo[n].obsolete()]
|
|
|
|
saveremotenames(repo, path, bmap, remote.listkeys('bookmarks'))
|
2014-10-03 20:43:48 +04:00
|
|
|
finally:
|
|
|
|
lock.release()
|
|
|
|
|
2014-04-01 06:22:23 +04:00
|
|
|
def blockerhook(orig, repo, *args, **kwargs):
|
|
|
|
blockers = orig(repo)
|
|
|
|
|
2015-02-10 20:15:37 +03:00
|
|
|
unblock = util.safehasattr(repo, '_unblockhiddenremotenames')
|
|
|
|
if not unblock:
|
2015-01-07 04:19:08 +03:00
|
|
|
return blockers
|
|
|
|
|
2015-01-15 01:45:24 +03:00
|
|
|
# add remotenames to blockers by looping over all names in our own cache
|
2014-04-01 06:22:23 +04:00
|
|
|
cl = repo.changelog
|
2015-01-15 01:45:24 +03:00
|
|
|
for remotename in _remotenames.keys():
|
|
|
|
rname = 'remote' + remotename
|
|
|
|
try:
|
|
|
|
ns = repo.names[rname]
|
|
|
|
except KeyError:
|
|
|
|
continue
|
|
|
|
for name in ns.listnames(repo):
|
|
|
|
blockers.update(cl.rev(node) for node in ns.nodes(repo, name))
|
2014-04-01 06:22:23 +04:00
|
|
|
|
|
|
|
return blockers
|
|
|
|
|
2015-01-29 01:03:19 +03:00
|
|
|
def exupdatefromremote(orig, ui, repo, remotemarks, path, trfunc, explicit=()):
|
|
|
|
if ui.configbool('remotenames', 'syncbookmarks', False):
|
|
|
|
return orig(ui, repo, remotemarks, path, trfunc, explicit)
|
|
|
|
|
2015-02-12 09:31:14 +03:00
|
|
|
ui.debug('remotenames: skipped syncing local bookmarks\n')
|
2015-01-29 01:03:19 +03:00
|
|
|
|
2015-01-29 04:45:48 +03:00
|
|
|
def exclone(orig, ui, *args, **opts):
|
|
|
|
"""
|
|
|
|
We may not want local bookmarks on clone... but we always want remotenames!
|
|
|
|
"""
|
|
|
|
srcpeer, dstpeer = orig(ui, *args, **opts)
|
|
|
|
|
|
|
|
pullremotenames(dstpeer.local(), srcpeer)
|
|
|
|
|
|
|
|
if not ui.configbool('remotenames', 'syncbookmarks', False):
|
2015-02-12 09:31:14 +03:00
|
|
|
ui.debug('remotenames: removing cloned bookmarks\n')
|
2015-02-10 03:08:18 +03:00
|
|
|
repo = dstpeer.local()
|
|
|
|
wlock = repo.wlock()
|
|
|
|
try:
|
|
|
|
try:
|
|
|
|
repo.vfs.unlink('bookmarks')
|
|
|
|
except OSError, inst:
|
|
|
|
if inst.errno != errno.ENOENT:
|
|
|
|
raise
|
|
|
|
finally:
|
|
|
|
wlock.release()
|
|
|
|
|
2015-01-29 04:45:48 +03:00
|
|
|
return (srcpeer, dstpeer)
|
|
|
|
|
2015-02-10 20:18:13 +03:00
|
|
|
def excommit(orig, repo, *args, **opts):
|
|
|
|
res = orig(repo, *args, **opts)
|
|
|
|
writedistance(repo)
|
|
|
|
return res
|
|
|
|
|
2015-02-10 07:16:15 +03:00
|
|
|
def exupdate(orig, repo, *args, **opts):
|
|
|
|
res = orig(repo, *args, **opts)
|
|
|
|
writedistance(repo)
|
|
|
|
return res
|
|
|
|
|
2015-02-10 07:38:30 +03:00
|
|
|
def exsetcurrent(orig, repo, mark):
|
|
|
|
res = orig(repo, mark)
|
|
|
|
writedistance(repo)
|
|
|
|
return res
|
|
|
|
|
2010-01-04 07:37:45 +03:00
|
|
|
def reposetup(ui, repo):
|
2011-03-29 17:24:25 +04:00
|
|
|
if not repo.local():
|
|
|
|
return
|
2010-01-04 07:37:45 +03:00
|
|
|
|
2015-03-03 12:11:47 +03:00
|
|
|
hoist = ui.config('remotenames', 'hoist')
|
2015-02-20 12:27:49 +03:00
|
|
|
if hoist:
|
|
|
|
hoist += '/'
|
|
|
|
|
2014-03-31 21:34:43 +04:00
|
|
|
loadremotenames(repo)
|
|
|
|
|
2015-01-15 01:45:24 +03:00
|
|
|
# cache this so we don't iterate over new values
|
|
|
|
items = list(repo.names.iteritems())
|
|
|
|
for nsname, ns in items:
|
|
|
|
d = _remotenames.get(nsname)
|
|
|
|
if not d:
|
|
|
|
continue
|
|
|
|
|
|
|
|
rname = 'remote' + nsname
|
|
|
|
rtmpl = 'remote' + ns.templatename
|
2015-02-20 12:27:49 +03:00
|
|
|
|
|
|
|
if nsname == 'bookmarks' and hoist:
|
|
|
|
def names(rp, d=d):
|
|
|
|
l = d.keys()
|
|
|
|
for name in l:
|
|
|
|
if name.startswith(hoist):
|
|
|
|
l.append(name[len(hoist):])
|
|
|
|
return l
|
2015-03-09 23:24:34 +03:00
|
|
|
|
2015-02-20 12:27:49 +03:00
|
|
|
def namemap(rp, name, d=d):
|
|
|
|
if name in d:
|
|
|
|
return d[name]
|
|
|
|
return d.get(hoist + name)
|
2015-03-09 23:24:34 +03:00
|
|
|
|
2015-02-20 12:27:49 +03:00
|
|
|
# we don't hoist nodemap because we don't want hoisted names
|
|
|
|
# to show up in logs, which is the primary use case here
|
|
|
|
else:
|
|
|
|
names = lambda rp, d=d: d.keys()
|
|
|
|
namemap = lambda rp, name, d=d: d.get(name)
|
|
|
|
|
2015-02-10 06:11:28 +03:00
|
|
|
nodemap = lambda rp, node, d=d: [name for name, n in d.iteritems()
|
|
|
|
for n2 in n if n2 == node]
|
2015-01-15 01:45:24 +03:00
|
|
|
|
|
|
|
n = namespaces.namespace(rname, templatename=rtmpl,
|
|
|
|
logname=ns.templatename, colorname=rtmpl,
|
|
|
|
listnames=names, namemap=namemap,
|
|
|
|
nodemap=nodemap)
|
|
|
|
repo.names.addnamespace(n)
|
2014-12-17 09:23:41 +03:00
|
|
|
|
2015-03-12 00:57:32 +03:00
|
|
|
def _tracking(ui):
|
2015-03-12 22:53:08 +03:00
|
|
|
# omg default true
|
|
|
|
return ui.configbool('remotenames', 'tracking', True)
|
2015-03-12 00:57:32 +03:00
|
|
|
|
2015-03-12 01:38:19 +03:00
|
|
|
def setuptracking(ui):
|
|
|
|
try:
|
|
|
|
rebase = extensions.find('rebase')
|
|
|
|
if rebase:
|
2015-03-12 22:53:08 +03:00
|
|
|
extensions.wrapcommand(rebase.cmdtable, 'rebase', exrebase)
|
2015-03-12 01:38:19 +03:00
|
|
|
except KeyError:
|
|
|
|
# rebase isn't on
|
|
|
|
pass
|
|
|
|
|
|
|
|
def exrebase(orig, ui, repo, **opts):
|
|
|
|
dest = opts['dest']
|
|
|
|
current = bookmarks.readcurrent(repo)
|
|
|
|
if not dest and current:
|
|
|
|
tracking = _readtracking(repo)
|
|
|
|
if current in tracking:
|
|
|
|
opts['dest'] = tracking[current]
|
|
|
|
|
|
|
|
return orig(ui, repo, **opts)
|
|
|
|
|
2015-03-13 01:55:16 +03:00
|
|
|
def exstrip(orig, ui, repo, *args, **opts):
|
|
|
|
ret = orig(ui, repo, *args, **opts)
|
|
|
|
writedistance(repo)
|
|
|
|
return ret
|
|
|
|
|
2015-01-17 01:48:56 +03:00
|
|
|
def extsetup(ui):
|
2015-02-10 23:02:58 +03:00
|
|
|
extensions.wrapfunction(exchange, 'push', expush)
|
|
|
|
extensions.wrapfunction(exchange, 'pull', expull)
|
|
|
|
extensions.wrapfunction(repoview, '_getdynamicblockers', blockerhook)
|
|
|
|
extensions.wrapfunction(bookmarks, 'updatefromremote', exupdatefromremote)
|
|
|
|
extensions.wrapfunction(bookmarks, 'setcurrent', exsetcurrent)
|
|
|
|
extensions.wrapfunction(hg, 'clone', exclone)
|
|
|
|
extensions.wrapfunction(hg, 'updaterepo', exupdate)
|
|
|
|
extensions.wrapfunction(localrepo.localrepository, 'commit', excommit)
|
|
|
|
|
2015-01-29 02:53:49 +03:00
|
|
|
entry = extensions.wrapcommand(commands.table, 'bookmarks', exbookmarks)
|
2015-01-17 01:48:56 +03:00
|
|
|
entry[1].append(('a', 'all', None, 'show both remote and local bookmarks'))
|
2015-01-17 02:47:07 +03:00
|
|
|
entry[1].append(('', 'remote', None, 'show only remote bookmarks'))
|
2015-01-17 01:48:56 +03:00
|
|
|
|
2015-03-12 00:57:32 +03:00
|
|
|
if _tracking(ui):
|
|
|
|
entry[1].append(('t', 'track', '', 'track this bookmark', 'BOOKMARK'))
|
2015-03-12 01:38:19 +03:00
|
|
|
setuptracking(ui)
|
2015-03-12 00:57:32 +03:00
|
|
|
|
2015-01-29 02:53:49 +03:00
|
|
|
entry = extensions.wrapcommand(commands.table, 'branches', exbranches)
|
2015-01-17 01:06:05 +03:00
|
|
|
entry[1].append(('a', 'all', None, 'show both remote and local branches'))
|
2015-01-17 02:47:07 +03:00
|
|
|
entry[1].append(('', 'remote', None, 'show only remote branches'))
|
2015-01-17 01:06:05 +03:00
|
|
|
|
2015-02-10 20:15:37 +03:00
|
|
|
entry = extensions.wrapcommand(commands.table, 'log', exlog)
|
|
|
|
entry[1].append(('', 'remote', None, 'show remote names even if hidden'))
|
|
|
|
|
2015-02-10 22:52:19 +03:00
|
|
|
entry = extensions.wrapcommand(commands.table, 'push', expushcmd)
|
|
|
|
entry[1].append(('t', 'to', '', 'push revs to this bookmark', 'BOOKMARK'))
|
2015-03-10 03:36:25 +03:00
|
|
|
entry[1].append(('d', 'delete', '', 'delete remote bookmark', 'BOOKMARK'))
|
2015-02-10 22:52:19 +03:00
|
|
|
|
|
|
|
exchange.pushdiscoverymapping['bookmarks'] = expushdiscoverybookmarks
|
|
|
|
|
2015-02-10 23:02:58 +03:00
|
|
|
templatekw.keywords['remotenames'] = remotenameskw
|
|
|
|
|
2015-03-13 01:55:16 +03:00
|
|
|
try:
|
|
|
|
strip = extensions.find('strip')
|
|
|
|
if strip:
|
|
|
|
extensions.wrapcommand(strip.cmdtable, 'strip', exstrip)
|
|
|
|
except KeyError:
|
|
|
|
# strip isn't on
|
|
|
|
pass
|
|
|
|
|
|
|
|
|
2015-02-10 20:15:37 +03:00
|
|
|
def exlog(orig, ui, repo, *args, **opts):
|
|
|
|
# hack for logging that turns on the dynamic blockerhook
|
|
|
|
if opts.get('remote'):
|
|
|
|
repo.__setattr__('_unblockhiddenremotenames', True)
|
|
|
|
res = orig(ui, repo, *args, **opts)
|
|
|
|
if opts.get('remote'):
|
|
|
|
repo.__setattr__('_unblockhiddenremotenames', False)
|
|
|
|
return res
|
|
|
|
|
2015-02-12 07:09:29 +03:00
|
|
|
_pushto = False
|
2015-03-10 03:36:25 +03:00
|
|
|
_delete = None
|
2015-02-10 22:52:19 +03:00
|
|
|
|
|
|
|
def expushdiscoverybookmarks(pushop):
|
2015-02-12 06:56:36 +03:00
|
|
|
repo = pushop.repo.unfiltered()
|
2015-02-11 20:22:26 +03:00
|
|
|
remotemarks = pushop.remote.listkeys('bookmarks')
|
2015-02-14 00:34:32 +03:00
|
|
|
force = pushop.force
|
2015-02-11 20:22:26 +03:00
|
|
|
|
2015-03-10 03:36:25 +03:00
|
|
|
if _delete:
|
|
|
|
if _delete not in remotemarks:
|
|
|
|
raise util.Abort(_('remote bookmark %s does not exist'))
|
|
|
|
pushop.outbookmarks.append([_delete, remotemarks[_delete], ''])
|
|
|
|
return exchange._pushdiscoverybookmarks(pushop)
|
|
|
|
|
2015-02-13 07:56:14 +03:00
|
|
|
if not _pushto:
|
2015-02-13 07:58:27 +03:00
|
|
|
ret = exchange._pushdiscoverybookmarks(pushop)
|
2015-02-14 00:34:32 +03:00
|
|
|
if not (repo.ui.configbool('remotenames', 'pushanonheads')
|
|
|
|
or force):
|
2015-02-13 07:56:14 +03:00
|
|
|
# check to make sure we don't push an anonymous head
|
|
|
|
if pushop.revs:
|
|
|
|
revs = set(pushop.revs)
|
|
|
|
else:
|
|
|
|
revs = set(repo.lookup(r) for r in repo.revs('head()'))
|
2015-03-10 10:06:51 +03:00
|
|
|
revs -= set(pushop.remoteheads)
|
2015-02-13 07:56:14 +03:00
|
|
|
# find heads that don't have a bookmark going with them
|
|
|
|
for bookmark in pushop.bookmarks:
|
|
|
|
rev = repo.lookup(bookmark)
|
|
|
|
if rev in revs:
|
|
|
|
revs.remove(rev)
|
|
|
|
# remove heads that already have a remote bookmark
|
|
|
|
for bookmark, node in remotemarks.iteritems():
|
|
|
|
rev = repo.lookup(node)
|
|
|
|
if rev in revs:
|
|
|
|
revs.remove(rev)
|
2015-02-13 07:59:17 +03:00
|
|
|
# remove heads that already advance bookmarks (old mercurial
|
|
|
|
# behavior)
|
|
|
|
for bookmark, old, new in pushop.outbookmarks:
|
|
|
|
rev = repo.lookup(new)
|
|
|
|
if rev in revs:
|
|
|
|
revs.remove(rev)
|
2015-02-13 07:56:14 +03:00
|
|
|
|
2015-02-27 01:36:34 +03:00
|
|
|
revs = [short(r) for r in revs
|
|
|
|
if not repo[r].obsolete()
|
|
|
|
and not repo[r].closesbranch()]
|
2015-02-13 07:56:14 +03:00
|
|
|
if revs:
|
|
|
|
msg = _("push would create new anonymous heads (%s)")
|
2015-02-14 03:06:16 +03:00
|
|
|
hint = _("use --force to override this warning")
|
2015-03-11 06:41:04 +03:00
|
|
|
raise util.Abort(msg % ', '.join(sorted(revs)), hint=hint)
|
2015-02-13 07:58:27 +03:00
|
|
|
return ret
|
2015-02-10 22:52:19 +03:00
|
|
|
|
2015-02-12 07:09:29 +03:00
|
|
|
bookmark = pushop.bookmarks[0]
|
|
|
|
rev = pushop.revs[0]
|
2015-02-10 22:52:19 +03:00
|
|
|
|
|
|
|
# allow new bookmark only if force is True
|
|
|
|
old = ''
|
|
|
|
if bookmark in remotemarks:
|
|
|
|
old = remotemarks[bookmark]
|
|
|
|
elif not force:
|
2015-02-12 10:53:22 +03:00
|
|
|
msg = _('not creating new bookmark')
|
|
|
|
hint = _('use --force to create a new bookmark')
|
|
|
|
raise util.Abort(msg, hint=hint)
|
2015-02-10 22:52:19 +03:00
|
|
|
|
|
|
|
# allow non-ff only if force is True
|
|
|
|
if not force and old != '':
|
|
|
|
if old not in repo:
|
2015-02-12 10:53:22 +03:00
|
|
|
msg = _('remote bookmark revision is not in local repo')
|
|
|
|
hint = _('pull and merge or rebase or use --force')
|
|
|
|
raise util.Abort(msg, hint=hint)
|
2015-02-12 10:34:56 +03:00
|
|
|
foreground = obsolete.foreground(repo, [repo.lookup(old)])
|
|
|
|
if repo[rev].node() not in foreground:
|
2015-02-12 10:53:22 +03:00
|
|
|
msg = _('pushed rev is not in the foreground of remote bookmark')
|
|
|
|
hint = _('use --force flag to complete non-fast-forward update')
|
|
|
|
raise util.Abort(msg, hint=hint)
|
2015-02-12 10:41:42 +03:00
|
|
|
if repo[old] == repo[rev]:
|
|
|
|
repo.ui.warn(_('remote bookmark already points at pushed rev\n'))
|
|
|
|
return
|
2015-02-10 22:52:19 +03:00
|
|
|
|
|
|
|
pushop.outbookmarks.append((bookmark, old, hex(rev)))
|
|
|
|
|
2015-03-10 10:06:51 +03:00
|
|
|
def _pushrev(repo, ui):
|
|
|
|
return repo.lookup(ui.config('remotenames', 'pushrev', '.'))
|
|
|
|
|
2015-02-10 22:52:19 +03:00
|
|
|
def expushcmd(orig, ui, repo, dest=None, **opts):
|
2015-03-10 03:36:25 +03:00
|
|
|
# needed for discovery method
|
|
|
|
global _pushto, _delete
|
|
|
|
|
|
|
|
_delete = opts.get('delete')
|
2015-03-10 09:37:13 +03:00
|
|
|
if _delete:
|
|
|
|
flag = None
|
|
|
|
for f in ('to', 'bookmark', 'branch', 'rev'):
|
|
|
|
if opts.get(f):
|
|
|
|
flag = f
|
|
|
|
break
|
|
|
|
if flag:
|
|
|
|
msg = _('do not specify --delete and '
|
2015-03-12 00:33:56 +03:00
|
|
|
'--%s at the same time') % flag
|
2015-03-10 09:37:13 +03:00
|
|
|
raise util.Abort(msg)
|
|
|
|
# we want to skip pushing any changesets while deleting a remote
|
|
|
|
# bookmark, so we send the null revision
|
|
|
|
opts['rev'] = ['null']
|
|
|
|
return orig(ui, repo, dest, **opts)
|
|
|
|
|
2015-03-10 10:06:51 +03:00
|
|
|
revs = opts.get('rev')
|
2015-02-10 22:52:19 +03:00
|
|
|
to = opts.get('to')
|
2015-03-12 03:24:01 +03:00
|
|
|
|
|
|
|
if not dest and not to and not revs and _tracking(ui):
|
|
|
|
current = bookmarks.readcurrent(repo)
|
|
|
|
tracking = _readtracking(repo)
|
|
|
|
# print "tracking on %s %s" % (current, tracking)
|
|
|
|
if current and current in tracking:
|
|
|
|
track = tracking[current]
|
|
|
|
path, book = splitremotename(track)
|
2015-03-12 10:06:11 +03:00
|
|
|
# un-rename a path, if needed
|
|
|
|
revrenames = dict((v, k) for k, v in _getrenames(ui).iteritems())
|
|
|
|
path = revrenames.get(path, path)
|
2015-03-12 03:24:01 +03:00
|
|
|
paths = set(path for path, ignore in ui.configitems('paths'))
|
|
|
|
if book and path in paths:
|
|
|
|
dest = path
|
|
|
|
to = book
|
|
|
|
|
2015-02-10 22:52:19 +03:00
|
|
|
if not to:
|
2015-02-12 05:18:56 +03:00
|
|
|
if ui.configbool('remotenames', 'forceto', False):
|
2015-02-12 10:53:22 +03:00
|
|
|
msg = _('must specify --to when pushing')
|
|
|
|
hint = _('see configuration option %s') % 'remotenames.forceto'
|
|
|
|
raise util.Abort(msg, hint=hint)
|
2015-03-10 03:36:25 +03:00
|
|
|
|
2015-03-10 10:06:51 +03:00
|
|
|
if not revs:
|
|
|
|
revs = [_pushrev(repo, ui)]
|
|
|
|
opts['rev'] = revs
|
|
|
|
|
2015-02-10 22:52:19 +03:00
|
|
|
return orig(ui, repo, dest, **opts)
|
|
|
|
|
|
|
|
if opts.get('bookmark'):
|
2015-02-12 10:53:22 +03:00
|
|
|
msg = _('do not specify --to/-t and --bookmark/-B at the same time')
|
|
|
|
raise util.Abort(msg)
|
2015-02-10 22:52:19 +03:00
|
|
|
if opts.get('branch'):
|
2015-02-12 10:53:22 +03:00
|
|
|
msg = _('do not specify --to/-t and --branch/-b at the same time')
|
|
|
|
raise util.Abort(msg)
|
2015-02-10 22:52:19 +03:00
|
|
|
|
|
|
|
if revs:
|
|
|
|
revs = [repo.lookup(r) for r in scmutil.revrange(repo, revs)]
|
|
|
|
else:
|
2015-03-10 10:06:51 +03:00
|
|
|
revs = [_pushrev(repo, ui)]
|
2015-02-10 22:52:19 +03:00
|
|
|
if len(revs) != 1:
|
2015-02-12 10:53:22 +03:00
|
|
|
msg = _('--to requires exactly one rev to push')
|
|
|
|
hint = _('use --rev BOOKMARK or omit --rev for current commit (.)')
|
|
|
|
raise util.Abort(msg, hint=hint)
|
2015-02-10 22:52:19 +03:00
|
|
|
rev = revs[0]
|
|
|
|
|
2015-02-12 07:09:29 +03:00
|
|
|
_pushto = True
|
2015-02-10 22:52:19 +03:00
|
|
|
|
|
|
|
# big can o' copypasta from exchange.push
|
|
|
|
dest = ui.expandpath(dest or 'default-push', dest or 'default')
|
|
|
|
dest, branches = hg.parseurl(dest, opts.get('branch'))
|
|
|
|
try:
|
|
|
|
other = hg.peer(repo, opts, dest)
|
|
|
|
except error.RepoError:
|
|
|
|
if dest == "default-push":
|
|
|
|
hint = _('see the "path" section in "hg help config"')
|
|
|
|
raise util.Abort(_("default repository not configured!"),
|
|
|
|
hint=hint)
|
|
|
|
else:
|
|
|
|
raise
|
|
|
|
|
|
|
|
# all checks pass, go for it!
|
2015-02-12 09:31:27 +03:00
|
|
|
ui.status(_('pushing rev %s to destination %s bookmark %s\n') % (
|
2015-02-10 22:52:19 +03:00
|
|
|
short(rev), dest, to))
|
|
|
|
|
|
|
|
# TODO: subrepo stuff
|
|
|
|
|
|
|
|
pushop = exchange.push(repo, other, opts.get('force'), revs=revs,
|
|
|
|
bookmarks=(to,))
|
|
|
|
|
|
|
|
result = not pushop.cgresult
|
|
|
|
if pushop.bkresult is not None:
|
|
|
|
if pushop.bkresult == 2:
|
|
|
|
result = 2
|
|
|
|
elif not result and pushop.bkresult:
|
|
|
|
result = 2
|
|
|
|
|
2015-02-12 07:09:29 +03:00
|
|
|
_pushto = False
|
2015-02-10 22:52:19 +03:00
|
|
|
return result
|
|
|
|
|
2015-01-29 02:53:49 +03:00
|
|
|
def exbranches(orig, ui, repo, *args, **opts):
|
2015-01-17 02:47:07 +03:00
|
|
|
if not opts.get('remote'):
|
|
|
|
orig(ui, repo, *args, **opts)
|
2015-01-17 01:48:56 +03:00
|
|
|
|
2015-01-17 02:47:07 +03:00
|
|
|
if opts.get('all') or opts.get('remote'):
|
2015-01-29 02:26:42 +03:00
|
|
|
# exit early if namespace doesn't even exist
|
|
|
|
namespace = 'remotebranches'
|
|
|
|
if namespace not in repo.names:
|
|
|
|
return
|
|
|
|
|
|
|
|
ns = repo.names[namespace]
|
|
|
|
label = 'log.' + ns.colorname
|
|
|
|
fm = ui.formatter('branches', opts)
|
2015-01-17 01:06:05 +03:00
|
|
|
|
2015-02-10 06:11:55 +03:00
|
|
|
# it seems overkill to hide displaying hidden remote branches
|
|
|
|
repo = repo.unfiltered()
|
|
|
|
|
2015-01-17 01:06:05 +03:00
|
|
|
# create a sorted by descending rev list
|
|
|
|
revs = set()
|
|
|
|
for name in ns.listnames(repo):
|
2015-02-10 06:11:28 +03:00
|
|
|
for n in ns.nodes(repo, name):
|
|
|
|
revs.add(repo.changelog.rev(n))
|
2015-01-17 01:06:05 +03:00
|
|
|
|
|
|
|
for r in sorted(revs, reverse=True):
|
2015-02-10 07:49:41 +03:00
|
|
|
ctx = repo[r]
|
|
|
|
for name in ns.names(repo, ctx.node()):
|
2015-01-17 01:06:05 +03:00
|
|
|
fm.startitem()
|
|
|
|
padsize = max(31 - len(str(r)) - encoding.colwidth(name), 0)
|
2015-01-17 02:35:06 +03:00
|
|
|
|
2015-02-10 07:49:41 +03:00
|
|
|
tmplabel = label
|
|
|
|
if ctx.obsolete():
|
|
|
|
tmplabel = tmplabel + ' changeset.obsolete'
|
2015-01-17 02:35:06 +03:00
|
|
|
fm.write(ns.colorname, '%s', name, label=label)
|
2015-01-17 01:06:05 +03:00
|
|
|
fmt = ' ' * padsize + ' %d:%s'
|
|
|
|
fm.condwrite(not ui.quiet, 'rev node', fmt, r,
|
2015-02-10 07:49:41 +03:00
|
|
|
fm.hexfunc(ctx.node()), label=tmplabel)
|
2015-01-17 01:06:05 +03:00
|
|
|
fm.plain('\n')
|
|
|
|
fm.end()
|
|
|
|
|
2015-03-12 01:16:53 +03:00
|
|
|
def _readtracking(repo):
|
|
|
|
tracking = {}
|
|
|
|
try:
|
2015-03-12 01:38:19 +03:00
|
|
|
for line in repo.vfs.read('bookmarks.tracking').strip().split('\n'):
|
|
|
|
try:
|
|
|
|
book, track = line.strip().split(' ')
|
|
|
|
tracking[book] = track
|
|
|
|
except ValueError:
|
|
|
|
# corrupt file, ignore entry
|
|
|
|
pass
|
2015-03-12 01:16:53 +03:00
|
|
|
except IOError:
|
|
|
|
pass
|
|
|
|
return tracking
|
|
|
|
|
|
|
|
def _writetracking(repo, tracking):
|
|
|
|
data = ''
|
|
|
|
for book, track in tracking.iteritems():
|
|
|
|
data += '%s %s\n' % (book, track)
|
2015-03-12 22:53:08 +03:00
|
|
|
repo.vfs.write('bookmarks.tracking', data)
|
2015-03-12 01:16:53 +03:00
|
|
|
|
2015-01-29 02:53:49 +03:00
|
|
|
def exbookmarks(orig, ui, repo, *args, **opts):
|
2015-01-29 02:27:57 +03:00
|
|
|
"""Bookmark output is sorted by bookmark name.
|
|
|
|
|
|
|
|
This has the side benefit of grouping all remote bookmarks by remote name.
|
|
|
|
|
|
|
|
"""
|
2015-03-12 22:54:08 +03:00
|
|
|
delete = opts.get('delete')
|
|
|
|
rename = opts.get('rename')
|
|
|
|
inactive = opts.get('inactive')
|
|
|
|
remote = opts.get('remote')
|
|
|
|
track = opts.get('track')
|
2015-03-11 06:38:03 +03:00
|
|
|
|
|
|
|
disallowed = set(ui.configlist('remotenames', 'disallowedbookmarks'))
|
|
|
|
for name in args:
|
|
|
|
if name in disallowed:
|
|
|
|
raise util.Abort(_(" bookmark '%s' not allowed by configuration")
|
2015-03-12 22:53:08 +03:00
|
|
|
% name)
|
2015-03-11 06:38:03 +03:00
|
|
|
|
2015-03-12 22:54:08 +03:00
|
|
|
if track:
|
2015-03-12 01:16:53 +03:00
|
|
|
tracking = _readtracking(repo)
|
|
|
|
for arg in args:
|
2015-03-12 22:54:08 +03:00
|
|
|
tracking[arg] = track
|
2015-03-12 01:16:53 +03:00
|
|
|
_writetracking(repo, tracking)
|
|
|
|
|
2015-03-12 22:54:08 +03:00
|
|
|
if delete or rename or args or inactive:
|
|
|
|
return orig(ui, repo, *args, **opts)
|
2015-01-29 02:27:57 +03:00
|
|
|
|
2015-03-12 22:54:08 +03:00
|
|
|
# copy pasta from commands.py; need to patch core
|
|
|
|
if not remote:
|
|
|
|
fm = ui.formatter('bookmarks', opts)
|
|
|
|
hexfn = fm.hexfunc
|
|
|
|
marks = repo._bookmarks
|
|
|
|
if len(marks) == 0 and not fm:
|
|
|
|
ui.status(_("no bookmarks set\n"))
|
|
|
|
for bmark, n in sorted(marks.iteritems()):
|
|
|
|
current = repo._bookmarkcurrent
|
|
|
|
if bmark == current:
|
|
|
|
prefix, label = '*', 'bookmarks.current'
|
|
|
|
else:
|
|
|
|
prefix, label = ' ', ''
|
|
|
|
|
|
|
|
fm.startitem()
|
|
|
|
if not ui.quiet:
|
|
|
|
fm.plain(' %s ' % prefix, label=label)
|
|
|
|
fm.write('bookmark', '%s', bmark, label=label)
|
|
|
|
pad = " " * (25 - encoding.colwidth(bmark))
|
|
|
|
rev = repo.changelog.rev(n)
|
|
|
|
h = hexfn(n)
|
|
|
|
fm.condwrite(not ui.quiet, 'rev node', pad + ' %d:%s', rev, h,
|
|
|
|
label=label)
|
|
|
|
fm.data(active=(bmark == current))
|
|
|
|
fm.plain('\n')
|
|
|
|
fm.end()
|
|
|
|
|
|
|
|
if remote or opts.get('all'):
|
2015-01-29 02:27:57 +03:00
|
|
|
n = 'remotebookmarks'
|
|
|
|
if n not in repo.names:
|
|
|
|
return
|
|
|
|
ns = repo.names[n]
|
|
|
|
color = ns.colorname
|
|
|
|
label = 'log.' + color
|
|
|
|
|
|
|
|
fm = ui.formatter('bookmarks', opts)
|
|
|
|
|
2015-02-09 06:40:27 +03:00
|
|
|
# it seems overkill to hide displaying hidden remote bookmarks
|
|
|
|
repo = repo.unfiltered()
|
|
|
|
|
2015-01-29 02:27:57 +03:00
|
|
|
for name in sorted(ns.listnames(repo)):
|
|
|
|
node = ns.nodes(repo, name)[0]
|
2015-02-09 06:53:08 +03:00
|
|
|
ctx = repo[node]
|
2015-01-29 02:27:57 +03:00
|
|
|
fm.startitem()
|
|
|
|
|
|
|
|
if not ui.quiet:
|
|
|
|
fm.plain(' ')
|
|
|
|
|
|
|
|
padsize = max(25 - encoding.colwidth(name), 0)
|
|
|
|
fmt = ' ' * padsize + ' %d:%s'
|
|
|
|
|
2015-02-09 06:53:08 +03:00
|
|
|
tmplabel = label
|
|
|
|
if ctx.obsolete():
|
|
|
|
tmplabel = tmplabel + ' changeset.obsolete'
|
2015-01-29 02:27:57 +03:00
|
|
|
fm.write(color, '%s', name, label=label)
|
2015-02-09 06:53:08 +03:00
|
|
|
fm.condwrite(not ui.quiet, 'rev node', fmt, ctx.rev(),
|
|
|
|
fm.hexfunc(node), label=tmplabel)
|
2015-01-29 02:27:57 +03:00
|
|
|
fm.plain('\n')
|
2015-03-12 22:54:08 +03:00
|
|
|
fm.end()
|
2015-01-29 02:27:57 +03:00
|
|
|
|
2014-03-21 22:37:03 +04:00
|
|
|
def activepath(ui, remote):
|
|
|
|
realpath = ''
|
|
|
|
local = None
|
|
|
|
try:
|
|
|
|
local = remote.local()
|
|
|
|
except AttributeError:
|
|
|
|
pass
|
|
|
|
|
|
|
|
# determine the remote path from the repo, if possible; else just
|
|
|
|
# use the string given to us
|
|
|
|
rpath = remote
|
|
|
|
if local:
|
|
|
|
rpath = getattr(remote, 'root', None)
|
|
|
|
if rpath is None:
|
|
|
|
# Maybe a localpeer? (hg@1ac628cd7113, 2.3)
|
|
|
|
rpath = getattr(getattr(remote, '_repo', None),
|
|
|
|
'root', None)
|
|
|
|
elif not isinstance(remote, str):
|
2014-03-25 07:45:03 +04:00
|
|
|
try:
|
|
|
|
rpath = remote._url
|
2015-03-09 23:11:37 +03:00
|
|
|
except AttributeError:
|
2014-03-25 07:45:03 +04:00
|
|
|
rpath = remote.url
|
2014-03-21 22:37:03 +04:00
|
|
|
|
|
|
|
for path, uri in ui.configitems('paths'):
|
|
|
|
uri = ui.expandpath(expandscheme(ui, uri))
|
|
|
|
if local:
|
|
|
|
uri = os.path.realpath(uri)
|
|
|
|
else:
|
|
|
|
if uri.startswith('http'):
|
|
|
|
try:
|
|
|
|
uri = url.url(uri).authinfo()[0]
|
|
|
|
except AttributeError:
|
|
|
|
try:
|
|
|
|
uri = util.url(uri).authinfo()[0]
|
|
|
|
except AttributeError:
|
|
|
|
uri = url.getauthinfo(uri)[0]
|
|
|
|
uri = uri.rstrip('/')
|
|
|
|
rpath = rpath.rstrip('/')
|
|
|
|
if uri == rpath:
|
|
|
|
realpath = path
|
|
|
|
# prefer a non-default name to default
|
|
|
|
if path != 'default' and path != 'default-push':
|
|
|
|
break
|
2015-03-03 10:22:51 +03:00
|
|
|
|
2015-03-12 10:06:11 +03:00
|
|
|
renames = _getrenames(ui)
|
2015-03-03 10:22:51 +03:00
|
|
|
realpath = renames.get(realpath, realpath)
|
2014-03-21 22:37:03 +04:00
|
|
|
return realpath
|
2014-03-21 22:34:32 +04:00
|
|
|
|
2015-03-03 10:22:51 +03:00
|
|
|
# memoization
|
|
|
|
_renames = None
|
2015-03-12 10:06:11 +03:00
|
|
|
def _getrenames(ui):
|
2015-03-03 10:22:51 +03:00
|
|
|
global _renames
|
|
|
|
if _renames is None:
|
|
|
|
_renames = {}
|
|
|
|
for k, v in ui.configitems('remotenames'):
|
|
|
|
if k.startswith('rename.'):
|
|
|
|
_renames[k[7:]] = v
|
|
|
|
return _renames
|
|
|
|
|
2014-03-21 22:34:32 +04:00
|
|
|
def expandscheme(ui, uri):
|
|
|
|
'''For a given uri, expand the scheme for it'''
|
|
|
|
urischemes = [s for s in schemes.schemes.iterkeys()
|
|
|
|
if uri.startswith('%s://' % s)]
|
|
|
|
for s in urischemes:
|
|
|
|
# TODO: refactor schemes so we don't
|
|
|
|
# duplicate this logic
|
2015-03-09 23:12:23 +03:00
|
|
|
ui.note(_('performing schemes expansion with '
|
|
|
|
'scheme %s\n') % s)
|
2014-03-21 22:34:32 +04:00
|
|
|
scheme = hg.schemes[s]
|
|
|
|
parts = uri.split('://', 1)[1].split('/', scheme.parts)
|
|
|
|
if len(parts) > scheme.parts:
|
|
|
|
tail = parts[-1]
|
|
|
|
parts = parts[:-1]
|
|
|
|
else:
|
|
|
|
tail = ''
|
2014-12-16 10:45:59 +03:00
|
|
|
ctx = dict((str(i + 1), v) for i, v in enumerate(parts))
|
|
|
|
uri = ''.join(scheme.templater.process(scheme.url, ctx)) + tail
|
2014-03-21 22:34:32 +04:00
|
|
|
return uri
|
|
|
|
|
2014-04-01 04:27:54 +04:00
|
|
|
def splitremotename(remote):
|
2014-03-22 21:41:01 +04:00
|
|
|
name = ''
|
|
|
|
if '/' in remote:
|
|
|
|
remote, name = remote.split('/', 1)
|
|
|
|
return remote, name
|
2014-03-21 22:34:32 +04:00
|
|
|
|
2014-04-01 04:27:54 +04:00
|
|
|
def joinremotename(remote, ref):
|
2014-03-22 21:41:20 +04:00
|
|
|
if ref:
|
|
|
|
remote += '/' + ref
|
|
|
|
return remote
|
|
|
|
|
2015-01-14 03:28:01 +03:00
|
|
|
def readremotenames(repo):
|
2014-03-31 21:34:43 +04:00
|
|
|
rfile = repo.join('remotenames')
|
|
|
|
# exit early if there is nothing to do
|
|
|
|
if not os.path.exists(rfile):
|
|
|
|
return
|
|
|
|
|
2015-01-14 03:28:01 +03:00
|
|
|
# needed to heuristically determine if a file is in the old format
|
2014-12-16 21:50:05 +03:00
|
|
|
branches = repo.names['branches'].listnames(repo)
|
|
|
|
bookmarks = repo.names['bookmarks'].listnames(repo)
|
|
|
|
|
2014-03-31 21:34:43 +04:00
|
|
|
f = open(rfile)
|
|
|
|
for line in f:
|
2015-01-14 03:28:01 +03:00
|
|
|
nametype = None
|
2014-03-31 21:34:43 +04:00
|
|
|
line = line.strip()
|
|
|
|
if not line:
|
|
|
|
continue
|
2015-01-15 01:45:24 +03:00
|
|
|
nametype = None
|
|
|
|
remote, rname = None, None
|
|
|
|
|
2015-01-06 21:50:19 +03:00
|
|
|
node, name = line.split(' ', 1)
|
2015-01-14 08:45:42 +03:00
|
|
|
|
|
|
|
# check for nametype being written into the file format
|
|
|
|
if ' ' in name:
|
|
|
|
nametype, name = name.split(' ', 1)
|
|
|
|
|
2015-01-06 23:31:17 +03:00
|
|
|
remote, rname = splitremotename(name)
|
|
|
|
|
|
|
|
# skip old data that didn't write the name (only wrote the alias)
|
|
|
|
if not rname:
|
|
|
|
continue
|
|
|
|
|
2015-01-14 08:45:42 +03:00
|
|
|
# old format didn't save the nametype, so check for the name in
|
|
|
|
# branches and bookmarks
|
2015-01-14 03:28:01 +03:00
|
|
|
if nametype is None:
|
|
|
|
if rname in branches:
|
|
|
|
nametype = 'branches'
|
|
|
|
elif rname in bookmarks:
|
|
|
|
nametype = 'bookmarks'
|
|
|
|
|
|
|
|
yield node, nametype, remote, rname
|
|
|
|
|
|
|
|
f.close()
|
|
|
|
|
|
|
|
def loadremotenames(repo):
|
|
|
|
alias_default = repo.ui.configbool('remotenames', 'alias.default')
|
|
|
|
|
|
|
|
for node, nametype, remote, rname in readremotenames(repo):
|
2015-01-06 23:31:17 +03:00
|
|
|
# handle alias_default here
|
|
|
|
if remote != "default" and rname == "default" and alias_default:
|
|
|
|
name = remote
|
2015-01-14 03:28:01 +03:00
|
|
|
else:
|
|
|
|
name = joinremotename(remote, rname)
|
2015-01-06 23:31:17 +03:00
|
|
|
|
2015-01-14 03:28:01 +03:00
|
|
|
# if the node doesn't exist, skip it
|
2015-01-06 22:16:51 +03:00
|
|
|
try:
|
|
|
|
ctx = repo[node]
|
|
|
|
except error.RepoLookupError:
|
2014-03-31 21:34:43 +04:00
|
|
|
continue
|
2014-12-16 21:50:05 +03:00
|
|
|
|
2015-01-14 03:28:01 +03:00
|
|
|
# only mark as remote if the head changeset isn't marked closed
|
2014-03-31 21:34:43 +04:00
|
|
|
if not ctx.extra().get('close'):
|
2015-02-10 06:11:28 +03:00
|
|
|
nodes = _remotenames[nametype].get(name, [])
|
|
|
|
nodes.append(ctx.node())
|
|
|
|
_remotenames[nametype][name] = nodes
|
2014-03-31 21:34:43 +04:00
|
|
|
|
2015-03-03 12:08:44 +03:00
|
|
|
def transition(repo, ui):
|
|
|
|
"""
|
|
|
|
Help with transitioning to using a remotenames workflow.
|
|
|
|
|
|
|
|
Allows deleting matching local bookmarks defined in a config file:
|
|
|
|
|
|
|
|
[remotenames]
|
|
|
|
transitionbookmarks = master, stable
|
|
|
|
"""
|
|
|
|
transmarks = ui.configlist('remotenames', 'transitionbookmarks')
|
|
|
|
localmarks = repo._bookmarks
|
|
|
|
for mark in transmarks:
|
|
|
|
if mark in localmarks:
|
|
|
|
del localmarks[mark]
|
|
|
|
localmarks.write()
|
|
|
|
|
2014-04-01 04:27:54 +04:00
|
|
|
def saveremotenames(repo, remote, branches, bookmarks):
|
2015-03-03 12:08:44 +03:00
|
|
|
if not repo.vfs.exists('remotenames'):
|
|
|
|
transition(repo, repo.ui)
|
|
|
|
|
2015-01-14 03:16:27 +03:00
|
|
|
# read in all data first before opening file to write
|
|
|
|
olddata = set(readremotenames(repo))
|
|
|
|
|
2014-04-01 04:27:54 +04:00
|
|
|
bfile = repo.join('remotenames')
|
2014-03-21 20:56:00 +04:00
|
|
|
f = open(bfile, 'w')
|
2015-01-14 03:16:27 +03:00
|
|
|
|
|
|
|
# only update the given 'remote', so iterate over old data and re-save it
|
|
|
|
for node, nametype, oldremote, rname in olddata:
|
|
|
|
if oldremote != remote:
|
2015-01-14 07:27:35 +03:00
|
|
|
n = joinremotename(oldremote, rname)
|
|
|
|
f.write('%s %s %s\n' % (node, nametype, n))
|
2015-01-14 03:16:27 +03:00
|
|
|
|
2014-03-18 22:46:10 +04:00
|
|
|
for branch, nodes in branches.iteritems():
|
2014-03-21 20:56:00 +04:00
|
|
|
for n in nodes:
|
2015-01-14 07:27:35 +03:00
|
|
|
rname = joinremotename(remote, branch)
|
|
|
|
f.write('%s branches %s\n' % (hex(n), rname))
|
2014-03-18 22:46:10 +04:00
|
|
|
for bookmark, n in bookmarks.iteritems():
|
2015-01-14 07:27:35 +03:00
|
|
|
f.write('%s bookmarks %s\n' % (n, joinremotename(remote, bookmark)))
|
2014-03-21 20:56:00 +04:00
|
|
|
f.close()
|
|
|
|
|
2015-02-10 06:20:25 +03:00
|
|
|
def distancefromremote(repo, remote="default"):
|
|
|
|
"""returns the signed distance between the current node and remote"""
|
|
|
|
b = repo._bookmarkcurrent
|
|
|
|
|
|
|
|
# if no bookmark is active, fallback to the branchname
|
|
|
|
if not b:
|
|
|
|
b = repo.lookupbranch('.')
|
|
|
|
|
|
|
|
# get the non-default name
|
|
|
|
paths = dict(repo.ui.configitems('paths'))
|
|
|
|
rpath = paths.get(remote)
|
|
|
|
if remote == 'default':
|
|
|
|
for path, uri in paths.iteritems():
|
|
|
|
if path != 'default' and path != 'default-push' and rpath == uri:
|
|
|
|
remote = path
|
|
|
|
|
|
|
|
# if we couldn't find anything for remote then return
|
|
|
|
if not rpath:
|
|
|
|
return 0
|
|
|
|
|
|
|
|
remoteb = joinremotename(remote, b)
|
2015-02-10 07:25:23 +03:00
|
|
|
if b == 'default' and repo.ui.configbool('remotenames', 'alias.default'):
|
|
|
|
remoteb = remote
|
2015-02-10 06:20:25 +03:00
|
|
|
|
2015-02-10 07:25:23 +03:00
|
|
|
distance = 0
|
2015-02-10 06:20:25 +03:00
|
|
|
if remoteb in repo:
|
|
|
|
rev1 = repo[remoteb].rev()
|
|
|
|
rev2 = repo['.'].rev()
|
|
|
|
sign = 1
|
|
|
|
if rev2 < rev1:
|
|
|
|
sign = -1
|
|
|
|
rev1, rev2 = rev2, rev1
|
|
|
|
nodes = repo.revs('%s::%s' % (rev1, rev2))
|
|
|
|
distance = sign * (len(nodes) - 1)
|
|
|
|
|
|
|
|
return distance
|
|
|
|
|
2015-02-10 06:30:17 +03:00
|
|
|
def writedistance(repo, remote="default"):
|
|
|
|
distance = distancefromremote(repo, remote)
|
|
|
|
sign = '+'
|
|
|
|
if distance < 0:
|
|
|
|
sign = '-'
|
|
|
|
|
|
|
|
wlock = repo.wlock()
|
|
|
|
try:
|
|
|
|
try:
|
|
|
|
fp = repo.vfs('remotedistance', 'w', atomictemp=True)
|
|
|
|
fp.write('%s %s' % (sign, abs(distance)))
|
|
|
|
fp.close()
|
|
|
|
except OSError, inst:
|
|
|
|
if inst.errno != errno.ENOENT:
|
|
|
|
raise
|
|
|
|
finally:
|
|
|
|
wlock.release()
|
|
|
|
|
2014-03-21 20:48:38 +04:00
|
|
|
#########
|
|
|
|
# revsets
|
|
|
|
#########
|
|
|
|
|
2011-03-30 04:02:08 +04:00
|
|
|
def upstream_revs(filt, repo, subset, x):
|
2014-04-01 04:55:54 +04:00
|
|
|
upstream_tips = set()
|
2015-01-15 01:45:24 +03:00
|
|
|
for remotename in _remotenames.keys():
|
|
|
|
rname = 'remote' + remotename
|
|
|
|
try:
|
|
|
|
ns = repo.names[rname]
|
|
|
|
except KeyError:
|
|
|
|
continue
|
|
|
|
for name in ns.listnames(repo):
|
2015-02-27 00:37:00 +03:00
|
|
|
if filt(splitremotename(name)[0]):
|
2015-01-15 01:45:24 +03:00
|
|
|
upstream_tips.update(ns.nodes(repo, name))
|
2014-04-01 04:55:54 +04:00
|
|
|
|
2014-04-01 04:20:52 +04:00
|
|
|
if not upstream_tips:
|
|
|
|
return revset.baseset([])
|
2014-03-11 01:49:44 +04:00
|
|
|
|
2014-04-01 04:55:54 +04:00
|
|
|
tipancestors = repo.revs('::%ln', upstream_tips)
|
2014-12-16 10:45:59 +03:00
|
|
|
return revset.filteredset(subset, lambda n: n in tipancestors)
|
2011-03-30 04:02:08 +04:00
|
|
|
|
|
|
|
def upstream(repo, subset, x):
|
|
|
|
'''``upstream()``
|
2014-04-01 04:27:54 +04:00
|
|
|
Select changesets in an upstream repository according to remotenames.
|
2011-03-30 04:02:08 +04:00
|
|
|
'''
|
2015-02-27 01:30:58 +03:00
|
|
|
repo = repo.unfiltered()
|
2015-02-27 00:37:00 +03:00
|
|
|
upstream_names = repo.ui.configlist('remotenames', 'upstream')
|
2015-02-27 01:31:31 +03:00
|
|
|
# override default args from hgrc with args passed in on the command line
|
|
|
|
if x:
|
|
|
|
upstream_names = [revset.getstring(symbol,
|
|
|
|
"remote path must be a string")
|
|
|
|
for symbol in revset.getlist(x)]
|
2015-02-27 00:37:00 +03:00
|
|
|
filt = lambda x: True
|
2015-02-27 01:05:15 +03:00
|
|
|
default_path = dict(repo.ui.configitems('paths')).get('default')
|
|
|
|
if not upstream_names and default_path:
|
|
|
|
default_path = expandscheme(repo.ui, default_path)
|
|
|
|
upstream_names = [activepath(repo.ui, default_path)]
|
2015-02-27 00:37:00 +03:00
|
|
|
if upstream_names:
|
|
|
|
filt = lambda name: name in upstream_names
|
2011-03-30 04:02:08 +04:00
|
|
|
return upstream_revs(filt, repo, subset, x)
|
|
|
|
|
|
|
|
def pushed(repo, subset, x):
|
|
|
|
'''``pushed()``
|
2014-04-01 04:27:54 +04:00
|
|
|
Select changesets in any remote repository according to remotenames.
|
2011-03-30 04:02:08 +04:00
|
|
|
'''
|
2014-12-16 10:45:59 +03:00
|
|
|
revset.getargs(x, 0, 0, "pushed takes no arguments")
|
2011-03-30 04:02:08 +04:00
|
|
|
return upstream_revs(lambda x: True, repo, subset, x)
|
|
|
|
|
2014-04-01 04:27:54 +04:00
|
|
|
def remotenamesrevset(repo, subset, x):
|
|
|
|
"""``remotenames()``
|
2012-06-20 13:24:55 +04:00
|
|
|
All remote branches heads.
|
|
|
|
"""
|
2014-12-16 10:45:59 +03:00
|
|
|
revset.getargs(x, 0, 0, "remotenames takes no arguments")
|
2014-04-01 04:55:54 +04:00
|
|
|
remoterevs = set()
|
|
|
|
cl = repo.changelog
|
2015-01-15 01:45:24 +03:00
|
|
|
for remotename in _remotenames.keys():
|
|
|
|
rname = 'remote' + remotename
|
|
|
|
try:
|
|
|
|
ns = repo.names[rname]
|
|
|
|
except KeyError:
|
|
|
|
continue
|
|
|
|
for name in ns.listnames(repo):
|
|
|
|
remoterevs.update(ns.nodes(repo, name))
|
|
|
|
|
2014-04-01 04:55:54 +04:00
|
|
|
return revset.baseset(sorted(cl.rev(n) for n in remoterevs))
|
2012-06-20 13:24:55 +04:00
|
|
|
|
2014-03-18 22:46:10 +04:00
|
|
|
revset.symbols.update({'upstream': upstream,
|
|
|
|
'pushed': pushed,
|
2014-04-01 04:27:54 +04:00
|
|
|
'remotenames': remotenamesrevset})
|
2012-06-20 19:23:51 +04:00
|
|
|
|
2014-03-21 20:48:38 +04:00
|
|
|
###########
|
|
|
|
# templates
|
|
|
|
###########
|
|
|
|
|
2015-01-06 07:29:40 +03:00
|
|
|
def remotenameskw(**args):
|
|
|
|
""":remotenames: List of strings. List of remote names associated with the
|
|
|
|
changeset. If remotenames.suppressbranches is True then branch names will
|
|
|
|
be hidden if there is a bookmark at the same changeset.
|
|
|
|
|
|
|
|
"""
|
|
|
|
repo, ctx = args['repo'], args['ctx']
|
|
|
|
|
2015-01-17 03:08:04 +03:00
|
|
|
remotenames = []
|
|
|
|
if 'remotebookmarks' in repo.names:
|
|
|
|
remotenames = repo.names['remotebookmarks'].names(repo, ctx.node())
|
2015-01-06 07:29:40 +03:00
|
|
|
|
|
|
|
suppress = repo.ui.configbool('remotenames', 'suppressbranches', False)
|
2015-01-17 03:08:04 +03:00
|
|
|
if (not remotenames or not suppress) and 'remotebranches' in repo.names:
|
2015-01-15 01:45:24 +03:00
|
|
|
remotenames += repo.names['remotebranches'].names(repo, ctx.node())
|
2015-01-06 07:29:40 +03:00
|
|
|
|
|
|
|
return templatekw.showlist('remotename', remotenames,
|
|
|
|
plural='remotenames', **args)
|