2012-11-04 03:13:23 +01:00
|
|
|
#!/usr/bin/env python
|
|
|
|
#
|
|
|
|
# Copyright (c) 2012 Felipe Contreras
|
|
|
|
#
|
|
|
|
|
|
|
|
# Inspired by Rocco Rutte's hg-fast-export
|
|
|
|
|
|
|
|
# Just copy to your ~/bin, or anywhere in your $PATH.
|
|
|
|
# Then you can clone with:
|
|
|
|
# git clone hg::/path/to/mercurial/repo/
|
2013-04-11 14:23:15 +02:00
|
|
|
#
|
|
|
|
# For remote repositories a local clone is stored in
|
|
|
|
# "$GIT_DIR/hg/origin/clone/.hg/".
|
2012-11-04 03:13:23 +01:00
|
|
|
|
2013-05-25 04:29:50 +02:00
|
|
|
from mercurial import hg, ui, bookmarks, context, encoding, node, error, extensions, discovery, util
|
2012-11-04 03:13:23 +01:00
|
|
|
|
|
|
|
import re
|
|
|
|
import sys
|
|
|
|
import os
|
|
|
|
import json
|
2012-11-04 03:13:26 +01:00
|
|
|
import shutil
|
2012-11-04 03:13:29 +01:00
|
|
|
import subprocess
|
2012-11-04 03:13:30 +01:00
|
|
|
import urllib
|
2013-04-11 14:23:14 +02:00
|
|
|
import atexit
|
2013-04-26 23:12:35 +02:00
|
|
|
import urlparse, hashlib
|
2013-08-30 00:29:50 +02:00
|
|
|
import time as ptime
|
2012-11-04 03:13:29 +01:00
|
|
|
|
2013-08-30 00:29:50 +02:00
|
|
|
#
|
|
|
|
# If you want to see Mercurial revisions as Git commit notes:
|
|
|
|
# git config core.notesRef refs/notes/hg
|
2012-11-04 03:13:29 +01:00
|
|
|
#
|
2013-04-11 14:22:58 +02:00
|
|
|
# If you are not in hg-git-compat mode and want to disable the tracking of
|
|
|
|
# named branches:
|
|
|
|
# git config --global remote-hg.track-branches false
|
|
|
|
#
|
2013-04-11 14:23:13 +02:00
|
|
|
# If you want the equivalent of hg's clone/pull--insecure option:
|
2013-05-21 05:47:52 +02:00
|
|
|
# git config --global remote-hg.insecure true
|
2013-04-11 14:23:13 +02:00
|
|
|
#
|
2013-05-21 05:47:53 +02:00
|
|
|
# If you want to switch to hg-git compatibility mode:
|
|
|
|
# git config --global remote-hg.hg-git-compat true
|
|
|
|
#
|
2012-11-04 03:13:29 +01:00
|
|
|
# git:
|
|
|
|
# Sensible defaults for git.
|
|
|
|
# hg bookmarks are exported as git branches, hg branches are prefixed
|
2012-11-04 03:13:31 +01:00
|
|
|
# with 'branches/', HEAD is a special case.
|
2012-11-04 03:13:29 +01:00
|
|
|
#
|
|
|
|
# hg:
|
|
|
|
# Emulate hg-git.
|
|
|
|
# Only hg bookmarks are exported as git branches.
|
2013-01-08 16:47:37 +01:00
|
|
|
# Commits are modified to preserve hg information and allow bidirectionality.
|
2012-11-04 03:13:29 +01:00
|
|
|
#
|
2012-11-04 03:13:23 +01:00
|
|
|
|
|
|
|
NAME_RE = re.compile('^([^<>]+)')
|
2013-11-18 05:12:50 +01:00
|
|
|
AUTHOR_RE = re.compile('^([^<>]+?)? ?[<>]([^<>]*)(?:$|>)')
|
|
|
|
EMAIL_RE = re.compile(r'([^ \t<>]+@[^ \t<>]+)')
|
2012-11-04 03:13:38 +01:00
|
|
|
AUTHOR_HG_RE = re.compile('^(.*?) ?<(.*?)(?:>(.+)?)?$')
|
|
|
|
RAW_AUTHOR_RE = re.compile('^(\w+) (?:(.+)? )?<(.*)> (\d+) ([+-]\d+)')
|
2012-11-04 03:13:23 +01:00
|
|
|
|
2013-05-25 04:29:29 +02:00
|
|
|
VERSION = 2
|
2013-05-25 04:29:28 +02:00
|
|
|
|
2012-11-04 03:13:23 +01:00
|
|
|
def die(msg, *args):
|
|
|
|
sys.stderr.write('ERROR: %s\n' % (msg % args))
|
|
|
|
sys.exit(1)
|
|
|
|
|
|
|
|
def warn(msg, *args):
|
|
|
|
sys.stderr.write('WARNING: %s\n' % (msg % args))
|
|
|
|
|
|
|
|
def gitmode(flags):
|
|
|
|
return 'l' in flags and '120000' or 'x' in flags and '100755' or '100644'
|
|
|
|
|
|
|
|
def gittz(tz):
|
|
|
|
return '%+03d%02d' % (-tz / 3600, -tz % 3600 / 60)
|
|
|
|
|
2012-11-04 03:13:24 +01:00
|
|
|
def hgmode(mode):
|
2013-01-15 14:02:39 +01:00
|
|
|
m = { '100755': 'x', '120000': 'l' }
|
2012-11-04 03:13:24 +01:00
|
|
|
return m.get(mode, '')
|
|
|
|
|
2013-05-25 04:29:32 +02:00
|
|
|
def hghex(n):
|
|
|
|
return node.hex(n)
|
2013-04-11 14:23:05 +02:00
|
|
|
|
2013-05-25 04:29:32 +02:00
|
|
|
def hgbin(n):
|
|
|
|
return node.bin(n)
|
2013-05-25 04:29:26 +02:00
|
|
|
|
2013-04-22 23:55:21 +02:00
|
|
|
def hgref(ref):
|
|
|
|
return ref.replace('___', ' ')
|
|
|
|
|
|
|
|
def gitref(ref):
|
|
|
|
return ref.replace(' ', '___')
|
|
|
|
|
2013-05-25 04:29:50 +02:00
|
|
|
def check_version(*check):
|
|
|
|
if not hg_version:
|
|
|
|
return True
|
|
|
|
return hg_version >= check
|
|
|
|
|
2012-11-28 02:01:33 +01:00
|
|
|
def get_config(config):
|
|
|
|
cmd = ['git', 'config', '--get', config]
|
|
|
|
process = subprocess.Popen(cmd, stdout=subprocess.PIPE)
|
|
|
|
output, _ = process.communicate()
|
|
|
|
return output
|
|
|
|
|
2013-05-14 06:36:27 +02:00
|
|
|
def get_config_bool(config, default=False):
|
|
|
|
value = get_config(config).rstrip('\n')
|
|
|
|
if value == "true":
|
|
|
|
return True
|
|
|
|
elif value == "false":
|
|
|
|
return False
|
|
|
|
else:
|
|
|
|
return default
|
|
|
|
|
2012-11-04 03:13:23 +01:00
|
|
|
class Marks:
|
|
|
|
|
2013-05-25 04:29:30 +02:00
|
|
|
def __init__(self, path, repo):
|
2012-11-04 03:13:23 +01:00
|
|
|
self.path = path
|
2013-05-25 04:29:30 +02:00
|
|
|
self.repo = repo
|
2013-05-25 04:29:28 +02:00
|
|
|
self.clear()
|
|
|
|
self.load()
|
|
|
|
|
|
|
|
if self.version < VERSION:
|
2013-05-25 04:29:30 +02:00
|
|
|
if self.version == 1:
|
|
|
|
self.upgrade_one()
|
|
|
|
|
|
|
|
# upgraded?
|
|
|
|
if self.version < VERSION:
|
|
|
|
self.clear()
|
|
|
|
self.version = VERSION
|
2013-05-25 04:29:28 +02:00
|
|
|
|
|
|
|
def clear(self):
|
2012-11-04 03:13:23 +01:00
|
|
|
self.tips = {}
|
|
|
|
self.marks = {}
|
2012-11-04 03:13:24 +01:00
|
|
|
self.rev_marks = {}
|
2012-11-04 03:13:23 +01:00
|
|
|
self.last_mark = 0
|
2013-05-25 04:29:28 +02:00
|
|
|
self.version = 0
|
2013-08-30 00:29:50 +02:00
|
|
|
self.last_note = 0
|
2012-11-04 03:13:23 +01:00
|
|
|
|
|
|
|
def load(self):
|
|
|
|
if not os.path.exists(self.path):
|
|
|
|
return
|
|
|
|
|
|
|
|
tmp = json.load(open(self.path))
|
|
|
|
|
|
|
|
self.tips = tmp['tips']
|
|
|
|
self.marks = tmp['marks']
|
|
|
|
self.last_mark = tmp['last-mark']
|
2013-05-25 04:29:28 +02:00
|
|
|
self.version = tmp.get('version', 1)
|
2013-08-30 00:29:50 +02:00
|
|
|
self.last_note = tmp.get('last-note', 0)
|
2012-11-04 03:13:23 +01:00
|
|
|
|
2012-11-04 03:13:24 +01:00
|
|
|
for rev, mark in self.marks.iteritems():
|
2013-05-25 04:29:29 +02:00
|
|
|
self.rev_marks[mark] = rev
|
2012-11-04 03:13:24 +01:00
|
|
|
|
2013-05-25 04:29:30 +02:00
|
|
|
def upgrade_one(self):
|
|
|
|
def get_id(rev):
|
|
|
|
return hghex(self.repo.changelog.node(int(rev)))
|
|
|
|
self.tips = dict((name, get_id(rev)) for name, rev in self.tips.iteritems())
|
|
|
|
self.marks = dict((get_id(rev), mark) for rev, mark in self.marks.iteritems())
|
|
|
|
self.rev_marks = dict((mark, get_id(rev)) for mark, rev in self.rev_marks.iteritems())
|
|
|
|
self.version = 2
|
|
|
|
|
2012-11-04 03:13:23 +01:00
|
|
|
def dict(self):
|
2013-08-30 00:29:50 +02:00
|
|
|
return { 'tips': self.tips, 'marks': self.marks, 'last-mark' : self.last_mark, 'version' : self.version, 'last-note' : self.last_note }
|
2012-11-04 03:13:23 +01:00
|
|
|
|
|
|
|
def store(self):
|
|
|
|
json.dump(self.dict(), open(self.path, 'w'))
|
|
|
|
|
|
|
|
def __str__(self):
|
|
|
|
return str(self.dict())
|
|
|
|
|
|
|
|
def from_rev(self, rev):
|
2013-05-25 04:29:29 +02:00
|
|
|
return self.marks[rev]
|
2012-11-04 03:13:23 +01:00
|
|
|
|
2012-11-04 03:13:24 +01:00
|
|
|
def to_rev(self, mark):
|
2013-05-25 04:30:00 +02:00
|
|
|
return str(self.rev_marks[mark])
|
2012-11-04 03:13:24 +01:00
|
|
|
|
2013-04-22 23:55:23 +02:00
|
|
|
def next_mark(self):
|
|
|
|
self.last_mark += 1
|
|
|
|
return self.last_mark
|
|
|
|
|
2012-11-04 03:13:23 +01:00
|
|
|
def get_mark(self, rev):
|
|
|
|
self.last_mark += 1
|
2013-05-25 04:29:29 +02:00
|
|
|
self.marks[rev] = self.last_mark
|
2012-11-04 03:13:23 +01:00
|
|
|
return self.last_mark
|
|
|
|
|
2012-11-04 03:13:24 +01:00
|
|
|
def new_mark(self, rev, mark):
|
2013-05-25 04:29:29 +02:00
|
|
|
self.marks[rev] = mark
|
2012-11-04 03:13:24 +01:00
|
|
|
self.rev_marks[mark] = rev
|
|
|
|
self.last_mark = mark
|
|
|
|
|
2012-11-04 03:13:23 +01:00
|
|
|
def is_marked(self, rev):
|
2013-05-25 04:29:29 +02:00
|
|
|
return rev in self.marks
|
2012-11-04 03:13:23 +01:00
|
|
|
|
|
|
|
def get_tip(self, branch):
|
2013-05-25 04:30:01 +02:00
|
|
|
return str(self.tips[branch])
|
2012-11-04 03:13:23 +01:00
|
|
|
|
|
|
|
def set_tip(self, branch, tip):
|
|
|
|
self.tips[branch] = tip
|
|
|
|
|
|
|
|
class Parser:
|
|
|
|
|
|
|
|
def __init__(self, repo):
|
|
|
|
self.repo = repo
|
|
|
|
self.line = self.get_line()
|
|
|
|
|
|
|
|
def get_line(self):
|
|
|
|
return sys.stdin.readline().strip()
|
|
|
|
|
|
|
|
def __getitem__(self, i):
|
|
|
|
return self.line.split()[i]
|
|
|
|
|
|
|
|
def check(self, word):
|
|
|
|
return self.line.startswith(word)
|
|
|
|
|
|
|
|
def each_block(self, separator):
|
|
|
|
while self.line != separator:
|
|
|
|
yield self.line
|
|
|
|
self.line = self.get_line()
|
|
|
|
|
|
|
|
def __iter__(self):
|
|
|
|
return self.each_block('')
|
|
|
|
|
|
|
|
def next(self):
|
|
|
|
self.line = self.get_line()
|
|
|
|
if self.line == 'done':
|
|
|
|
self.line = None
|
|
|
|
|
2012-11-04 03:13:24 +01:00
|
|
|
def get_mark(self):
|
|
|
|
i = self.line.index(':') + 1
|
|
|
|
return int(self.line[i:])
|
|
|
|
|
|
|
|
def get_data(self):
|
|
|
|
if not self.check('data'):
|
|
|
|
return None
|
|
|
|
i = self.line.index(' ') + 1
|
|
|
|
size = int(self.line[i:])
|
|
|
|
return sys.stdin.read(size)
|
|
|
|
|
|
|
|
def get_author(self):
|
2012-11-04 03:13:30 +01:00
|
|
|
ex = None
|
2012-11-04 03:13:24 +01:00
|
|
|
m = RAW_AUTHOR_RE.match(self.line)
|
|
|
|
if not m:
|
|
|
|
return None
|
|
|
|
_, name, email, date, tz = m.groups()
|
2012-11-04 03:13:30 +01:00
|
|
|
if name and 'ext:' in name:
|
|
|
|
m = re.match('^(.+?) ext:\((.+)\)$', name)
|
|
|
|
if m:
|
|
|
|
name = m.group(1)
|
|
|
|
ex = urllib.unquote(m.group(2))
|
2012-11-04 03:13:24 +01:00
|
|
|
|
2012-11-04 03:13:30 +01:00
|
|
|
if email != bad_mail:
|
2012-11-04 03:13:24 +01:00
|
|
|
if name:
|
|
|
|
user = '%s <%s>' % (name, email)
|
|
|
|
else:
|
|
|
|
user = '<%s>' % (email)
|
|
|
|
else:
|
|
|
|
user = name
|
|
|
|
|
2012-11-04 03:13:30 +01:00
|
|
|
if ex:
|
|
|
|
user += ex
|
|
|
|
|
2012-11-04 03:13:24 +01:00
|
|
|
tz = int(tz)
|
|
|
|
tz = ((tz / 100) * 3600) + ((tz % 100) * 60)
|
|
|
|
return (user, int(date), -tz)
|
|
|
|
|
2013-04-11 14:23:16 +02:00
|
|
|
def fix_file_path(path):
|
|
|
|
if not os.path.isabs(path):
|
|
|
|
return path
|
|
|
|
return os.path.relpath(path, '/')
|
|
|
|
|
2013-04-22 23:55:23 +02:00
|
|
|
def export_files(files):
|
|
|
|
final = []
|
|
|
|
for f in files:
|
|
|
|
fid = node.hex(f.filenode())
|
|
|
|
|
|
|
|
if fid in filenodes:
|
|
|
|
mark = filenodes[fid]
|
|
|
|
else:
|
|
|
|
mark = marks.next_mark()
|
|
|
|
filenodes[fid] = mark
|
|
|
|
d = f.data()
|
|
|
|
|
|
|
|
print "blob"
|
|
|
|
print "mark :%u" % mark
|
|
|
|
print "data %d" % len(d)
|
|
|
|
print d
|
|
|
|
|
|
|
|
path = fix_file_path(f.path())
|
|
|
|
final.append((gitmode(f.flags()), mark, path))
|
|
|
|
|
|
|
|
return final
|
2012-11-04 03:13:23 +01:00
|
|
|
|
|
|
|
def get_filechanges(repo, ctx, parent):
|
|
|
|
modified = set()
|
|
|
|
added = set()
|
|
|
|
removed = set()
|
|
|
|
|
2013-04-22 23:55:22 +02:00
|
|
|
# load earliest manifest first for caching reasons
|
2013-05-25 04:29:26 +02:00
|
|
|
prev = parent.manifest().copy()
|
2013-04-22 23:55:22 +02:00
|
|
|
cur = ctx.manifest()
|
2012-11-04 03:13:23 +01:00
|
|
|
|
|
|
|
for fn in cur:
|
|
|
|
if fn in prev:
|
|
|
|
if (cur.flags(fn) != prev.flags(fn) or cur[fn] != prev[fn]):
|
|
|
|
modified.add(fn)
|
|
|
|
del prev[fn]
|
|
|
|
else:
|
|
|
|
added.add(fn)
|
|
|
|
removed |= set(prev.keys())
|
|
|
|
|
|
|
|
return added | modified, removed
|
|
|
|
|
2012-11-04 03:13:30 +01:00
|
|
|
def fixup_user_git(user):
|
2012-11-04 03:13:23 +01:00
|
|
|
name = mail = None
|
2012-11-04 03:13:30 +01:00
|
|
|
user = user.replace('"', '')
|
2012-11-04 03:13:23 +01:00
|
|
|
m = AUTHOR_RE.match(user)
|
|
|
|
if m:
|
|
|
|
name = m.group(1)
|
|
|
|
mail = m.group(2).strip()
|
|
|
|
else:
|
2013-04-22 23:55:18 +02:00
|
|
|
m = EMAIL_RE.match(user)
|
2012-11-04 03:13:23 +01:00
|
|
|
if m:
|
2013-11-18 05:12:50 +01:00
|
|
|
mail = m.group(1)
|
2013-04-22 23:55:18 +02:00
|
|
|
else:
|
|
|
|
m = NAME_RE.match(user)
|
|
|
|
if m:
|
|
|
|
name = m.group(1).strip()
|
2012-11-04 03:13:30 +01:00
|
|
|
return (name, mail)
|
|
|
|
|
|
|
|
def fixup_user_hg(user):
|
|
|
|
def sanitize(name):
|
|
|
|
# stole this from hg-git
|
|
|
|
return re.sub('[<>\n]', '?', name.lstrip('< ').rstrip('> '))
|
|
|
|
|
|
|
|
m = AUTHOR_HG_RE.match(user)
|
|
|
|
if m:
|
|
|
|
name = sanitize(m.group(1))
|
|
|
|
mail = sanitize(m.group(2))
|
|
|
|
ex = m.group(3)
|
|
|
|
if ex:
|
|
|
|
name += ' ext:(' + urllib.quote(ex) + ')'
|
|
|
|
else:
|
|
|
|
name = sanitize(user)
|
|
|
|
if '@' in user:
|
|
|
|
mail = name
|
|
|
|
else:
|
|
|
|
mail = None
|
|
|
|
|
|
|
|
return (name, mail)
|
|
|
|
|
|
|
|
def fixup_user(user):
|
|
|
|
if mode == 'git':
|
|
|
|
name, mail = fixup_user_git(user)
|
|
|
|
else:
|
|
|
|
name, mail = fixup_user_hg(user)
|
2012-11-04 03:13:23 +01:00
|
|
|
|
|
|
|
if not name:
|
2012-11-04 03:13:30 +01:00
|
|
|
name = bad_name
|
2012-11-04 03:13:23 +01:00
|
|
|
if not mail:
|
2012-11-04 03:13:30 +01:00
|
|
|
mail = bad_mail
|
2012-11-04 03:13:23 +01:00
|
|
|
|
|
|
|
return '%s <%s>' % (name, mail)
|
|
|
|
|
2013-05-25 04:29:38 +02:00
|
|
|
def updatebookmarks(repo, peer):
|
|
|
|
remotemarks = peer.listkeys('bookmarks')
|
|
|
|
localmarks = repo._bookmarks
|
|
|
|
|
|
|
|
if not remotemarks:
|
|
|
|
return
|
|
|
|
|
|
|
|
for k, v in remotemarks.iteritems():
|
|
|
|
localmarks[k] = hgbin(v)
|
|
|
|
|
|
|
|
if hasattr(localmarks, 'write'):
|
|
|
|
localmarks.write()
|
|
|
|
else:
|
|
|
|
bookmarks.write(repo)
|
|
|
|
|
2012-11-04 03:13:23 +01:00
|
|
|
def get_repo(url, alias):
|
2013-08-28 21:23:12 +02:00
|
|
|
global peer
|
2012-11-04 03:13:23 +01:00
|
|
|
|
|
|
|
myui = ui.ui()
|
|
|
|
myui.setconfig('ui', 'interactive', 'off')
|
2013-04-11 14:23:03 +02:00
|
|
|
myui.fout = sys.stderr
|
2012-11-04 03:13:23 +01:00
|
|
|
|
2013-05-14 06:36:27 +02:00
|
|
|
if get_config_bool('remote-hg.insecure'):
|
2013-05-14 06:36:25 +02:00
|
|
|
myui.setconfig('web', 'cacerts', '')
|
2012-11-04 03:13:23 +01:00
|
|
|
|
2013-05-25 04:29:24 +02:00
|
|
|
extensions.loadall(myui)
|
2013-04-22 23:55:19 +02:00
|
|
|
|
2013-05-25 04:29:39 +02:00
|
|
|
if hg.islocal(url) and not os.environ.get('GIT_REMOTE_HG_TEST_REMOTE'):
|
2012-11-04 03:13:23 +01:00
|
|
|
repo = hg.repository(myui, url)
|
2013-05-25 04:29:25 +02:00
|
|
|
if not os.path.exists(dirname):
|
|
|
|
os.makedirs(dirname)
|
2012-11-04 03:13:23 +01:00
|
|
|
else:
|
2013-05-25 04:29:25 +02:00
|
|
|
shared_path = os.path.join(gitdir, 'hg')
|
2013-08-10 00:38:03 +02:00
|
|
|
|
2013-08-10 00:38:04 +02:00
|
|
|
# check and upgrade old organization
|
|
|
|
hg_path = os.path.join(shared_path, '.hg')
|
|
|
|
if os.path.exists(shared_path) and not os.path.exists(hg_path):
|
|
|
|
repos = os.listdir(shared_path)
|
|
|
|
for x in repos:
|
|
|
|
local_hg = os.path.join(shared_path, x, 'clone', '.hg')
|
|
|
|
if not os.path.exists(local_hg):
|
|
|
|
continue
|
|
|
|
if not os.path.exists(hg_path):
|
|
|
|
shutil.move(local_hg, hg_path)
|
|
|
|
shutil.rmtree(os.path.join(shared_path, x, 'clone'))
|
|
|
|
|
2013-08-10 00:38:03 +02:00
|
|
|
# setup shared repo (if not there)
|
|
|
|
try:
|
|
|
|
hg.peer(myui, {}, shared_path, create=True)
|
|
|
|
except error.RepoError:
|
|
|
|
pass
|
2013-05-25 04:29:25 +02:00
|
|
|
|
|
|
|
if not os.path.exists(dirname):
|
|
|
|
os.makedirs(dirname)
|
|
|
|
|
|
|
|
local_path = os.path.join(dirname, 'clone')
|
|
|
|
if not os.path.exists(local_path):
|
|
|
|
hg.share(myui, shared_path, local_path, update=False)
|
|
|
|
|
|
|
|
repo = hg.repository(myui, local_path)
|
|
|
|
try:
|
|
|
|
peer = hg.peer(myui, {}, url)
|
|
|
|
except:
|
|
|
|
die('Repository error')
|
|
|
|
repo.pull(peer, heads=None, force=True)
|
2012-11-04 03:13:23 +01:00
|
|
|
|
2013-05-25 04:29:38 +02:00
|
|
|
updatebookmarks(repo, peer)
|
|
|
|
|
2012-11-04 03:13:23 +01:00
|
|
|
return repo
|
|
|
|
|
|
|
|
def rev_to_mark(rev):
|
2013-05-25 04:29:29 +02:00
|
|
|
return marks.from_rev(rev.hex())
|
2012-11-04 03:13:23 +01:00
|
|
|
|
2012-11-04 03:13:24 +01:00
|
|
|
def mark_to_rev(mark):
|
|
|
|
return marks.to_rev(mark)
|
|
|
|
|
2012-11-04 03:13:23 +01:00
|
|
|
def export_ref(repo, name, kind, head):
|
|
|
|
ename = '%s/%s' % (kind, name)
|
2013-05-25 04:30:01 +02:00
|
|
|
try:
|
|
|
|
tip = marks.get_tip(ename)
|
2013-05-25 04:29:29 +02:00
|
|
|
tip = repo[tip].rev()
|
2013-05-25 04:30:01 +02:00
|
|
|
except:
|
2013-05-25 04:29:29 +02:00
|
|
|
tip = 0
|
2012-11-04 03:13:23 +01:00
|
|
|
|
2012-11-12 18:41:06 +01:00
|
|
|
revs = xrange(tip, head.rev() + 1)
|
2013-05-25 04:29:33 +02:00
|
|
|
total = len(revs)
|
2012-11-04 03:13:23 +01:00
|
|
|
|
|
|
|
for rev in revs:
|
|
|
|
|
|
|
|
c = repo[rev]
|
2013-05-25 04:29:27 +02:00
|
|
|
node = c.node()
|
|
|
|
|
|
|
|
if marks.is_marked(c.hex()):
|
|
|
|
continue
|
|
|
|
|
|
|
|
(manifest, user, (time, tz), files, desc, extra) = repo.changelog.read(node)
|
2012-11-04 03:13:23 +01:00
|
|
|
rev_branch = extra['branch']
|
|
|
|
|
|
|
|
author = "%s %d %s" % (fixup_user(user), time, gittz(tz))
|
|
|
|
if 'committer' in extra:
|
|
|
|
user, time, tz = extra['committer'].rsplit(' ', 2)
|
|
|
|
committer = "%s %s %s" % (user, time, gittz(int(tz)))
|
|
|
|
else:
|
|
|
|
committer = author
|
|
|
|
|
2013-05-25 04:29:26 +02:00
|
|
|
parents = [repo[p] for p in repo.changelog.parentrevs(rev) if p >= 0]
|
2012-11-04 03:13:23 +01:00
|
|
|
|
|
|
|
if len(parents) == 0:
|
|
|
|
modified = c.manifest().keys()
|
|
|
|
removed = []
|
|
|
|
else:
|
|
|
|
modified, removed = get_filechanges(repo, c, parents[0])
|
|
|
|
|
2013-04-18 08:06:31 +02:00
|
|
|
desc += '\n'
|
|
|
|
|
2012-11-04 03:13:29 +01:00
|
|
|
if mode == 'hg':
|
|
|
|
extra_msg = ''
|
|
|
|
|
|
|
|
if rev_branch != 'default':
|
|
|
|
extra_msg += 'branch : %s\n' % rev_branch
|
|
|
|
|
|
|
|
renames = []
|
|
|
|
for f in c.files():
|
|
|
|
if f not in c.manifest():
|
|
|
|
continue
|
|
|
|
rename = c.filectx(f).renamed()
|
|
|
|
if rename:
|
|
|
|
renames.append((rename[0], f))
|
|
|
|
|
|
|
|
for e in renames:
|
|
|
|
extra_msg += "rename : %s => %s\n" % e
|
|
|
|
|
|
|
|
for key, value in extra.iteritems():
|
|
|
|
if key in ('author', 'committer', 'encoding', 'message', 'branch', 'hg-git'):
|
|
|
|
continue
|
|
|
|
else:
|
|
|
|
extra_msg += "extra : %s : %s\n" % (key, urllib.quote(value))
|
|
|
|
|
|
|
|
if extra_msg:
|
|
|
|
desc += '\n--HG--\n' + extra_msg
|
|
|
|
|
2012-11-04 03:13:23 +01:00
|
|
|
if len(parents) == 0 and rev:
|
|
|
|
print 'reset %s/%s' % (prefix, ename)
|
|
|
|
|
2013-04-22 23:55:23 +02:00
|
|
|
modified_final = export_files(c.filectx(f) for f in modified)
|
|
|
|
|
2012-11-04 03:13:23 +01:00
|
|
|
print "commit %s/%s" % (prefix, ename)
|
2013-05-25 04:29:29 +02:00
|
|
|
print "mark :%d" % (marks.get_mark(c.hex()))
|
2012-11-04 03:13:23 +01:00
|
|
|
print "author %s" % (author)
|
|
|
|
print "committer %s" % (committer)
|
|
|
|
print "data %d" % (len(desc))
|
|
|
|
print desc
|
|
|
|
|
|
|
|
if len(parents) > 0:
|
2013-05-25 04:29:29 +02:00
|
|
|
print "from :%s" % (rev_to_mark(parents[0]))
|
2012-11-04 03:13:23 +01:00
|
|
|
if len(parents) > 1:
|
2013-05-25 04:29:29 +02:00
|
|
|
print "merge :%s" % (rev_to_mark(parents[1]))
|
2012-11-04 03:13:23 +01:00
|
|
|
|
|
|
|
for f in removed:
|
2013-04-11 14:23:16 +02:00
|
|
|
print "D %s" % (fix_file_path(f))
|
2013-05-25 04:29:57 +02:00
|
|
|
for f in modified_final:
|
|
|
|
print "M %s :%u %s" % f
|
2012-11-04 03:13:23 +01:00
|
|
|
print
|
|
|
|
|
2013-05-25 04:29:33 +02:00
|
|
|
progress = (rev - tip)
|
|
|
|
if (progress % 100 == 0):
|
|
|
|
print "progress revision %d '%s' (%d/%d)" % (rev, name, progress, total)
|
2012-11-04 03:13:23 +01:00
|
|
|
|
|
|
|
# make sure the ref is updated
|
|
|
|
print "reset %s/%s" % (prefix, ename)
|
2013-05-25 04:29:29 +02:00
|
|
|
print "from :%u" % rev_to_mark(head)
|
2012-11-04 03:13:23 +01:00
|
|
|
print
|
|
|
|
|
2013-08-30 00:29:50 +02:00
|
|
|
pending_revs = set(revs) - notes
|
|
|
|
if pending_revs:
|
|
|
|
note_mark = marks.next_mark()
|
|
|
|
ref = "refs/notes/hg"
|
|
|
|
|
|
|
|
print "commit %s" % ref
|
|
|
|
print "mark :%d" % (note_mark)
|
2013-12-07 14:09:40 +01:00
|
|
|
print "committer remote-hg <> %d %s" % (ptime.time(), gittz(ptime.timezone))
|
2013-08-30 00:29:50 +02:00
|
|
|
desc = "Notes for %s\n" % (name)
|
|
|
|
print "data %d" % (len(desc))
|
|
|
|
print desc
|
|
|
|
if marks.last_note:
|
|
|
|
print "from :%u" % marks.last_note
|
|
|
|
|
|
|
|
for rev in pending_revs:
|
|
|
|
notes.add(rev)
|
|
|
|
c = repo[rev]
|
|
|
|
print "N inline :%u" % rev_to_mark(c)
|
|
|
|
msg = c.hex()
|
|
|
|
print "data %d" % (len(msg))
|
|
|
|
print msg
|
|
|
|
print
|
|
|
|
|
|
|
|
marks.last_note = note_mark
|
|
|
|
|
2013-05-25 04:29:29 +02:00
|
|
|
marks.set_tip(ename, head.hex())
|
2012-11-04 03:13:23 +01:00
|
|
|
|
|
|
|
def export_tag(repo, tag):
|
2013-04-22 23:55:21 +02:00
|
|
|
export_ref(repo, tag, 'tags', repo[hgref(tag)])
|
2012-11-04 03:13:23 +01:00
|
|
|
|
|
|
|
def export_bookmark(repo, bmark):
|
2013-04-22 23:55:21 +02:00
|
|
|
head = bmarks[hgref(bmark)]
|
2012-11-04 03:13:23 +01:00
|
|
|
export_ref(repo, bmark, 'bookmarks', head)
|
|
|
|
|
|
|
|
def export_branch(repo, branch):
|
|
|
|
tip = get_branch_tip(repo, branch)
|
|
|
|
head = repo[tip]
|
|
|
|
export_ref(repo, branch, 'branches', head)
|
|
|
|
|
|
|
|
def export_head(repo):
|
|
|
|
export_ref(repo, g_head[0], 'bookmarks', g_head[1])
|
|
|
|
|
|
|
|
def do_capabilities(parser):
|
|
|
|
print "import"
|
2012-11-04 03:13:24 +01:00
|
|
|
print "export"
|
2012-11-04 03:13:23 +01:00
|
|
|
print "refspec refs/heads/branches/*:%s/branches/*" % prefix
|
|
|
|
print "refspec refs/heads/*:%s/bookmarks/*" % prefix
|
|
|
|
print "refspec refs/tags/*:%s/tags/*" % prefix
|
2012-11-04 03:13:24 +01:00
|
|
|
|
|
|
|
path = os.path.join(dirname, 'marks-git')
|
|
|
|
|
|
|
|
if os.path.exists(path):
|
|
|
|
print "*import-marks %s" % path
|
|
|
|
print "*export-marks %s" % path
|
2013-05-25 04:30:03 +02:00
|
|
|
print "option"
|
2012-11-04 03:13:24 +01:00
|
|
|
|
2012-11-04 03:13:23 +01:00
|
|
|
print
|
|
|
|
|
2013-05-25 04:29:53 +02:00
|
|
|
def branch_tip(branch):
|
|
|
|
return branches[branch][-1]
|
2013-04-22 23:55:13 +02:00
|
|
|
|
2012-11-04 03:13:23 +01:00
|
|
|
def get_branch_tip(repo, branch):
|
2013-04-22 23:55:21 +02:00
|
|
|
heads = branches.get(hgref(branch), None)
|
2012-11-04 03:13:23 +01:00
|
|
|
if not heads:
|
|
|
|
return None
|
|
|
|
|
|
|
|
# verify there's only one head
|
|
|
|
if (len(heads) > 1):
|
|
|
|
warn("Branch '%s' has more than one head, consider merging" % branch)
|
2013-05-25 04:29:53 +02:00
|
|
|
return branch_tip(hgref(branch))
|
2012-11-04 03:13:23 +01:00
|
|
|
|
|
|
|
return heads[0]
|
|
|
|
|
|
|
|
def list_head(repo, cur):
|
2013-08-28 21:23:12 +02:00
|
|
|
global g_head, fake_bmark
|
2012-11-04 03:13:23 +01:00
|
|
|
|
2013-05-25 04:29:55 +02:00
|
|
|
if 'default' not in branches:
|
2013-05-25 04:29:34 +02:00
|
|
|
# empty repo
|
|
|
|
return
|
|
|
|
|
2013-05-25 04:29:55 +02:00
|
|
|
node = repo[branch_tip('default')]
|
2013-05-25 04:29:34 +02:00
|
|
|
head = 'master' if not 'master' in bmarks else 'default'
|
|
|
|
fake_bmark = head
|
|
|
|
bmarks[head] = node
|
2012-11-04 03:13:31 +01:00
|
|
|
|
2013-04-22 23:55:21 +02:00
|
|
|
head = gitref(head)
|
2012-11-04 03:13:23 +01:00
|
|
|
print "@refs/heads/%s HEAD" % head
|
|
|
|
g_head = (head, node)
|
|
|
|
|
|
|
|
def do_list(parser):
|
|
|
|
repo = parser.repo
|
|
|
|
for bmark, node in bookmarks.listbookmarks(repo).iteritems():
|
|
|
|
bmarks[bmark] = repo[node]
|
|
|
|
|
|
|
|
cur = repo.dirstate.branch()
|
2013-05-25 04:29:55 +02:00
|
|
|
orig = peer if peer else repo
|
|
|
|
|
|
|
|
for branch, heads in orig.branchmap().iteritems():
|
|
|
|
# only open heads
|
|
|
|
heads = [h for h in heads if 'close' not in repo.changelog.read(h)[5]]
|
|
|
|
if heads:
|
|
|
|
branches[branch] = heads
|
2012-11-04 03:13:23 +01:00
|
|
|
|
|
|
|
list_head(repo, cur)
|
2012-11-04 03:13:29 +01:00
|
|
|
|
2012-11-04 03:13:37 +01:00
|
|
|
if track_branches:
|
2012-11-04 03:13:29 +01:00
|
|
|
for branch in branches:
|
2013-04-22 23:55:21 +02:00
|
|
|
print "? refs/heads/branches/%s" % gitref(branch)
|
2012-11-04 03:13:29 +01:00
|
|
|
|
2012-11-04 03:13:23 +01:00
|
|
|
for bmark in bmarks:
|
2013-04-22 23:55:21 +02:00
|
|
|
print "? refs/heads/%s" % gitref(bmark)
|
2012-11-04 03:13:23 +01:00
|
|
|
|
|
|
|
for tag, node in repo.tagslist():
|
|
|
|
if tag == 'tip':
|
|
|
|
continue
|
2013-04-22 23:55:21 +02:00
|
|
|
print "? refs/tags/%s" % gitref(tag)
|
2012-11-04 03:13:23 +01:00
|
|
|
|
|
|
|
print
|
|
|
|
|
|
|
|
def do_import(parser):
|
|
|
|
repo = parser.repo
|
|
|
|
|
|
|
|
path = os.path.join(dirname, 'marks-git')
|
|
|
|
|
|
|
|
print "feature done"
|
|
|
|
if os.path.exists(path):
|
|
|
|
print "feature import-marks=%s" % path
|
|
|
|
print "feature export-marks=%s" % path
|
2013-05-25 04:29:31 +02:00
|
|
|
print "feature force"
|
2012-11-04 03:13:23 +01:00
|
|
|
sys.stdout.flush()
|
|
|
|
|
2012-11-04 03:13:27 +01:00
|
|
|
tmp = encoding.encoding
|
|
|
|
encoding.encoding = 'utf-8'
|
|
|
|
|
2012-11-04 03:13:23 +01:00
|
|
|
# lets get all the import lines
|
|
|
|
while parser.check('import'):
|
|
|
|
ref = parser[1]
|
|
|
|
|
|
|
|
if (ref == 'HEAD'):
|
|
|
|
export_head(repo)
|
|
|
|
elif ref.startswith('refs/heads/branches/'):
|
|
|
|
branch = ref[len('refs/heads/branches/'):]
|
|
|
|
export_branch(repo, branch)
|
|
|
|
elif ref.startswith('refs/heads/'):
|
|
|
|
bmark = ref[len('refs/heads/'):]
|
|
|
|
export_bookmark(repo, bmark)
|
|
|
|
elif ref.startswith('refs/tags/'):
|
|
|
|
tag = ref[len('refs/tags/'):]
|
|
|
|
export_tag(repo, tag)
|
|
|
|
|
|
|
|
parser.next()
|
|
|
|
|
2012-11-04 03:13:27 +01:00
|
|
|
encoding.encoding = tmp
|
|
|
|
|
2012-11-04 03:13:23 +01:00
|
|
|
print 'done'
|
|
|
|
|
2012-11-04 03:13:24 +01:00
|
|
|
def parse_blob(parser):
|
|
|
|
parser.next()
|
|
|
|
mark = parser.get_mark()
|
|
|
|
parser.next()
|
|
|
|
data = parser.get_data()
|
|
|
|
blob_marks[mark] = data
|
|
|
|
parser.next()
|
|
|
|
|
2012-11-04 03:13:28 +01:00
|
|
|
def get_merge_files(repo, p1, p2, files):
|
|
|
|
for e in repo[p1].files():
|
|
|
|
if e not in files:
|
|
|
|
if e not in repo[p1].manifest():
|
|
|
|
continue
|
|
|
|
f = { 'ctx' : repo[p1][e] }
|
|
|
|
files[e] = f
|
|
|
|
|
2013-10-23 17:44:11 +02:00
|
|
|
def c_style_unescape(string):
|
|
|
|
if string[0] == string[-1] == '"':
|
|
|
|
return string.decode('string-escape')[1:-1]
|
|
|
|
return string
|
|
|
|
|
2012-11-04 03:13:24 +01:00
|
|
|
def parse_commit(parser):
|
|
|
|
from_mark = merge_mark = None
|
|
|
|
|
|
|
|
ref = parser[1]
|
|
|
|
parser.next()
|
|
|
|
|
|
|
|
commit_mark = parser.get_mark()
|
|
|
|
parser.next()
|
|
|
|
author = parser.get_author()
|
|
|
|
parser.next()
|
|
|
|
committer = parser.get_author()
|
|
|
|
parser.next()
|
|
|
|
data = parser.get_data()
|
|
|
|
parser.next()
|
|
|
|
if parser.check('from'):
|
|
|
|
from_mark = parser.get_mark()
|
|
|
|
parser.next()
|
|
|
|
if parser.check('merge'):
|
|
|
|
merge_mark = parser.get_mark()
|
|
|
|
parser.next()
|
|
|
|
if parser.check('merge'):
|
|
|
|
die('octopus merges are not supported yet')
|
|
|
|
|
2013-04-22 23:55:24 +02:00
|
|
|
# fast-export adds an extra newline
|
|
|
|
if data[-1] == '\n':
|
|
|
|
data = data[:-1]
|
|
|
|
|
2012-11-04 03:13:24 +01:00
|
|
|
files = {}
|
|
|
|
|
|
|
|
for line in parser:
|
|
|
|
if parser.check('M'):
|
2012-11-28 02:01:32 +01:00
|
|
|
t, m, mark_ref, path = line.split(' ', 3)
|
2012-11-04 03:13:24 +01:00
|
|
|
mark = int(mark_ref[1:])
|
|
|
|
f = { 'mode' : hgmode(m), 'data' : blob_marks[mark] }
|
|
|
|
elif parser.check('D'):
|
2013-04-11 14:23:00 +02:00
|
|
|
t, path = line.split(' ', 1)
|
2012-11-04 03:13:24 +01:00
|
|
|
f = { 'deleted' : True }
|
|
|
|
else:
|
|
|
|
die('Unknown file command: %s' % line)
|
2013-11-18 05:12:42 +01:00
|
|
|
path = c_style_unescape(path)
|
2012-11-04 03:13:24 +01:00
|
|
|
files[path] = f
|
|
|
|
|
2013-05-25 04:30:03 +02:00
|
|
|
# only export the commits if we are on an internal proxy repo
|
|
|
|
if dry_run and not peer:
|
|
|
|
parsed_refs[ref] = None
|
|
|
|
return
|
|
|
|
|
2012-11-04 03:13:24 +01:00
|
|
|
def getfilectx(repo, memctx, f):
|
|
|
|
of = files[f]
|
|
|
|
if 'deleted' in of:
|
|
|
|
raise IOError
|
2012-11-04 03:13:28 +01:00
|
|
|
if 'ctx' in of:
|
|
|
|
return of['ctx']
|
2012-11-04 03:13:24 +01:00
|
|
|
is_exec = of['mode'] == 'x'
|
|
|
|
is_link = of['mode'] == 'l'
|
2012-11-04 03:13:29 +01:00
|
|
|
rename = of.get('rename', None)
|
|
|
|
return context.memfilectx(f, of['data'],
|
|
|
|
is_link, is_exec, rename)
|
2012-11-04 03:13:24 +01:00
|
|
|
|
|
|
|
repo = parser.repo
|
|
|
|
|
|
|
|
user, date, tz = author
|
|
|
|
extra = {}
|
|
|
|
|
|
|
|
if committer != author:
|
|
|
|
extra['committer'] = "%s %u %u" % committer
|
|
|
|
|
|
|
|
if from_mark:
|
2013-05-25 04:29:29 +02:00
|
|
|
p1 = mark_to_rev(from_mark)
|
2012-11-04 03:13:24 +01:00
|
|
|
else:
|
2013-05-25 04:29:32 +02:00
|
|
|
p1 = '0' * 40
|
2012-11-04 03:13:24 +01:00
|
|
|
|
|
|
|
if merge_mark:
|
2013-05-25 04:29:29 +02:00
|
|
|
p2 = mark_to_rev(merge_mark)
|
2012-11-04 03:13:24 +01:00
|
|
|
else:
|
2013-05-25 04:29:32 +02:00
|
|
|
p2 = '0' * 40
|
2012-11-04 03:13:24 +01:00
|
|
|
|
2012-11-04 03:13:28 +01:00
|
|
|
#
|
|
|
|
# If files changed from any of the parents, hg wants to know, but in git if
|
|
|
|
# nothing changed from the first parent, nothing changed.
|
|
|
|
#
|
|
|
|
if merge_mark:
|
|
|
|
get_merge_files(repo, p1, p2, files)
|
|
|
|
|
2013-04-11 14:23:08 +02:00
|
|
|
# Check if the ref is supposed to be a named branch
|
|
|
|
if ref.startswith('refs/heads/branches/'):
|
2013-04-22 23:55:21 +02:00
|
|
|
branch = ref[len('refs/heads/branches/'):]
|
|
|
|
extra['branch'] = hgref(branch)
|
2013-04-11 14:23:08 +02:00
|
|
|
|
2012-11-04 03:13:29 +01:00
|
|
|
if mode == 'hg':
|
|
|
|
i = data.find('\n--HG--\n')
|
|
|
|
if i >= 0:
|
|
|
|
tmp = data[i + len('\n--HG--\n'):].strip()
|
2013-04-11 14:23:00 +02:00
|
|
|
for k, v in [e.split(' : ', 1) for e in tmp.split('\n')]:
|
2012-11-04 03:13:29 +01:00
|
|
|
if k == 'rename':
|
|
|
|
old, new = v.split(' => ', 1)
|
|
|
|
files[new]['rename'] = old
|
|
|
|
elif k == 'branch':
|
|
|
|
extra[k] = v
|
|
|
|
elif k == 'extra':
|
|
|
|
ek, ev = v.split(' : ', 1)
|
|
|
|
extra[ek] = urllib.unquote(ev)
|
|
|
|
data = data[:i]
|
|
|
|
|
2012-11-04 03:13:24 +01:00
|
|
|
ctx = context.memctx(repo, (p1, p2), data,
|
|
|
|
files.keys(), getfilectx,
|
|
|
|
user, (date, tz), extra)
|
|
|
|
|
2012-11-04 03:13:27 +01:00
|
|
|
tmp = encoding.encoding
|
|
|
|
encoding.encoding = 'utf-8'
|
|
|
|
|
2013-05-25 04:29:26 +02:00
|
|
|
node = hghex(repo.commitctx(ctx))
|
2012-11-04 03:13:24 +01:00
|
|
|
|
2012-11-04 03:13:27 +01:00
|
|
|
encoding.encoding = tmp
|
|
|
|
|
2012-11-04 03:13:24 +01:00
|
|
|
parsed_refs[ref] = node
|
2013-05-25 04:29:29 +02:00
|
|
|
marks.new_mark(node, commit_mark)
|
2012-11-04 03:13:24 +01:00
|
|
|
|
|
|
|
def parse_reset(parser):
|
|
|
|
ref = parser[1]
|
|
|
|
parser.next()
|
|
|
|
# ugh
|
|
|
|
if parser.check('commit'):
|
|
|
|
parse_commit(parser)
|
|
|
|
return
|
|
|
|
if not parser.check('from'):
|
|
|
|
return
|
|
|
|
from_mark = parser.get_mark()
|
|
|
|
parser.next()
|
|
|
|
|
2013-05-25 04:30:03 +02:00
|
|
|
try:
|
|
|
|
rev = mark_to_rev(from_mark)
|
|
|
|
except KeyError:
|
|
|
|
rev = None
|
2013-05-25 04:29:29 +02:00
|
|
|
parsed_refs[ref] = rev
|
2012-11-04 03:13:24 +01:00
|
|
|
|
|
|
|
def parse_tag(parser):
|
|
|
|
name = parser[1]
|
|
|
|
parser.next()
|
|
|
|
from_mark = parser.get_mark()
|
|
|
|
parser.next()
|
|
|
|
tagger = parser.get_author()
|
|
|
|
parser.next()
|
|
|
|
data = parser.get_data()
|
|
|
|
parser.next()
|
|
|
|
|
2013-04-22 23:55:14 +02:00
|
|
|
parsed_tags[name] = (tagger, data)
|
2012-11-04 03:13:24 +01:00
|
|
|
|
2013-04-22 23:55:15 +02:00
|
|
|
def write_tag(repo, tag, node, msg, author):
|
2013-04-22 23:55:16 +02:00
|
|
|
branch = repo[node].branch()
|
2013-05-25 04:29:53 +02:00
|
|
|
tip = branch_tip(branch)
|
2013-04-22 23:55:16 +02:00
|
|
|
tip = repo[tip]
|
2013-04-22 23:55:15 +02:00
|
|
|
|
|
|
|
def getfilectx(repo, memctx, f):
|
|
|
|
try:
|
|
|
|
fctx = tip.filectx(f)
|
|
|
|
data = fctx.data()
|
|
|
|
except error.ManifestLookupError:
|
|
|
|
data = ""
|
2013-05-25 04:29:26 +02:00
|
|
|
content = data + "%s %s\n" % (node, tag)
|
2013-04-22 23:55:15 +02:00
|
|
|
return context.memfilectx(f, content, False, False, None)
|
|
|
|
|
|
|
|
p1 = tip.hex()
|
2013-05-25 04:29:32 +02:00
|
|
|
p2 = '0' * 40
|
2013-05-25 04:29:56 +02:00
|
|
|
if author:
|
|
|
|
user, date, tz = author
|
|
|
|
date_tz = (date, tz)
|
|
|
|
else:
|
|
|
|
cmd = ['git', 'var', 'GIT_COMMITTER_IDENT']
|
|
|
|
process = subprocess.Popen(cmd, stdout=subprocess.PIPE)
|
|
|
|
output, _ = process.communicate()
|
|
|
|
m = re.match('^.* <.*>', output)
|
|
|
|
if m:
|
|
|
|
user = m.group(0)
|
|
|
|
else:
|
|
|
|
user = repo.ui.username()
|
|
|
|
date_tz = None
|
2013-04-22 23:55:15 +02:00
|
|
|
|
|
|
|
ctx = context.memctx(repo, (p1, p2), msg,
|
|
|
|
['.hgtags'], getfilectx,
|
2013-05-25 04:29:56 +02:00
|
|
|
user, date_tz, {'branch' : branch})
|
2013-04-22 23:55:15 +02:00
|
|
|
|
|
|
|
tmp = encoding.encoding
|
|
|
|
encoding.encoding = 'utf-8'
|
|
|
|
|
|
|
|
tagnode = repo.commitctx(ctx)
|
|
|
|
|
|
|
|
encoding.encoding = tmp
|
|
|
|
|
2013-05-25 04:29:51 +02:00
|
|
|
return (tagnode, branch)
|
2013-04-22 23:55:15 +02:00
|
|
|
|
2013-05-25 04:29:52 +02:00
|
|
|
def checkheads_bmark(repo, ref, ctx):
|
|
|
|
bmark = ref[len('refs/heads/'):]
|
|
|
|
if not bmark in bmarks:
|
|
|
|
# new bmark
|
|
|
|
return True
|
|
|
|
|
|
|
|
ctx_old = bmarks[bmark]
|
|
|
|
ctx_new = ctx
|
|
|
|
if not repo.changelog.descendant(ctx_old.rev(), ctx_new.rev()):
|
2013-05-25 04:30:04 +02:00
|
|
|
if force_push:
|
|
|
|
print "ok %s forced update" % ref
|
|
|
|
else:
|
|
|
|
print "error %s non-fast forward" % ref
|
|
|
|
return False
|
2013-05-25 04:29:52 +02:00
|
|
|
|
|
|
|
return True
|
|
|
|
|
2013-05-25 04:29:50 +02:00
|
|
|
def checkheads(repo, remote, p_revs):
|
|
|
|
|
|
|
|
remotemap = remote.branchmap()
|
|
|
|
if not remotemap:
|
|
|
|
# empty repo
|
2013-05-25 04:29:51 +02:00
|
|
|
return True
|
2013-05-25 04:29:50 +02:00
|
|
|
|
|
|
|
new = {}
|
2013-05-25 04:29:51 +02:00
|
|
|
ret = True
|
2013-05-25 04:29:50 +02:00
|
|
|
|
2013-05-25 04:29:51 +02:00
|
|
|
for node, ref in p_revs.iteritems():
|
2013-05-25 04:29:50 +02:00
|
|
|
ctx = repo[node]
|
|
|
|
branch = ctx.branch()
|
|
|
|
if not branch in remotemap:
|
|
|
|
# new branch
|
|
|
|
continue
|
2013-05-25 04:29:52 +02:00
|
|
|
if not ref.startswith('refs/heads/branches'):
|
|
|
|
if ref.startswith('refs/heads/'):
|
|
|
|
if not checkheads_bmark(repo, ref, ctx):
|
|
|
|
ret = False
|
|
|
|
|
|
|
|
# only check branches
|
|
|
|
continue
|
2013-05-25 04:29:50 +02:00
|
|
|
new.setdefault(branch, []).append(ctx.rev())
|
|
|
|
|
|
|
|
for branch, heads in new.iteritems():
|
|
|
|
old = [repo.changelog.rev(x) for x in remotemap[branch]]
|
|
|
|
for rev in heads:
|
|
|
|
if check_version(2, 3):
|
|
|
|
ancestors = repo.changelog.ancestors([rev], stoprev=min(old))
|
|
|
|
else:
|
|
|
|
ancestors = repo.changelog.ancestors(rev)
|
|
|
|
found = False
|
|
|
|
|
|
|
|
for x in old:
|
|
|
|
if x in ancestors:
|
|
|
|
found = True
|
|
|
|
break
|
|
|
|
|
|
|
|
if found:
|
|
|
|
continue
|
|
|
|
|
2013-05-25 04:29:51 +02:00
|
|
|
node = repo.changelog.node(rev)
|
2013-05-25 04:30:04 +02:00
|
|
|
ref = p_revs[node]
|
|
|
|
if force_push:
|
|
|
|
print "ok %s forced update" % ref
|
|
|
|
else:
|
|
|
|
print "error %s non-fast forward" % ref
|
|
|
|
ret = False
|
2013-05-25 04:29:51 +02:00
|
|
|
|
|
|
|
return ret
|
2013-05-25 04:29:50 +02:00
|
|
|
|
2013-05-25 04:29:49 +02:00
|
|
|
def push_unsafe(repo, remote, parsed_refs, p_revs):
|
|
|
|
|
|
|
|
force = force_push
|
|
|
|
|
|
|
|
fci = discovery.findcommonincoming
|
|
|
|
commoninc = fci(repo, remote, force=force)
|
|
|
|
common, _, remoteheads = commoninc
|
|
|
|
|
2013-05-25 04:30:04 +02:00
|
|
|
if not checkheads(repo, remote, p_revs):
|
2013-05-25 04:29:51 +02:00
|
|
|
return None
|
2013-05-25 04:29:49 +02:00
|
|
|
|
|
|
|
cg = repo.getbundle('push', heads=list(p_revs), common=common)
|
|
|
|
|
|
|
|
unbundle = remote.capable('unbundle')
|
|
|
|
if unbundle:
|
|
|
|
if force:
|
|
|
|
remoteheads = ['force']
|
|
|
|
return remote.unbundle(cg, remoteheads, 'push')
|
|
|
|
else:
|
|
|
|
return remote.addchangegroup(cg, 'push', repo.url())
|
|
|
|
|
|
|
|
def push(repo, remote, parsed_refs, p_revs):
|
|
|
|
if hasattr(remote, 'canpush') and not remote.canpush():
|
|
|
|
print "error cannot push"
|
|
|
|
|
|
|
|
if not p_revs:
|
|
|
|
# nothing to push
|
|
|
|
return
|
|
|
|
|
|
|
|
lock = None
|
|
|
|
unbundle = remote.capable('unbundle')
|
|
|
|
if not unbundle:
|
|
|
|
lock = remote.lock()
|
|
|
|
try:
|
|
|
|
ret = push_unsafe(repo, remote, parsed_refs, p_revs)
|
|
|
|
finally:
|
|
|
|
if lock is not None:
|
|
|
|
lock.release()
|
|
|
|
|
|
|
|
return ret
|
|
|
|
|
2013-05-25 04:30:02 +02:00
|
|
|
def check_tip(ref, kind, name, heads):
|
|
|
|
try:
|
|
|
|
ename = '%s/%s' % (kind, name)
|
|
|
|
tip = marks.get_tip(ename)
|
|
|
|
except KeyError:
|
|
|
|
return True
|
|
|
|
else:
|
|
|
|
return tip in heads
|
|
|
|
|
2012-11-04 03:13:24 +01:00
|
|
|
def do_export(parser):
|
2013-04-11 14:23:04 +02:00
|
|
|
p_bmarks = []
|
2013-05-25 04:29:51 +02:00
|
|
|
p_revs = {}
|
2013-04-11 14:23:04 +02:00
|
|
|
|
2012-11-04 03:13:24 +01:00
|
|
|
parser.next()
|
|
|
|
|
|
|
|
for line in parser.each_block('done'):
|
|
|
|
if parser.check('blob'):
|
|
|
|
parse_blob(parser)
|
|
|
|
elif parser.check('commit'):
|
|
|
|
parse_commit(parser)
|
|
|
|
elif parser.check('reset'):
|
|
|
|
parse_reset(parser)
|
|
|
|
elif parser.check('tag'):
|
|
|
|
parse_tag(parser)
|
|
|
|
elif parser.check('feature'):
|
|
|
|
pass
|
|
|
|
else:
|
|
|
|
die('unhandled export command: %s' % line)
|
|
|
|
|
2013-05-25 04:30:02 +02:00
|
|
|
need_fetch = False
|
|
|
|
|
2012-11-04 03:13:24 +01:00
|
|
|
for ref, node in parsed_refs.iteritems():
|
2013-05-25 04:30:03 +02:00
|
|
|
bnode = hgbin(node) if node else None
|
2012-11-04 03:13:24 +01:00
|
|
|
if ref.startswith('refs/heads/branches'):
|
2013-04-22 23:55:12 +02:00
|
|
|
branch = ref[len('refs/heads/branches/'):]
|
2013-05-25 04:29:26 +02:00
|
|
|
if branch in branches and bnode in branches[branch]:
|
2013-04-22 23:55:12 +02:00
|
|
|
# up to date
|
|
|
|
continue
|
2013-05-25 04:30:02 +02:00
|
|
|
|
|
|
|
if peer:
|
|
|
|
remotemap = peer.branchmap()
|
|
|
|
if remotemap and branch in remotemap:
|
|
|
|
heads = [hghex(e) for e in remotemap[branch]]
|
|
|
|
if not check_tip(ref, 'branches', branch, heads):
|
|
|
|
print "error %s fetch first" % ref
|
|
|
|
need_fetch = True
|
|
|
|
continue
|
|
|
|
|
2013-05-25 04:29:51 +02:00
|
|
|
p_revs[bnode] = ref
|
2013-04-11 14:23:05 +02:00
|
|
|
print "ok %s" % ref
|
2012-11-04 03:13:24 +01:00
|
|
|
elif ref.startswith('refs/heads/'):
|
|
|
|
bmark = ref[len('refs/heads/'):]
|
2013-05-25 04:29:46 +02:00
|
|
|
new = node
|
|
|
|
old = bmarks[bmark].hex() if bmark in bmarks else ''
|
|
|
|
|
|
|
|
if old == new:
|
|
|
|
continue
|
|
|
|
|
|
|
|
print "ok %s" % ref
|
|
|
|
if bmark != fake_bmark and \
|
|
|
|
not (bmark == 'master' and bmark not in parser.repo._bookmarks):
|
|
|
|
p_bmarks.append((ref, bmark, old, new))
|
|
|
|
|
2013-05-25 04:30:02 +02:00
|
|
|
if peer:
|
|
|
|
remote_old = peer.listkeys('bookmarks').get(bmark)
|
|
|
|
if remote_old:
|
|
|
|
if not check_tip(ref, 'bookmarks', bmark, remote_old):
|
|
|
|
print "error %s fetch first" % ref
|
|
|
|
need_fetch = True
|
|
|
|
continue
|
|
|
|
|
2013-05-25 04:29:51 +02:00
|
|
|
p_revs[bnode] = ref
|
2012-11-04 03:13:24 +01:00
|
|
|
elif ref.startswith('refs/tags/'):
|
2013-05-25 04:30:03 +02:00
|
|
|
if dry_run:
|
|
|
|
print "ok %s" % ref
|
|
|
|
continue
|
2012-11-04 03:13:24 +01:00
|
|
|
tag = ref[len('refs/tags/'):]
|
2013-04-22 23:55:21 +02:00
|
|
|
tag = hgref(tag)
|
2013-04-22 23:55:14 +02:00
|
|
|
author, msg = parsed_tags.get(tag, (None, None))
|
2013-04-11 14:23:07 +02:00
|
|
|
if mode == 'git':
|
2013-04-22 23:55:14 +02:00
|
|
|
if not msg:
|
2013-08-28 21:23:11 +02:00
|
|
|
msg = 'Added tag %s for changeset %s' % (tag, node[:12])
|
2013-05-25 04:29:51 +02:00
|
|
|
tagnode, branch = write_tag(parser.repo, tag, node, msg, author)
|
|
|
|
p_revs[tagnode] = 'refs/heads/branches/' + gitref(branch)
|
2013-04-11 14:23:07 +02:00
|
|
|
else:
|
2013-04-22 23:55:17 +02:00
|
|
|
fp = parser.repo.opener('localtags', 'a')
|
2013-05-25 04:29:26 +02:00
|
|
|
fp.write('%s %s\n' % (node, tag))
|
2013-04-22 23:55:17 +02:00
|
|
|
fp.close()
|
2013-05-25 04:29:51 +02:00
|
|
|
p_revs[bnode] = ref
|
2013-04-11 14:23:05 +02:00
|
|
|
print "ok %s" % ref
|
2012-11-12 18:41:08 +01:00
|
|
|
else:
|
|
|
|
# transport-helper/fast-export bugs
|
|
|
|
continue
|
2012-11-04 03:13:24 +01:00
|
|
|
|
2013-05-25 04:30:02 +02:00
|
|
|
if need_fetch:
|
|
|
|
print
|
|
|
|
return
|
|
|
|
|
|
|
|
if dry_run:
|
|
|
|
if peer and not force_push:
|
|
|
|
checkheads(parser.repo, peer, p_revs)
|
|
|
|
print
|
|
|
|
return
|
|
|
|
|
2012-11-04 03:13:25 +01:00
|
|
|
if peer:
|
2013-05-25 04:29:51 +02:00
|
|
|
if not push(parser.repo, peer, parsed_refs, p_revs):
|
|
|
|
# do not update bookmarks
|
|
|
|
print
|
|
|
|
return
|
2013-04-22 23:55:20 +02:00
|
|
|
|
2013-05-25 04:29:46 +02:00
|
|
|
# update remote bookmarks
|
|
|
|
remote_bmarks = peer.listkeys('bookmarks')
|
|
|
|
for ref, bmark, old, new in p_bmarks:
|
2013-05-25 04:29:47 +02:00
|
|
|
if force_push:
|
|
|
|
old = remote_bmarks.get(bmark, '')
|
2013-04-11 14:23:06 +02:00
|
|
|
if not peer.pushkey('bookmarks', bmark, old, new):
|
|
|
|
print "error %s" % ref
|
2013-05-25 04:29:46 +02:00
|
|
|
else:
|
|
|
|
# update local bookmarks
|
|
|
|
for ref, bmark, old, new in p_bmarks:
|
|
|
|
if not bookmarks.pushbookmark(parser.repo, bmark, old, new):
|
|
|
|
print "error %s" % ref
|
2012-11-04 03:13:24 +01:00
|
|
|
|
|
|
|
print
|
|
|
|
|
2013-05-25 04:30:03 +02:00
|
|
|
def do_option(parser):
|
2013-05-25 04:30:04 +02:00
|
|
|
global dry_run, force_push
|
2013-05-25 04:30:03 +02:00
|
|
|
_, key, value = parser.line.split(' ')
|
|
|
|
if key == 'dry-run':
|
|
|
|
dry_run = (value == 'true')
|
|
|
|
print 'ok'
|
2013-05-25 04:30:04 +02:00
|
|
|
elif key == 'force':
|
|
|
|
force_push = (value == 'true')
|
|
|
|
print 'ok'
|
2013-05-25 04:30:03 +02:00
|
|
|
else:
|
|
|
|
print 'unsupported'
|
|
|
|
|
2013-01-09 20:43:38 +01:00
|
|
|
def fix_path(alias, repo, orig_url):
|
2013-04-22 23:55:11 +02:00
|
|
|
url = urlparse.urlparse(orig_url, 'file')
|
2013-08-09 19:13:07 +02:00
|
|
|
if url.scheme != 'file' or os.path.isabs(os.path.expanduser(url.path)):
|
2013-01-09 20:43:38 +01:00
|
|
|
return
|
2013-04-22 23:55:11 +02:00
|
|
|
abs_url = urlparse.urljoin("%s/" % os.getcwd(), orig_url)
|
|
|
|
cmd = ['git', 'config', 'remote.%s.url' % alias, "hg::%s" % abs_url]
|
2013-01-09 20:43:38 +01:00
|
|
|
subprocess.call(cmd)
|
|
|
|
|
2012-11-04 03:13:23 +01:00
|
|
|
def main(args):
|
2013-05-25 04:29:25 +02:00
|
|
|
global prefix, gitdir, dirname, branches, bmarks
|
2012-11-04 03:13:24 +01:00
|
|
|
global marks, blob_marks, parsed_refs
|
2012-11-04 03:13:30 +01:00
|
|
|
global peer, mode, bad_mail, bad_name
|
2013-04-11 14:23:14 +02:00
|
|
|
global track_branches, force_push, is_tmp
|
2013-04-22 23:55:14 +02:00
|
|
|
global parsed_tags
|
2013-04-22 23:55:23 +02:00
|
|
|
global filenodes
|
2013-05-25 04:29:50 +02:00
|
|
|
global fake_bmark, hg_version
|
2013-05-25 04:30:03 +02:00
|
|
|
global dry_run
|
2013-08-30 00:29:50 +02:00
|
|
|
global notes, alias
|
2012-11-04 03:13:23 +01:00
|
|
|
|
2013-12-07 14:09:41 +01:00
|
|
|
marks = None
|
|
|
|
is_tmp = False
|
|
|
|
gitdir = os.environ.get('GIT_DIR', None)
|
|
|
|
|
|
|
|
if len(args) < 3:
|
|
|
|
die('Not enough arguments.')
|
|
|
|
|
|
|
|
if not gitdir:
|
|
|
|
die('GIT_DIR not set')
|
|
|
|
|
2012-11-04 03:13:23 +01:00
|
|
|
alias = args[1]
|
|
|
|
url = args[2]
|
2012-11-04 03:13:25 +01:00
|
|
|
peer = None
|
2012-11-04 03:13:29 +01:00
|
|
|
|
2013-05-14 06:36:27 +02:00
|
|
|
hg_git_compat = get_config_bool('remote-hg.hg-git-compat')
|
|
|
|
track_branches = get_config_bool('remote-hg.track-branches', True)
|
2013-05-25 04:30:04 +02:00
|
|
|
force_push = False
|
2012-11-04 03:13:29 +01:00
|
|
|
|
|
|
|
if hg_git_compat:
|
|
|
|
mode = 'hg'
|
2012-11-04 03:13:30 +01:00
|
|
|
bad_mail = 'none@none'
|
|
|
|
bad_name = ''
|
2012-11-04 03:13:29 +01:00
|
|
|
else:
|
|
|
|
mode = 'git'
|
2012-11-04 03:13:30 +01:00
|
|
|
bad_mail = 'unknown'
|
|
|
|
bad_name = 'Unknown'
|
2012-11-04 03:13:23 +01:00
|
|
|
|
2012-11-04 03:13:26 +01:00
|
|
|
if alias[4:] == url:
|
|
|
|
is_tmp = True
|
2013-04-26 23:12:35 +02:00
|
|
|
alias = hashlib.sha1(alias).hexdigest()
|
2012-11-04 03:13:26 +01:00
|
|
|
|
2012-11-04 03:13:23 +01:00
|
|
|
dirname = os.path.join(gitdir, 'hg', alias)
|
|
|
|
branches = {}
|
|
|
|
bmarks = {}
|
2012-11-04 03:13:24 +01:00
|
|
|
blob_marks = {}
|
|
|
|
parsed_refs = {}
|
2013-04-22 23:55:14 +02:00
|
|
|
parsed_tags = {}
|
2013-04-22 23:55:23 +02:00
|
|
|
filenodes = {}
|
2013-05-25 04:29:34 +02:00
|
|
|
fake_bmark = None
|
2013-05-25 04:29:50 +02:00
|
|
|
try:
|
|
|
|
hg_version = tuple(int(e) for e in util.version().split('.'))
|
|
|
|
except:
|
|
|
|
hg_version = None
|
2013-05-25 04:30:03 +02:00
|
|
|
dry_run = False
|
2013-08-30 00:29:50 +02:00
|
|
|
notes = set()
|
2012-11-04 03:13:23 +01:00
|
|
|
|
|
|
|
repo = get_repo(url, alias)
|
|
|
|
prefix = 'refs/hg/%s' % alias
|
|
|
|
|
2013-01-09 20:43:38 +01:00
|
|
|
if not is_tmp:
|
|
|
|
fix_path(alias, peer or repo, url)
|
|
|
|
|
2012-11-04 03:13:23 +01:00
|
|
|
marks_path = os.path.join(dirname, 'marks-hg')
|
2013-05-25 04:29:30 +02:00
|
|
|
marks = Marks(marks_path, repo)
|
2012-11-04 03:13:23 +01:00
|
|
|
|
2013-05-19 13:53:48 +02:00
|
|
|
if sys.platform == 'win32':
|
|
|
|
import msvcrt
|
|
|
|
msvcrt.setmode(sys.stdout.fileno(), os.O_BINARY)
|
|
|
|
|
2012-11-04 03:13:23 +01:00
|
|
|
parser = Parser(repo)
|
|
|
|
for line in parser:
|
|
|
|
if parser.check('capabilities'):
|
|
|
|
do_capabilities(parser)
|
|
|
|
elif parser.check('list'):
|
|
|
|
do_list(parser)
|
|
|
|
elif parser.check('import'):
|
|
|
|
do_import(parser)
|
|
|
|
elif parser.check('export'):
|
|
|
|
do_export(parser)
|
2013-05-25 04:30:03 +02:00
|
|
|
elif parser.check('option'):
|
|
|
|
do_option(parser)
|
2012-11-04 03:13:23 +01:00
|
|
|
else:
|
|
|
|
die('unhandled command: %s' % line)
|
|
|
|
sys.stdout.flush()
|
|
|
|
|
2013-04-11 14:23:14 +02:00
|
|
|
def bye():
|
|
|
|
if not marks:
|
|
|
|
return
|
2012-11-04 03:13:26 +01:00
|
|
|
if not is_tmp:
|
|
|
|
marks.store()
|
|
|
|
else:
|
|
|
|
shutil.rmtree(dirname)
|
2012-11-04 03:13:23 +01:00
|
|
|
|
2013-04-11 14:23:14 +02:00
|
|
|
atexit.register(bye)
|
2012-11-04 03:13:23 +01:00
|
|
|
sys.exit(main(sys.argv))
|