##// END OF EJS Templates
rebase: add boolean config item rebase.store-source...
rebase: add boolean config item rebase.store-source This allows to use rebase without recording a rebase_source extra field. This is useful for example to build a mirror converted from another SCM (such as svn) by converting only new revisions, and then incrementally add them to the destination by pulling from the newly converted (unrelated) repo and rebasing the new revisions onto the last old already stored changeset. Without this patch the rebased changesets would always receive some rebase_source that would depend on the particular history of the conversion process, instead of only depending on the original source revisions. This is used to implement a hg mirror repo of SvarDOS (a partially nonfree but completely redistributable DOS distribution) in the scripts at https://hg.pushbx.org/ecm/svardos.scr/ In particular, cre.sh creates an svn mirror, upd.sh recreates an entire hg repo from the svn mirror (which takes too long to do in a regular job), and akt.sh uses hg convert with the config item convert.svn.startrev to incrementally convert only the two most recent revisions already found in the mirror destination plus any possible new revisions. If any are found, the temporary repo's changesets are pulled into the destination (as changesets from an unrelated repository). Then the changesets corresponding to the new revisions are rebased onto the prior final changeset. (Finally, the two remaining duplicates of the prior head and its parent are stripped from the destination repository.) Without this patch, the particular rebase_source extra field would depend on the order and times at which akt.sh was used, instead of only depending on the source repository. In other words, whatever sequence of upd.sh and akt.sh is used at whatever times, it is desired that the final output repositories always match each other exactly.

File last commit:

r49730:6000f5b2 default
r50343:cfff73ca default
Show More
fastexport.py
219 lines | 6.8 KiB | text/x-python | PythonLexer
# Copyright 2020 Joerg Sonnenberger <joerg@bec.de>
#
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
"""export repositories as git fast-import stream"""
# The format specification for fast-import streams can be found at
# https://git-scm.com/docs/git-fast-import#_input_format
import re
from mercurial.i18n import _
from mercurial.node import hex, nullrev
from mercurial.utils import stringutil
from mercurial import (
error,
logcmdutil,
pycompat,
registrar,
scmutil,
)
from .convert import convcmd
# Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
# extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
# be specifying the version(s) of Mercurial they are tested with, or
# leave the attribute unspecified.
testedwith = b"ships-with-hg-core"
cmdtable = {}
command = registrar.command(cmdtable)
GIT_PERSON_PROHIBITED = re.compile(b'[<>\n"]')
GIT_EMAIL_PROHIBITED = re.compile(b"[<> \n]")
def convert_to_git_user(authormap, user, rev):
mapped_user = authormap.get(user, user)
user_person = stringutil.person(mapped_user)
user_email = stringutil.email(mapped_user)
if GIT_EMAIL_PROHIBITED.match(user_email) or GIT_PERSON_PROHIBITED.match(
user_person
):
raise error.Abort(
_(b"Unable to parse user into person and email for revision %s")
% rev
)
if user_person:
return b'"' + user_person + b'" <' + user_email + b'>'
else:
return b"<" + user_email + b">"
def convert_to_git_date(date):
timestamp, utcoff = date
tzsign = b"+" if utcoff <= 0 else b"-"
if utcoff % 60 != 0:
raise error.Abort(
_(b"UTC offset in %b is not an integer number of seconds") % (date,)
)
utcoff = abs(utcoff) // 60
tzh = utcoff // 60
tzmin = utcoff % 60
return b"%d " % int(timestamp) + tzsign + b"%02d%02d" % (tzh, tzmin)
def convert_to_git_ref(branch):
# XXX filter/map depending on git restrictions
return b"refs/heads/" + branch
def write_data(buf, data, skip_newline):
buf.append(b"data %d\n" % len(data))
buf.append(data)
if not skip_newline or data[-1:] != b"\n":
buf.append(b"\n")
def export_commit(ui, repo, rev, marks, authormap):
ctx = repo[rev]
revid = ctx.hex()
if revid in marks:
ui.debug(b"warning: revision %s already exported, skipped\n" % revid)
return
parents = [p for p in ctx.parents() if p.rev() != nullrev]
for p in parents:
if p.hex() not in marks:
ui.warn(
_(b"warning: parent %s of %s has not been exported, skipped\n")
% (p, revid)
)
return
# For all files modified by the commit, check if they have already
# been exported and otherwise dump the blob with the new mark.
for fname in ctx.files():
if fname not in ctx:
continue
filectx = ctx.filectx(fname)
filerev = hex(filectx.filenode())
if filerev not in marks:
mark = len(marks) + 1
marks[filerev] = mark
data = filectx.data()
buf = [b"blob\n", b"mark :%d\n" % mark]
write_data(buf, data, False)
ui.write(*buf, keepprogressbar=True)
del buf
# Assign a mark for the current revision for references by
# latter merge commits.
mark = len(marks) + 1
marks[revid] = mark
ref = convert_to_git_ref(ctx.branch())
buf = [
b"commit %s\n" % ref,
b"mark :%d\n" % mark,
b"committer %s %s\n"
% (
convert_to_git_user(authormap, ctx.user(), revid),
convert_to_git_date(ctx.date()),
),
]
write_data(buf, ctx.description(), True)
if parents:
buf.append(b"from :%d\n" % marks[parents[0].hex()])
if len(parents) == 2:
buf.append(b"merge :%d\n" % marks[parents[1].hex()])
p0ctx = repo[parents[0]]
files = ctx.manifest().diff(p0ctx.manifest())
else:
files = ctx.files()
filebuf = []
for fname in files:
if fname not in ctx:
filebuf.append((fname, b"D %s\n" % fname))
else:
filectx = ctx.filectx(fname)
filerev = filectx.filenode()
fileperm = b"755" if filectx.isexec() else b"644"
changed = b"M %s :%d %s\n" % (fileperm, marks[hex(filerev)], fname)
filebuf.append((fname, changed))
filebuf.sort()
buf.extend(changed for (fname, changed) in filebuf)
del filebuf
buf.append(b"\n")
ui.write(*buf, keepprogressbar=True)
del buf
isrev = re.compile(b"^[0-9a-f]{40}$")
@command(
b"fastexport",
[
(b"r", b"rev", [], _(b"revisions to export"), _(b"REV")),
(b"i", b"import-marks", b"", _(b"old marks file to read"), _(b"FILE")),
(b"e", b"export-marks", b"", _(b"new marks file to write"), _(b"FILE")),
(
b"A",
b"authormap",
b"",
_(b"remap usernames using this file"),
_(b"FILE"),
),
],
_(b"[OPTION]... [REV]..."),
helpcategory=command.CATEGORY_IMPORT_EXPORT,
)
def fastexport(ui, repo, *revs, **opts):
"""export repository as git fast-import stream
This command lets you dump a repository as a human-readable text stream.
It can be piped into corresponding import routines like "git fast-import".
Incremental dumps can be created by using marks files.
"""
opts = pycompat.byteskwargs(opts)
revs += tuple(opts.get(b"rev", []))
if not revs:
revs = scmutil.revrange(repo, [b":"])
else:
revs = logcmdutil.revrange(repo, revs)
if not revs:
raise error.Abort(_(b"no revisions matched"))
authorfile = opts.get(b"authormap")
if authorfile:
authormap = convcmd.readauthormap(ui, authorfile)
else:
authormap = {}
import_marks = opts.get(b"import_marks")
marks = {}
if import_marks:
with open(import_marks, "rb") as import_marks_file:
for line in import_marks_file:
line = line.strip()
if not isrev.match(line) or line in marks:
raise error.Abort(_(b"Corrupted marks file"))
marks[line] = len(marks) + 1
revs.sort()
with ui.makeprogress(
_(b"exporting"), unit=_(b"revisions"), total=len(revs)
) as progress:
for rev in revs:
export_commit(ui, repo, rev, marks, authormap)
progress.increment()
export_marks = opts.get(b"export_marks")
if export_marks:
with open(export_marks, "wb") as export_marks_file:
output_marks = [None] * len(marks)
for k, v in marks.items():
output_marks[v - 1] = k
for k in output_marks:
export_marks_file.write(k + b"\n")