##// END OF EJS Templates
errors: stop passing non-strings to Abort's constructor...
Martin von Zweigbergk -
r46273:a736ab68 default
parent child Browse files
Show More
@@ -1,1070 +1,1070 b''
1 # Mercurial built-in replacement for cvsps.
1 # Mercurial built-in replacement for cvsps.
2 #
2 #
3 # Copyright 2008, Frank Kingswood <frank@kingswood-consulting.co.uk>
3 # Copyright 2008, Frank Kingswood <frank@kingswood-consulting.co.uk>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7 from __future__ import absolute_import
7 from __future__ import absolute_import
8
8
9 import functools
9 import functools
10 import os
10 import os
11 import re
11 import re
12
12
13 from mercurial.i18n import _
13 from mercurial.i18n import _
14 from mercurial.pycompat import open
14 from mercurial.pycompat import open
15 from mercurial import (
15 from mercurial import (
16 encoding,
16 encoding,
17 error,
17 error,
18 hook,
18 hook,
19 pycompat,
19 pycompat,
20 util,
20 util,
21 )
21 )
22 from mercurial.utils import (
22 from mercurial.utils import (
23 dateutil,
23 dateutil,
24 procutil,
24 procutil,
25 stringutil,
25 stringutil,
26 )
26 )
27
27
28 pickle = util.pickle
28 pickle = util.pickle
29
29
30
30
31 class logentry(object):
31 class logentry(object):
32 '''Class logentry has the following attributes:
32 '''Class logentry has the following attributes:
33 .author - author name as CVS knows it
33 .author - author name as CVS knows it
34 .branch - name of branch this revision is on
34 .branch - name of branch this revision is on
35 .branches - revision tuple of branches starting at this revision
35 .branches - revision tuple of branches starting at this revision
36 .comment - commit message
36 .comment - commit message
37 .commitid - CVS commitid or None
37 .commitid - CVS commitid or None
38 .date - the commit date as a (time, tz) tuple
38 .date - the commit date as a (time, tz) tuple
39 .dead - true if file revision is dead
39 .dead - true if file revision is dead
40 .file - Name of file
40 .file - Name of file
41 .lines - a tuple (+lines, -lines) or None
41 .lines - a tuple (+lines, -lines) or None
42 .parent - Previous revision of this entry
42 .parent - Previous revision of this entry
43 .rcs - name of file as returned from CVS
43 .rcs - name of file as returned from CVS
44 .revision - revision number as tuple
44 .revision - revision number as tuple
45 .tags - list of tags on the file
45 .tags - list of tags on the file
46 .synthetic - is this a synthetic "file ... added on ..." revision?
46 .synthetic - is this a synthetic "file ... added on ..." revision?
47 .mergepoint - the branch that has been merged from (if present in
47 .mergepoint - the branch that has been merged from (if present in
48 rlog output) or None
48 rlog output) or None
49 .branchpoints - the branches that start at the current entry or empty
49 .branchpoints - the branches that start at the current entry or empty
50 '''
50 '''
51
51
52 def __init__(self, **entries):
52 def __init__(self, **entries):
53 self.synthetic = False
53 self.synthetic = False
54 self.__dict__.update(entries)
54 self.__dict__.update(entries)
55
55
56 def __repr__(self):
56 def __repr__(self):
57 items = ("%s=%r" % (k, self.__dict__[k]) for k in sorted(self.__dict__))
57 items = ("%s=%r" % (k, self.__dict__[k]) for k in sorted(self.__dict__))
58 return "%s(%s)" % (type(self).__name__, ", ".join(items))
58 return "%s(%s)" % (type(self).__name__, ", ".join(items))
59
59
60
60
61 class logerror(Exception):
61 class logerror(Exception):
62 pass
62 pass
63
63
64
64
65 def getrepopath(cvspath):
65 def getrepopath(cvspath):
66 """Return the repository path from a CVS path.
66 """Return the repository path from a CVS path.
67
67
68 >>> getrepopath(b'/foo/bar')
68 >>> getrepopath(b'/foo/bar')
69 '/foo/bar'
69 '/foo/bar'
70 >>> getrepopath(b'c:/foo/bar')
70 >>> getrepopath(b'c:/foo/bar')
71 '/foo/bar'
71 '/foo/bar'
72 >>> getrepopath(b':pserver:10/foo/bar')
72 >>> getrepopath(b':pserver:10/foo/bar')
73 '/foo/bar'
73 '/foo/bar'
74 >>> getrepopath(b':pserver:10c:/foo/bar')
74 >>> getrepopath(b':pserver:10c:/foo/bar')
75 '/foo/bar'
75 '/foo/bar'
76 >>> getrepopath(b':pserver:/foo/bar')
76 >>> getrepopath(b':pserver:/foo/bar')
77 '/foo/bar'
77 '/foo/bar'
78 >>> getrepopath(b':pserver:c:/foo/bar')
78 >>> getrepopath(b':pserver:c:/foo/bar')
79 '/foo/bar'
79 '/foo/bar'
80 >>> getrepopath(b':pserver:truc@foo.bar:/foo/bar')
80 >>> getrepopath(b':pserver:truc@foo.bar:/foo/bar')
81 '/foo/bar'
81 '/foo/bar'
82 >>> getrepopath(b':pserver:truc@foo.bar:c:/foo/bar')
82 >>> getrepopath(b':pserver:truc@foo.bar:c:/foo/bar')
83 '/foo/bar'
83 '/foo/bar'
84 >>> getrepopath(b'user@server/path/to/repository')
84 >>> getrepopath(b'user@server/path/to/repository')
85 '/path/to/repository'
85 '/path/to/repository'
86 """
86 """
87 # According to CVS manual, CVS paths are expressed like:
87 # According to CVS manual, CVS paths are expressed like:
88 # [:method:][[user][:password]@]hostname[:[port]]/path/to/repository
88 # [:method:][[user][:password]@]hostname[:[port]]/path/to/repository
89 #
89 #
90 # CVSpath is splitted into parts and then position of the first occurrence
90 # CVSpath is splitted into parts and then position of the first occurrence
91 # of the '/' char after the '@' is located. The solution is the rest of the
91 # of the '/' char after the '@' is located. The solution is the rest of the
92 # string after that '/' sign including it
92 # string after that '/' sign including it
93
93
94 parts = cvspath.split(b':')
94 parts = cvspath.split(b':')
95 atposition = parts[-1].find(b'@')
95 atposition = parts[-1].find(b'@')
96 start = 0
96 start = 0
97
97
98 if atposition != -1:
98 if atposition != -1:
99 start = atposition
99 start = atposition
100
100
101 repopath = parts[-1][parts[-1].find(b'/', start) :]
101 repopath = parts[-1][parts[-1].find(b'/', start) :]
102 return repopath
102 return repopath
103
103
104
104
105 def createlog(ui, directory=None, root=b"", rlog=True, cache=None):
105 def createlog(ui, directory=None, root=b"", rlog=True, cache=None):
106 '''Collect the CVS rlog'''
106 '''Collect the CVS rlog'''
107
107
108 # Because we store many duplicate commit log messages, reusing strings
108 # Because we store many duplicate commit log messages, reusing strings
109 # saves a lot of memory and pickle storage space.
109 # saves a lot of memory and pickle storage space.
110 _scache = {}
110 _scache = {}
111
111
112 def scache(s):
112 def scache(s):
113 """return a shared version of a string"""
113 """return a shared version of a string"""
114 return _scache.setdefault(s, s)
114 return _scache.setdefault(s, s)
115
115
116 ui.status(_(b'collecting CVS rlog\n'))
116 ui.status(_(b'collecting CVS rlog\n'))
117
117
118 log = [] # list of logentry objects containing the CVS state
118 log = [] # list of logentry objects containing the CVS state
119
119
120 # patterns to match in CVS (r)log output, by state of use
120 # patterns to match in CVS (r)log output, by state of use
121 re_00 = re.compile(b'RCS file: (.+)$')
121 re_00 = re.compile(b'RCS file: (.+)$')
122 re_01 = re.compile(b'cvs \\[r?log aborted\\]: (.+)$')
122 re_01 = re.compile(b'cvs \\[r?log aborted\\]: (.+)$')
123 re_02 = re.compile(b'cvs (r?log|server): (.+)\n$')
123 re_02 = re.compile(b'cvs (r?log|server): (.+)\n$')
124 re_03 = re.compile(
124 re_03 = re.compile(
125 b"(Cannot access.+CVSROOT)|(can't create temporary directory.+)$"
125 b"(Cannot access.+CVSROOT)|(can't create temporary directory.+)$"
126 )
126 )
127 re_10 = re.compile(b'Working file: (.+)$')
127 re_10 = re.compile(b'Working file: (.+)$')
128 re_20 = re.compile(b'symbolic names:')
128 re_20 = re.compile(b'symbolic names:')
129 re_30 = re.compile(b'\t(.+): ([\\d.]+)$')
129 re_30 = re.compile(b'\t(.+): ([\\d.]+)$')
130 re_31 = re.compile(b'----------------------------$')
130 re_31 = re.compile(b'----------------------------$')
131 re_32 = re.compile(
131 re_32 = re.compile(
132 b'======================================='
132 b'======================================='
133 b'======================================$'
133 b'======================================$'
134 )
134 )
135 re_50 = re.compile(br'revision ([\d.]+)(\s+locked by:\s+.+;)?$')
135 re_50 = re.compile(br'revision ([\d.]+)(\s+locked by:\s+.+;)?$')
136 re_60 = re.compile(
136 re_60 = re.compile(
137 br'date:\s+(.+);\s+author:\s+(.+);\s+state:\s+(.+?);'
137 br'date:\s+(.+);\s+author:\s+(.+);\s+state:\s+(.+?);'
138 br'(\s+lines:\s+(\+\d+)?\s+(-\d+)?;)?'
138 br'(\s+lines:\s+(\+\d+)?\s+(-\d+)?;)?'
139 br'(\s+commitid:\s+([^;]+);)?'
139 br'(\s+commitid:\s+([^;]+);)?'
140 br'(.*mergepoint:\s+([^;]+);)?'
140 br'(.*mergepoint:\s+([^;]+);)?'
141 )
141 )
142 re_70 = re.compile(b'branches: (.+);$')
142 re_70 = re.compile(b'branches: (.+);$')
143
143
144 file_added_re = re.compile(br'file [^/]+ was (initially )?added on branch')
144 file_added_re = re.compile(br'file [^/]+ was (initially )?added on branch')
145
145
146 prefix = b'' # leading path to strip of what we get from CVS
146 prefix = b'' # leading path to strip of what we get from CVS
147
147
148 if directory is None:
148 if directory is None:
149 # Current working directory
149 # Current working directory
150
150
151 # Get the real directory in the repository
151 # Get the real directory in the repository
152 try:
152 try:
153 with open(os.path.join(b'CVS', b'Repository'), b'rb') as f:
153 with open(os.path.join(b'CVS', b'Repository'), b'rb') as f:
154 prefix = f.read().strip()
154 prefix = f.read().strip()
155 directory = prefix
155 directory = prefix
156 if prefix == b".":
156 if prefix == b".":
157 prefix = b""
157 prefix = b""
158 except IOError:
158 except IOError:
159 raise logerror(_(b'not a CVS sandbox'))
159 raise logerror(_(b'not a CVS sandbox'))
160
160
161 if prefix and not prefix.endswith(pycompat.ossep):
161 if prefix and not prefix.endswith(pycompat.ossep):
162 prefix += pycompat.ossep
162 prefix += pycompat.ossep
163
163
164 # Use the Root file in the sandbox, if it exists
164 # Use the Root file in the sandbox, if it exists
165 try:
165 try:
166 root = open(os.path.join(b'CVS', b'Root'), b'rb').read().strip()
166 root = open(os.path.join(b'CVS', b'Root'), b'rb').read().strip()
167 except IOError:
167 except IOError:
168 pass
168 pass
169
169
170 if not root:
170 if not root:
171 root = encoding.environ.get(b'CVSROOT', b'')
171 root = encoding.environ.get(b'CVSROOT', b'')
172
172
173 # read log cache if one exists
173 # read log cache if one exists
174 oldlog = []
174 oldlog = []
175 date = None
175 date = None
176
176
177 if cache:
177 if cache:
178 cachedir = os.path.expanduser(b'~/.hg.cvsps')
178 cachedir = os.path.expanduser(b'~/.hg.cvsps')
179 if not os.path.exists(cachedir):
179 if not os.path.exists(cachedir):
180 os.mkdir(cachedir)
180 os.mkdir(cachedir)
181
181
182 # The cvsps cache pickle needs a uniquified name, based on the
182 # The cvsps cache pickle needs a uniquified name, based on the
183 # repository location. The address may have all sort of nasties
183 # repository location. The address may have all sort of nasties
184 # in it, slashes, colons and such. So here we take just the
184 # in it, slashes, colons and such. So here we take just the
185 # alphanumeric characters, concatenated in a way that does not
185 # alphanumeric characters, concatenated in a way that does not
186 # mix up the various components, so that
186 # mix up the various components, so that
187 # :pserver:user@server:/path
187 # :pserver:user@server:/path
188 # and
188 # and
189 # /pserver/user/server/path
189 # /pserver/user/server/path
190 # are mapped to different cache file names.
190 # are mapped to different cache file names.
191 cachefile = root.split(b":") + [directory, b"cache"]
191 cachefile = root.split(b":") + [directory, b"cache"]
192 cachefile = [b'-'.join(re.findall(br'\w+', s)) for s in cachefile if s]
192 cachefile = [b'-'.join(re.findall(br'\w+', s)) for s in cachefile if s]
193 cachefile = os.path.join(
193 cachefile = os.path.join(
194 cachedir, b'.'.join([s for s in cachefile if s])
194 cachedir, b'.'.join([s for s in cachefile if s])
195 )
195 )
196
196
197 if cache == b'update':
197 if cache == b'update':
198 try:
198 try:
199 ui.note(_(b'reading cvs log cache %s\n') % cachefile)
199 ui.note(_(b'reading cvs log cache %s\n') % cachefile)
200 oldlog = pickle.load(open(cachefile, b'rb'))
200 oldlog = pickle.load(open(cachefile, b'rb'))
201 for e in oldlog:
201 for e in oldlog:
202 if not (
202 if not (
203 util.safehasattr(e, b'branchpoints')
203 util.safehasattr(e, b'branchpoints')
204 and util.safehasattr(e, b'commitid')
204 and util.safehasattr(e, b'commitid')
205 and util.safehasattr(e, b'mergepoint')
205 and util.safehasattr(e, b'mergepoint')
206 ):
206 ):
207 ui.status(_(b'ignoring old cache\n'))
207 ui.status(_(b'ignoring old cache\n'))
208 oldlog = []
208 oldlog = []
209 break
209 break
210
210
211 ui.note(_(b'cache has %d log entries\n') % len(oldlog))
211 ui.note(_(b'cache has %d log entries\n') % len(oldlog))
212 except Exception as e:
212 except Exception as e:
213 ui.note(_(b'error reading cache: %r\n') % e)
213 ui.note(_(b'error reading cache: %r\n') % e)
214
214
215 if oldlog:
215 if oldlog:
216 date = oldlog[-1].date # last commit date as a (time,tz) tuple
216 date = oldlog[-1].date # last commit date as a (time,tz) tuple
217 date = dateutil.datestr(date, b'%Y/%m/%d %H:%M:%S %1%2')
217 date = dateutil.datestr(date, b'%Y/%m/%d %H:%M:%S %1%2')
218
218
219 # build the CVS commandline
219 # build the CVS commandline
220 cmd = [b'cvs', b'-q']
220 cmd = [b'cvs', b'-q']
221 if root:
221 if root:
222 cmd.append(b'-d%s' % root)
222 cmd.append(b'-d%s' % root)
223 p = util.normpath(getrepopath(root))
223 p = util.normpath(getrepopath(root))
224 if not p.endswith(b'/'):
224 if not p.endswith(b'/'):
225 p += b'/'
225 p += b'/'
226 if prefix:
226 if prefix:
227 # looks like normpath replaces "" by "."
227 # looks like normpath replaces "" by "."
228 prefix = p + util.normpath(prefix)
228 prefix = p + util.normpath(prefix)
229 else:
229 else:
230 prefix = p
230 prefix = p
231 cmd.append([b'log', b'rlog'][rlog])
231 cmd.append([b'log', b'rlog'][rlog])
232 if date:
232 if date:
233 # no space between option and date string
233 # no space between option and date string
234 cmd.append(b'-d>%s' % date)
234 cmd.append(b'-d>%s' % date)
235 cmd.append(directory)
235 cmd.append(directory)
236
236
237 # state machine begins here
237 # state machine begins here
238 tags = {} # dictionary of revisions on current file with their tags
238 tags = {} # dictionary of revisions on current file with their tags
239 branchmap = {} # mapping between branch names and revision numbers
239 branchmap = {} # mapping between branch names and revision numbers
240 rcsmap = {}
240 rcsmap = {}
241 state = 0
241 state = 0
242 store = False # set when a new record can be appended
242 store = False # set when a new record can be appended
243
243
244 cmd = [procutil.shellquote(arg) for arg in cmd]
244 cmd = [procutil.shellquote(arg) for arg in cmd]
245 ui.note(_(b"running %s\n") % (b' '.join(cmd)))
245 ui.note(_(b"running %s\n") % (b' '.join(cmd)))
246 ui.debug(b"prefix=%r directory=%r root=%r\n" % (prefix, directory, root))
246 ui.debug(b"prefix=%r directory=%r root=%r\n" % (prefix, directory, root))
247
247
248 pfp = procutil.popen(b' '.join(cmd), b'rb')
248 pfp = procutil.popen(b' '.join(cmd), b'rb')
249 peek = util.fromnativeeol(pfp.readline())
249 peek = util.fromnativeeol(pfp.readline())
250 while True:
250 while True:
251 line = peek
251 line = peek
252 if line == b'':
252 if line == b'':
253 break
253 break
254 peek = util.fromnativeeol(pfp.readline())
254 peek = util.fromnativeeol(pfp.readline())
255 if line.endswith(b'\n'):
255 if line.endswith(b'\n'):
256 line = line[:-1]
256 line = line[:-1]
257 # ui.debug('state=%d line=%r\n' % (state, line))
257 # ui.debug('state=%d line=%r\n' % (state, line))
258
258
259 if state == 0:
259 if state == 0:
260 # initial state, consume input until we see 'RCS file'
260 # initial state, consume input until we see 'RCS file'
261 match = re_00.match(line)
261 match = re_00.match(line)
262 if match:
262 if match:
263 rcs = match.group(1)
263 rcs = match.group(1)
264 tags = {}
264 tags = {}
265 if rlog:
265 if rlog:
266 filename = util.normpath(rcs[:-2])
266 filename = util.normpath(rcs[:-2])
267 if filename.startswith(prefix):
267 if filename.startswith(prefix):
268 filename = filename[len(prefix) :]
268 filename = filename[len(prefix) :]
269 if filename.startswith(b'/'):
269 if filename.startswith(b'/'):
270 filename = filename[1:]
270 filename = filename[1:]
271 if filename.startswith(b'Attic/'):
271 if filename.startswith(b'Attic/'):
272 filename = filename[6:]
272 filename = filename[6:]
273 else:
273 else:
274 filename = filename.replace(b'/Attic/', b'/')
274 filename = filename.replace(b'/Attic/', b'/')
275 state = 2
275 state = 2
276 continue
276 continue
277 state = 1
277 state = 1
278 continue
278 continue
279 match = re_01.match(line)
279 match = re_01.match(line)
280 if match:
280 if match:
281 raise logerror(match.group(1))
281 raise logerror(match.group(1))
282 match = re_02.match(line)
282 match = re_02.match(line)
283 if match:
283 if match:
284 raise logerror(match.group(2))
284 raise logerror(match.group(2))
285 if re_03.match(line):
285 if re_03.match(line):
286 raise logerror(line)
286 raise logerror(line)
287
287
288 elif state == 1:
288 elif state == 1:
289 # expect 'Working file' (only when using log instead of rlog)
289 # expect 'Working file' (only when using log instead of rlog)
290 match = re_10.match(line)
290 match = re_10.match(line)
291 assert match, _(b'RCS file must be followed by working file')
291 assert match, _(b'RCS file must be followed by working file')
292 filename = util.normpath(match.group(1))
292 filename = util.normpath(match.group(1))
293 state = 2
293 state = 2
294
294
295 elif state == 2:
295 elif state == 2:
296 # expect 'symbolic names'
296 # expect 'symbolic names'
297 if re_20.match(line):
297 if re_20.match(line):
298 branchmap = {}
298 branchmap = {}
299 state = 3
299 state = 3
300
300
301 elif state == 3:
301 elif state == 3:
302 # read the symbolic names and store as tags
302 # read the symbolic names and store as tags
303 match = re_30.match(line)
303 match = re_30.match(line)
304 if match:
304 if match:
305 rev = [int(x) for x in match.group(2).split(b'.')]
305 rev = [int(x) for x in match.group(2).split(b'.')]
306
306
307 # Convert magic branch number to an odd-numbered one
307 # Convert magic branch number to an odd-numbered one
308 revn = len(rev)
308 revn = len(rev)
309 if revn > 3 and (revn % 2) == 0 and rev[-2] == 0:
309 if revn > 3 and (revn % 2) == 0 and rev[-2] == 0:
310 rev = rev[:-2] + rev[-1:]
310 rev = rev[:-2] + rev[-1:]
311 rev = tuple(rev)
311 rev = tuple(rev)
312
312
313 if rev not in tags:
313 if rev not in tags:
314 tags[rev] = []
314 tags[rev] = []
315 tags[rev].append(match.group(1))
315 tags[rev].append(match.group(1))
316 branchmap[match.group(1)] = match.group(2)
316 branchmap[match.group(1)] = match.group(2)
317
317
318 elif re_31.match(line):
318 elif re_31.match(line):
319 state = 5
319 state = 5
320 elif re_32.match(line):
320 elif re_32.match(line):
321 state = 0
321 state = 0
322
322
323 elif state == 4:
323 elif state == 4:
324 # expecting '------' separator before first revision
324 # expecting '------' separator before first revision
325 if re_31.match(line):
325 if re_31.match(line):
326 state = 5
326 state = 5
327 else:
327 else:
328 assert not re_32.match(line), _(
328 assert not re_32.match(line), _(
329 b'must have at least some revisions'
329 b'must have at least some revisions'
330 )
330 )
331
331
332 elif state == 5:
332 elif state == 5:
333 # expecting revision number and possibly (ignored) lock indication
333 # expecting revision number and possibly (ignored) lock indication
334 # we create the logentry here from values stored in states 0 to 4,
334 # we create the logentry here from values stored in states 0 to 4,
335 # as this state is re-entered for subsequent revisions of a file.
335 # as this state is re-entered for subsequent revisions of a file.
336 match = re_50.match(line)
336 match = re_50.match(line)
337 assert match, _(b'expected revision number')
337 assert match, _(b'expected revision number')
338 e = logentry(
338 e = logentry(
339 rcs=scache(rcs),
339 rcs=scache(rcs),
340 file=scache(filename),
340 file=scache(filename),
341 revision=tuple([int(x) for x in match.group(1).split(b'.')]),
341 revision=tuple([int(x) for x in match.group(1).split(b'.')]),
342 branches=[],
342 branches=[],
343 parent=None,
343 parent=None,
344 commitid=None,
344 commitid=None,
345 mergepoint=None,
345 mergepoint=None,
346 branchpoints=set(),
346 branchpoints=set(),
347 )
347 )
348
348
349 state = 6
349 state = 6
350
350
351 elif state == 6:
351 elif state == 6:
352 # expecting date, author, state, lines changed
352 # expecting date, author, state, lines changed
353 match = re_60.match(line)
353 match = re_60.match(line)
354 assert match, _(b'revision must be followed by date line')
354 assert match, _(b'revision must be followed by date line')
355 d = match.group(1)
355 d = match.group(1)
356 if d[2] == b'/':
356 if d[2] == b'/':
357 # Y2K
357 # Y2K
358 d = b'19' + d
358 d = b'19' + d
359
359
360 if len(d.split()) != 3:
360 if len(d.split()) != 3:
361 # cvs log dates always in GMT
361 # cvs log dates always in GMT
362 d = d + b' UTC'
362 d = d + b' UTC'
363 e.date = dateutil.parsedate(
363 e.date = dateutil.parsedate(
364 d,
364 d,
365 [
365 [
366 b'%y/%m/%d %H:%M:%S',
366 b'%y/%m/%d %H:%M:%S',
367 b'%Y/%m/%d %H:%M:%S',
367 b'%Y/%m/%d %H:%M:%S',
368 b'%Y-%m-%d %H:%M:%S',
368 b'%Y-%m-%d %H:%M:%S',
369 ],
369 ],
370 )
370 )
371 e.author = scache(match.group(2))
371 e.author = scache(match.group(2))
372 e.dead = match.group(3).lower() == b'dead'
372 e.dead = match.group(3).lower() == b'dead'
373
373
374 if match.group(5):
374 if match.group(5):
375 if match.group(6):
375 if match.group(6):
376 e.lines = (int(match.group(5)), int(match.group(6)))
376 e.lines = (int(match.group(5)), int(match.group(6)))
377 else:
377 else:
378 e.lines = (int(match.group(5)), 0)
378 e.lines = (int(match.group(5)), 0)
379 elif match.group(6):
379 elif match.group(6):
380 e.lines = (0, int(match.group(6)))
380 e.lines = (0, int(match.group(6)))
381 else:
381 else:
382 e.lines = None
382 e.lines = None
383
383
384 if match.group(7): # cvs 1.12 commitid
384 if match.group(7): # cvs 1.12 commitid
385 e.commitid = match.group(8)
385 e.commitid = match.group(8)
386
386
387 if match.group(9): # cvsnt mergepoint
387 if match.group(9): # cvsnt mergepoint
388 myrev = match.group(10).split(b'.')
388 myrev = match.group(10).split(b'.')
389 if len(myrev) == 2: # head
389 if len(myrev) == 2: # head
390 e.mergepoint = b'HEAD'
390 e.mergepoint = b'HEAD'
391 else:
391 else:
392 myrev = b'.'.join(myrev[:-2] + [b'0', myrev[-2]])
392 myrev = b'.'.join(myrev[:-2] + [b'0', myrev[-2]])
393 branches = [b for b in branchmap if branchmap[b] == myrev]
393 branches = [b for b in branchmap if branchmap[b] == myrev]
394 assert len(branches) == 1, (
394 assert len(branches) == 1, (
395 b'unknown branch: %s' % e.mergepoint
395 b'unknown branch: %s' % e.mergepoint
396 )
396 )
397 e.mergepoint = branches[0]
397 e.mergepoint = branches[0]
398
398
399 e.comment = []
399 e.comment = []
400 state = 7
400 state = 7
401
401
402 elif state == 7:
402 elif state == 7:
403 # read the revision numbers of branches that start at this revision
403 # read the revision numbers of branches that start at this revision
404 # or store the commit log message otherwise
404 # or store the commit log message otherwise
405 m = re_70.match(line)
405 m = re_70.match(line)
406 if m:
406 if m:
407 e.branches = [
407 e.branches = [
408 tuple([int(y) for y in x.strip().split(b'.')])
408 tuple([int(y) for y in x.strip().split(b'.')])
409 for x in m.group(1).split(b';')
409 for x in m.group(1).split(b';')
410 ]
410 ]
411 state = 8
411 state = 8
412 elif re_31.match(line) and re_50.match(peek):
412 elif re_31.match(line) and re_50.match(peek):
413 state = 5
413 state = 5
414 store = True
414 store = True
415 elif re_32.match(line):
415 elif re_32.match(line):
416 state = 0
416 state = 0
417 store = True
417 store = True
418 else:
418 else:
419 e.comment.append(line)
419 e.comment.append(line)
420
420
421 elif state == 8:
421 elif state == 8:
422 # store commit log message
422 # store commit log message
423 if re_31.match(line):
423 if re_31.match(line):
424 cpeek = peek
424 cpeek = peek
425 if cpeek.endswith(b'\n'):
425 if cpeek.endswith(b'\n'):
426 cpeek = cpeek[:-1]
426 cpeek = cpeek[:-1]
427 if re_50.match(cpeek):
427 if re_50.match(cpeek):
428 state = 5
428 state = 5
429 store = True
429 store = True
430 else:
430 else:
431 e.comment.append(line)
431 e.comment.append(line)
432 elif re_32.match(line):
432 elif re_32.match(line):
433 state = 0
433 state = 0
434 store = True
434 store = True
435 else:
435 else:
436 e.comment.append(line)
436 e.comment.append(line)
437
437
438 # When a file is added on a branch B1, CVS creates a synthetic
438 # When a file is added on a branch B1, CVS creates a synthetic
439 # dead trunk revision 1.1 so that the branch has a root.
439 # dead trunk revision 1.1 so that the branch has a root.
440 # Likewise, if you merge such a file to a later branch B2 (one
440 # Likewise, if you merge such a file to a later branch B2 (one
441 # that already existed when the file was added on B1), CVS
441 # that already existed when the file was added on B1), CVS
442 # creates a synthetic dead revision 1.1.x.1 on B2. Don't drop
442 # creates a synthetic dead revision 1.1.x.1 on B2. Don't drop
443 # these revisions now, but mark them synthetic so
443 # these revisions now, but mark them synthetic so
444 # createchangeset() can take care of them.
444 # createchangeset() can take care of them.
445 if (
445 if (
446 store
446 store
447 and e.dead
447 and e.dead
448 and e.revision[-1] == 1
448 and e.revision[-1] == 1
449 and len(e.comment) == 1 # 1.1 or 1.1.x.1
449 and len(e.comment) == 1 # 1.1 or 1.1.x.1
450 and file_added_re.match(e.comment[0])
450 and file_added_re.match(e.comment[0])
451 ):
451 ):
452 ui.debug(
452 ui.debug(
453 b'found synthetic revision in %s: %r\n' % (e.rcs, e.comment[0])
453 b'found synthetic revision in %s: %r\n' % (e.rcs, e.comment[0])
454 )
454 )
455 e.synthetic = True
455 e.synthetic = True
456
456
457 if store:
457 if store:
458 # clean up the results and save in the log.
458 # clean up the results and save in the log.
459 store = False
459 store = False
460 e.tags = sorted([scache(x) for x in tags.get(e.revision, [])])
460 e.tags = sorted([scache(x) for x in tags.get(e.revision, [])])
461 e.comment = scache(b'\n'.join(e.comment))
461 e.comment = scache(b'\n'.join(e.comment))
462
462
463 revn = len(e.revision)
463 revn = len(e.revision)
464 if revn > 3 and (revn % 2) == 0:
464 if revn > 3 and (revn % 2) == 0:
465 e.branch = tags.get(e.revision[:-1], [None])[0]
465 e.branch = tags.get(e.revision[:-1], [None])[0]
466 else:
466 else:
467 e.branch = None
467 e.branch = None
468
468
469 # find the branches starting from this revision
469 # find the branches starting from this revision
470 branchpoints = set()
470 branchpoints = set()
471 for branch, revision in pycompat.iteritems(branchmap):
471 for branch, revision in pycompat.iteritems(branchmap):
472 revparts = tuple([int(i) for i in revision.split(b'.')])
472 revparts = tuple([int(i) for i in revision.split(b'.')])
473 if len(revparts) < 2: # bad tags
473 if len(revparts) < 2: # bad tags
474 continue
474 continue
475 if revparts[-2] == 0 and revparts[-1] % 2 == 0:
475 if revparts[-2] == 0 and revparts[-1] % 2 == 0:
476 # normal branch
476 # normal branch
477 if revparts[:-2] == e.revision:
477 if revparts[:-2] == e.revision:
478 branchpoints.add(branch)
478 branchpoints.add(branch)
479 elif revparts == (1, 1, 1): # vendor branch
479 elif revparts == (1, 1, 1): # vendor branch
480 if revparts in e.branches:
480 if revparts in e.branches:
481 branchpoints.add(branch)
481 branchpoints.add(branch)
482 e.branchpoints = branchpoints
482 e.branchpoints = branchpoints
483
483
484 log.append(e)
484 log.append(e)
485
485
486 rcsmap[e.rcs.replace(b'/Attic/', b'/')] = e.rcs
486 rcsmap[e.rcs.replace(b'/Attic/', b'/')] = e.rcs
487
487
488 if len(log) % 100 == 0:
488 if len(log) % 100 == 0:
489 ui.status(
489 ui.status(
490 stringutil.ellipsis(b'%d %s' % (len(log), e.file), 80)
490 stringutil.ellipsis(b'%d %s' % (len(log), e.file), 80)
491 + b'\n'
491 + b'\n'
492 )
492 )
493
493
494 log.sort(key=lambda x: (x.rcs, x.revision))
494 log.sort(key=lambda x: (x.rcs, x.revision))
495
495
496 # find parent revisions of individual files
496 # find parent revisions of individual files
497 versions = {}
497 versions = {}
498 for e in sorted(oldlog, key=lambda x: (x.rcs, x.revision)):
498 for e in sorted(oldlog, key=lambda x: (x.rcs, x.revision)):
499 rcs = e.rcs.replace(b'/Attic/', b'/')
499 rcs = e.rcs.replace(b'/Attic/', b'/')
500 if rcs in rcsmap:
500 if rcs in rcsmap:
501 e.rcs = rcsmap[rcs]
501 e.rcs = rcsmap[rcs]
502 branch = e.revision[:-1]
502 branch = e.revision[:-1]
503 versions[(e.rcs, branch)] = e.revision
503 versions[(e.rcs, branch)] = e.revision
504
504
505 for e in log:
505 for e in log:
506 branch = e.revision[:-1]
506 branch = e.revision[:-1]
507 p = versions.get((e.rcs, branch), None)
507 p = versions.get((e.rcs, branch), None)
508 if p is None:
508 if p is None:
509 p = e.revision[:-2]
509 p = e.revision[:-2]
510 e.parent = p
510 e.parent = p
511 versions[(e.rcs, branch)] = e.revision
511 versions[(e.rcs, branch)] = e.revision
512
512
513 # update the log cache
513 # update the log cache
514 if cache:
514 if cache:
515 if log:
515 if log:
516 # join up the old and new logs
516 # join up the old and new logs
517 log.sort(key=lambda x: x.date)
517 log.sort(key=lambda x: x.date)
518
518
519 if oldlog and oldlog[-1].date >= log[0].date:
519 if oldlog and oldlog[-1].date >= log[0].date:
520 raise logerror(
520 raise logerror(
521 _(
521 _(
522 b'log cache overlaps with new log entries,'
522 b'log cache overlaps with new log entries,'
523 b' re-run without cache.'
523 b' re-run without cache.'
524 )
524 )
525 )
525 )
526
526
527 log = oldlog + log
527 log = oldlog + log
528
528
529 # write the new cachefile
529 # write the new cachefile
530 ui.note(_(b'writing cvs log cache %s\n') % cachefile)
530 ui.note(_(b'writing cvs log cache %s\n') % cachefile)
531 pickle.dump(log, open(cachefile, b'wb'))
531 pickle.dump(log, open(cachefile, b'wb'))
532 else:
532 else:
533 log = oldlog
533 log = oldlog
534
534
535 ui.status(_(b'%d log entries\n') % len(log))
535 ui.status(_(b'%d log entries\n') % len(log))
536
536
537 encodings = ui.configlist(b'convert', b'cvsps.logencoding')
537 encodings = ui.configlist(b'convert', b'cvsps.logencoding')
538 if encodings:
538 if encodings:
539
539
540 def revstr(r):
540 def revstr(r):
541 # this is needed, because logentry.revision is a tuple of "int"
541 # this is needed, because logentry.revision is a tuple of "int"
542 # (e.g. (1, 2) for "1.2")
542 # (e.g. (1, 2) for "1.2")
543 return b'.'.join(pycompat.maplist(pycompat.bytestr, r))
543 return b'.'.join(pycompat.maplist(pycompat.bytestr, r))
544
544
545 for entry in log:
545 for entry in log:
546 comment = entry.comment
546 comment = entry.comment
547 for e in encodings:
547 for e in encodings:
548 try:
548 try:
549 entry.comment = comment.decode(pycompat.sysstr(e)).encode(
549 entry.comment = comment.decode(pycompat.sysstr(e)).encode(
550 'utf-8'
550 'utf-8'
551 )
551 )
552 if ui.debugflag:
552 if ui.debugflag:
553 ui.debug(
553 ui.debug(
554 b"transcoding by %s: %s of %s\n"
554 b"transcoding by %s: %s of %s\n"
555 % (e, revstr(entry.revision), entry.file)
555 % (e, revstr(entry.revision), entry.file)
556 )
556 )
557 break
557 break
558 except UnicodeDecodeError:
558 except UnicodeDecodeError:
559 pass # try next encoding
559 pass # try next encoding
560 except LookupError as inst: # unknown encoding, maybe
560 except LookupError as inst: # unknown encoding, maybe
561 raise error.Abort(
561 raise error.Abort(
562 inst,
562 pycompat.bytestr(inst),
563 hint=_(
563 hint=_(
564 b'check convert.cvsps.logencoding configuration'
564 b'check convert.cvsps.logencoding configuration'
565 ),
565 ),
566 )
566 )
567 else:
567 else:
568 raise error.Abort(
568 raise error.Abort(
569 _(
569 _(
570 b"no encoding can transcode"
570 b"no encoding can transcode"
571 b" CVS log message for %s of %s"
571 b" CVS log message for %s of %s"
572 )
572 )
573 % (revstr(entry.revision), entry.file),
573 % (revstr(entry.revision), entry.file),
574 hint=_(b'check convert.cvsps.logencoding configuration'),
574 hint=_(b'check convert.cvsps.logencoding configuration'),
575 )
575 )
576
576
577 hook.hook(ui, None, b"cvslog", True, log=log)
577 hook.hook(ui, None, b"cvslog", True, log=log)
578
578
579 return log
579 return log
580
580
581
581
582 class changeset(object):
582 class changeset(object):
583 '''Class changeset has the following attributes:
583 '''Class changeset has the following attributes:
584 .id - integer identifying this changeset (list index)
584 .id - integer identifying this changeset (list index)
585 .author - author name as CVS knows it
585 .author - author name as CVS knows it
586 .branch - name of branch this changeset is on, or None
586 .branch - name of branch this changeset is on, or None
587 .comment - commit message
587 .comment - commit message
588 .commitid - CVS commitid or None
588 .commitid - CVS commitid or None
589 .date - the commit date as a (time,tz) tuple
589 .date - the commit date as a (time,tz) tuple
590 .entries - list of logentry objects in this changeset
590 .entries - list of logentry objects in this changeset
591 .parents - list of one or two parent changesets
591 .parents - list of one or two parent changesets
592 .tags - list of tags on this changeset
592 .tags - list of tags on this changeset
593 .synthetic - from synthetic revision "file ... added on branch ..."
593 .synthetic - from synthetic revision "file ... added on branch ..."
594 .mergepoint- the branch that has been merged from or None
594 .mergepoint- the branch that has been merged from or None
595 .branchpoints- the branches that start at the current entry or empty
595 .branchpoints- the branches that start at the current entry or empty
596 '''
596 '''
597
597
598 def __init__(self, **entries):
598 def __init__(self, **entries):
599 self.id = None
599 self.id = None
600 self.synthetic = False
600 self.synthetic = False
601 self.__dict__.update(entries)
601 self.__dict__.update(entries)
602
602
603 def __repr__(self):
603 def __repr__(self):
604 items = (
604 items = (
605 b"%s=%r" % (k, self.__dict__[k]) for k in sorted(self.__dict__)
605 b"%s=%r" % (k, self.__dict__[k]) for k in sorted(self.__dict__)
606 )
606 )
607 return b"%s(%s)" % (type(self).__name__, b", ".join(items))
607 return b"%s(%s)" % (type(self).__name__, b", ".join(items))
608
608
609
609
610 def createchangeset(ui, log, fuzz=60, mergefrom=None, mergeto=None):
610 def createchangeset(ui, log, fuzz=60, mergefrom=None, mergeto=None):
611 '''Convert log into changesets.'''
611 '''Convert log into changesets.'''
612
612
613 ui.status(_(b'creating changesets\n'))
613 ui.status(_(b'creating changesets\n'))
614
614
615 # try to order commitids by date
615 # try to order commitids by date
616 mindate = {}
616 mindate = {}
617 for e in log:
617 for e in log:
618 if e.commitid:
618 if e.commitid:
619 if e.commitid not in mindate:
619 if e.commitid not in mindate:
620 mindate[e.commitid] = e.date
620 mindate[e.commitid] = e.date
621 else:
621 else:
622 mindate[e.commitid] = min(e.date, mindate[e.commitid])
622 mindate[e.commitid] = min(e.date, mindate[e.commitid])
623
623
624 # Merge changesets
624 # Merge changesets
625 log.sort(
625 log.sort(
626 key=lambda x: (
626 key=lambda x: (
627 mindate.get(x.commitid, (-1, 0)),
627 mindate.get(x.commitid, (-1, 0)),
628 x.commitid or b'',
628 x.commitid or b'',
629 x.comment,
629 x.comment,
630 x.author,
630 x.author,
631 x.branch or b'',
631 x.branch or b'',
632 x.date,
632 x.date,
633 x.branchpoints,
633 x.branchpoints,
634 )
634 )
635 )
635 )
636
636
637 changesets = []
637 changesets = []
638 files = set()
638 files = set()
639 c = None
639 c = None
640 for i, e in enumerate(log):
640 for i, e in enumerate(log):
641
641
642 # Check if log entry belongs to the current changeset or not.
642 # Check if log entry belongs to the current changeset or not.
643
643
644 # Since CVS is file-centric, two different file revisions with
644 # Since CVS is file-centric, two different file revisions with
645 # different branchpoints should be treated as belonging to two
645 # different branchpoints should be treated as belonging to two
646 # different changesets (and the ordering is important and not
646 # different changesets (and the ordering is important and not
647 # honoured by cvsps at this point).
647 # honoured by cvsps at this point).
648 #
648 #
649 # Consider the following case:
649 # Consider the following case:
650 # foo 1.1 branchpoints: [MYBRANCH]
650 # foo 1.1 branchpoints: [MYBRANCH]
651 # bar 1.1 branchpoints: [MYBRANCH, MYBRANCH2]
651 # bar 1.1 branchpoints: [MYBRANCH, MYBRANCH2]
652 #
652 #
653 # Here foo is part only of MYBRANCH, but not MYBRANCH2, e.g. a
653 # Here foo is part only of MYBRANCH, but not MYBRANCH2, e.g. a
654 # later version of foo may be in MYBRANCH2, so foo should be the
654 # later version of foo may be in MYBRANCH2, so foo should be the
655 # first changeset and bar the next and MYBRANCH and MYBRANCH2
655 # first changeset and bar the next and MYBRANCH and MYBRANCH2
656 # should both start off of the bar changeset. No provisions are
656 # should both start off of the bar changeset. No provisions are
657 # made to ensure that this is, in fact, what happens.
657 # made to ensure that this is, in fact, what happens.
658 if not (
658 if not (
659 c
659 c
660 and e.branchpoints == c.branchpoints
660 and e.branchpoints == c.branchpoints
661 and ( # cvs commitids
661 and ( # cvs commitids
662 (e.commitid is not None and e.commitid == c.commitid)
662 (e.commitid is not None and e.commitid == c.commitid)
663 or ( # no commitids, use fuzzy commit detection
663 or ( # no commitids, use fuzzy commit detection
664 (e.commitid is None or c.commitid is None)
664 (e.commitid is None or c.commitid is None)
665 and e.comment == c.comment
665 and e.comment == c.comment
666 and e.author == c.author
666 and e.author == c.author
667 and e.branch == c.branch
667 and e.branch == c.branch
668 and (
668 and (
669 (c.date[0] + c.date[1])
669 (c.date[0] + c.date[1])
670 <= (e.date[0] + e.date[1])
670 <= (e.date[0] + e.date[1])
671 <= (c.date[0] + c.date[1]) + fuzz
671 <= (c.date[0] + c.date[1]) + fuzz
672 )
672 )
673 and e.file not in files
673 and e.file not in files
674 )
674 )
675 )
675 )
676 ):
676 ):
677 c = changeset(
677 c = changeset(
678 comment=e.comment,
678 comment=e.comment,
679 author=e.author,
679 author=e.author,
680 branch=e.branch,
680 branch=e.branch,
681 date=e.date,
681 date=e.date,
682 entries=[],
682 entries=[],
683 mergepoint=e.mergepoint,
683 mergepoint=e.mergepoint,
684 branchpoints=e.branchpoints,
684 branchpoints=e.branchpoints,
685 commitid=e.commitid,
685 commitid=e.commitid,
686 )
686 )
687 changesets.append(c)
687 changesets.append(c)
688
688
689 files = set()
689 files = set()
690 if len(changesets) % 100 == 0:
690 if len(changesets) % 100 == 0:
691 t = b'%d %s' % (len(changesets), repr(e.comment)[1:-1])
691 t = b'%d %s' % (len(changesets), repr(e.comment)[1:-1])
692 ui.status(stringutil.ellipsis(t, 80) + b'\n')
692 ui.status(stringutil.ellipsis(t, 80) + b'\n')
693
693
694 c.entries.append(e)
694 c.entries.append(e)
695 files.add(e.file)
695 files.add(e.file)
696 c.date = e.date # changeset date is date of latest commit in it
696 c.date = e.date # changeset date is date of latest commit in it
697
697
698 # Mark synthetic changesets
698 # Mark synthetic changesets
699
699
700 for c in changesets:
700 for c in changesets:
701 # Synthetic revisions always get their own changeset, because
701 # Synthetic revisions always get their own changeset, because
702 # the log message includes the filename. E.g. if you add file3
702 # the log message includes the filename. E.g. if you add file3
703 # and file4 on a branch, you get four log entries and three
703 # and file4 on a branch, you get four log entries and three
704 # changesets:
704 # changesets:
705 # "File file3 was added on branch ..." (synthetic, 1 entry)
705 # "File file3 was added on branch ..." (synthetic, 1 entry)
706 # "File file4 was added on branch ..." (synthetic, 1 entry)
706 # "File file4 was added on branch ..." (synthetic, 1 entry)
707 # "Add file3 and file4 to fix ..." (real, 2 entries)
707 # "Add file3 and file4 to fix ..." (real, 2 entries)
708 # Hence the check for 1 entry here.
708 # Hence the check for 1 entry here.
709 c.synthetic = len(c.entries) == 1 and c.entries[0].synthetic
709 c.synthetic = len(c.entries) == 1 and c.entries[0].synthetic
710
710
711 # Sort files in each changeset
711 # Sort files in each changeset
712
712
713 def entitycompare(l, r):
713 def entitycompare(l, r):
714 """Mimic cvsps sorting order"""
714 """Mimic cvsps sorting order"""
715 l = l.file.split(b'/')
715 l = l.file.split(b'/')
716 r = r.file.split(b'/')
716 r = r.file.split(b'/')
717 nl = len(l)
717 nl = len(l)
718 nr = len(r)
718 nr = len(r)
719 n = min(nl, nr)
719 n = min(nl, nr)
720 for i in range(n):
720 for i in range(n):
721 if i + 1 == nl and nl < nr:
721 if i + 1 == nl and nl < nr:
722 return -1
722 return -1
723 elif i + 1 == nr and nl > nr:
723 elif i + 1 == nr and nl > nr:
724 return +1
724 return +1
725 elif l[i] < r[i]:
725 elif l[i] < r[i]:
726 return -1
726 return -1
727 elif l[i] > r[i]:
727 elif l[i] > r[i]:
728 return +1
728 return +1
729 return 0
729 return 0
730
730
731 for c in changesets:
731 for c in changesets:
732 c.entries.sort(key=functools.cmp_to_key(entitycompare))
732 c.entries.sort(key=functools.cmp_to_key(entitycompare))
733
733
734 # Sort changesets by date
734 # Sort changesets by date
735
735
736 odd = set()
736 odd = set()
737
737
738 def cscmp(l, r):
738 def cscmp(l, r):
739 d = sum(l.date) - sum(r.date)
739 d = sum(l.date) - sum(r.date)
740 if d:
740 if d:
741 return d
741 return d
742
742
743 # detect vendor branches and initial commits on a branch
743 # detect vendor branches and initial commits on a branch
744 le = {}
744 le = {}
745 for e in l.entries:
745 for e in l.entries:
746 le[e.rcs] = e.revision
746 le[e.rcs] = e.revision
747 re = {}
747 re = {}
748 for e in r.entries:
748 for e in r.entries:
749 re[e.rcs] = e.revision
749 re[e.rcs] = e.revision
750
750
751 d = 0
751 d = 0
752 for e in l.entries:
752 for e in l.entries:
753 if re.get(e.rcs, None) == e.parent:
753 if re.get(e.rcs, None) == e.parent:
754 assert not d
754 assert not d
755 d = 1
755 d = 1
756 break
756 break
757
757
758 for e in r.entries:
758 for e in r.entries:
759 if le.get(e.rcs, None) == e.parent:
759 if le.get(e.rcs, None) == e.parent:
760 if d:
760 if d:
761 odd.add((l, r))
761 odd.add((l, r))
762 d = -1
762 d = -1
763 break
763 break
764 # By this point, the changesets are sufficiently compared that
764 # By this point, the changesets are sufficiently compared that
765 # we don't really care about ordering. However, this leaves
765 # we don't really care about ordering. However, this leaves
766 # some race conditions in the tests, so we compare on the
766 # some race conditions in the tests, so we compare on the
767 # number of files modified, the files contained in each
767 # number of files modified, the files contained in each
768 # changeset, and the branchpoints in the change to ensure test
768 # changeset, and the branchpoints in the change to ensure test
769 # output remains stable.
769 # output remains stable.
770
770
771 # recommended replacement for cmp from
771 # recommended replacement for cmp from
772 # https://docs.python.org/3.0/whatsnew/3.0.html
772 # https://docs.python.org/3.0/whatsnew/3.0.html
773 c = lambda x, y: (x > y) - (x < y)
773 c = lambda x, y: (x > y) - (x < y)
774 # Sort bigger changes first.
774 # Sort bigger changes first.
775 if not d:
775 if not d:
776 d = c(len(l.entries), len(r.entries))
776 d = c(len(l.entries), len(r.entries))
777 # Try sorting by filename in the change.
777 # Try sorting by filename in the change.
778 if not d:
778 if not d:
779 d = c([e.file for e in l.entries], [e.file for e in r.entries])
779 d = c([e.file for e in l.entries], [e.file for e in r.entries])
780 # Try and put changes without a branch point before ones with
780 # Try and put changes without a branch point before ones with
781 # a branch point.
781 # a branch point.
782 if not d:
782 if not d:
783 d = c(len(l.branchpoints), len(r.branchpoints))
783 d = c(len(l.branchpoints), len(r.branchpoints))
784 return d
784 return d
785
785
786 changesets.sort(key=functools.cmp_to_key(cscmp))
786 changesets.sort(key=functools.cmp_to_key(cscmp))
787
787
788 # Collect tags
788 # Collect tags
789
789
790 globaltags = {}
790 globaltags = {}
791 for c in changesets:
791 for c in changesets:
792 for e in c.entries:
792 for e in c.entries:
793 for tag in e.tags:
793 for tag in e.tags:
794 # remember which is the latest changeset to have this tag
794 # remember which is the latest changeset to have this tag
795 globaltags[tag] = c
795 globaltags[tag] = c
796
796
797 for c in changesets:
797 for c in changesets:
798 tags = set()
798 tags = set()
799 for e in c.entries:
799 for e in c.entries:
800 tags.update(e.tags)
800 tags.update(e.tags)
801 # remember tags only if this is the latest changeset to have it
801 # remember tags only if this is the latest changeset to have it
802 c.tags = sorted(tag for tag in tags if globaltags[tag] is c)
802 c.tags = sorted(tag for tag in tags if globaltags[tag] is c)
803
803
804 # Find parent changesets, handle {{mergetobranch BRANCHNAME}}
804 # Find parent changesets, handle {{mergetobranch BRANCHNAME}}
805 # by inserting dummy changesets with two parents, and handle
805 # by inserting dummy changesets with two parents, and handle
806 # {{mergefrombranch BRANCHNAME}} by setting two parents.
806 # {{mergefrombranch BRANCHNAME}} by setting two parents.
807
807
808 if mergeto is None:
808 if mergeto is None:
809 mergeto = br'{{mergetobranch ([-\w]+)}}'
809 mergeto = br'{{mergetobranch ([-\w]+)}}'
810 if mergeto:
810 if mergeto:
811 mergeto = re.compile(mergeto)
811 mergeto = re.compile(mergeto)
812
812
813 if mergefrom is None:
813 if mergefrom is None:
814 mergefrom = br'{{mergefrombranch ([-\w]+)}}'
814 mergefrom = br'{{mergefrombranch ([-\w]+)}}'
815 if mergefrom:
815 if mergefrom:
816 mergefrom = re.compile(mergefrom)
816 mergefrom = re.compile(mergefrom)
817
817
818 versions = {} # changeset index where we saw any particular file version
818 versions = {} # changeset index where we saw any particular file version
819 branches = {} # changeset index where we saw a branch
819 branches = {} # changeset index where we saw a branch
820 n = len(changesets)
820 n = len(changesets)
821 i = 0
821 i = 0
822 while i < n:
822 while i < n:
823 c = changesets[i]
823 c = changesets[i]
824
824
825 for f in c.entries:
825 for f in c.entries:
826 versions[(f.rcs, f.revision)] = i
826 versions[(f.rcs, f.revision)] = i
827
827
828 p = None
828 p = None
829 if c.branch in branches:
829 if c.branch in branches:
830 p = branches[c.branch]
830 p = branches[c.branch]
831 else:
831 else:
832 # first changeset on a new branch
832 # first changeset on a new branch
833 # the parent is a changeset with the branch in its
833 # the parent is a changeset with the branch in its
834 # branchpoints such that it is the latest possible
834 # branchpoints such that it is the latest possible
835 # commit without any intervening, unrelated commits.
835 # commit without any intervening, unrelated commits.
836
836
837 for candidate in pycompat.xrange(i):
837 for candidate in pycompat.xrange(i):
838 if c.branch not in changesets[candidate].branchpoints:
838 if c.branch not in changesets[candidate].branchpoints:
839 if p is not None:
839 if p is not None:
840 break
840 break
841 continue
841 continue
842 p = candidate
842 p = candidate
843
843
844 c.parents = []
844 c.parents = []
845 if p is not None:
845 if p is not None:
846 p = changesets[p]
846 p = changesets[p]
847
847
848 # Ensure no changeset has a synthetic changeset as a parent.
848 # Ensure no changeset has a synthetic changeset as a parent.
849 while p.synthetic:
849 while p.synthetic:
850 assert len(p.parents) <= 1, _(
850 assert len(p.parents) <= 1, _(
851 b'synthetic changeset cannot have multiple parents'
851 b'synthetic changeset cannot have multiple parents'
852 )
852 )
853 if p.parents:
853 if p.parents:
854 p = p.parents[0]
854 p = p.parents[0]
855 else:
855 else:
856 p = None
856 p = None
857 break
857 break
858
858
859 if p is not None:
859 if p is not None:
860 c.parents.append(p)
860 c.parents.append(p)
861
861
862 if c.mergepoint:
862 if c.mergepoint:
863 if c.mergepoint == b'HEAD':
863 if c.mergepoint == b'HEAD':
864 c.mergepoint = None
864 c.mergepoint = None
865 c.parents.append(changesets[branches[c.mergepoint]])
865 c.parents.append(changesets[branches[c.mergepoint]])
866
866
867 if mergefrom:
867 if mergefrom:
868 m = mergefrom.search(c.comment)
868 m = mergefrom.search(c.comment)
869 if m:
869 if m:
870 m = m.group(1)
870 m = m.group(1)
871 if m == b'HEAD':
871 if m == b'HEAD':
872 m = None
872 m = None
873 try:
873 try:
874 candidate = changesets[branches[m]]
874 candidate = changesets[branches[m]]
875 except KeyError:
875 except KeyError:
876 ui.warn(
876 ui.warn(
877 _(
877 _(
878 b"warning: CVS commit message references "
878 b"warning: CVS commit message references "
879 b"non-existent branch %r:\n%s\n"
879 b"non-existent branch %r:\n%s\n"
880 )
880 )
881 % (pycompat.bytestr(m), c.comment)
881 % (pycompat.bytestr(m), c.comment)
882 )
882 )
883 if m in branches and c.branch != m and not candidate.synthetic:
883 if m in branches and c.branch != m and not candidate.synthetic:
884 c.parents.append(candidate)
884 c.parents.append(candidate)
885
885
886 if mergeto:
886 if mergeto:
887 m = mergeto.search(c.comment)
887 m = mergeto.search(c.comment)
888 if m:
888 if m:
889 if m.groups():
889 if m.groups():
890 m = m.group(1)
890 m = m.group(1)
891 if m == b'HEAD':
891 if m == b'HEAD':
892 m = None
892 m = None
893 else:
893 else:
894 m = None # if no group found then merge to HEAD
894 m = None # if no group found then merge to HEAD
895 if m in branches and c.branch != m:
895 if m in branches and c.branch != m:
896 # insert empty changeset for merge
896 # insert empty changeset for merge
897 cc = changeset(
897 cc = changeset(
898 author=c.author,
898 author=c.author,
899 branch=m,
899 branch=m,
900 date=c.date,
900 date=c.date,
901 comment=b'convert-repo: CVS merge from branch %s'
901 comment=b'convert-repo: CVS merge from branch %s'
902 % c.branch,
902 % c.branch,
903 entries=[],
903 entries=[],
904 tags=[],
904 tags=[],
905 parents=[changesets[branches[m]], c],
905 parents=[changesets[branches[m]], c],
906 )
906 )
907 changesets.insert(i + 1, cc)
907 changesets.insert(i + 1, cc)
908 branches[m] = i + 1
908 branches[m] = i + 1
909
909
910 # adjust our loop counters now we have inserted a new entry
910 # adjust our loop counters now we have inserted a new entry
911 n += 1
911 n += 1
912 i += 2
912 i += 2
913 continue
913 continue
914
914
915 branches[c.branch] = i
915 branches[c.branch] = i
916 i += 1
916 i += 1
917
917
918 # Drop synthetic changesets (safe now that we have ensured no other
918 # Drop synthetic changesets (safe now that we have ensured no other
919 # changesets can have them as parents).
919 # changesets can have them as parents).
920 i = 0
920 i = 0
921 while i < len(changesets):
921 while i < len(changesets):
922 if changesets[i].synthetic:
922 if changesets[i].synthetic:
923 del changesets[i]
923 del changesets[i]
924 else:
924 else:
925 i += 1
925 i += 1
926
926
927 # Number changesets
927 # Number changesets
928
928
929 for i, c in enumerate(changesets):
929 for i, c in enumerate(changesets):
930 c.id = i + 1
930 c.id = i + 1
931
931
932 if odd:
932 if odd:
933 for l, r in odd:
933 for l, r in odd:
934 if l.id is not None and r.id is not None:
934 if l.id is not None and r.id is not None:
935 ui.warn(
935 ui.warn(
936 _(b'changeset %d is both before and after %d\n')
936 _(b'changeset %d is both before and after %d\n')
937 % (l.id, r.id)
937 % (l.id, r.id)
938 )
938 )
939
939
940 ui.status(_(b'%d changeset entries\n') % len(changesets))
940 ui.status(_(b'%d changeset entries\n') % len(changesets))
941
941
942 hook.hook(ui, None, b"cvschangesets", True, changesets=changesets)
942 hook.hook(ui, None, b"cvschangesets", True, changesets=changesets)
943
943
944 return changesets
944 return changesets
945
945
946
946
947 def debugcvsps(ui, *args, **opts):
947 def debugcvsps(ui, *args, **opts):
948 '''Read CVS rlog for current directory or named path in
948 '''Read CVS rlog for current directory or named path in
949 repository, and convert the log to changesets based on matching
949 repository, and convert the log to changesets based on matching
950 commit log entries and dates.
950 commit log entries and dates.
951 '''
951 '''
952 opts = pycompat.byteskwargs(opts)
952 opts = pycompat.byteskwargs(opts)
953 if opts[b"new_cache"]:
953 if opts[b"new_cache"]:
954 cache = b"write"
954 cache = b"write"
955 elif opts[b"update_cache"]:
955 elif opts[b"update_cache"]:
956 cache = b"update"
956 cache = b"update"
957 else:
957 else:
958 cache = None
958 cache = None
959
959
960 revisions = opts[b"revisions"]
960 revisions = opts[b"revisions"]
961
961
962 try:
962 try:
963 if args:
963 if args:
964 log = []
964 log = []
965 for d in args:
965 for d in args:
966 log += createlog(ui, d, root=opts[b"root"], cache=cache)
966 log += createlog(ui, d, root=opts[b"root"], cache=cache)
967 else:
967 else:
968 log = createlog(ui, root=opts[b"root"], cache=cache)
968 log = createlog(ui, root=opts[b"root"], cache=cache)
969 except logerror as e:
969 except logerror as e:
970 ui.write(b"%r\n" % e)
970 ui.write(b"%r\n" % e)
971 return
971 return
972
972
973 changesets = createchangeset(ui, log, opts[b"fuzz"])
973 changesets = createchangeset(ui, log, opts[b"fuzz"])
974 del log
974 del log
975
975
976 # Print changesets (optionally filtered)
976 # Print changesets (optionally filtered)
977
977
978 off = len(revisions)
978 off = len(revisions)
979 branches = {} # latest version number in each branch
979 branches = {} # latest version number in each branch
980 ancestors = {} # parent branch
980 ancestors = {} # parent branch
981 for cs in changesets:
981 for cs in changesets:
982
982
983 if opts[b"ancestors"]:
983 if opts[b"ancestors"]:
984 if cs.branch not in branches and cs.parents and cs.parents[0].id:
984 if cs.branch not in branches and cs.parents and cs.parents[0].id:
985 ancestors[cs.branch] = (
985 ancestors[cs.branch] = (
986 changesets[cs.parents[0].id - 1].branch,
986 changesets[cs.parents[0].id - 1].branch,
987 cs.parents[0].id,
987 cs.parents[0].id,
988 )
988 )
989 branches[cs.branch] = cs.id
989 branches[cs.branch] = cs.id
990
990
991 # limit by branches
991 # limit by branches
992 if (
992 if (
993 opts[b"branches"]
993 opts[b"branches"]
994 and (cs.branch or b'HEAD') not in opts[b"branches"]
994 and (cs.branch or b'HEAD') not in opts[b"branches"]
995 ):
995 ):
996 continue
996 continue
997
997
998 if not off:
998 if not off:
999 # Note: trailing spaces on several lines here are needed to have
999 # Note: trailing spaces on several lines here are needed to have
1000 # bug-for-bug compatibility with cvsps.
1000 # bug-for-bug compatibility with cvsps.
1001 ui.write(b'---------------------\n')
1001 ui.write(b'---------------------\n')
1002 ui.write((b'PatchSet %d \n' % cs.id))
1002 ui.write((b'PatchSet %d \n' % cs.id))
1003 ui.write(
1003 ui.write(
1004 (
1004 (
1005 b'Date: %s\n'
1005 b'Date: %s\n'
1006 % dateutil.datestr(cs.date, b'%Y/%m/%d %H:%M:%S %1%2')
1006 % dateutil.datestr(cs.date, b'%Y/%m/%d %H:%M:%S %1%2')
1007 )
1007 )
1008 )
1008 )
1009 ui.write((b'Author: %s\n' % cs.author))
1009 ui.write((b'Author: %s\n' % cs.author))
1010 ui.write((b'Branch: %s\n' % (cs.branch or b'HEAD')))
1010 ui.write((b'Branch: %s\n' % (cs.branch or b'HEAD')))
1011 ui.write(
1011 ui.write(
1012 (
1012 (
1013 b'Tag%s: %s \n'
1013 b'Tag%s: %s \n'
1014 % (
1014 % (
1015 [b'', b's'][len(cs.tags) > 1],
1015 [b'', b's'][len(cs.tags) > 1],
1016 b','.join(cs.tags) or b'(none)',
1016 b','.join(cs.tags) or b'(none)',
1017 )
1017 )
1018 )
1018 )
1019 )
1019 )
1020 if cs.branchpoints:
1020 if cs.branchpoints:
1021 ui.writenoi18n(
1021 ui.writenoi18n(
1022 b'Branchpoints: %s \n' % b', '.join(sorted(cs.branchpoints))
1022 b'Branchpoints: %s \n' % b', '.join(sorted(cs.branchpoints))
1023 )
1023 )
1024 if opts[b"parents"] and cs.parents:
1024 if opts[b"parents"] and cs.parents:
1025 if len(cs.parents) > 1:
1025 if len(cs.parents) > 1:
1026 ui.write(
1026 ui.write(
1027 (
1027 (
1028 b'Parents: %s\n'
1028 b'Parents: %s\n'
1029 % (b','.join([(b"%d" % p.id) for p in cs.parents]))
1029 % (b','.join([(b"%d" % p.id) for p in cs.parents]))
1030 )
1030 )
1031 )
1031 )
1032 else:
1032 else:
1033 ui.write((b'Parent: %d\n' % cs.parents[0].id))
1033 ui.write((b'Parent: %d\n' % cs.parents[0].id))
1034
1034
1035 if opts[b"ancestors"]:
1035 if opts[b"ancestors"]:
1036 b = cs.branch
1036 b = cs.branch
1037 r = []
1037 r = []
1038 while b:
1038 while b:
1039 b, c = ancestors[b]
1039 b, c = ancestors[b]
1040 r.append(b'%s:%d:%d' % (b or b"HEAD", c, branches[b]))
1040 r.append(b'%s:%d:%d' % (b or b"HEAD", c, branches[b]))
1041 if r:
1041 if r:
1042 ui.write((b'Ancestors: %s\n' % (b','.join(r))))
1042 ui.write((b'Ancestors: %s\n' % (b','.join(r))))
1043
1043
1044 ui.writenoi18n(b'Log:\n')
1044 ui.writenoi18n(b'Log:\n')
1045 ui.write(b'%s\n\n' % cs.comment)
1045 ui.write(b'%s\n\n' % cs.comment)
1046 ui.writenoi18n(b'Members: \n')
1046 ui.writenoi18n(b'Members: \n')
1047 for f in cs.entries:
1047 for f in cs.entries:
1048 fn = f.file
1048 fn = f.file
1049 if fn.startswith(opts[b"prefix"]):
1049 if fn.startswith(opts[b"prefix"]):
1050 fn = fn[len(opts[b"prefix"]) :]
1050 fn = fn[len(opts[b"prefix"]) :]
1051 ui.write(
1051 ui.write(
1052 b'\t%s:%s->%s%s \n'
1052 b'\t%s:%s->%s%s \n'
1053 % (
1053 % (
1054 fn,
1054 fn,
1055 b'.'.join([b"%d" % x for x in f.parent]) or b'INITIAL',
1055 b'.'.join([b"%d" % x for x in f.parent]) or b'INITIAL',
1056 b'.'.join([(b"%d" % x) for x in f.revision]),
1056 b'.'.join([(b"%d" % x) for x in f.revision]),
1057 [b'', b'(DEAD)'][f.dead],
1057 [b'', b'(DEAD)'][f.dead],
1058 )
1058 )
1059 )
1059 )
1060 ui.write(b'\n')
1060 ui.write(b'\n')
1061
1061
1062 # have we seen the start tag?
1062 # have we seen the start tag?
1063 if revisions and off:
1063 if revisions and off:
1064 if revisions[0] == (b"%d" % cs.id) or revisions[0] in cs.tags:
1064 if revisions[0] == (b"%d" % cs.id) or revisions[0] in cs.tags:
1065 off = False
1065 off = False
1066
1066
1067 # see if we reached the end tag
1067 # see if we reached the end tag
1068 if len(revisions) > 1 and not off:
1068 if len(revisions) > 1 and not off:
1069 if revisions[1] == (b"%d" % cs.id) or revisions[1] in cs.tags:
1069 if revisions[1] == (b"%d" % cs.id) or revisions[1] in cs.tags:
1070 break
1070 break
@@ -1,702 +1,704 b''
1 # encoding.py - character transcoding support for Mercurial
1 # encoding.py - character transcoding support for Mercurial
2 #
2 #
3 # Copyright 2005-2009 Matt Mackall <mpm@selenic.com> and others
3 # Copyright 2005-2009 Matt Mackall <mpm@selenic.com> and others
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import, print_function
8 from __future__ import absolute_import, print_function
9
9
10 import locale
10 import locale
11 import os
11 import os
12 import unicodedata
12 import unicodedata
13
13
14 from .pycompat import getattr
14 from .pycompat import getattr
15 from . import (
15 from . import (
16 error,
16 error,
17 policy,
17 policy,
18 pycompat,
18 pycompat,
19 )
19 )
20
20
21 from .pure import charencode as charencodepure
21 from .pure import charencode as charencodepure
22
22
23 if pycompat.TYPE_CHECKING:
23 if pycompat.TYPE_CHECKING:
24 from typing import (
24 from typing import (
25 Any,
25 Any,
26 Callable,
26 Callable,
27 List,
27 List,
28 Text,
28 Text,
29 Type,
29 Type,
30 TypeVar,
30 TypeVar,
31 Union,
31 Union,
32 )
32 )
33
33
34 # keep pyflakes happy
34 # keep pyflakes happy
35 for t in (Any, Callable, List, Text, Type, Union):
35 for t in (Any, Callable, List, Text, Type, Union):
36 assert t
36 assert t
37
37
38 _Tlocalstr = TypeVar('_Tlocalstr', bound='localstr')
38 _Tlocalstr = TypeVar('_Tlocalstr', bound='localstr')
39
39
40 charencode = policy.importmod('charencode')
40 charencode = policy.importmod('charencode')
41
41
42 isasciistr = charencode.isasciistr
42 isasciistr = charencode.isasciistr
43 asciilower = charencode.asciilower
43 asciilower = charencode.asciilower
44 asciiupper = charencode.asciiupper
44 asciiupper = charencode.asciiupper
45 _jsonescapeu8fast = charencode.jsonescapeu8fast
45 _jsonescapeu8fast = charencode.jsonescapeu8fast
46
46
47 _sysstr = pycompat.sysstr
47 _sysstr = pycompat.sysstr
48
48
49 if pycompat.ispy3:
49 if pycompat.ispy3:
50 unichr = chr
50 unichr = chr
51
51
52 # These unicode characters are ignored by HFS+ (Apple Technote 1150,
52 # These unicode characters are ignored by HFS+ (Apple Technote 1150,
53 # "Unicode Subtleties"), so we need to ignore them in some places for
53 # "Unicode Subtleties"), so we need to ignore them in some places for
54 # sanity.
54 # sanity.
55 _ignore = [
55 _ignore = [
56 unichr(int(x, 16)).encode("utf-8")
56 unichr(int(x, 16)).encode("utf-8")
57 for x in b"200c 200d 200e 200f 202a 202b 202c 202d 202e "
57 for x in b"200c 200d 200e 200f 202a 202b 202c 202d 202e "
58 b"206a 206b 206c 206d 206e 206f feff".split()
58 b"206a 206b 206c 206d 206e 206f feff".split()
59 ]
59 ]
60 # verify the next function will work
60 # verify the next function will work
61 assert all(i.startswith((b"\xe2", b"\xef")) for i in _ignore)
61 assert all(i.startswith((b"\xe2", b"\xef")) for i in _ignore)
62
62
63
63
64 def hfsignoreclean(s):
64 def hfsignoreclean(s):
65 # type: (bytes) -> bytes
65 # type: (bytes) -> bytes
66 """Remove codepoints ignored by HFS+ from s.
66 """Remove codepoints ignored by HFS+ from s.
67
67
68 >>> hfsignoreclean(u'.h\u200cg'.encode('utf-8'))
68 >>> hfsignoreclean(u'.h\u200cg'.encode('utf-8'))
69 '.hg'
69 '.hg'
70 >>> hfsignoreclean(u'.h\ufeffg'.encode('utf-8'))
70 >>> hfsignoreclean(u'.h\ufeffg'.encode('utf-8'))
71 '.hg'
71 '.hg'
72 """
72 """
73 if b"\xe2" in s or b"\xef" in s:
73 if b"\xe2" in s or b"\xef" in s:
74 for c in _ignore:
74 for c in _ignore:
75 s = s.replace(c, b'')
75 s = s.replace(c, b'')
76 return s
76 return s
77
77
78
78
79 # encoding.environ is provided read-only, which may not be used to modify
79 # encoding.environ is provided read-only, which may not be used to modify
80 # the process environment
80 # the process environment
81 _nativeenviron = not pycompat.ispy3 or os.supports_bytes_environ
81 _nativeenviron = not pycompat.ispy3 or os.supports_bytes_environ
82 if not pycompat.ispy3:
82 if not pycompat.ispy3:
83 environ = os.environ # re-exports
83 environ = os.environ # re-exports
84 elif _nativeenviron:
84 elif _nativeenviron:
85 environ = os.environb # re-exports
85 environ = os.environb # re-exports
86 else:
86 else:
87 # preferred encoding isn't known yet; use utf-8 to avoid unicode error
87 # preferred encoding isn't known yet; use utf-8 to avoid unicode error
88 # and recreate it once encoding is settled
88 # and recreate it once encoding is settled
89 environ = {
89 environ = {
90 k.encode('utf-8'): v.encode('utf-8')
90 k.encode('utf-8'): v.encode('utf-8')
91 for k, v in os.environ.items() # re-exports
91 for k, v in os.environ.items() # re-exports
92 }
92 }
93
93
94 _encodingrewrites = {
94 _encodingrewrites = {
95 b'646': b'ascii',
95 b'646': b'ascii',
96 b'ANSI_X3.4-1968': b'ascii',
96 b'ANSI_X3.4-1968': b'ascii',
97 }
97 }
98 # cp65001 is a Windows variant of utf-8, which isn't supported on Python 2.
98 # cp65001 is a Windows variant of utf-8, which isn't supported on Python 2.
99 # No idea if it should be rewritten to the canonical name 'utf-8' on Python 3.
99 # No idea if it should be rewritten to the canonical name 'utf-8' on Python 3.
100 # https://bugs.python.org/issue13216
100 # https://bugs.python.org/issue13216
101 if pycompat.iswindows and not pycompat.ispy3:
101 if pycompat.iswindows and not pycompat.ispy3:
102 _encodingrewrites[b'cp65001'] = b'utf-8'
102 _encodingrewrites[b'cp65001'] = b'utf-8'
103
103
104 try:
104 try:
105 encoding = environ.get(b"HGENCODING")
105 encoding = environ.get(b"HGENCODING")
106 if not encoding:
106 if not encoding:
107 encoding = locale.getpreferredencoding().encode('ascii') or b'ascii'
107 encoding = locale.getpreferredencoding().encode('ascii') or b'ascii'
108 encoding = _encodingrewrites.get(encoding, encoding)
108 encoding = _encodingrewrites.get(encoding, encoding)
109 except locale.Error:
109 except locale.Error:
110 encoding = b'ascii'
110 encoding = b'ascii'
111 encodingmode = environ.get(b"HGENCODINGMODE", b"strict")
111 encodingmode = environ.get(b"HGENCODINGMODE", b"strict")
112 fallbackencoding = b'ISO-8859-1'
112 fallbackencoding = b'ISO-8859-1'
113
113
114
114
115 class localstr(bytes):
115 class localstr(bytes):
116 '''This class allows strings that are unmodified to be
116 '''This class allows strings that are unmodified to be
117 round-tripped to the local encoding and back'''
117 round-tripped to the local encoding and back'''
118
118
119 def __new__(cls, u, l):
119 def __new__(cls, u, l):
120 s = bytes.__new__(cls, l)
120 s = bytes.__new__(cls, l)
121 s._utf8 = u
121 s._utf8 = u
122 return s
122 return s
123
123
124 if pycompat.TYPE_CHECKING:
124 if pycompat.TYPE_CHECKING:
125 # pseudo implementation to help pytype see localstr() constructor
125 # pseudo implementation to help pytype see localstr() constructor
126 def __init__(self, u, l):
126 def __init__(self, u, l):
127 # type: (bytes, bytes) -> None
127 # type: (bytes, bytes) -> None
128 super(localstr, self).__init__(l)
128 super(localstr, self).__init__(l)
129 self._utf8 = u
129 self._utf8 = u
130
130
131 def __hash__(self):
131 def __hash__(self):
132 return hash(self._utf8) # avoid collisions in local string space
132 return hash(self._utf8) # avoid collisions in local string space
133
133
134
134
135 class safelocalstr(bytes):
135 class safelocalstr(bytes):
136 """Tagged string denoting it was previously an internal UTF-8 string,
136 """Tagged string denoting it was previously an internal UTF-8 string,
137 and can be converted back to UTF-8 losslessly
137 and can be converted back to UTF-8 losslessly
138
138
139 >>> assert safelocalstr(b'\\xc3') == b'\\xc3'
139 >>> assert safelocalstr(b'\\xc3') == b'\\xc3'
140 >>> assert b'\\xc3' == safelocalstr(b'\\xc3')
140 >>> assert b'\\xc3' == safelocalstr(b'\\xc3')
141 >>> assert b'\\xc3' in {safelocalstr(b'\\xc3'): 0}
141 >>> assert b'\\xc3' in {safelocalstr(b'\\xc3'): 0}
142 >>> assert safelocalstr(b'\\xc3') in {b'\\xc3': 0}
142 >>> assert safelocalstr(b'\\xc3') in {b'\\xc3': 0}
143 """
143 """
144
144
145
145
146 def tolocal(s):
146 def tolocal(s):
147 # type: (bytes) -> bytes
147 # type: (bytes) -> bytes
148 """
148 """
149 Convert a string from internal UTF-8 to local encoding
149 Convert a string from internal UTF-8 to local encoding
150
150
151 All internal strings should be UTF-8 but some repos before the
151 All internal strings should be UTF-8 but some repos before the
152 implementation of locale support may contain latin1 or possibly
152 implementation of locale support may contain latin1 or possibly
153 other character sets. We attempt to decode everything strictly
153 other character sets. We attempt to decode everything strictly
154 using UTF-8, then Latin-1, and failing that, we use UTF-8 and
154 using UTF-8, then Latin-1, and failing that, we use UTF-8 and
155 replace unknown characters.
155 replace unknown characters.
156
156
157 The localstr class is used to cache the known UTF-8 encoding of
157 The localstr class is used to cache the known UTF-8 encoding of
158 strings next to their local representation to allow lossless
158 strings next to their local representation to allow lossless
159 round-trip conversion back to UTF-8.
159 round-trip conversion back to UTF-8.
160
160
161 >>> u = b'foo: \\xc3\\xa4' # utf-8
161 >>> u = b'foo: \\xc3\\xa4' # utf-8
162 >>> l = tolocal(u)
162 >>> l = tolocal(u)
163 >>> l
163 >>> l
164 'foo: ?'
164 'foo: ?'
165 >>> fromlocal(l)
165 >>> fromlocal(l)
166 'foo: \\xc3\\xa4'
166 'foo: \\xc3\\xa4'
167 >>> u2 = b'foo: \\xc3\\xa1'
167 >>> u2 = b'foo: \\xc3\\xa1'
168 >>> d = { l: 1, tolocal(u2): 2 }
168 >>> d = { l: 1, tolocal(u2): 2 }
169 >>> len(d) # no collision
169 >>> len(d) # no collision
170 2
170 2
171 >>> b'foo: ?' in d
171 >>> b'foo: ?' in d
172 False
172 False
173 >>> l1 = b'foo: \\xe4' # historical latin1 fallback
173 >>> l1 = b'foo: \\xe4' # historical latin1 fallback
174 >>> l = tolocal(l1)
174 >>> l = tolocal(l1)
175 >>> l
175 >>> l
176 'foo: ?'
176 'foo: ?'
177 >>> fromlocal(l) # magically in utf-8
177 >>> fromlocal(l) # magically in utf-8
178 'foo: \\xc3\\xa4'
178 'foo: \\xc3\\xa4'
179 """
179 """
180
180
181 if isasciistr(s):
181 if isasciistr(s):
182 return s
182 return s
183
183
184 try:
184 try:
185 try:
185 try:
186 # make sure string is actually stored in UTF-8
186 # make sure string is actually stored in UTF-8
187 u = s.decode('UTF-8')
187 u = s.decode('UTF-8')
188 if encoding == b'UTF-8':
188 if encoding == b'UTF-8':
189 # fast path
189 # fast path
190 return s
190 return s
191 r = u.encode(_sysstr(encoding), "replace")
191 r = u.encode(_sysstr(encoding), "replace")
192 if u == r.decode(_sysstr(encoding)):
192 if u == r.decode(_sysstr(encoding)):
193 # r is a safe, non-lossy encoding of s
193 # r is a safe, non-lossy encoding of s
194 return safelocalstr(r)
194 return safelocalstr(r)
195 return localstr(s, r)
195 return localstr(s, r)
196 except UnicodeDecodeError:
196 except UnicodeDecodeError:
197 # we should only get here if we're looking at an ancient changeset
197 # we should only get here if we're looking at an ancient changeset
198 try:
198 try:
199 u = s.decode(_sysstr(fallbackencoding))
199 u = s.decode(_sysstr(fallbackencoding))
200 r = u.encode(_sysstr(encoding), "replace")
200 r = u.encode(_sysstr(encoding), "replace")
201 if u == r.decode(_sysstr(encoding)):
201 if u == r.decode(_sysstr(encoding)):
202 # r is a safe, non-lossy encoding of s
202 # r is a safe, non-lossy encoding of s
203 return safelocalstr(r)
203 return safelocalstr(r)
204 return localstr(u.encode('UTF-8'), r)
204 return localstr(u.encode('UTF-8'), r)
205 except UnicodeDecodeError:
205 except UnicodeDecodeError:
206 u = s.decode("utf-8", "replace") # last ditch
206 u = s.decode("utf-8", "replace") # last ditch
207 # can't round-trip
207 # can't round-trip
208 return u.encode(_sysstr(encoding), "replace")
208 return u.encode(_sysstr(encoding), "replace")
209 except LookupError as k:
209 except LookupError as k:
210 raise error.Abort(k, hint=b"please check your locale settings")
210 raise error.Abort(
211 pycompat.bytestr(k), hint=b"please check your locale settings"
212 )
211
213
212
214
213 def fromlocal(s):
215 def fromlocal(s):
214 # type: (bytes) -> bytes
216 # type: (bytes) -> bytes
215 """
217 """
216 Convert a string from the local character encoding to UTF-8
218 Convert a string from the local character encoding to UTF-8
217
219
218 We attempt to decode strings using the encoding mode set by
220 We attempt to decode strings using the encoding mode set by
219 HGENCODINGMODE, which defaults to 'strict'. In this mode, unknown
221 HGENCODINGMODE, which defaults to 'strict'. In this mode, unknown
220 characters will cause an error message. Other modes include
222 characters will cause an error message. Other modes include
221 'replace', which replaces unknown characters with a special
223 'replace', which replaces unknown characters with a special
222 Unicode character, and 'ignore', which drops the character.
224 Unicode character, and 'ignore', which drops the character.
223 """
225 """
224
226
225 # can we do a lossless round-trip?
227 # can we do a lossless round-trip?
226 if isinstance(s, localstr):
228 if isinstance(s, localstr):
227 return s._utf8
229 return s._utf8
228 if isasciistr(s):
230 if isasciistr(s):
229 return s
231 return s
230
232
231 try:
233 try:
232 u = s.decode(_sysstr(encoding), _sysstr(encodingmode))
234 u = s.decode(_sysstr(encoding), _sysstr(encodingmode))
233 return u.encode("utf-8")
235 return u.encode("utf-8")
234 except UnicodeDecodeError as inst:
236 except UnicodeDecodeError as inst:
235 sub = s[max(0, inst.start - 10) : inst.start + 10]
237 sub = s[max(0, inst.start - 10) : inst.start + 10]
236 raise error.Abort(
238 raise error.Abort(
237 b"decoding near '%s': %s!" % (sub, pycompat.bytestr(inst))
239 b"decoding near '%s': %s!" % (sub, pycompat.bytestr(inst))
238 )
240 )
239 except LookupError as k:
241 except LookupError as k:
240 raise error.Abort(k, hint=b"please check your locale settings")
242 raise error.Abort(k, hint=b"please check your locale settings")
241
243
242
244
243 def unitolocal(u):
245 def unitolocal(u):
244 # type: (Text) -> bytes
246 # type: (Text) -> bytes
245 """Convert a unicode string to a byte string of local encoding"""
247 """Convert a unicode string to a byte string of local encoding"""
246 return tolocal(u.encode('utf-8'))
248 return tolocal(u.encode('utf-8'))
247
249
248
250
249 def unifromlocal(s):
251 def unifromlocal(s):
250 # type: (bytes) -> Text
252 # type: (bytes) -> Text
251 """Convert a byte string of local encoding to a unicode string"""
253 """Convert a byte string of local encoding to a unicode string"""
252 return fromlocal(s).decode('utf-8')
254 return fromlocal(s).decode('utf-8')
253
255
254
256
255 def unimethod(bytesfunc):
257 def unimethod(bytesfunc):
256 # type: (Callable[[Any], bytes]) -> Callable[[Any], Text]
258 # type: (Callable[[Any], bytes]) -> Callable[[Any], Text]
257 """Create a proxy method that forwards __unicode__() and __str__() of
259 """Create a proxy method that forwards __unicode__() and __str__() of
258 Python 3 to __bytes__()"""
260 Python 3 to __bytes__()"""
259
261
260 def unifunc(obj):
262 def unifunc(obj):
261 return unifromlocal(bytesfunc(obj))
263 return unifromlocal(bytesfunc(obj))
262
264
263 return unifunc
265 return unifunc
264
266
265
267
266 # converter functions between native str and byte string. use these if the
268 # converter functions between native str and byte string. use these if the
267 # character encoding is not aware (e.g. exception message) or is known to
269 # character encoding is not aware (e.g. exception message) or is known to
268 # be locale dependent (e.g. date formatting.)
270 # be locale dependent (e.g. date formatting.)
269 if pycompat.ispy3:
271 if pycompat.ispy3:
270 strtolocal = unitolocal
272 strtolocal = unitolocal
271 strfromlocal = unifromlocal
273 strfromlocal = unifromlocal
272 strmethod = unimethod
274 strmethod = unimethod
273 else:
275 else:
274
276
275 def strtolocal(s):
277 def strtolocal(s):
276 # type: (str) -> bytes
278 # type: (str) -> bytes
277 return s # pytype: disable=bad-return-type
279 return s # pytype: disable=bad-return-type
278
280
279 def strfromlocal(s):
281 def strfromlocal(s):
280 # type: (bytes) -> str
282 # type: (bytes) -> str
281 return s # pytype: disable=bad-return-type
283 return s # pytype: disable=bad-return-type
282
284
283 strmethod = pycompat.identity
285 strmethod = pycompat.identity
284
286
285 if not _nativeenviron:
287 if not _nativeenviron:
286 # now encoding and helper functions are available, recreate the environ
288 # now encoding and helper functions are available, recreate the environ
287 # dict to be exported to other modules
289 # dict to be exported to other modules
288 environ = {
290 environ = {
289 tolocal(k.encode('utf-8')): tolocal(v.encode('utf-8'))
291 tolocal(k.encode('utf-8')): tolocal(v.encode('utf-8'))
290 for k, v in os.environ.items() # re-exports
292 for k, v in os.environ.items() # re-exports
291 }
293 }
292
294
293 if pycompat.ispy3:
295 if pycompat.ispy3:
294 # os.getcwd() on Python 3 returns string, but it has os.getcwdb() which
296 # os.getcwd() on Python 3 returns string, but it has os.getcwdb() which
295 # returns bytes.
297 # returns bytes.
296 if pycompat.iswindows:
298 if pycompat.iswindows:
297 # Python 3 on Windows issues a DeprecationWarning about using the bytes
299 # Python 3 on Windows issues a DeprecationWarning about using the bytes
298 # API when os.getcwdb() is called.
300 # API when os.getcwdb() is called.
299 getcwd = lambda: strtolocal(os.getcwd()) # re-exports
301 getcwd = lambda: strtolocal(os.getcwd()) # re-exports
300 else:
302 else:
301 getcwd = os.getcwdb # re-exports
303 getcwd = os.getcwdb # re-exports
302 else:
304 else:
303 getcwd = os.getcwd # re-exports
305 getcwd = os.getcwd # re-exports
304
306
305 # How to treat ambiguous-width characters. Set to 'wide' to treat as wide.
307 # How to treat ambiguous-width characters. Set to 'wide' to treat as wide.
306 _wide = _sysstr(
308 _wide = _sysstr(
307 environ.get(b"HGENCODINGAMBIGUOUS", b"narrow") == b"wide"
309 environ.get(b"HGENCODINGAMBIGUOUS", b"narrow") == b"wide"
308 and b"WFA"
310 and b"WFA"
309 or b"WF"
311 or b"WF"
310 )
312 )
311
313
312
314
313 def colwidth(s):
315 def colwidth(s):
314 # type: (bytes) -> int
316 # type: (bytes) -> int
315 """Find the column width of a string for display in the local encoding"""
317 """Find the column width of a string for display in the local encoding"""
316 return ucolwidth(s.decode(_sysstr(encoding), 'replace'))
318 return ucolwidth(s.decode(_sysstr(encoding), 'replace'))
317
319
318
320
319 def ucolwidth(d):
321 def ucolwidth(d):
320 # type: (Text) -> int
322 # type: (Text) -> int
321 """Find the column width of a Unicode string for display"""
323 """Find the column width of a Unicode string for display"""
322 eaw = getattr(unicodedata, 'east_asian_width', None)
324 eaw = getattr(unicodedata, 'east_asian_width', None)
323 if eaw is not None:
325 if eaw is not None:
324 return sum([eaw(c) in _wide and 2 or 1 for c in d])
326 return sum([eaw(c) in _wide and 2 or 1 for c in d])
325 return len(d)
327 return len(d)
326
328
327
329
328 def getcols(s, start, c):
330 def getcols(s, start, c):
329 # type: (bytes, int, int) -> bytes
331 # type: (bytes, int, int) -> bytes
330 '''Use colwidth to find a c-column substring of s starting at byte
332 '''Use colwidth to find a c-column substring of s starting at byte
331 index start'''
333 index start'''
332 for x in pycompat.xrange(start + c, len(s)):
334 for x in pycompat.xrange(start + c, len(s)):
333 t = s[start:x]
335 t = s[start:x]
334 if colwidth(t) == c:
336 if colwidth(t) == c:
335 return t
337 return t
336 raise ValueError('substring not found')
338 raise ValueError('substring not found')
337
339
338
340
339 def trim(s, width, ellipsis=b'', leftside=False):
341 def trim(s, width, ellipsis=b'', leftside=False):
340 # type: (bytes, int, bytes, bool) -> bytes
342 # type: (bytes, int, bytes, bool) -> bytes
341 """Trim string 's' to at most 'width' columns (including 'ellipsis').
343 """Trim string 's' to at most 'width' columns (including 'ellipsis').
342
344
343 If 'leftside' is True, left side of string 's' is trimmed.
345 If 'leftside' is True, left side of string 's' is trimmed.
344 'ellipsis' is always placed at trimmed side.
346 'ellipsis' is always placed at trimmed side.
345
347
346 >>> from .node import bin
348 >>> from .node import bin
347 >>> def bprint(s):
349 >>> def bprint(s):
348 ... print(pycompat.sysstr(s))
350 ... print(pycompat.sysstr(s))
349 >>> ellipsis = b'+++'
351 >>> ellipsis = b'+++'
350 >>> from . import encoding
352 >>> from . import encoding
351 >>> encoding.encoding = b'utf-8'
353 >>> encoding.encoding = b'utf-8'
352 >>> t = b'1234567890'
354 >>> t = b'1234567890'
353 >>> bprint(trim(t, 12, ellipsis=ellipsis))
355 >>> bprint(trim(t, 12, ellipsis=ellipsis))
354 1234567890
356 1234567890
355 >>> bprint(trim(t, 10, ellipsis=ellipsis))
357 >>> bprint(trim(t, 10, ellipsis=ellipsis))
356 1234567890
358 1234567890
357 >>> bprint(trim(t, 8, ellipsis=ellipsis))
359 >>> bprint(trim(t, 8, ellipsis=ellipsis))
358 12345+++
360 12345+++
359 >>> bprint(trim(t, 8, ellipsis=ellipsis, leftside=True))
361 >>> bprint(trim(t, 8, ellipsis=ellipsis, leftside=True))
360 +++67890
362 +++67890
361 >>> bprint(trim(t, 8))
363 >>> bprint(trim(t, 8))
362 12345678
364 12345678
363 >>> bprint(trim(t, 8, leftside=True))
365 >>> bprint(trim(t, 8, leftside=True))
364 34567890
366 34567890
365 >>> bprint(trim(t, 3, ellipsis=ellipsis))
367 >>> bprint(trim(t, 3, ellipsis=ellipsis))
366 +++
368 +++
367 >>> bprint(trim(t, 1, ellipsis=ellipsis))
369 >>> bprint(trim(t, 1, ellipsis=ellipsis))
368 +
370 +
369 >>> u = u'\u3042\u3044\u3046\u3048\u304a' # 2 x 5 = 10 columns
371 >>> u = u'\u3042\u3044\u3046\u3048\u304a' # 2 x 5 = 10 columns
370 >>> t = u.encode(pycompat.sysstr(encoding.encoding))
372 >>> t = u.encode(pycompat.sysstr(encoding.encoding))
371 >>> bprint(trim(t, 12, ellipsis=ellipsis))
373 >>> bprint(trim(t, 12, ellipsis=ellipsis))
372 \xe3\x81\x82\xe3\x81\x84\xe3\x81\x86\xe3\x81\x88\xe3\x81\x8a
374 \xe3\x81\x82\xe3\x81\x84\xe3\x81\x86\xe3\x81\x88\xe3\x81\x8a
373 >>> bprint(trim(t, 10, ellipsis=ellipsis))
375 >>> bprint(trim(t, 10, ellipsis=ellipsis))
374 \xe3\x81\x82\xe3\x81\x84\xe3\x81\x86\xe3\x81\x88\xe3\x81\x8a
376 \xe3\x81\x82\xe3\x81\x84\xe3\x81\x86\xe3\x81\x88\xe3\x81\x8a
375 >>> bprint(trim(t, 8, ellipsis=ellipsis))
377 >>> bprint(trim(t, 8, ellipsis=ellipsis))
376 \xe3\x81\x82\xe3\x81\x84+++
378 \xe3\x81\x82\xe3\x81\x84+++
377 >>> bprint(trim(t, 8, ellipsis=ellipsis, leftside=True))
379 >>> bprint(trim(t, 8, ellipsis=ellipsis, leftside=True))
378 +++\xe3\x81\x88\xe3\x81\x8a
380 +++\xe3\x81\x88\xe3\x81\x8a
379 >>> bprint(trim(t, 5))
381 >>> bprint(trim(t, 5))
380 \xe3\x81\x82\xe3\x81\x84
382 \xe3\x81\x82\xe3\x81\x84
381 >>> bprint(trim(t, 5, leftside=True))
383 >>> bprint(trim(t, 5, leftside=True))
382 \xe3\x81\x88\xe3\x81\x8a
384 \xe3\x81\x88\xe3\x81\x8a
383 >>> bprint(trim(t, 4, ellipsis=ellipsis))
385 >>> bprint(trim(t, 4, ellipsis=ellipsis))
384 +++
386 +++
385 >>> bprint(trim(t, 4, ellipsis=ellipsis, leftside=True))
387 >>> bprint(trim(t, 4, ellipsis=ellipsis, leftside=True))
386 +++
388 +++
387 >>> t = bin(b'112233445566778899aa') # invalid byte sequence
389 >>> t = bin(b'112233445566778899aa') # invalid byte sequence
388 >>> bprint(trim(t, 12, ellipsis=ellipsis))
390 >>> bprint(trim(t, 12, ellipsis=ellipsis))
389 \x11\x22\x33\x44\x55\x66\x77\x88\x99\xaa
391 \x11\x22\x33\x44\x55\x66\x77\x88\x99\xaa
390 >>> bprint(trim(t, 10, ellipsis=ellipsis))
392 >>> bprint(trim(t, 10, ellipsis=ellipsis))
391 \x11\x22\x33\x44\x55\x66\x77\x88\x99\xaa
393 \x11\x22\x33\x44\x55\x66\x77\x88\x99\xaa
392 >>> bprint(trim(t, 8, ellipsis=ellipsis))
394 >>> bprint(trim(t, 8, ellipsis=ellipsis))
393 \x11\x22\x33\x44\x55+++
395 \x11\x22\x33\x44\x55+++
394 >>> bprint(trim(t, 8, ellipsis=ellipsis, leftside=True))
396 >>> bprint(trim(t, 8, ellipsis=ellipsis, leftside=True))
395 +++\x66\x77\x88\x99\xaa
397 +++\x66\x77\x88\x99\xaa
396 >>> bprint(trim(t, 8))
398 >>> bprint(trim(t, 8))
397 \x11\x22\x33\x44\x55\x66\x77\x88
399 \x11\x22\x33\x44\x55\x66\x77\x88
398 >>> bprint(trim(t, 8, leftside=True))
400 >>> bprint(trim(t, 8, leftside=True))
399 \x33\x44\x55\x66\x77\x88\x99\xaa
401 \x33\x44\x55\x66\x77\x88\x99\xaa
400 >>> bprint(trim(t, 3, ellipsis=ellipsis))
402 >>> bprint(trim(t, 3, ellipsis=ellipsis))
401 +++
403 +++
402 >>> bprint(trim(t, 1, ellipsis=ellipsis))
404 >>> bprint(trim(t, 1, ellipsis=ellipsis))
403 +
405 +
404 """
406 """
405 try:
407 try:
406 u = s.decode(_sysstr(encoding))
408 u = s.decode(_sysstr(encoding))
407 except UnicodeDecodeError:
409 except UnicodeDecodeError:
408 if len(s) <= width: # trimming is not needed
410 if len(s) <= width: # trimming is not needed
409 return s
411 return s
410 width -= len(ellipsis)
412 width -= len(ellipsis)
411 if width <= 0: # no enough room even for ellipsis
413 if width <= 0: # no enough room even for ellipsis
412 return ellipsis[: width + len(ellipsis)]
414 return ellipsis[: width + len(ellipsis)]
413 if leftside:
415 if leftside:
414 return ellipsis + s[-width:]
416 return ellipsis + s[-width:]
415 return s[:width] + ellipsis
417 return s[:width] + ellipsis
416
418
417 if ucolwidth(u) <= width: # trimming is not needed
419 if ucolwidth(u) <= width: # trimming is not needed
418 return s
420 return s
419
421
420 width -= len(ellipsis)
422 width -= len(ellipsis)
421 if width <= 0: # no enough room even for ellipsis
423 if width <= 0: # no enough room even for ellipsis
422 return ellipsis[: width + len(ellipsis)]
424 return ellipsis[: width + len(ellipsis)]
423
425
424 if leftside:
426 if leftside:
425 uslice = lambda i: u[i:]
427 uslice = lambda i: u[i:]
426 concat = lambda s: ellipsis + s
428 concat = lambda s: ellipsis + s
427 else:
429 else:
428 uslice = lambda i: u[:-i]
430 uslice = lambda i: u[:-i]
429 concat = lambda s: s + ellipsis
431 concat = lambda s: s + ellipsis
430 for i in pycompat.xrange(1, len(u)):
432 for i in pycompat.xrange(1, len(u)):
431 usub = uslice(i)
433 usub = uslice(i)
432 if ucolwidth(usub) <= width:
434 if ucolwidth(usub) <= width:
433 return concat(usub.encode(_sysstr(encoding)))
435 return concat(usub.encode(_sysstr(encoding)))
434 return ellipsis # no enough room for multi-column characters
436 return ellipsis # no enough room for multi-column characters
435
437
436
438
437 def lower(s):
439 def lower(s):
438 # type: (bytes) -> bytes
440 # type: (bytes) -> bytes
439 """best-effort encoding-aware case-folding of local string s"""
441 """best-effort encoding-aware case-folding of local string s"""
440 try:
442 try:
441 return asciilower(s)
443 return asciilower(s)
442 except UnicodeDecodeError:
444 except UnicodeDecodeError:
443 pass
445 pass
444 try:
446 try:
445 if isinstance(s, localstr):
447 if isinstance(s, localstr):
446 u = s._utf8.decode("utf-8")
448 u = s._utf8.decode("utf-8")
447 else:
449 else:
448 u = s.decode(_sysstr(encoding), _sysstr(encodingmode))
450 u = s.decode(_sysstr(encoding), _sysstr(encodingmode))
449
451
450 lu = u.lower()
452 lu = u.lower()
451 if u == lu:
453 if u == lu:
452 return s # preserve localstring
454 return s # preserve localstring
453 return lu.encode(_sysstr(encoding))
455 return lu.encode(_sysstr(encoding))
454 except UnicodeError:
456 except UnicodeError:
455 return s.lower() # we don't know how to fold this except in ASCII
457 return s.lower() # we don't know how to fold this except in ASCII
456 except LookupError as k:
458 except LookupError as k:
457 raise error.Abort(k, hint=b"please check your locale settings")
459 raise error.Abort(k, hint=b"please check your locale settings")
458
460
459
461
460 def upper(s):
462 def upper(s):
461 # type: (bytes) -> bytes
463 # type: (bytes) -> bytes
462 """best-effort encoding-aware case-folding of local string s"""
464 """best-effort encoding-aware case-folding of local string s"""
463 try:
465 try:
464 return asciiupper(s)
466 return asciiupper(s)
465 except UnicodeDecodeError:
467 except UnicodeDecodeError:
466 return upperfallback(s)
468 return upperfallback(s)
467
469
468
470
469 def upperfallback(s):
471 def upperfallback(s):
470 # type: (Any) -> Any
472 # type: (Any) -> Any
471 try:
473 try:
472 if isinstance(s, localstr):
474 if isinstance(s, localstr):
473 u = s._utf8.decode("utf-8")
475 u = s._utf8.decode("utf-8")
474 else:
476 else:
475 u = s.decode(_sysstr(encoding), _sysstr(encodingmode))
477 u = s.decode(_sysstr(encoding), _sysstr(encodingmode))
476
478
477 uu = u.upper()
479 uu = u.upper()
478 if u == uu:
480 if u == uu:
479 return s # preserve localstring
481 return s # preserve localstring
480 return uu.encode(_sysstr(encoding))
482 return uu.encode(_sysstr(encoding))
481 except UnicodeError:
483 except UnicodeError:
482 return s.upper() # we don't know how to fold this except in ASCII
484 return s.upper() # we don't know how to fold this except in ASCII
483 except LookupError as k:
485 except LookupError as k:
484 raise error.Abort(k, hint=b"please check your locale settings")
486 raise error.Abort(k, hint=b"please check your locale settings")
485
487
486
488
487 class normcasespecs(object):
489 class normcasespecs(object):
488 '''what a platform's normcase does to ASCII strings
490 '''what a platform's normcase does to ASCII strings
489
491
490 This is specified per platform, and should be consistent with what normcase
492 This is specified per platform, and should be consistent with what normcase
491 on that platform actually does.
493 on that platform actually does.
492
494
493 lower: normcase lowercases ASCII strings
495 lower: normcase lowercases ASCII strings
494 upper: normcase uppercases ASCII strings
496 upper: normcase uppercases ASCII strings
495 other: the fallback function should always be called
497 other: the fallback function should always be called
496
498
497 This should be kept in sync with normcase_spec in util.h.'''
499 This should be kept in sync with normcase_spec in util.h.'''
498
500
499 lower = -1
501 lower = -1
500 upper = 1
502 upper = 1
501 other = 0
503 other = 0
502
504
503
505
504 def jsonescape(s, paranoid=False):
506 def jsonescape(s, paranoid=False):
505 # type: (Any, Any) -> Any
507 # type: (Any, Any) -> Any
506 '''returns a string suitable for JSON
508 '''returns a string suitable for JSON
507
509
508 JSON is problematic for us because it doesn't support non-Unicode
510 JSON is problematic for us because it doesn't support non-Unicode
509 bytes. To deal with this, we take the following approach:
511 bytes. To deal with this, we take the following approach:
510
512
511 - localstr/safelocalstr objects are converted back to UTF-8
513 - localstr/safelocalstr objects are converted back to UTF-8
512 - valid UTF-8/ASCII strings are passed as-is
514 - valid UTF-8/ASCII strings are passed as-is
513 - other strings are converted to UTF-8b surrogate encoding
515 - other strings are converted to UTF-8b surrogate encoding
514 - apply JSON-specified string escaping
516 - apply JSON-specified string escaping
515
517
516 (escapes are doubled in these tests)
518 (escapes are doubled in these tests)
517
519
518 >>> jsonescape(b'this is a test')
520 >>> jsonescape(b'this is a test')
519 'this is a test'
521 'this is a test'
520 >>> jsonescape(b'escape characters: \\0 \\x0b \\x7f')
522 >>> jsonescape(b'escape characters: \\0 \\x0b \\x7f')
521 'escape characters: \\\\u0000 \\\\u000b \\\\u007f'
523 'escape characters: \\\\u0000 \\\\u000b \\\\u007f'
522 >>> jsonescape(b'escape characters: \\b \\t \\n \\f \\r \\" \\\\')
524 >>> jsonescape(b'escape characters: \\b \\t \\n \\f \\r \\" \\\\')
523 'escape characters: \\\\b \\\\t \\\\n \\\\f \\\\r \\\\" \\\\\\\\'
525 'escape characters: \\\\b \\\\t \\\\n \\\\f \\\\r \\\\" \\\\\\\\'
524 >>> jsonescape(b'a weird byte: \\xdd')
526 >>> jsonescape(b'a weird byte: \\xdd')
525 'a weird byte: \\xed\\xb3\\x9d'
527 'a weird byte: \\xed\\xb3\\x9d'
526 >>> jsonescape(b'utf-8: caf\\xc3\\xa9')
528 >>> jsonescape(b'utf-8: caf\\xc3\\xa9')
527 'utf-8: caf\\xc3\\xa9'
529 'utf-8: caf\\xc3\\xa9'
528 >>> jsonescape(b'')
530 >>> jsonescape(b'')
529 ''
531 ''
530
532
531 If paranoid, non-ascii and common troublesome characters are also escaped.
533 If paranoid, non-ascii and common troublesome characters are also escaped.
532 This is suitable for web output.
534 This is suitable for web output.
533
535
534 >>> s = b'escape characters: \\0 \\x0b \\x7f'
536 >>> s = b'escape characters: \\0 \\x0b \\x7f'
535 >>> assert jsonescape(s) == jsonescape(s, paranoid=True)
537 >>> assert jsonescape(s) == jsonescape(s, paranoid=True)
536 >>> s = b'escape characters: \\b \\t \\n \\f \\r \\" \\\\'
538 >>> s = b'escape characters: \\b \\t \\n \\f \\r \\" \\\\'
537 >>> assert jsonescape(s) == jsonescape(s, paranoid=True)
539 >>> assert jsonescape(s) == jsonescape(s, paranoid=True)
538 >>> jsonescape(b'escape boundary: \\x7e \\x7f \\xc2\\x80', paranoid=True)
540 >>> jsonescape(b'escape boundary: \\x7e \\x7f \\xc2\\x80', paranoid=True)
539 'escape boundary: ~ \\\\u007f \\\\u0080'
541 'escape boundary: ~ \\\\u007f \\\\u0080'
540 >>> jsonescape(b'a weird byte: \\xdd', paranoid=True)
542 >>> jsonescape(b'a weird byte: \\xdd', paranoid=True)
541 'a weird byte: \\\\udcdd'
543 'a weird byte: \\\\udcdd'
542 >>> jsonescape(b'utf-8: caf\\xc3\\xa9', paranoid=True)
544 >>> jsonescape(b'utf-8: caf\\xc3\\xa9', paranoid=True)
543 'utf-8: caf\\\\u00e9'
545 'utf-8: caf\\\\u00e9'
544 >>> jsonescape(b'non-BMP: \\xf0\\x9d\\x84\\x9e', paranoid=True)
546 >>> jsonescape(b'non-BMP: \\xf0\\x9d\\x84\\x9e', paranoid=True)
545 'non-BMP: \\\\ud834\\\\udd1e'
547 'non-BMP: \\\\ud834\\\\udd1e'
546 >>> jsonescape(b'<foo@example.org>', paranoid=True)
548 >>> jsonescape(b'<foo@example.org>', paranoid=True)
547 '\\\\u003cfoo@example.org\\\\u003e'
549 '\\\\u003cfoo@example.org\\\\u003e'
548 '''
550 '''
549
551
550 u8chars = toutf8b(s)
552 u8chars = toutf8b(s)
551 try:
553 try:
552 return _jsonescapeu8fast(u8chars, paranoid)
554 return _jsonescapeu8fast(u8chars, paranoid)
553 except ValueError:
555 except ValueError:
554 pass
556 pass
555 return charencodepure.jsonescapeu8fallback(u8chars, paranoid)
557 return charencodepure.jsonescapeu8fallback(u8chars, paranoid)
556
558
557
559
558 # We need to decode/encode U+DCxx codes transparently since invalid UTF-8
560 # We need to decode/encode U+DCxx codes transparently since invalid UTF-8
559 # bytes are mapped to that range.
561 # bytes are mapped to that range.
560 if pycompat.ispy3:
562 if pycompat.ispy3:
561 _utf8strict = r'surrogatepass'
563 _utf8strict = r'surrogatepass'
562 else:
564 else:
563 _utf8strict = r'strict'
565 _utf8strict = r'strict'
564
566
565 _utf8len = [0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 2, 2, 3, 4]
567 _utf8len = [0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 2, 2, 3, 4]
566
568
567
569
568 def getutf8char(s, pos):
570 def getutf8char(s, pos):
569 # type: (bytes, int) -> bytes
571 # type: (bytes, int) -> bytes
570 '''get the next full utf-8 character in the given string, starting at pos
572 '''get the next full utf-8 character in the given string, starting at pos
571
573
572 Raises a UnicodeError if the given location does not start a valid
574 Raises a UnicodeError if the given location does not start a valid
573 utf-8 character.
575 utf-8 character.
574 '''
576 '''
575
577
576 # find how many bytes to attempt decoding from first nibble
578 # find how many bytes to attempt decoding from first nibble
577 l = _utf8len[ord(s[pos : pos + 1]) >> 4]
579 l = _utf8len[ord(s[pos : pos + 1]) >> 4]
578 if not l: # ascii
580 if not l: # ascii
579 return s[pos : pos + 1]
581 return s[pos : pos + 1]
580
582
581 c = s[pos : pos + l]
583 c = s[pos : pos + l]
582 # validate with attempted decode
584 # validate with attempted decode
583 c.decode("utf-8", _utf8strict)
585 c.decode("utf-8", _utf8strict)
584 return c
586 return c
585
587
586
588
587 def toutf8b(s):
589 def toutf8b(s):
588 # type: (bytes) -> bytes
590 # type: (bytes) -> bytes
589 '''convert a local, possibly-binary string into UTF-8b
591 '''convert a local, possibly-binary string into UTF-8b
590
592
591 This is intended as a generic method to preserve data when working
593 This is intended as a generic method to preserve data when working
592 with schemes like JSON and XML that have no provision for
594 with schemes like JSON and XML that have no provision for
593 arbitrary byte strings. As Mercurial often doesn't know
595 arbitrary byte strings. As Mercurial often doesn't know
594 what encoding data is in, we use so-called UTF-8b.
596 what encoding data is in, we use so-called UTF-8b.
595
597
596 If a string is already valid UTF-8 (or ASCII), it passes unmodified.
598 If a string is already valid UTF-8 (or ASCII), it passes unmodified.
597 Otherwise, unsupported bytes are mapped to UTF-16 surrogate range,
599 Otherwise, unsupported bytes are mapped to UTF-16 surrogate range,
598 uDC00-uDCFF.
600 uDC00-uDCFF.
599
601
600 Principles of operation:
602 Principles of operation:
601
603
602 - ASCII and UTF-8 data successfully round-trips and is understood
604 - ASCII and UTF-8 data successfully round-trips and is understood
603 by Unicode-oriented clients
605 by Unicode-oriented clients
604 - filenames and file contents in arbitrary other encodings can have
606 - filenames and file contents in arbitrary other encodings can have
605 be round-tripped or recovered by clueful clients
607 be round-tripped or recovered by clueful clients
606 - local strings that have a cached known UTF-8 encoding (aka
608 - local strings that have a cached known UTF-8 encoding (aka
607 localstr) get sent as UTF-8 so Unicode-oriented clients get the
609 localstr) get sent as UTF-8 so Unicode-oriented clients get the
608 Unicode data they want
610 Unicode data they want
609 - non-lossy local strings (aka safelocalstr) get sent as UTF-8 as well
611 - non-lossy local strings (aka safelocalstr) get sent as UTF-8 as well
610 - because we must preserve UTF-8 bytestring in places such as
612 - because we must preserve UTF-8 bytestring in places such as
611 filenames, metadata can't be roundtripped without help
613 filenames, metadata can't be roundtripped without help
612
614
613 (Note: "UTF-8b" often refers to decoding a mix of valid UTF-8 and
615 (Note: "UTF-8b" often refers to decoding a mix of valid UTF-8 and
614 arbitrary bytes into an internal Unicode format that can be
616 arbitrary bytes into an internal Unicode format that can be
615 re-encoded back into the original. Here we are exposing the
617 re-encoded back into the original. Here we are exposing the
616 internal surrogate encoding as a UTF-8 string.)
618 internal surrogate encoding as a UTF-8 string.)
617 '''
619 '''
618
620
619 if isinstance(s, localstr):
621 if isinstance(s, localstr):
620 # assume that the original UTF-8 sequence would never contain
622 # assume that the original UTF-8 sequence would never contain
621 # invalid characters in U+DCxx range
623 # invalid characters in U+DCxx range
622 return s._utf8
624 return s._utf8
623 elif isinstance(s, safelocalstr):
625 elif isinstance(s, safelocalstr):
624 # already verified that s is non-lossy in legacy encoding, which
626 # already verified that s is non-lossy in legacy encoding, which
625 # shouldn't contain characters in U+DCxx range
627 # shouldn't contain characters in U+DCxx range
626 return fromlocal(s)
628 return fromlocal(s)
627 elif isasciistr(s):
629 elif isasciistr(s):
628 return s
630 return s
629 if b"\xed" not in s:
631 if b"\xed" not in s:
630 try:
632 try:
631 s.decode('utf-8', _utf8strict)
633 s.decode('utf-8', _utf8strict)
632 return s
634 return s
633 except UnicodeDecodeError:
635 except UnicodeDecodeError:
634 pass
636 pass
635
637
636 s = pycompat.bytestr(s)
638 s = pycompat.bytestr(s)
637 r = b""
639 r = b""
638 pos = 0
640 pos = 0
639 l = len(s)
641 l = len(s)
640 while pos < l:
642 while pos < l:
641 try:
643 try:
642 c = getutf8char(s, pos)
644 c = getutf8char(s, pos)
643 if b"\xed\xb0\x80" <= c <= b"\xed\xb3\xbf":
645 if b"\xed\xb0\x80" <= c <= b"\xed\xb3\xbf":
644 # have to re-escape existing U+DCxx characters
646 # have to re-escape existing U+DCxx characters
645 c = unichr(0xDC00 + ord(s[pos])).encode('utf-8', _utf8strict)
647 c = unichr(0xDC00 + ord(s[pos])).encode('utf-8', _utf8strict)
646 pos += 1
648 pos += 1
647 else:
649 else:
648 pos += len(c)
650 pos += len(c)
649 except UnicodeDecodeError:
651 except UnicodeDecodeError:
650 c = unichr(0xDC00 + ord(s[pos])).encode('utf-8', _utf8strict)
652 c = unichr(0xDC00 + ord(s[pos])).encode('utf-8', _utf8strict)
651 pos += 1
653 pos += 1
652 r += c
654 r += c
653 return r
655 return r
654
656
655
657
656 def fromutf8b(s):
658 def fromutf8b(s):
657 # type: (bytes) -> bytes
659 # type: (bytes) -> bytes
658 '''Given a UTF-8b string, return a local, possibly-binary string.
660 '''Given a UTF-8b string, return a local, possibly-binary string.
659
661
660 return the original binary string. This
662 return the original binary string. This
661 is a round-trip process for strings like filenames, but metadata
663 is a round-trip process for strings like filenames, but metadata
662 that's was passed through tolocal will remain in UTF-8.
664 that's was passed through tolocal will remain in UTF-8.
663
665
664 >>> roundtrip = lambda x: fromutf8b(toutf8b(x)) == x
666 >>> roundtrip = lambda x: fromutf8b(toutf8b(x)) == x
665 >>> m = b"\\xc3\\xa9\\x99abcd"
667 >>> m = b"\\xc3\\xa9\\x99abcd"
666 >>> toutf8b(m)
668 >>> toutf8b(m)
667 '\\xc3\\xa9\\xed\\xb2\\x99abcd'
669 '\\xc3\\xa9\\xed\\xb2\\x99abcd'
668 >>> roundtrip(m)
670 >>> roundtrip(m)
669 True
671 True
670 >>> roundtrip(b"\\xc2\\xc2\\x80")
672 >>> roundtrip(b"\\xc2\\xc2\\x80")
671 True
673 True
672 >>> roundtrip(b"\\xef\\xbf\\xbd")
674 >>> roundtrip(b"\\xef\\xbf\\xbd")
673 True
675 True
674 >>> roundtrip(b"\\xef\\xef\\xbf\\xbd")
676 >>> roundtrip(b"\\xef\\xef\\xbf\\xbd")
675 True
677 True
676 >>> roundtrip(b"\\xf1\\x80\\x80\\x80\\x80")
678 >>> roundtrip(b"\\xf1\\x80\\x80\\x80\\x80")
677 True
679 True
678 '''
680 '''
679
681
680 if isasciistr(s):
682 if isasciistr(s):
681 return s
683 return s
682 # fast path - look for uDxxx prefixes in s
684 # fast path - look for uDxxx prefixes in s
683 if b"\xed" not in s:
685 if b"\xed" not in s:
684 return s
686 return s
685
687
686 # We could do this with the unicode type but some Python builds
688 # We could do this with the unicode type but some Python builds
687 # use UTF-16 internally (issue5031) which causes non-BMP code
689 # use UTF-16 internally (issue5031) which causes non-BMP code
688 # points to be escaped. Instead, we use our handy getutf8char
690 # points to be escaped. Instead, we use our handy getutf8char
689 # helper again to walk the string without "decoding" it.
691 # helper again to walk the string without "decoding" it.
690
692
691 s = pycompat.bytestr(s)
693 s = pycompat.bytestr(s)
692 r = b""
694 r = b""
693 pos = 0
695 pos = 0
694 l = len(s)
696 l = len(s)
695 while pos < l:
697 while pos < l:
696 c = getutf8char(s, pos)
698 c = getutf8char(s, pos)
697 pos += len(c)
699 pos += len(c)
698 # unescape U+DCxx characters
700 # unescape U+DCxx characters
699 if b"\xed\xb0\x80" <= c <= b"\xed\xb3\xbf":
701 if b"\xed\xb0\x80" <= c <= b"\xed\xb3\xbf":
700 c = pycompat.bytechr(ord(c.decode("utf-8", _utf8strict)) & 0xFF)
702 c = pycompat.bytechr(ord(c.decode("utf-8", _utf8strict)) & 0xFF)
701 r += c
703 r += c
702 return r
704 return r
@@ -1,3157 +1,3157 b''
1 # exchange.py - utility to exchange data between repos.
1 # exchange.py - utility to exchange data between repos.
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from __future__ import absolute_import
8 from __future__ import absolute_import
9
9
10 import collections
10 import collections
11 import weakref
11 import weakref
12
12
13 from .i18n import _
13 from .i18n import _
14 from .node import (
14 from .node import (
15 hex,
15 hex,
16 nullid,
16 nullid,
17 nullrev,
17 nullrev,
18 )
18 )
19 from .thirdparty import attr
19 from .thirdparty import attr
20 from . import (
20 from . import (
21 bookmarks as bookmod,
21 bookmarks as bookmod,
22 bundle2,
22 bundle2,
23 changegroup,
23 changegroup,
24 discovery,
24 discovery,
25 error,
25 error,
26 exchangev2,
26 exchangev2,
27 lock as lockmod,
27 lock as lockmod,
28 logexchange,
28 logexchange,
29 narrowspec,
29 narrowspec,
30 obsolete,
30 obsolete,
31 obsutil,
31 obsutil,
32 phases,
32 phases,
33 pushkey,
33 pushkey,
34 pycompat,
34 pycompat,
35 requirements,
35 requirements,
36 scmutil,
36 scmutil,
37 sslutil,
37 sslutil,
38 streamclone,
38 streamclone,
39 url as urlmod,
39 url as urlmod,
40 util,
40 util,
41 wireprototypes,
41 wireprototypes,
42 )
42 )
43 from .utils import (
43 from .utils import (
44 hashutil,
44 hashutil,
45 stringutil,
45 stringutil,
46 )
46 )
47
47
48 urlerr = util.urlerr
48 urlerr = util.urlerr
49 urlreq = util.urlreq
49 urlreq = util.urlreq
50
50
51 _NARROWACL_SECTION = b'narrowacl'
51 _NARROWACL_SECTION = b'narrowacl'
52
52
53 # Maps bundle version human names to changegroup versions.
53 # Maps bundle version human names to changegroup versions.
54 _bundlespeccgversions = {
54 _bundlespeccgversions = {
55 b'v1': b'01',
55 b'v1': b'01',
56 b'v2': b'02',
56 b'v2': b'02',
57 b'packed1': b's1',
57 b'packed1': b's1',
58 b'bundle2': b'02', # legacy
58 b'bundle2': b'02', # legacy
59 }
59 }
60
60
61 # Maps bundle version with content opts to choose which part to bundle
61 # Maps bundle version with content opts to choose which part to bundle
62 _bundlespeccontentopts = {
62 _bundlespeccontentopts = {
63 b'v1': {
63 b'v1': {
64 b'changegroup': True,
64 b'changegroup': True,
65 b'cg.version': b'01',
65 b'cg.version': b'01',
66 b'obsolescence': False,
66 b'obsolescence': False,
67 b'phases': False,
67 b'phases': False,
68 b'tagsfnodescache': False,
68 b'tagsfnodescache': False,
69 b'revbranchcache': False,
69 b'revbranchcache': False,
70 },
70 },
71 b'v2': {
71 b'v2': {
72 b'changegroup': True,
72 b'changegroup': True,
73 b'cg.version': b'02',
73 b'cg.version': b'02',
74 b'obsolescence': False,
74 b'obsolescence': False,
75 b'phases': False,
75 b'phases': False,
76 b'tagsfnodescache': True,
76 b'tagsfnodescache': True,
77 b'revbranchcache': True,
77 b'revbranchcache': True,
78 },
78 },
79 b'packed1': {b'cg.version': b's1'},
79 b'packed1': {b'cg.version': b's1'},
80 }
80 }
81 _bundlespeccontentopts[b'bundle2'] = _bundlespeccontentopts[b'v2']
81 _bundlespeccontentopts[b'bundle2'] = _bundlespeccontentopts[b'v2']
82
82
83 _bundlespecvariants = {
83 _bundlespecvariants = {
84 b"streamv2": {
84 b"streamv2": {
85 b"changegroup": False,
85 b"changegroup": False,
86 b"streamv2": True,
86 b"streamv2": True,
87 b"tagsfnodescache": False,
87 b"tagsfnodescache": False,
88 b"revbranchcache": False,
88 b"revbranchcache": False,
89 }
89 }
90 }
90 }
91
91
92 # Compression engines allowed in version 1. THIS SHOULD NEVER CHANGE.
92 # Compression engines allowed in version 1. THIS SHOULD NEVER CHANGE.
93 _bundlespecv1compengines = {b'gzip', b'bzip2', b'none'}
93 _bundlespecv1compengines = {b'gzip', b'bzip2', b'none'}
94
94
95
95
96 @attr.s
96 @attr.s
97 class bundlespec(object):
97 class bundlespec(object):
98 compression = attr.ib()
98 compression = attr.ib()
99 wirecompression = attr.ib()
99 wirecompression = attr.ib()
100 version = attr.ib()
100 version = attr.ib()
101 wireversion = attr.ib()
101 wireversion = attr.ib()
102 params = attr.ib()
102 params = attr.ib()
103 contentopts = attr.ib()
103 contentopts = attr.ib()
104
104
105
105
106 def parsebundlespec(repo, spec, strict=True):
106 def parsebundlespec(repo, spec, strict=True):
107 """Parse a bundle string specification into parts.
107 """Parse a bundle string specification into parts.
108
108
109 Bundle specifications denote a well-defined bundle/exchange format.
109 Bundle specifications denote a well-defined bundle/exchange format.
110 The content of a given specification should not change over time in
110 The content of a given specification should not change over time in
111 order to ensure that bundles produced by a newer version of Mercurial are
111 order to ensure that bundles produced by a newer version of Mercurial are
112 readable from an older version.
112 readable from an older version.
113
113
114 The string currently has the form:
114 The string currently has the form:
115
115
116 <compression>-<type>[;<parameter0>[;<parameter1>]]
116 <compression>-<type>[;<parameter0>[;<parameter1>]]
117
117
118 Where <compression> is one of the supported compression formats
118 Where <compression> is one of the supported compression formats
119 and <type> is (currently) a version string. A ";" can follow the type and
119 and <type> is (currently) a version string. A ";" can follow the type and
120 all text afterwards is interpreted as URI encoded, ";" delimited key=value
120 all text afterwards is interpreted as URI encoded, ";" delimited key=value
121 pairs.
121 pairs.
122
122
123 If ``strict`` is True (the default) <compression> is required. Otherwise,
123 If ``strict`` is True (the default) <compression> is required. Otherwise,
124 it is optional.
124 it is optional.
125
125
126 Returns a bundlespec object of (compression, version, parameters).
126 Returns a bundlespec object of (compression, version, parameters).
127 Compression will be ``None`` if not in strict mode and a compression isn't
127 Compression will be ``None`` if not in strict mode and a compression isn't
128 defined.
128 defined.
129
129
130 An ``InvalidBundleSpecification`` is raised when the specification is
130 An ``InvalidBundleSpecification`` is raised when the specification is
131 not syntactically well formed.
131 not syntactically well formed.
132
132
133 An ``UnsupportedBundleSpecification`` is raised when the compression or
133 An ``UnsupportedBundleSpecification`` is raised when the compression or
134 bundle type/version is not recognized.
134 bundle type/version is not recognized.
135
135
136 Note: this function will likely eventually return a more complex data
136 Note: this function will likely eventually return a more complex data
137 structure, including bundle2 part information.
137 structure, including bundle2 part information.
138 """
138 """
139
139
140 def parseparams(s):
140 def parseparams(s):
141 if b';' not in s:
141 if b';' not in s:
142 return s, {}
142 return s, {}
143
143
144 params = {}
144 params = {}
145 version, paramstr = s.split(b';', 1)
145 version, paramstr = s.split(b';', 1)
146
146
147 for p in paramstr.split(b';'):
147 for p in paramstr.split(b';'):
148 if b'=' not in p:
148 if b'=' not in p:
149 raise error.InvalidBundleSpecification(
149 raise error.InvalidBundleSpecification(
150 _(
150 _(
151 b'invalid bundle specification: '
151 b'invalid bundle specification: '
152 b'missing "=" in parameter: %s'
152 b'missing "=" in parameter: %s'
153 )
153 )
154 % p
154 % p
155 )
155 )
156
156
157 key, value = p.split(b'=', 1)
157 key, value = p.split(b'=', 1)
158 key = urlreq.unquote(key)
158 key = urlreq.unquote(key)
159 value = urlreq.unquote(value)
159 value = urlreq.unquote(value)
160 params[key] = value
160 params[key] = value
161
161
162 return version, params
162 return version, params
163
163
164 if strict and b'-' not in spec:
164 if strict and b'-' not in spec:
165 raise error.InvalidBundleSpecification(
165 raise error.InvalidBundleSpecification(
166 _(
166 _(
167 b'invalid bundle specification; '
167 b'invalid bundle specification; '
168 b'must be prefixed with compression: %s'
168 b'must be prefixed with compression: %s'
169 )
169 )
170 % spec
170 % spec
171 )
171 )
172
172
173 if b'-' in spec:
173 if b'-' in spec:
174 compression, version = spec.split(b'-', 1)
174 compression, version = spec.split(b'-', 1)
175
175
176 if compression not in util.compengines.supportedbundlenames:
176 if compression not in util.compengines.supportedbundlenames:
177 raise error.UnsupportedBundleSpecification(
177 raise error.UnsupportedBundleSpecification(
178 _(b'%s compression is not supported') % compression
178 _(b'%s compression is not supported') % compression
179 )
179 )
180
180
181 version, params = parseparams(version)
181 version, params = parseparams(version)
182
182
183 if version not in _bundlespeccgversions:
183 if version not in _bundlespeccgversions:
184 raise error.UnsupportedBundleSpecification(
184 raise error.UnsupportedBundleSpecification(
185 _(b'%s is not a recognized bundle version') % version
185 _(b'%s is not a recognized bundle version') % version
186 )
186 )
187 else:
187 else:
188 # Value could be just the compression or just the version, in which
188 # Value could be just the compression or just the version, in which
189 # case some defaults are assumed (but only when not in strict mode).
189 # case some defaults are assumed (but only when not in strict mode).
190 assert not strict
190 assert not strict
191
191
192 spec, params = parseparams(spec)
192 spec, params = parseparams(spec)
193
193
194 if spec in util.compengines.supportedbundlenames:
194 if spec in util.compengines.supportedbundlenames:
195 compression = spec
195 compression = spec
196 version = b'v1'
196 version = b'v1'
197 # Generaldelta repos require v2.
197 # Generaldelta repos require v2.
198 if b'generaldelta' in repo.requirements:
198 if b'generaldelta' in repo.requirements:
199 version = b'v2'
199 version = b'v2'
200 # Modern compression engines require v2.
200 # Modern compression engines require v2.
201 if compression not in _bundlespecv1compengines:
201 if compression not in _bundlespecv1compengines:
202 version = b'v2'
202 version = b'v2'
203 elif spec in _bundlespeccgversions:
203 elif spec in _bundlespeccgversions:
204 if spec == b'packed1':
204 if spec == b'packed1':
205 compression = b'none'
205 compression = b'none'
206 else:
206 else:
207 compression = b'bzip2'
207 compression = b'bzip2'
208 version = spec
208 version = spec
209 else:
209 else:
210 raise error.UnsupportedBundleSpecification(
210 raise error.UnsupportedBundleSpecification(
211 _(b'%s is not a recognized bundle specification') % spec
211 _(b'%s is not a recognized bundle specification') % spec
212 )
212 )
213
213
214 # Bundle version 1 only supports a known set of compression engines.
214 # Bundle version 1 only supports a known set of compression engines.
215 if version == b'v1' and compression not in _bundlespecv1compengines:
215 if version == b'v1' and compression not in _bundlespecv1compengines:
216 raise error.UnsupportedBundleSpecification(
216 raise error.UnsupportedBundleSpecification(
217 _(b'compression engine %s is not supported on v1 bundles')
217 _(b'compression engine %s is not supported on v1 bundles')
218 % compression
218 % compression
219 )
219 )
220
220
221 # The specification for packed1 can optionally declare the data formats
221 # The specification for packed1 can optionally declare the data formats
222 # required to apply it. If we see this metadata, compare against what the
222 # required to apply it. If we see this metadata, compare against what the
223 # repo supports and error if the bundle isn't compatible.
223 # repo supports and error if the bundle isn't compatible.
224 if version == b'packed1' and b'requirements' in params:
224 if version == b'packed1' and b'requirements' in params:
225 requirements = set(params[b'requirements'].split(b','))
225 requirements = set(params[b'requirements'].split(b','))
226 missingreqs = requirements - repo.supportedformats
226 missingreqs = requirements - repo.supportedformats
227 if missingreqs:
227 if missingreqs:
228 raise error.UnsupportedBundleSpecification(
228 raise error.UnsupportedBundleSpecification(
229 _(b'missing support for repository features: %s')
229 _(b'missing support for repository features: %s')
230 % b', '.join(sorted(missingreqs))
230 % b', '.join(sorted(missingreqs))
231 )
231 )
232
232
233 # Compute contentopts based on the version
233 # Compute contentopts based on the version
234 contentopts = _bundlespeccontentopts.get(version, {}).copy()
234 contentopts = _bundlespeccontentopts.get(version, {}).copy()
235
235
236 # Process the variants
236 # Process the variants
237 if b"stream" in params and params[b"stream"] == b"v2":
237 if b"stream" in params and params[b"stream"] == b"v2":
238 variant = _bundlespecvariants[b"streamv2"]
238 variant = _bundlespecvariants[b"streamv2"]
239 contentopts.update(variant)
239 contentopts.update(variant)
240
240
241 engine = util.compengines.forbundlename(compression)
241 engine = util.compengines.forbundlename(compression)
242 compression, wirecompression = engine.bundletype()
242 compression, wirecompression = engine.bundletype()
243 wireversion = _bundlespeccgversions[version]
243 wireversion = _bundlespeccgversions[version]
244
244
245 return bundlespec(
245 return bundlespec(
246 compression, wirecompression, version, wireversion, params, contentopts
246 compression, wirecompression, version, wireversion, params, contentopts
247 )
247 )
248
248
249
249
250 def readbundle(ui, fh, fname, vfs=None):
250 def readbundle(ui, fh, fname, vfs=None):
251 header = changegroup.readexactly(fh, 4)
251 header = changegroup.readexactly(fh, 4)
252
252
253 alg = None
253 alg = None
254 if not fname:
254 if not fname:
255 fname = b"stream"
255 fname = b"stream"
256 if not header.startswith(b'HG') and header.startswith(b'\0'):
256 if not header.startswith(b'HG') and header.startswith(b'\0'):
257 fh = changegroup.headerlessfixup(fh, header)
257 fh = changegroup.headerlessfixup(fh, header)
258 header = b"HG10"
258 header = b"HG10"
259 alg = b'UN'
259 alg = b'UN'
260 elif vfs:
260 elif vfs:
261 fname = vfs.join(fname)
261 fname = vfs.join(fname)
262
262
263 magic, version = header[0:2], header[2:4]
263 magic, version = header[0:2], header[2:4]
264
264
265 if magic != b'HG':
265 if magic != b'HG':
266 raise error.Abort(_(b'%s: not a Mercurial bundle') % fname)
266 raise error.Abort(_(b'%s: not a Mercurial bundle') % fname)
267 if version == b'10':
267 if version == b'10':
268 if alg is None:
268 if alg is None:
269 alg = changegroup.readexactly(fh, 2)
269 alg = changegroup.readexactly(fh, 2)
270 return changegroup.cg1unpacker(fh, alg)
270 return changegroup.cg1unpacker(fh, alg)
271 elif version.startswith(b'2'):
271 elif version.startswith(b'2'):
272 return bundle2.getunbundler(ui, fh, magicstring=magic + version)
272 return bundle2.getunbundler(ui, fh, magicstring=magic + version)
273 elif version == b'S1':
273 elif version == b'S1':
274 return streamclone.streamcloneapplier(fh)
274 return streamclone.streamcloneapplier(fh)
275 else:
275 else:
276 raise error.Abort(
276 raise error.Abort(
277 _(b'%s: unknown bundle version %s') % (fname, version)
277 _(b'%s: unknown bundle version %s') % (fname, version)
278 )
278 )
279
279
280
280
281 def getbundlespec(ui, fh):
281 def getbundlespec(ui, fh):
282 """Infer the bundlespec from a bundle file handle.
282 """Infer the bundlespec from a bundle file handle.
283
283
284 The input file handle is seeked and the original seek position is not
284 The input file handle is seeked and the original seek position is not
285 restored.
285 restored.
286 """
286 """
287
287
288 def speccompression(alg):
288 def speccompression(alg):
289 try:
289 try:
290 return util.compengines.forbundletype(alg).bundletype()[0]
290 return util.compengines.forbundletype(alg).bundletype()[0]
291 except KeyError:
291 except KeyError:
292 return None
292 return None
293
293
294 b = readbundle(ui, fh, None)
294 b = readbundle(ui, fh, None)
295 if isinstance(b, changegroup.cg1unpacker):
295 if isinstance(b, changegroup.cg1unpacker):
296 alg = b._type
296 alg = b._type
297 if alg == b'_truncatedBZ':
297 if alg == b'_truncatedBZ':
298 alg = b'BZ'
298 alg = b'BZ'
299 comp = speccompression(alg)
299 comp = speccompression(alg)
300 if not comp:
300 if not comp:
301 raise error.Abort(_(b'unknown compression algorithm: %s') % alg)
301 raise error.Abort(_(b'unknown compression algorithm: %s') % alg)
302 return b'%s-v1' % comp
302 return b'%s-v1' % comp
303 elif isinstance(b, bundle2.unbundle20):
303 elif isinstance(b, bundle2.unbundle20):
304 if b'Compression' in b.params:
304 if b'Compression' in b.params:
305 comp = speccompression(b.params[b'Compression'])
305 comp = speccompression(b.params[b'Compression'])
306 if not comp:
306 if not comp:
307 raise error.Abort(
307 raise error.Abort(
308 _(b'unknown compression algorithm: %s') % comp
308 _(b'unknown compression algorithm: %s') % comp
309 )
309 )
310 else:
310 else:
311 comp = b'none'
311 comp = b'none'
312
312
313 version = None
313 version = None
314 for part in b.iterparts():
314 for part in b.iterparts():
315 if part.type == b'changegroup':
315 if part.type == b'changegroup':
316 version = part.params[b'version']
316 version = part.params[b'version']
317 if version in (b'01', b'02'):
317 if version in (b'01', b'02'):
318 version = b'v2'
318 version = b'v2'
319 else:
319 else:
320 raise error.Abort(
320 raise error.Abort(
321 _(
321 _(
322 b'changegroup version %s does not have '
322 b'changegroup version %s does not have '
323 b'a known bundlespec'
323 b'a known bundlespec'
324 )
324 )
325 % version,
325 % version,
326 hint=_(b'try upgrading your Mercurial client'),
326 hint=_(b'try upgrading your Mercurial client'),
327 )
327 )
328 elif part.type == b'stream2' and version is None:
328 elif part.type == b'stream2' and version is None:
329 # A stream2 part requires to be part of a v2 bundle
329 # A stream2 part requires to be part of a v2 bundle
330 requirements = urlreq.unquote(part.params[b'requirements'])
330 requirements = urlreq.unquote(part.params[b'requirements'])
331 splitted = requirements.split()
331 splitted = requirements.split()
332 params = bundle2._formatrequirementsparams(splitted)
332 params = bundle2._formatrequirementsparams(splitted)
333 return b'none-v2;stream=v2;%s' % params
333 return b'none-v2;stream=v2;%s' % params
334
334
335 if not version:
335 if not version:
336 raise error.Abort(
336 raise error.Abort(
337 _(b'could not identify changegroup version in bundle')
337 _(b'could not identify changegroup version in bundle')
338 )
338 )
339
339
340 return b'%s-%s' % (comp, version)
340 return b'%s-%s' % (comp, version)
341 elif isinstance(b, streamclone.streamcloneapplier):
341 elif isinstance(b, streamclone.streamcloneapplier):
342 requirements = streamclone.readbundle1header(fh)[2]
342 requirements = streamclone.readbundle1header(fh)[2]
343 formatted = bundle2._formatrequirementsparams(requirements)
343 formatted = bundle2._formatrequirementsparams(requirements)
344 return b'none-packed1;%s' % formatted
344 return b'none-packed1;%s' % formatted
345 else:
345 else:
346 raise error.Abort(_(b'unknown bundle type: %s') % b)
346 raise error.Abort(_(b'unknown bundle type: %s') % b)
347
347
348
348
349 def _computeoutgoing(repo, heads, common):
349 def _computeoutgoing(repo, heads, common):
350 """Computes which revs are outgoing given a set of common
350 """Computes which revs are outgoing given a set of common
351 and a set of heads.
351 and a set of heads.
352
352
353 This is a separate function so extensions can have access to
353 This is a separate function so extensions can have access to
354 the logic.
354 the logic.
355
355
356 Returns a discovery.outgoing object.
356 Returns a discovery.outgoing object.
357 """
357 """
358 cl = repo.changelog
358 cl = repo.changelog
359 if common:
359 if common:
360 hasnode = cl.hasnode
360 hasnode = cl.hasnode
361 common = [n for n in common if hasnode(n)]
361 common = [n for n in common if hasnode(n)]
362 else:
362 else:
363 common = [nullid]
363 common = [nullid]
364 if not heads:
364 if not heads:
365 heads = cl.heads()
365 heads = cl.heads()
366 return discovery.outgoing(repo, common, heads)
366 return discovery.outgoing(repo, common, heads)
367
367
368
368
369 def _checkpublish(pushop):
369 def _checkpublish(pushop):
370 repo = pushop.repo
370 repo = pushop.repo
371 ui = repo.ui
371 ui = repo.ui
372 behavior = ui.config(b'experimental', b'auto-publish')
372 behavior = ui.config(b'experimental', b'auto-publish')
373 if pushop.publish or behavior not in (b'warn', b'confirm', b'abort'):
373 if pushop.publish or behavior not in (b'warn', b'confirm', b'abort'):
374 return
374 return
375 remotephases = listkeys(pushop.remote, b'phases')
375 remotephases = listkeys(pushop.remote, b'phases')
376 if not remotephases.get(b'publishing', False):
376 if not remotephases.get(b'publishing', False):
377 return
377 return
378
378
379 if pushop.revs is None:
379 if pushop.revs is None:
380 published = repo.filtered(b'served').revs(b'not public()')
380 published = repo.filtered(b'served').revs(b'not public()')
381 else:
381 else:
382 published = repo.revs(b'::%ln - public()', pushop.revs)
382 published = repo.revs(b'::%ln - public()', pushop.revs)
383 if published:
383 if published:
384 if behavior == b'warn':
384 if behavior == b'warn':
385 ui.warn(
385 ui.warn(
386 _(b'%i changesets about to be published\n') % len(published)
386 _(b'%i changesets about to be published\n') % len(published)
387 )
387 )
388 elif behavior == b'confirm':
388 elif behavior == b'confirm':
389 if ui.promptchoice(
389 if ui.promptchoice(
390 _(b'push and publish %i changesets (yn)?$$ &Yes $$ &No')
390 _(b'push and publish %i changesets (yn)?$$ &Yes $$ &No')
391 % len(published)
391 % len(published)
392 ):
392 ):
393 raise error.Abort(_(b'user quit'))
393 raise error.Abort(_(b'user quit'))
394 elif behavior == b'abort':
394 elif behavior == b'abort':
395 msg = _(b'push would publish %i changesets') % len(published)
395 msg = _(b'push would publish %i changesets') % len(published)
396 hint = _(
396 hint = _(
397 b"use --publish or adjust 'experimental.auto-publish'"
397 b"use --publish or adjust 'experimental.auto-publish'"
398 b" config"
398 b" config"
399 )
399 )
400 raise error.Abort(msg, hint=hint)
400 raise error.Abort(msg, hint=hint)
401
401
402
402
403 def _forcebundle1(op):
403 def _forcebundle1(op):
404 """return true if a pull/push must use bundle1
404 """return true if a pull/push must use bundle1
405
405
406 This function is used to allow testing of the older bundle version"""
406 This function is used to allow testing of the older bundle version"""
407 ui = op.repo.ui
407 ui = op.repo.ui
408 # The goal is this config is to allow developer to choose the bundle
408 # The goal is this config is to allow developer to choose the bundle
409 # version used during exchanged. This is especially handy during test.
409 # version used during exchanged. This is especially handy during test.
410 # Value is a list of bundle version to be picked from, highest version
410 # Value is a list of bundle version to be picked from, highest version
411 # should be used.
411 # should be used.
412 #
412 #
413 # developer config: devel.legacy.exchange
413 # developer config: devel.legacy.exchange
414 exchange = ui.configlist(b'devel', b'legacy.exchange')
414 exchange = ui.configlist(b'devel', b'legacy.exchange')
415 forcebundle1 = b'bundle2' not in exchange and b'bundle1' in exchange
415 forcebundle1 = b'bundle2' not in exchange and b'bundle1' in exchange
416 return forcebundle1 or not op.remote.capable(b'bundle2')
416 return forcebundle1 or not op.remote.capable(b'bundle2')
417
417
418
418
419 class pushoperation(object):
419 class pushoperation(object):
420 """A object that represent a single push operation
420 """A object that represent a single push operation
421
421
422 Its purpose is to carry push related state and very common operations.
422 Its purpose is to carry push related state and very common operations.
423
423
424 A new pushoperation should be created at the beginning of each push and
424 A new pushoperation should be created at the beginning of each push and
425 discarded afterward.
425 discarded afterward.
426 """
426 """
427
427
428 def __init__(
428 def __init__(
429 self,
429 self,
430 repo,
430 repo,
431 remote,
431 remote,
432 force=False,
432 force=False,
433 revs=None,
433 revs=None,
434 newbranch=False,
434 newbranch=False,
435 bookmarks=(),
435 bookmarks=(),
436 publish=False,
436 publish=False,
437 pushvars=None,
437 pushvars=None,
438 ):
438 ):
439 # repo we push from
439 # repo we push from
440 self.repo = repo
440 self.repo = repo
441 self.ui = repo.ui
441 self.ui = repo.ui
442 # repo we push to
442 # repo we push to
443 self.remote = remote
443 self.remote = remote
444 # force option provided
444 # force option provided
445 self.force = force
445 self.force = force
446 # revs to be pushed (None is "all")
446 # revs to be pushed (None is "all")
447 self.revs = revs
447 self.revs = revs
448 # bookmark explicitly pushed
448 # bookmark explicitly pushed
449 self.bookmarks = bookmarks
449 self.bookmarks = bookmarks
450 # allow push of new branch
450 # allow push of new branch
451 self.newbranch = newbranch
451 self.newbranch = newbranch
452 # step already performed
452 # step already performed
453 # (used to check what steps have been already performed through bundle2)
453 # (used to check what steps have been already performed through bundle2)
454 self.stepsdone = set()
454 self.stepsdone = set()
455 # Integer version of the changegroup push result
455 # Integer version of the changegroup push result
456 # - None means nothing to push
456 # - None means nothing to push
457 # - 0 means HTTP error
457 # - 0 means HTTP error
458 # - 1 means we pushed and remote head count is unchanged *or*
458 # - 1 means we pushed and remote head count is unchanged *or*
459 # we have outgoing changesets but refused to push
459 # we have outgoing changesets but refused to push
460 # - other values as described by addchangegroup()
460 # - other values as described by addchangegroup()
461 self.cgresult = None
461 self.cgresult = None
462 # Boolean value for the bookmark push
462 # Boolean value for the bookmark push
463 self.bkresult = None
463 self.bkresult = None
464 # discover.outgoing object (contains common and outgoing data)
464 # discover.outgoing object (contains common and outgoing data)
465 self.outgoing = None
465 self.outgoing = None
466 # all remote topological heads before the push
466 # all remote topological heads before the push
467 self.remoteheads = None
467 self.remoteheads = None
468 # Details of the remote branch pre and post push
468 # Details of the remote branch pre and post push
469 #
469 #
470 # mapping: {'branch': ([remoteheads],
470 # mapping: {'branch': ([remoteheads],
471 # [newheads],
471 # [newheads],
472 # [unsyncedheads],
472 # [unsyncedheads],
473 # [discardedheads])}
473 # [discardedheads])}
474 # - branch: the branch name
474 # - branch: the branch name
475 # - remoteheads: the list of remote heads known locally
475 # - remoteheads: the list of remote heads known locally
476 # None if the branch is new
476 # None if the branch is new
477 # - newheads: the new remote heads (known locally) with outgoing pushed
477 # - newheads: the new remote heads (known locally) with outgoing pushed
478 # - unsyncedheads: the list of remote heads unknown locally.
478 # - unsyncedheads: the list of remote heads unknown locally.
479 # - discardedheads: the list of remote heads made obsolete by the push
479 # - discardedheads: the list of remote heads made obsolete by the push
480 self.pushbranchmap = None
480 self.pushbranchmap = None
481 # testable as a boolean indicating if any nodes are missing locally.
481 # testable as a boolean indicating if any nodes are missing locally.
482 self.incoming = None
482 self.incoming = None
483 # summary of the remote phase situation
483 # summary of the remote phase situation
484 self.remotephases = None
484 self.remotephases = None
485 # phases changes that must be pushed along side the changesets
485 # phases changes that must be pushed along side the changesets
486 self.outdatedphases = None
486 self.outdatedphases = None
487 # phases changes that must be pushed if changeset push fails
487 # phases changes that must be pushed if changeset push fails
488 self.fallbackoutdatedphases = None
488 self.fallbackoutdatedphases = None
489 # outgoing obsmarkers
489 # outgoing obsmarkers
490 self.outobsmarkers = set()
490 self.outobsmarkers = set()
491 # outgoing bookmarks, list of (bm, oldnode | '', newnode | '')
491 # outgoing bookmarks, list of (bm, oldnode | '', newnode | '')
492 self.outbookmarks = []
492 self.outbookmarks = []
493 # transaction manager
493 # transaction manager
494 self.trmanager = None
494 self.trmanager = None
495 # map { pushkey partid -> callback handling failure}
495 # map { pushkey partid -> callback handling failure}
496 # used to handle exception from mandatory pushkey part failure
496 # used to handle exception from mandatory pushkey part failure
497 self.pkfailcb = {}
497 self.pkfailcb = {}
498 # an iterable of pushvars or None
498 # an iterable of pushvars or None
499 self.pushvars = pushvars
499 self.pushvars = pushvars
500 # publish pushed changesets
500 # publish pushed changesets
501 self.publish = publish
501 self.publish = publish
502
502
503 @util.propertycache
503 @util.propertycache
504 def futureheads(self):
504 def futureheads(self):
505 """future remote heads if the changeset push succeeds"""
505 """future remote heads if the changeset push succeeds"""
506 return self.outgoing.ancestorsof
506 return self.outgoing.ancestorsof
507
507
508 @util.propertycache
508 @util.propertycache
509 def fallbackheads(self):
509 def fallbackheads(self):
510 """future remote heads if the changeset push fails"""
510 """future remote heads if the changeset push fails"""
511 if self.revs is None:
511 if self.revs is None:
512 # not target to push, all common are relevant
512 # not target to push, all common are relevant
513 return self.outgoing.commonheads
513 return self.outgoing.commonheads
514 unfi = self.repo.unfiltered()
514 unfi = self.repo.unfiltered()
515 # I want cheads = heads(::ancestorsof and ::commonheads)
515 # I want cheads = heads(::ancestorsof and ::commonheads)
516 # (ancestorsof is revs with secret changeset filtered out)
516 # (ancestorsof is revs with secret changeset filtered out)
517 #
517 #
518 # This can be expressed as:
518 # This can be expressed as:
519 # cheads = ( (ancestorsof and ::commonheads)
519 # cheads = ( (ancestorsof and ::commonheads)
520 # + (commonheads and ::ancestorsof))"
520 # + (commonheads and ::ancestorsof))"
521 # )
521 # )
522 #
522 #
523 # while trying to push we already computed the following:
523 # while trying to push we already computed the following:
524 # common = (::commonheads)
524 # common = (::commonheads)
525 # missing = ((commonheads::ancestorsof) - commonheads)
525 # missing = ((commonheads::ancestorsof) - commonheads)
526 #
526 #
527 # We can pick:
527 # We can pick:
528 # * ancestorsof part of common (::commonheads)
528 # * ancestorsof part of common (::commonheads)
529 common = self.outgoing.common
529 common = self.outgoing.common
530 rev = self.repo.changelog.index.rev
530 rev = self.repo.changelog.index.rev
531 cheads = [node for node in self.revs if rev(node) in common]
531 cheads = [node for node in self.revs if rev(node) in common]
532 # and
532 # and
533 # * commonheads parents on missing
533 # * commonheads parents on missing
534 revset = unfi.set(
534 revset = unfi.set(
535 b'%ln and parents(roots(%ln))',
535 b'%ln and parents(roots(%ln))',
536 self.outgoing.commonheads,
536 self.outgoing.commonheads,
537 self.outgoing.missing,
537 self.outgoing.missing,
538 )
538 )
539 cheads.extend(c.node() for c in revset)
539 cheads.extend(c.node() for c in revset)
540 return cheads
540 return cheads
541
541
542 @property
542 @property
543 def commonheads(self):
543 def commonheads(self):
544 """set of all common heads after changeset bundle push"""
544 """set of all common heads after changeset bundle push"""
545 if self.cgresult:
545 if self.cgresult:
546 return self.futureheads
546 return self.futureheads
547 else:
547 else:
548 return self.fallbackheads
548 return self.fallbackheads
549
549
550
550
551 # mapping of message used when pushing bookmark
551 # mapping of message used when pushing bookmark
552 bookmsgmap = {
552 bookmsgmap = {
553 b'update': (
553 b'update': (
554 _(b"updating bookmark %s\n"),
554 _(b"updating bookmark %s\n"),
555 _(b'updating bookmark %s failed!\n'),
555 _(b'updating bookmark %s failed!\n'),
556 ),
556 ),
557 b'export': (
557 b'export': (
558 _(b"exporting bookmark %s\n"),
558 _(b"exporting bookmark %s\n"),
559 _(b'exporting bookmark %s failed!\n'),
559 _(b'exporting bookmark %s failed!\n'),
560 ),
560 ),
561 b'delete': (
561 b'delete': (
562 _(b"deleting remote bookmark %s\n"),
562 _(b"deleting remote bookmark %s\n"),
563 _(b'deleting remote bookmark %s failed!\n'),
563 _(b'deleting remote bookmark %s failed!\n'),
564 ),
564 ),
565 }
565 }
566
566
567
567
568 def push(
568 def push(
569 repo,
569 repo,
570 remote,
570 remote,
571 force=False,
571 force=False,
572 revs=None,
572 revs=None,
573 newbranch=False,
573 newbranch=False,
574 bookmarks=(),
574 bookmarks=(),
575 publish=False,
575 publish=False,
576 opargs=None,
576 opargs=None,
577 ):
577 ):
578 '''Push outgoing changesets (limited by revs) from a local
578 '''Push outgoing changesets (limited by revs) from a local
579 repository to remote. Return an integer:
579 repository to remote. Return an integer:
580 - None means nothing to push
580 - None means nothing to push
581 - 0 means HTTP error
581 - 0 means HTTP error
582 - 1 means we pushed and remote head count is unchanged *or*
582 - 1 means we pushed and remote head count is unchanged *or*
583 we have outgoing changesets but refused to push
583 we have outgoing changesets but refused to push
584 - other values as described by addchangegroup()
584 - other values as described by addchangegroup()
585 '''
585 '''
586 if opargs is None:
586 if opargs is None:
587 opargs = {}
587 opargs = {}
588 pushop = pushoperation(
588 pushop = pushoperation(
589 repo,
589 repo,
590 remote,
590 remote,
591 force,
591 force,
592 revs,
592 revs,
593 newbranch,
593 newbranch,
594 bookmarks,
594 bookmarks,
595 publish,
595 publish,
596 **pycompat.strkwargs(opargs)
596 **pycompat.strkwargs(opargs)
597 )
597 )
598 if pushop.remote.local():
598 if pushop.remote.local():
599 missing = (
599 missing = (
600 set(pushop.repo.requirements) - pushop.remote.local().supported
600 set(pushop.repo.requirements) - pushop.remote.local().supported
601 )
601 )
602 if missing:
602 if missing:
603 msg = _(
603 msg = _(
604 b"required features are not"
604 b"required features are not"
605 b" supported in the destination:"
605 b" supported in the destination:"
606 b" %s"
606 b" %s"
607 ) % (b', '.join(sorted(missing)))
607 ) % (b', '.join(sorted(missing)))
608 raise error.Abort(msg)
608 raise error.Abort(msg)
609
609
610 if not pushop.remote.canpush():
610 if not pushop.remote.canpush():
611 raise error.Abort(_(b"destination does not support push"))
611 raise error.Abort(_(b"destination does not support push"))
612
612
613 if not pushop.remote.capable(b'unbundle'):
613 if not pushop.remote.capable(b'unbundle'):
614 raise error.Abort(
614 raise error.Abort(
615 _(
615 _(
616 b'cannot push: destination does not support the '
616 b'cannot push: destination does not support the '
617 b'unbundle wire protocol command'
617 b'unbundle wire protocol command'
618 )
618 )
619 )
619 )
620
620
621 # get lock as we might write phase data
621 # get lock as we might write phase data
622 wlock = lock = None
622 wlock = lock = None
623 try:
623 try:
624 # bundle2 push may receive a reply bundle touching bookmarks
624 # bundle2 push may receive a reply bundle touching bookmarks
625 # requiring the wlock. Take it now to ensure proper ordering.
625 # requiring the wlock. Take it now to ensure proper ordering.
626 maypushback = pushop.ui.configbool(b'experimental', b'bundle2.pushback')
626 maypushback = pushop.ui.configbool(b'experimental', b'bundle2.pushback')
627 if (
627 if (
628 (not _forcebundle1(pushop))
628 (not _forcebundle1(pushop))
629 and maypushback
629 and maypushback
630 and not bookmod.bookmarksinstore(repo)
630 and not bookmod.bookmarksinstore(repo)
631 ):
631 ):
632 wlock = pushop.repo.wlock()
632 wlock = pushop.repo.wlock()
633 lock = pushop.repo.lock()
633 lock = pushop.repo.lock()
634 pushop.trmanager = transactionmanager(
634 pushop.trmanager = transactionmanager(
635 pushop.repo, b'push-response', pushop.remote.url()
635 pushop.repo, b'push-response', pushop.remote.url()
636 )
636 )
637 except error.LockUnavailable as err:
637 except error.LockUnavailable as err:
638 # source repo cannot be locked.
638 # source repo cannot be locked.
639 # We do not abort the push, but just disable the local phase
639 # We do not abort the push, but just disable the local phase
640 # synchronisation.
640 # synchronisation.
641 msg = b'cannot lock source repository: %s\n' % stringutil.forcebytestr(
641 msg = b'cannot lock source repository: %s\n' % stringutil.forcebytestr(
642 err
642 err
643 )
643 )
644 pushop.ui.debug(msg)
644 pushop.ui.debug(msg)
645
645
646 with wlock or util.nullcontextmanager():
646 with wlock or util.nullcontextmanager():
647 with lock or util.nullcontextmanager():
647 with lock or util.nullcontextmanager():
648 with pushop.trmanager or util.nullcontextmanager():
648 with pushop.trmanager or util.nullcontextmanager():
649 pushop.repo.checkpush(pushop)
649 pushop.repo.checkpush(pushop)
650 _checkpublish(pushop)
650 _checkpublish(pushop)
651 _pushdiscovery(pushop)
651 _pushdiscovery(pushop)
652 if not pushop.force:
652 if not pushop.force:
653 _checksubrepostate(pushop)
653 _checksubrepostate(pushop)
654 if not _forcebundle1(pushop):
654 if not _forcebundle1(pushop):
655 _pushbundle2(pushop)
655 _pushbundle2(pushop)
656 _pushchangeset(pushop)
656 _pushchangeset(pushop)
657 _pushsyncphase(pushop)
657 _pushsyncphase(pushop)
658 _pushobsolete(pushop)
658 _pushobsolete(pushop)
659 _pushbookmark(pushop)
659 _pushbookmark(pushop)
660
660
661 if repo.ui.configbool(b'experimental', b'remotenames'):
661 if repo.ui.configbool(b'experimental', b'remotenames'):
662 logexchange.pullremotenames(repo, remote)
662 logexchange.pullremotenames(repo, remote)
663
663
664 return pushop
664 return pushop
665
665
666
666
667 # list of steps to perform discovery before push
667 # list of steps to perform discovery before push
668 pushdiscoveryorder = []
668 pushdiscoveryorder = []
669
669
670 # Mapping between step name and function
670 # Mapping between step name and function
671 #
671 #
672 # This exists to help extensions wrap steps if necessary
672 # This exists to help extensions wrap steps if necessary
673 pushdiscoverymapping = {}
673 pushdiscoverymapping = {}
674
674
675
675
676 def pushdiscovery(stepname):
676 def pushdiscovery(stepname):
677 """decorator for function performing discovery before push
677 """decorator for function performing discovery before push
678
678
679 The function is added to the step -> function mapping and appended to the
679 The function is added to the step -> function mapping and appended to the
680 list of steps. Beware that decorated function will be added in order (this
680 list of steps. Beware that decorated function will be added in order (this
681 may matter).
681 may matter).
682
682
683 You can only use this decorator for a new step, if you want to wrap a step
683 You can only use this decorator for a new step, if you want to wrap a step
684 from an extension, change the pushdiscovery dictionary directly."""
684 from an extension, change the pushdiscovery dictionary directly."""
685
685
686 def dec(func):
686 def dec(func):
687 assert stepname not in pushdiscoverymapping
687 assert stepname not in pushdiscoverymapping
688 pushdiscoverymapping[stepname] = func
688 pushdiscoverymapping[stepname] = func
689 pushdiscoveryorder.append(stepname)
689 pushdiscoveryorder.append(stepname)
690 return func
690 return func
691
691
692 return dec
692 return dec
693
693
694
694
695 def _pushdiscovery(pushop):
695 def _pushdiscovery(pushop):
696 """Run all discovery steps"""
696 """Run all discovery steps"""
697 for stepname in pushdiscoveryorder:
697 for stepname in pushdiscoveryorder:
698 step = pushdiscoverymapping[stepname]
698 step = pushdiscoverymapping[stepname]
699 step(pushop)
699 step(pushop)
700
700
701
701
702 def _checksubrepostate(pushop):
702 def _checksubrepostate(pushop):
703 """Ensure all outgoing referenced subrepo revisions are present locally"""
703 """Ensure all outgoing referenced subrepo revisions are present locally"""
704 for n in pushop.outgoing.missing:
704 for n in pushop.outgoing.missing:
705 ctx = pushop.repo[n]
705 ctx = pushop.repo[n]
706
706
707 if b'.hgsub' in ctx.manifest() and b'.hgsubstate' in ctx.files():
707 if b'.hgsub' in ctx.manifest() and b'.hgsubstate' in ctx.files():
708 for subpath in sorted(ctx.substate):
708 for subpath in sorted(ctx.substate):
709 sub = ctx.sub(subpath)
709 sub = ctx.sub(subpath)
710 sub.verify(onpush=True)
710 sub.verify(onpush=True)
711
711
712
712
713 @pushdiscovery(b'changeset')
713 @pushdiscovery(b'changeset')
714 def _pushdiscoverychangeset(pushop):
714 def _pushdiscoverychangeset(pushop):
715 """discover the changeset that need to be pushed"""
715 """discover the changeset that need to be pushed"""
716 fci = discovery.findcommonincoming
716 fci = discovery.findcommonincoming
717 if pushop.revs:
717 if pushop.revs:
718 commoninc = fci(
718 commoninc = fci(
719 pushop.repo,
719 pushop.repo,
720 pushop.remote,
720 pushop.remote,
721 force=pushop.force,
721 force=pushop.force,
722 ancestorsof=pushop.revs,
722 ancestorsof=pushop.revs,
723 )
723 )
724 else:
724 else:
725 commoninc = fci(pushop.repo, pushop.remote, force=pushop.force)
725 commoninc = fci(pushop.repo, pushop.remote, force=pushop.force)
726 common, inc, remoteheads = commoninc
726 common, inc, remoteheads = commoninc
727 fco = discovery.findcommonoutgoing
727 fco = discovery.findcommonoutgoing
728 outgoing = fco(
728 outgoing = fco(
729 pushop.repo,
729 pushop.repo,
730 pushop.remote,
730 pushop.remote,
731 onlyheads=pushop.revs,
731 onlyheads=pushop.revs,
732 commoninc=commoninc,
732 commoninc=commoninc,
733 force=pushop.force,
733 force=pushop.force,
734 )
734 )
735 pushop.outgoing = outgoing
735 pushop.outgoing = outgoing
736 pushop.remoteheads = remoteheads
736 pushop.remoteheads = remoteheads
737 pushop.incoming = inc
737 pushop.incoming = inc
738
738
739
739
740 @pushdiscovery(b'phase')
740 @pushdiscovery(b'phase')
741 def _pushdiscoveryphase(pushop):
741 def _pushdiscoveryphase(pushop):
742 """discover the phase that needs to be pushed
742 """discover the phase that needs to be pushed
743
743
744 (computed for both success and failure case for changesets push)"""
744 (computed for both success and failure case for changesets push)"""
745 outgoing = pushop.outgoing
745 outgoing = pushop.outgoing
746 unfi = pushop.repo.unfiltered()
746 unfi = pushop.repo.unfiltered()
747 remotephases = listkeys(pushop.remote, b'phases')
747 remotephases = listkeys(pushop.remote, b'phases')
748
748
749 if (
749 if (
750 pushop.ui.configbool(b'ui', b'_usedassubrepo')
750 pushop.ui.configbool(b'ui', b'_usedassubrepo')
751 and remotephases # server supports phases
751 and remotephases # server supports phases
752 and not pushop.outgoing.missing # no changesets to be pushed
752 and not pushop.outgoing.missing # no changesets to be pushed
753 and remotephases.get(b'publishing', False)
753 and remotephases.get(b'publishing', False)
754 ):
754 ):
755 # When:
755 # When:
756 # - this is a subrepo push
756 # - this is a subrepo push
757 # - and remote support phase
757 # - and remote support phase
758 # - and no changeset are to be pushed
758 # - and no changeset are to be pushed
759 # - and remote is publishing
759 # - and remote is publishing
760 # We may be in issue 3781 case!
760 # We may be in issue 3781 case!
761 # We drop the possible phase synchronisation done by
761 # We drop the possible phase synchronisation done by
762 # courtesy to publish changesets possibly locally draft
762 # courtesy to publish changesets possibly locally draft
763 # on the remote.
763 # on the remote.
764 pushop.outdatedphases = []
764 pushop.outdatedphases = []
765 pushop.fallbackoutdatedphases = []
765 pushop.fallbackoutdatedphases = []
766 return
766 return
767
767
768 pushop.remotephases = phases.remotephasessummary(
768 pushop.remotephases = phases.remotephasessummary(
769 pushop.repo, pushop.fallbackheads, remotephases
769 pushop.repo, pushop.fallbackheads, remotephases
770 )
770 )
771 droots = pushop.remotephases.draftroots
771 droots = pushop.remotephases.draftroots
772
772
773 extracond = b''
773 extracond = b''
774 if not pushop.remotephases.publishing:
774 if not pushop.remotephases.publishing:
775 extracond = b' and public()'
775 extracond = b' and public()'
776 revset = b'heads((%%ln::%%ln) %s)' % extracond
776 revset = b'heads((%%ln::%%ln) %s)' % extracond
777 # Get the list of all revs draft on remote by public here.
777 # Get the list of all revs draft on remote by public here.
778 # XXX Beware that revset break if droots is not strictly
778 # XXX Beware that revset break if droots is not strictly
779 # XXX root we may want to ensure it is but it is costly
779 # XXX root we may want to ensure it is but it is costly
780 fallback = list(unfi.set(revset, droots, pushop.fallbackheads))
780 fallback = list(unfi.set(revset, droots, pushop.fallbackheads))
781 if not pushop.remotephases.publishing and pushop.publish:
781 if not pushop.remotephases.publishing and pushop.publish:
782 future = list(
782 future = list(
783 unfi.set(
783 unfi.set(
784 b'%ln and (not public() or %ln::)', pushop.futureheads, droots
784 b'%ln and (not public() or %ln::)', pushop.futureheads, droots
785 )
785 )
786 )
786 )
787 elif not outgoing.missing:
787 elif not outgoing.missing:
788 future = fallback
788 future = fallback
789 else:
789 else:
790 # adds changeset we are going to push as draft
790 # adds changeset we are going to push as draft
791 #
791 #
792 # should not be necessary for publishing server, but because of an
792 # should not be necessary for publishing server, but because of an
793 # issue fixed in xxxxx we have to do it anyway.
793 # issue fixed in xxxxx we have to do it anyway.
794 fdroots = list(
794 fdroots = list(
795 unfi.set(b'roots(%ln + %ln::)', outgoing.missing, droots)
795 unfi.set(b'roots(%ln + %ln::)', outgoing.missing, droots)
796 )
796 )
797 fdroots = [f.node() for f in fdroots]
797 fdroots = [f.node() for f in fdroots]
798 future = list(unfi.set(revset, fdroots, pushop.futureheads))
798 future = list(unfi.set(revset, fdroots, pushop.futureheads))
799 pushop.outdatedphases = future
799 pushop.outdatedphases = future
800 pushop.fallbackoutdatedphases = fallback
800 pushop.fallbackoutdatedphases = fallback
801
801
802
802
803 @pushdiscovery(b'obsmarker')
803 @pushdiscovery(b'obsmarker')
804 def _pushdiscoveryobsmarkers(pushop):
804 def _pushdiscoveryobsmarkers(pushop):
805 if not obsolete.isenabled(pushop.repo, obsolete.exchangeopt):
805 if not obsolete.isenabled(pushop.repo, obsolete.exchangeopt):
806 return
806 return
807
807
808 if not pushop.repo.obsstore:
808 if not pushop.repo.obsstore:
809 return
809 return
810
810
811 if b'obsolete' not in listkeys(pushop.remote, b'namespaces'):
811 if b'obsolete' not in listkeys(pushop.remote, b'namespaces'):
812 return
812 return
813
813
814 repo = pushop.repo
814 repo = pushop.repo
815 # very naive computation, that can be quite expensive on big repo.
815 # very naive computation, that can be quite expensive on big repo.
816 # However: evolution is currently slow on them anyway.
816 # However: evolution is currently slow on them anyway.
817 nodes = (c.node() for c in repo.set(b'::%ln', pushop.futureheads))
817 nodes = (c.node() for c in repo.set(b'::%ln', pushop.futureheads))
818 pushop.outobsmarkers = pushop.repo.obsstore.relevantmarkers(nodes)
818 pushop.outobsmarkers = pushop.repo.obsstore.relevantmarkers(nodes)
819
819
820
820
821 @pushdiscovery(b'bookmarks')
821 @pushdiscovery(b'bookmarks')
822 def _pushdiscoverybookmarks(pushop):
822 def _pushdiscoverybookmarks(pushop):
823 ui = pushop.ui
823 ui = pushop.ui
824 repo = pushop.repo.unfiltered()
824 repo = pushop.repo.unfiltered()
825 remote = pushop.remote
825 remote = pushop.remote
826 ui.debug(b"checking for updated bookmarks\n")
826 ui.debug(b"checking for updated bookmarks\n")
827 ancestors = ()
827 ancestors = ()
828 if pushop.revs:
828 if pushop.revs:
829 revnums = pycompat.maplist(repo.changelog.rev, pushop.revs)
829 revnums = pycompat.maplist(repo.changelog.rev, pushop.revs)
830 ancestors = repo.changelog.ancestors(revnums, inclusive=True)
830 ancestors = repo.changelog.ancestors(revnums, inclusive=True)
831
831
832 remotebookmark = bookmod.unhexlifybookmarks(listkeys(remote, b'bookmarks'))
832 remotebookmark = bookmod.unhexlifybookmarks(listkeys(remote, b'bookmarks'))
833
833
834 explicit = {
834 explicit = {
835 repo._bookmarks.expandname(bookmark) for bookmark in pushop.bookmarks
835 repo._bookmarks.expandname(bookmark) for bookmark in pushop.bookmarks
836 }
836 }
837
837
838 comp = bookmod.comparebookmarks(repo, repo._bookmarks, remotebookmark)
838 comp = bookmod.comparebookmarks(repo, repo._bookmarks, remotebookmark)
839 return _processcompared(pushop, ancestors, explicit, remotebookmark, comp)
839 return _processcompared(pushop, ancestors, explicit, remotebookmark, comp)
840
840
841
841
842 def _processcompared(pushop, pushed, explicit, remotebms, comp):
842 def _processcompared(pushop, pushed, explicit, remotebms, comp):
843 """take decision on bookmarks to push to the remote repo
843 """take decision on bookmarks to push to the remote repo
844
844
845 Exists to help extensions alter this behavior.
845 Exists to help extensions alter this behavior.
846 """
846 """
847 addsrc, adddst, advsrc, advdst, diverge, differ, invalid, same = comp
847 addsrc, adddst, advsrc, advdst, diverge, differ, invalid, same = comp
848
848
849 repo = pushop.repo
849 repo = pushop.repo
850
850
851 for b, scid, dcid in advsrc:
851 for b, scid, dcid in advsrc:
852 if b in explicit:
852 if b in explicit:
853 explicit.remove(b)
853 explicit.remove(b)
854 if not pushed or repo[scid].rev() in pushed:
854 if not pushed or repo[scid].rev() in pushed:
855 pushop.outbookmarks.append((b, dcid, scid))
855 pushop.outbookmarks.append((b, dcid, scid))
856 # search added bookmark
856 # search added bookmark
857 for b, scid, dcid in addsrc:
857 for b, scid, dcid in addsrc:
858 if b in explicit:
858 if b in explicit:
859 explicit.remove(b)
859 explicit.remove(b)
860 if bookmod.isdivergent(b):
860 if bookmod.isdivergent(b):
861 pushop.ui.warn(_(b'cannot push divergent bookmark %s!\n') % b)
861 pushop.ui.warn(_(b'cannot push divergent bookmark %s!\n') % b)
862 pushop.bkresult = 2
862 pushop.bkresult = 2
863 else:
863 else:
864 pushop.outbookmarks.append((b, b'', scid))
864 pushop.outbookmarks.append((b, b'', scid))
865 # search for overwritten bookmark
865 # search for overwritten bookmark
866 for b, scid, dcid in list(advdst) + list(diverge) + list(differ):
866 for b, scid, dcid in list(advdst) + list(diverge) + list(differ):
867 if b in explicit:
867 if b in explicit:
868 explicit.remove(b)
868 explicit.remove(b)
869 pushop.outbookmarks.append((b, dcid, scid))
869 pushop.outbookmarks.append((b, dcid, scid))
870 # search for bookmark to delete
870 # search for bookmark to delete
871 for b, scid, dcid in adddst:
871 for b, scid, dcid in adddst:
872 if b in explicit:
872 if b in explicit:
873 explicit.remove(b)
873 explicit.remove(b)
874 # treat as "deleted locally"
874 # treat as "deleted locally"
875 pushop.outbookmarks.append((b, dcid, b''))
875 pushop.outbookmarks.append((b, dcid, b''))
876 # identical bookmarks shouldn't get reported
876 # identical bookmarks shouldn't get reported
877 for b, scid, dcid in same:
877 for b, scid, dcid in same:
878 if b in explicit:
878 if b in explicit:
879 explicit.remove(b)
879 explicit.remove(b)
880
880
881 if explicit:
881 if explicit:
882 explicit = sorted(explicit)
882 explicit = sorted(explicit)
883 # we should probably list all of them
883 # we should probably list all of them
884 pushop.ui.warn(
884 pushop.ui.warn(
885 _(
885 _(
886 b'bookmark %s does not exist on the local '
886 b'bookmark %s does not exist on the local '
887 b'or remote repository!\n'
887 b'or remote repository!\n'
888 )
888 )
889 % explicit[0]
889 % explicit[0]
890 )
890 )
891 pushop.bkresult = 2
891 pushop.bkresult = 2
892
892
893 pushop.outbookmarks.sort()
893 pushop.outbookmarks.sort()
894
894
895
895
896 def _pushcheckoutgoing(pushop):
896 def _pushcheckoutgoing(pushop):
897 outgoing = pushop.outgoing
897 outgoing = pushop.outgoing
898 unfi = pushop.repo.unfiltered()
898 unfi = pushop.repo.unfiltered()
899 if not outgoing.missing:
899 if not outgoing.missing:
900 # nothing to push
900 # nothing to push
901 scmutil.nochangesfound(unfi.ui, unfi, outgoing.excluded)
901 scmutil.nochangesfound(unfi.ui, unfi, outgoing.excluded)
902 return False
902 return False
903 # something to push
903 # something to push
904 if not pushop.force:
904 if not pushop.force:
905 # if repo.obsstore == False --> no obsolete
905 # if repo.obsstore == False --> no obsolete
906 # then, save the iteration
906 # then, save the iteration
907 if unfi.obsstore:
907 if unfi.obsstore:
908 # this message are here for 80 char limit reason
908 # this message are here for 80 char limit reason
909 mso = _(b"push includes obsolete changeset: %s!")
909 mso = _(b"push includes obsolete changeset: %s!")
910 mspd = _(b"push includes phase-divergent changeset: %s!")
910 mspd = _(b"push includes phase-divergent changeset: %s!")
911 mscd = _(b"push includes content-divergent changeset: %s!")
911 mscd = _(b"push includes content-divergent changeset: %s!")
912 mst = {
912 mst = {
913 b"orphan": _(b"push includes orphan changeset: %s!"),
913 b"orphan": _(b"push includes orphan changeset: %s!"),
914 b"phase-divergent": mspd,
914 b"phase-divergent": mspd,
915 b"content-divergent": mscd,
915 b"content-divergent": mscd,
916 }
916 }
917 # If we are to push if there is at least one
917 # If we are to push if there is at least one
918 # obsolete or unstable changeset in missing, at
918 # obsolete or unstable changeset in missing, at
919 # least one of the missinghead will be obsolete or
919 # least one of the missinghead will be obsolete or
920 # unstable. So checking heads only is ok
920 # unstable. So checking heads only is ok
921 for node in outgoing.ancestorsof:
921 for node in outgoing.ancestorsof:
922 ctx = unfi[node]
922 ctx = unfi[node]
923 if ctx.obsolete():
923 if ctx.obsolete():
924 raise error.Abort(mso % ctx)
924 raise error.Abort(mso % ctx)
925 elif ctx.isunstable():
925 elif ctx.isunstable():
926 # TODO print more than one instability in the abort
926 # TODO print more than one instability in the abort
927 # message
927 # message
928 raise error.Abort(mst[ctx.instabilities()[0]] % ctx)
928 raise error.Abort(mst[ctx.instabilities()[0]] % ctx)
929
929
930 discovery.checkheads(pushop)
930 discovery.checkheads(pushop)
931 return True
931 return True
932
932
933
933
934 # List of names of steps to perform for an outgoing bundle2, order matters.
934 # List of names of steps to perform for an outgoing bundle2, order matters.
935 b2partsgenorder = []
935 b2partsgenorder = []
936
936
937 # Mapping between step name and function
937 # Mapping between step name and function
938 #
938 #
939 # This exists to help extensions wrap steps if necessary
939 # This exists to help extensions wrap steps if necessary
940 b2partsgenmapping = {}
940 b2partsgenmapping = {}
941
941
942
942
943 def b2partsgenerator(stepname, idx=None):
943 def b2partsgenerator(stepname, idx=None):
944 """decorator for function generating bundle2 part
944 """decorator for function generating bundle2 part
945
945
946 The function is added to the step -> function mapping and appended to the
946 The function is added to the step -> function mapping and appended to the
947 list of steps. Beware that decorated functions will be added in order
947 list of steps. Beware that decorated functions will be added in order
948 (this may matter).
948 (this may matter).
949
949
950 You can only use this decorator for new steps, if you want to wrap a step
950 You can only use this decorator for new steps, if you want to wrap a step
951 from an extension, attack the b2partsgenmapping dictionary directly."""
951 from an extension, attack the b2partsgenmapping dictionary directly."""
952
952
953 def dec(func):
953 def dec(func):
954 assert stepname not in b2partsgenmapping
954 assert stepname not in b2partsgenmapping
955 b2partsgenmapping[stepname] = func
955 b2partsgenmapping[stepname] = func
956 if idx is None:
956 if idx is None:
957 b2partsgenorder.append(stepname)
957 b2partsgenorder.append(stepname)
958 else:
958 else:
959 b2partsgenorder.insert(idx, stepname)
959 b2partsgenorder.insert(idx, stepname)
960 return func
960 return func
961
961
962 return dec
962 return dec
963
963
964
964
965 def _pushb2ctxcheckheads(pushop, bundler):
965 def _pushb2ctxcheckheads(pushop, bundler):
966 """Generate race condition checking parts
966 """Generate race condition checking parts
967
967
968 Exists as an independent function to aid extensions
968 Exists as an independent function to aid extensions
969 """
969 """
970 # * 'force' do not check for push race,
970 # * 'force' do not check for push race,
971 # * if we don't push anything, there are nothing to check.
971 # * if we don't push anything, there are nothing to check.
972 if not pushop.force and pushop.outgoing.ancestorsof:
972 if not pushop.force and pushop.outgoing.ancestorsof:
973 allowunrelated = b'related' in bundler.capabilities.get(
973 allowunrelated = b'related' in bundler.capabilities.get(
974 b'checkheads', ()
974 b'checkheads', ()
975 )
975 )
976 emptyremote = pushop.pushbranchmap is None
976 emptyremote = pushop.pushbranchmap is None
977 if not allowunrelated or emptyremote:
977 if not allowunrelated or emptyremote:
978 bundler.newpart(b'check:heads', data=iter(pushop.remoteheads))
978 bundler.newpart(b'check:heads', data=iter(pushop.remoteheads))
979 else:
979 else:
980 affected = set()
980 affected = set()
981 for branch, heads in pycompat.iteritems(pushop.pushbranchmap):
981 for branch, heads in pycompat.iteritems(pushop.pushbranchmap):
982 remoteheads, newheads, unsyncedheads, discardedheads = heads
982 remoteheads, newheads, unsyncedheads, discardedheads = heads
983 if remoteheads is not None:
983 if remoteheads is not None:
984 remote = set(remoteheads)
984 remote = set(remoteheads)
985 affected |= set(discardedheads) & remote
985 affected |= set(discardedheads) & remote
986 affected |= remote - set(newheads)
986 affected |= remote - set(newheads)
987 if affected:
987 if affected:
988 data = iter(sorted(affected))
988 data = iter(sorted(affected))
989 bundler.newpart(b'check:updated-heads', data=data)
989 bundler.newpart(b'check:updated-heads', data=data)
990
990
991
991
992 def _pushing(pushop):
992 def _pushing(pushop):
993 """return True if we are pushing anything"""
993 """return True if we are pushing anything"""
994 return bool(
994 return bool(
995 pushop.outgoing.missing
995 pushop.outgoing.missing
996 or pushop.outdatedphases
996 or pushop.outdatedphases
997 or pushop.outobsmarkers
997 or pushop.outobsmarkers
998 or pushop.outbookmarks
998 or pushop.outbookmarks
999 )
999 )
1000
1000
1001
1001
1002 @b2partsgenerator(b'check-bookmarks')
1002 @b2partsgenerator(b'check-bookmarks')
1003 def _pushb2checkbookmarks(pushop, bundler):
1003 def _pushb2checkbookmarks(pushop, bundler):
1004 """insert bookmark move checking"""
1004 """insert bookmark move checking"""
1005 if not _pushing(pushop) or pushop.force:
1005 if not _pushing(pushop) or pushop.force:
1006 return
1006 return
1007 b2caps = bundle2.bundle2caps(pushop.remote)
1007 b2caps = bundle2.bundle2caps(pushop.remote)
1008 hasbookmarkcheck = b'bookmarks' in b2caps
1008 hasbookmarkcheck = b'bookmarks' in b2caps
1009 if not (pushop.outbookmarks and hasbookmarkcheck):
1009 if not (pushop.outbookmarks and hasbookmarkcheck):
1010 return
1010 return
1011 data = []
1011 data = []
1012 for book, old, new in pushop.outbookmarks:
1012 for book, old, new in pushop.outbookmarks:
1013 data.append((book, old))
1013 data.append((book, old))
1014 checkdata = bookmod.binaryencode(data)
1014 checkdata = bookmod.binaryencode(data)
1015 bundler.newpart(b'check:bookmarks', data=checkdata)
1015 bundler.newpart(b'check:bookmarks', data=checkdata)
1016
1016
1017
1017
1018 @b2partsgenerator(b'check-phases')
1018 @b2partsgenerator(b'check-phases')
1019 def _pushb2checkphases(pushop, bundler):
1019 def _pushb2checkphases(pushop, bundler):
1020 """insert phase move checking"""
1020 """insert phase move checking"""
1021 if not _pushing(pushop) or pushop.force:
1021 if not _pushing(pushop) or pushop.force:
1022 return
1022 return
1023 b2caps = bundle2.bundle2caps(pushop.remote)
1023 b2caps = bundle2.bundle2caps(pushop.remote)
1024 hasphaseheads = b'heads' in b2caps.get(b'phases', ())
1024 hasphaseheads = b'heads' in b2caps.get(b'phases', ())
1025 if pushop.remotephases is not None and hasphaseheads:
1025 if pushop.remotephases is not None and hasphaseheads:
1026 # check that the remote phase has not changed
1026 # check that the remote phase has not changed
1027 checks = {p: [] for p in phases.allphases}
1027 checks = {p: [] for p in phases.allphases}
1028 checks[phases.public].extend(pushop.remotephases.publicheads)
1028 checks[phases.public].extend(pushop.remotephases.publicheads)
1029 checks[phases.draft].extend(pushop.remotephases.draftroots)
1029 checks[phases.draft].extend(pushop.remotephases.draftroots)
1030 if any(pycompat.itervalues(checks)):
1030 if any(pycompat.itervalues(checks)):
1031 for phase in checks:
1031 for phase in checks:
1032 checks[phase].sort()
1032 checks[phase].sort()
1033 checkdata = phases.binaryencode(checks)
1033 checkdata = phases.binaryencode(checks)
1034 bundler.newpart(b'check:phases', data=checkdata)
1034 bundler.newpart(b'check:phases', data=checkdata)
1035
1035
1036
1036
1037 @b2partsgenerator(b'changeset')
1037 @b2partsgenerator(b'changeset')
1038 def _pushb2ctx(pushop, bundler):
1038 def _pushb2ctx(pushop, bundler):
1039 """handle changegroup push through bundle2
1039 """handle changegroup push through bundle2
1040
1040
1041 addchangegroup result is stored in the ``pushop.cgresult`` attribute.
1041 addchangegroup result is stored in the ``pushop.cgresult`` attribute.
1042 """
1042 """
1043 if b'changesets' in pushop.stepsdone:
1043 if b'changesets' in pushop.stepsdone:
1044 return
1044 return
1045 pushop.stepsdone.add(b'changesets')
1045 pushop.stepsdone.add(b'changesets')
1046 # Send known heads to the server for race detection.
1046 # Send known heads to the server for race detection.
1047 if not _pushcheckoutgoing(pushop):
1047 if not _pushcheckoutgoing(pushop):
1048 return
1048 return
1049 pushop.repo.prepushoutgoinghooks(pushop)
1049 pushop.repo.prepushoutgoinghooks(pushop)
1050
1050
1051 _pushb2ctxcheckheads(pushop, bundler)
1051 _pushb2ctxcheckheads(pushop, bundler)
1052
1052
1053 b2caps = bundle2.bundle2caps(pushop.remote)
1053 b2caps = bundle2.bundle2caps(pushop.remote)
1054 version = b'01'
1054 version = b'01'
1055 cgversions = b2caps.get(b'changegroup')
1055 cgversions = b2caps.get(b'changegroup')
1056 if cgversions: # 3.1 and 3.2 ship with an empty value
1056 if cgversions: # 3.1 and 3.2 ship with an empty value
1057 cgversions = [
1057 cgversions = [
1058 v
1058 v
1059 for v in cgversions
1059 for v in cgversions
1060 if v in changegroup.supportedoutgoingversions(pushop.repo)
1060 if v in changegroup.supportedoutgoingversions(pushop.repo)
1061 ]
1061 ]
1062 if not cgversions:
1062 if not cgversions:
1063 raise error.Abort(_(b'no common changegroup version'))
1063 raise error.Abort(_(b'no common changegroup version'))
1064 version = max(cgversions)
1064 version = max(cgversions)
1065 cgstream = changegroup.makestream(
1065 cgstream = changegroup.makestream(
1066 pushop.repo, pushop.outgoing, version, b'push'
1066 pushop.repo, pushop.outgoing, version, b'push'
1067 )
1067 )
1068 cgpart = bundler.newpart(b'changegroup', data=cgstream)
1068 cgpart = bundler.newpart(b'changegroup', data=cgstream)
1069 if cgversions:
1069 if cgversions:
1070 cgpart.addparam(b'version', version)
1070 cgpart.addparam(b'version', version)
1071 if scmutil.istreemanifest(pushop.repo):
1071 if scmutil.istreemanifest(pushop.repo):
1072 cgpart.addparam(b'treemanifest', b'1')
1072 cgpart.addparam(b'treemanifest', b'1')
1073 if b'exp-sidedata-flag' in pushop.repo.requirements:
1073 if b'exp-sidedata-flag' in pushop.repo.requirements:
1074 cgpart.addparam(b'exp-sidedata', b'1')
1074 cgpart.addparam(b'exp-sidedata', b'1')
1075
1075
1076 def handlereply(op):
1076 def handlereply(op):
1077 """extract addchangegroup returns from server reply"""
1077 """extract addchangegroup returns from server reply"""
1078 cgreplies = op.records.getreplies(cgpart.id)
1078 cgreplies = op.records.getreplies(cgpart.id)
1079 assert len(cgreplies[b'changegroup']) == 1
1079 assert len(cgreplies[b'changegroup']) == 1
1080 pushop.cgresult = cgreplies[b'changegroup'][0][b'return']
1080 pushop.cgresult = cgreplies[b'changegroup'][0][b'return']
1081
1081
1082 return handlereply
1082 return handlereply
1083
1083
1084
1084
1085 @b2partsgenerator(b'phase')
1085 @b2partsgenerator(b'phase')
1086 def _pushb2phases(pushop, bundler):
1086 def _pushb2phases(pushop, bundler):
1087 """handle phase push through bundle2"""
1087 """handle phase push through bundle2"""
1088 if b'phases' in pushop.stepsdone:
1088 if b'phases' in pushop.stepsdone:
1089 return
1089 return
1090 b2caps = bundle2.bundle2caps(pushop.remote)
1090 b2caps = bundle2.bundle2caps(pushop.remote)
1091 ui = pushop.repo.ui
1091 ui = pushop.repo.ui
1092
1092
1093 legacyphase = b'phases' in ui.configlist(b'devel', b'legacy.exchange')
1093 legacyphase = b'phases' in ui.configlist(b'devel', b'legacy.exchange')
1094 haspushkey = b'pushkey' in b2caps
1094 haspushkey = b'pushkey' in b2caps
1095 hasphaseheads = b'heads' in b2caps.get(b'phases', ())
1095 hasphaseheads = b'heads' in b2caps.get(b'phases', ())
1096
1096
1097 if hasphaseheads and not legacyphase:
1097 if hasphaseheads and not legacyphase:
1098 return _pushb2phaseheads(pushop, bundler)
1098 return _pushb2phaseheads(pushop, bundler)
1099 elif haspushkey:
1099 elif haspushkey:
1100 return _pushb2phasespushkey(pushop, bundler)
1100 return _pushb2phasespushkey(pushop, bundler)
1101
1101
1102
1102
1103 def _pushb2phaseheads(pushop, bundler):
1103 def _pushb2phaseheads(pushop, bundler):
1104 """push phase information through a bundle2 - binary part"""
1104 """push phase information through a bundle2 - binary part"""
1105 pushop.stepsdone.add(b'phases')
1105 pushop.stepsdone.add(b'phases')
1106 if pushop.outdatedphases:
1106 if pushop.outdatedphases:
1107 updates = {p: [] for p in phases.allphases}
1107 updates = {p: [] for p in phases.allphases}
1108 updates[0].extend(h.node() for h in pushop.outdatedphases)
1108 updates[0].extend(h.node() for h in pushop.outdatedphases)
1109 phasedata = phases.binaryencode(updates)
1109 phasedata = phases.binaryencode(updates)
1110 bundler.newpart(b'phase-heads', data=phasedata)
1110 bundler.newpart(b'phase-heads', data=phasedata)
1111
1111
1112
1112
1113 def _pushb2phasespushkey(pushop, bundler):
1113 def _pushb2phasespushkey(pushop, bundler):
1114 """push phase information through a bundle2 - pushkey part"""
1114 """push phase information through a bundle2 - pushkey part"""
1115 pushop.stepsdone.add(b'phases')
1115 pushop.stepsdone.add(b'phases')
1116 part2node = []
1116 part2node = []
1117
1117
1118 def handlefailure(pushop, exc):
1118 def handlefailure(pushop, exc):
1119 targetid = int(exc.partid)
1119 targetid = int(exc.partid)
1120 for partid, node in part2node:
1120 for partid, node in part2node:
1121 if partid == targetid:
1121 if partid == targetid:
1122 raise error.Abort(_(b'updating %s to public failed') % node)
1122 raise error.Abort(_(b'updating %s to public failed') % node)
1123
1123
1124 enc = pushkey.encode
1124 enc = pushkey.encode
1125 for newremotehead in pushop.outdatedphases:
1125 for newremotehead in pushop.outdatedphases:
1126 part = bundler.newpart(b'pushkey')
1126 part = bundler.newpart(b'pushkey')
1127 part.addparam(b'namespace', enc(b'phases'))
1127 part.addparam(b'namespace', enc(b'phases'))
1128 part.addparam(b'key', enc(newremotehead.hex()))
1128 part.addparam(b'key', enc(newremotehead.hex()))
1129 part.addparam(b'old', enc(b'%d' % phases.draft))
1129 part.addparam(b'old', enc(b'%d' % phases.draft))
1130 part.addparam(b'new', enc(b'%d' % phases.public))
1130 part.addparam(b'new', enc(b'%d' % phases.public))
1131 part2node.append((part.id, newremotehead))
1131 part2node.append((part.id, newremotehead))
1132 pushop.pkfailcb[part.id] = handlefailure
1132 pushop.pkfailcb[part.id] = handlefailure
1133
1133
1134 def handlereply(op):
1134 def handlereply(op):
1135 for partid, node in part2node:
1135 for partid, node in part2node:
1136 partrep = op.records.getreplies(partid)
1136 partrep = op.records.getreplies(partid)
1137 results = partrep[b'pushkey']
1137 results = partrep[b'pushkey']
1138 assert len(results) <= 1
1138 assert len(results) <= 1
1139 msg = None
1139 msg = None
1140 if not results:
1140 if not results:
1141 msg = _(b'server ignored update of %s to public!\n') % node
1141 msg = _(b'server ignored update of %s to public!\n') % node
1142 elif not int(results[0][b'return']):
1142 elif not int(results[0][b'return']):
1143 msg = _(b'updating %s to public failed!\n') % node
1143 msg = _(b'updating %s to public failed!\n') % node
1144 if msg is not None:
1144 if msg is not None:
1145 pushop.ui.warn(msg)
1145 pushop.ui.warn(msg)
1146
1146
1147 return handlereply
1147 return handlereply
1148
1148
1149
1149
1150 @b2partsgenerator(b'obsmarkers')
1150 @b2partsgenerator(b'obsmarkers')
1151 def _pushb2obsmarkers(pushop, bundler):
1151 def _pushb2obsmarkers(pushop, bundler):
1152 if b'obsmarkers' in pushop.stepsdone:
1152 if b'obsmarkers' in pushop.stepsdone:
1153 return
1153 return
1154 remoteversions = bundle2.obsmarkersversion(bundler.capabilities)
1154 remoteversions = bundle2.obsmarkersversion(bundler.capabilities)
1155 if obsolete.commonversion(remoteversions) is None:
1155 if obsolete.commonversion(remoteversions) is None:
1156 return
1156 return
1157 pushop.stepsdone.add(b'obsmarkers')
1157 pushop.stepsdone.add(b'obsmarkers')
1158 if pushop.outobsmarkers:
1158 if pushop.outobsmarkers:
1159 markers = obsutil.sortedmarkers(pushop.outobsmarkers)
1159 markers = obsutil.sortedmarkers(pushop.outobsmarkers)
1160 bundle2.buildobsmarkerspart(bundler, markers)
1160 bundle2.buildobsmarkerspart(bundler, markers)
1161
1161
1162
1162
1163 @b2partsgenerator(b'bookmarks')
1163 @b2partsgenerator(b'bookmarks')
1164 def _pushb2bookmarks(pushop, bundler):
1164 def _pushb2bookmarks(pushop, bundler):
1165 """handle bookmark push through bundle2"""
1165 """handle bookmark push through bundle2"""
1166 if b'bookmarks' in pushop.stepsdone:
1166 if b'bookmarks' in pushop.stepsdone:
1167 return
1167 return
1168 b2caps = bundle2.bundle2caps(pushop.remote)
1168 b2caps = bundle2.bundle2caps(pushop.remote)
1169
1169
1170 legacy = pushop.repo.ui.configlist(b'devel', b'legacy.exchange')
1170 legacy = pushop.repo.ui.configlist(b'devel', b'legacy.exchange')
1171 legacybooks = b'bookmarks' in legacy
1171 legacybooks = b'bookmarks' in legacy
1172
1172
1173 if not legacybooks and b'bookmarks' in b2caps:
1173 if not legacybooks and b'bookmarks' in b2caps:
1174 return _pushb2bookmarkspart(pushop, bundler)
1174 return _pushb2bookmarkspart(pushop, bundler)
1175 elif b'pushkey' in b2caps:
1175 elif b'pushkey' in b2caps:
1176 return _pushb2bookmarkspushkey(pushop, bundler)
1176 return _pushb2bookmarkspushkey(pushop, bundler)
1177
1177
1178
1178
1179 def _bmaction(old, new):
1179 def _bmaction(old, new):
1180 """small utility for bookmark pushing"""
1180 """small utility for bookmark pushing"""
1181 if not old:
1181 if not old:
1182 return b'export'
1182 return b'export'
1183 elif not new:
1183 elif not new:
1184 return b'delete'
1184 return b'delete'
1185 return b'update'
1185 return b'update'
1186
1186
1187
1187
1188 def _abortonsecretctx(pushop, node, b):
1188 def _abortonsecretctx(pushop, node, b):
1189 """abort if a given bookmark points to a secret changeset"""
1189 """abort if a given bookmark points to a secret changeset"""
1190 if node and pushop.repo[node].phase() == phases.secret:
1190 if node and pushop.repo[node].phase() == phases.secret:
1191 raise error.Abort(
1191 raise error.Abort(
1192 _(b'cannot push bookmark %s as it points to a secret changeset') % b
1192 _(b'cannot push bookmark %s as it points to a secret changeset') % b
1193 )
1193 )
1194
1194
1195
1195
1196 def _pushb2bookmarkspart(pushop, bundler):
1196 def _pushb2bookmarkspart(pushop, bundler):
1197 pushop.stepsdone.add(b'bookmarks')
1197 pushop.stepsdone.add(b'bookmarks')
1198 if not pushop.outbookmarks:
1198 if not pushop.outbookmarks:
1199 return
1199 return
1200
1200
1201 allactions = []
1201 allactions = []
1202 data = []
1202 data = []
1203 for book, old, new in pushop.outbookmarks:
1203 for book, old, new in pushop.outbookmarks:
1204 _abortonsecretctx(pushop, new, book)
1204 _abortonsecretctx(pushop, new, book)
1205 data.append((book, new))
1205 data.append((book, new))
1206 allactions.append((book, _bmaction(old, new)))
1206 allactions.append((book, _bmaction(old, new)))
1207 checkdata = bookmod.binaryencode(data)
1207 checkdata = bookmod.binaryencode(data)
1208 bundler.newpart(b'bookmarks', data=checkdata)
1208 bundler.newpart(b'bookmarks', data=checkdata)
1209
1209
1210 def handlereply(op):
1210 def handlereply(op):
1211 ui = pushop.ui
1211 ui = pushop.ui
1212 # if success
1212 # if success
1213 for book, action in allactions:
1213 for book, action in allactions:
1214 ui.status(bookmsgmap[action][0] % book)
1214 ui.status(bookmsgmap[action][0] % book)
1215
1215
1216 return handlereply
1216 return handlereply
1217
1217
1218
1218
1219 def _pushb2bookmarkspushkey(pushop, bundler):
1219 def _pushb2bookmarkspushkey(pushop, bundler):
1220 pushop.stepsdone.add(b'bookmarks')
1220 pushop.stepsdone.add(b'bookmarks')
1221 part2book = []
1221 part2book = []
1222 enc = pushkey.encode
1222 enc = pushkey.encode
1223
1223
1224 def handlefailure(pushop, exc):
1224 def handlefailure(pushop, exc):
1225 targetid = int(exc.partid)
1225 targetid = int(exc.partid)
1226 for partid, book, action in part2book:
1226 for partid, book, action in part2book:
1227 if partid == targetid:
1227 if partid == targetid:
1228 raise error.Abort(bookmsgmap[action][1].rstrip() % book)
1228 raise error.Abort(bookmsgmap[action][1].rstrip() % book)
1229 # we should not be called for part we did not generated
1229 # we should not be called for part we did not generated
1230 assert False
1230 assert False
1231
1231
1232 for book, old, new in pushop.outbookmarks:
1232 for book, old, new in pushop.outbookmarks:
1233 _abortonsecretctx(pushop, new, book)
1233 _abortonsecretctx(pushop, new, book)
1234 part = bundler.newpart(b'pushkey')
1234 part = bundler.newpart(b'pushkey')
1235 part.addparam(b'namespace', enc(b'bookmarks'))
1235 part.addparam(b'namespace', enc(b'bookmarks'))
1236 part.addparam(b'key', enc(book))
1236 part.addparam(b'key', enc(book))
1237 part.addparam(b'old', enc(hex(old)))
1237 part.addparam(b'old', enc(hex(old)))
1238 part.addparam(b'new', enc(hex(new)))
1238 part.addparam(b'new', enc(hex(new)))
1239 action = b'update'
1239 action = b'update'
1240 if not old:
1240 if not old:
1241 action = b'export'
1241 action = b'export'
1242 elif not new:
1242 elif not new:
1243 action = b'delete'
1243 action = b'delete'
1244 part2book.append((part.id, book, action))
1244 part2book.append((part.id, book, action))
1245 pushop.pkfailcb[part.id] = handlefailure
1245 pushop.pkfailcb[part.id] = handlefailure
1246
1246
1247 def handlereply(op):
1247 def handlereply(op):
1248 ui = pushop.ui
1248 ui = pushop.ui
1249 for partid, book, action in part2book:
1249 for partid, book, action in part2book:
1250 partrep = op.records.getreplies(partid)
1250 partrep = op.records.getreplies(partid)
1251 results = partrep[b'pushkey']
1251 results = partrep[b'pushkey']
1252 assert len(results) <= 1
1252 assert len(results) <= 1
1253 if not results:
1253 if not results:
1254 pushop.ui.warn(_(b'server ignored bookmark %s update\n') % book)
1254 pushop.ui.warn(_(b'server ignored bookmark %s update\n') % book)
1255 else:
1255 else:
1256 ret = int(results[0][b'return'])
1256 ret = int(results[0][b'return'])
1257 if ret:
1257 if ret:
1258 ui.status(bookmsgmap[action][0] % book)
1258 ui.status(bookmsgmap[action][0] % book)
1259 else:
1259 else:
1260 ui.warn(bookmsgmap[action][1] % book)
1260 ui.warn(bookmsgmap[action][1] % book)
1261 if pushop.bkresult is not None:
1261 if pushop.bkresult is not None:
1262 pushop.bkresult = 1
1262 pushop.bkresult = 1
1263
1263
1264 return handlereply
1264 return handlereply
1265
1265
1266
1266
1267 @b2partsgenerator(b'pushvars', idx=0)
1267 @b2partsgenerator(b'pushvars', idx=0)
1268 def _getbundlesendvars(pushop, bundler):
1268 def _getbundlesendvars(pushop, bundler):
1269 '''send shellvars via bundle2'''
1269 '''send shellvars via bundle2'''
1270 pushvars = pushop.pushvars
1270 pushvars = pushop.pushvars
1271 if pushvars:
1271 if pushvars:
1272 shellvars = {}
1272 shellvars = {}
1273 for raw in pushvars:
1273 for raw in pushvars:
1274 if b'=' not in raw:
1274 if b'=' not in raw:
1275 msg = (
1275 msg = (
1276 b"unable to parse variable '%s', should follow "
1276 b"unable to parse variable '%s', should follow "
1277 b"'KEY=VALUE' or 'KEY=' format"
1277 b"'KEY=VALUE' or 'KEY=' format"
1278 )
1278 )
1279 raise error.Abort(msg % raw)
1279 raise error.Abort(msg % raw)
1280 k, v = raw.split(b'=', 1)
1280 k, v = raw.split(b'=', 1)
1281 shellvars[k] = v
1281 shellvars[k] = v
1282
1282
1283 part = bundler.newpart(b'pushvars')
1283 part = bundler.newpart(b'pushvars')
1284
1284
1285 for key, value in pycompat.iteritems(shellvars):
1285 for key, value in pycompat.iteritems(shellvars):
1286 part.addparam(key, value, mandatory=False)
1286 part.addparam(key, value, mandatory=False)
1287
1287
1288
1288
1289 def _pushbundle2(pushop):
1289 def _pushbundle2(pushop):
1290 """push data to the remote using bundle2
1290 """push data to the remote using bundle2
1291
1291
1292 The only currently supported type of data is changegroup but this will
1292 The only currently supported type of data is changegroup but this will
1293 evolve in the future."""
1293 evolve in the future."""
1294 bundler = bundle2.bundle20(pushop.ui, bundle2.bundle2caps(pushop.remote))
1294 bundler = bundle2.bundle20(pushop.ui, bundle2.bundle2caps(pushop.remote))
1295 pushback = pushop.trmanager and pushop.ui.configbool(
1295 pushback = pushop.trmanager and pushop.ui.configbool(
1296 b'experimental', b'bundle2.pushback'
1296 b'experimental', b'bundle2.pushback'
1297 )
1297 )
1298
1298
1299 # create reply capability
1299 # create reply capability
1300 capsblob = bundle2.encodecaps(
1300 capsblob = bundle2.encodecaps(
1301 bundle2.getrepocaps(pushop.repo, allowpushback=pushback, role=b'client')
1301 bundle2.getrepocaps(pushop.repo, allowpushback=pushback, role=b'client')
1302 )
1302 )
1303 bundler.newpart(b'replycaps', data=capsblob)
1303 bundler.newpart(b'replycaps', data=capsblob)
1304 replyhandlers = []
1304 replyhandlers = []
1305 for partgenname in b2partsgenorder:
1305 for partgenname in b2partsgenorder:
1306 partgen = b2partsgenmapping[partgenname]
1306 partgen = b2partsgenmapping[partgenname]
1307 ret = partgen(pushop, bundler)
1307 ret = partgen(pushop, bundler)
1308 if callable(ret):
1308 if callable(ret):
1309 replyhandlers.append(ret)
1309 replyhandlers.append(ret)
1310 # do not push if nothing to push
1310 # do not push if nothing to push
1311 if bundler.nbparts <= 1:
1311 if bundler.nbparts <= 1:
1312 return
1312 return
1313 stream = util.chunkbuffer(bundler.getchunks())
1313 stream = util.chunkbuffer(bundler.getchunks())
1314 try:
1314 try:
1315 try:
1315 try:
1316 with pushop.remote.commandexecutor() as e:
1316 with pushop.remote.commandexecutor() as e:
1317 reply = e.callcommand(
1317 reply = e.callcommand(
1318 b'unbundle',
1318 b'unbundle',
1319 {
1319 {
1320 b'bundle': stream,
1320 b'bundle': stream,
1321 b'heads': [b'force'],
1321 b'heads': [b'force'],
1322 b'url': pushop.remote.url(),
1322 b'url': pushop.remote.url(),
1323 },
1323 },
1324 ).result()
1324 ).result()
1325 except error.BundleValueError as exc:
1325 except error.BundleValueError as exc:
1326 raise error.Abort(_(b'missing support for %s') % exc)
1326 raise error.Abort(_(b'missing support for %s') % exc)
1327 try:
1327 try:
1328 trgetter = None
1328 trgetter = None
1329 if pushback:
1329 if pushback:
1330 trgetter = pushop.trmanager.transaction
1330 trgetter = pushop.trmanager.transaction
1331 op = bundle2.processbundle(pushop.repo, reply, trgetter)
1331 op = bundle2.processbundle(pushop.repo, reply, trgetter)
1332 except error.BundleValueError as exc:
1332 except error.BundleValueError as exc:
1333 raise error.Abort(_(b'missing support for %s') % exc)
1333 raise error.Abort(_(b'missing support for %s') % exc)
1334 except bundle2.AbortFromPart as exc:
1334 except bundle2.AbortFromPart as exc:
1335 pushop.ui.status(_(b'remote: %s\n') % exc)
1335 pushop.ui.status(_(b'remote: %s\n') % exc)
1336 if exc.hint is not None:
1336 if exc.hint is not None:
1337 pushop.ui.status(_(b'remote: %s\n') % (b'(%s)' % exc.hint))
1337 pushop.ui.status(_(b'remote: %s\n') % (b'(%s)' % exc.hint))
1338 raise error.Abort(_(b'push failed on remote'))
1338 raise error.Abort(_(b'push failed on remote'))
1339 except error.PushkeyFailed as exc:
1339 except error.PushkeyFailed as exc:
1340 partid = int(exc.partid)
1340 partid = int(exc.partid)
1341 if partid not in pushop.pkfailcb:
1341 if partid not in pushop.pkfailcb:
1342 raise
1342 raise
1343 pushop.pkfailcb[partid](pushop, exc)
1343 pushop.pkfailcb[partid](pushop, exc)
1344 for rephand in replyhandlers:
1344 for rephand in replyhandlers:
1345 rephand(op)
1345 rephand(op)
1346
1346
1347
1347
1348 def _pushchangeset(pushop):
1348 def _pushchangeset(pushop):
1349 """Make the actual push of changeset bundle to remote repo"""
1349 """Make the actual push of changeset bundle to remote repo"""
1350 if b'changesets' in pushop.stepsdone:
1350 if b'changesets' in pushop.stepsdone:
1351 return
1351 return
1352 pushop.stepsdone.add(b'changesets')
1352 pushop.stepsdone.add(b'changesets')
1353 if not _pushcheckoutgoing(pushop):
1353 if not _pushcheckoutgoing(pushop):
1354 return
1354 return
1355
1355
1356 # Should have verified this in push().
1356 # Should have verified this in push().
1357 assert pushop.remote.capable(b'unbundle')
1357 assert pushop.remote.capable(b'unbundle')
1358
1358
1359 pushop.repo.prepushoutgoinghooks(pushop)
1359 pushop.repo.prepushoutgoinghooks(pushop)
1360 outgoing = pushop.outgoing
1360 outgoing = pushop.outgoing
1361 # TODO: get bundlecaps from remote
1361 # TODO: get bundlecaps from remote
1362 bundlecaps = None
1362 bundlecaps = None
1363 # create a changegroup from local
1363 # create a changegroup from local
1364 if pushop.revs is None and not (
1364 if pushop.revs is None and not (
1365 outgoing.excluded or pushop.repo.changelog.filteredrevs
1365 outgoing.excluded or pushop.repo.changelog.filteredrevs
1366 ):
1366 ):
1367 # push everything,
1367 # push everything,
1368 # use the fast path, no race possible on push
1368 # use the fast path, no race possible on push
1369 cg = changegroup.makechangegroup(
1369 cg = changegroup.makechangegroup(
1370 pushop.repo,
1370 pushop.repo,
1371 outgoing,
1371 outgoing,
1372 b'01',
1372 b'01',
1373 b'push',
1373 b'push',
1374 fastpath=True,
1374 fastpath=True,
1375 bundlecaps=bundlecaps,
1375 bundlecaps=bundlecaps,
1376 )
1376 )
1377 else:
1377 else:
1378 cg = changegroup.makechangegroup(
1378 cg = changegroup.makechangegroup(
1379 pushop.repo, outgoing, b'01', b'push', bundlecaps=bundlecaps
1379 pushop.repo, outgoing, b'01', b'push', bundlecaps=bundlecaps
1380 )
1380 )
1381
1381
1382 # apply changegroup to remote
1382 # apply changegroup to remote
1383 # local repo finds heads on server, finds out what
1383 # local repo finds heads on server, finds out what
1384 # revs it must push. once revs transferred, if server
1384 # revs it must push. once revs transferred, if server
1385 # finds it has different heads (someone else won
1385 # finds it has different heads (someone else won
1386 # commit/push race), server aborts.
1386 # commit/push race), server aborts.
1387 if pushop.force:
1387 if pushop.force:
1388 remoteheads = [b'force']
1388 remoteheads = [b'force']
1389 else:
1389 else:
1390 remoteheads = pushop.remoteheads
1390 remoteheads = pushop.remoteheads
1391 # ssh: return remote's addchangegroup()
1391 # ssh: return remote's addchangegroup()
1392 # http: return remote's addchangegroup() or 0 for error
1392 # http: return remote's addchangegroup() or 0 for error
1393 pushop.cgresult = pushop.remote.unbundle(cg, remoteheads, pushop.repo.url())
1393 pushop.cgresult = pushop.remote.unbundle(cg, remoteheads, pushop.repo.url())
1394
1394
1395
1395
1396 def _pushsyncphase(pushop):
1396 def _pushsyncphase(pushop):
1397 """synchronise phase information locally and remotely"""
1397 """synchronise phase information locally and remotely"""
1398 cheads = pushop.commonheads
1398 cheads = pushop.commonheads
1399 # even when we don't push, exchanging phase data is useful
1399 # even when we don't push, exchanging phase data is useful
1400 remotephases = listkeys(pushop.remote, b'phases')
1400 remotephases = listkeys(pushop.remote, b'phases')
1401 if (
1401 if (
1402 pushop.ui.configbool(b'ui', b'_usedassubrepo')
1402 pushop.ui.configbool(b'ui', b'_usedassubrepo')
1403 and remotephases # server supports phases
1403 and remotephases # server supports phases
1404 and pushop.cgresult is None # nothing was pushed
1404 and pushop.cgresult is None # nothing was pushed
1405 and remotephases.get(b'publishing', False)
1405 and remotephases.get(b'publishing', False)
1406 ):
1406 ):
1407 # When:
1407 # When:
1408 # - this is a subrepo push
1408 # - this is a subrepo push
1409 # - and remote support phase
1409 # - and remote support phase
1410 # - and no changeset was pushed
1410 # - and no changeset was pushed
1411 # - and remote is publishing
1411 # - and remote is publishing
1412 # We may be in issue 3871 case!
1412 # We may be in issue 3871 case!
1413 # We drop the possible phase synchronisation done by
1413 # We drop the possible phase synchronisation done by
1414 # courtesy to publish changesets possibly locally draft
1414 # courtesy to publish changesets possibly locally draft
1415 # on the remote.
1415 # on the remote.
1416 remotephases = {b'publishing': b'True'}
1416 remotephases = {b'publishing': b'True'}
1417 if not remotephases: # old server or public only reply from non-publishing
1417 if not remotephases: # old server or public only reply from non-publishing
1418 _localphasemove(pushop, cheads)
1418 _localphasemove(pushop, cheads)
1419 # don't push any phase data as there is nothing to push
1419 # don't push any phase data as there is nothing to push
1420 else:
1420 else:
1421 ana = phases.analyzeremotephases(pushop.repo, cheads, remotephases)
1421 ana = phases.analyzeremotephases(pushop.repo, cheads, remotephases)
1422 pheads, droots = ana
1422 pheads, droots = ana
1423 ### Apply remote phase on local
1423 ### Apply remote phase on local
1424 if remotephases.get(b'publishing', False):
1424 if remotephases.get(b'publishing', False):
1425 _localphasemove(pushop, cheads)
1425 _localphasemove(pushop, cheads)
1426 else: # publish = False
1426 else: # publish = False
1427 _localphasemove(pushop, pheads)
1427 _localphasemove(pushop, pheads)
1428 _localphasemove(pushop, cheads, phases.draft)
1428 _localphasemove(pushop, cheads, phases.draft)
1429 ### Apply local phase on remote
1429 ### Apply local phase on remote
1430
1430
1431 if pushop.cgresult:
1431 if pushop.cgresult:
1432 if b'phases' in pushop.stepsdone:
1432 if b'phases' in pushop.stepsdone:
1433 # phases already pushed though bundle2
1433 # phases already pushed though bundle2
1434 return
1434 return
1435 outdated = pushop.outdatedphases
1435 outdated = pushop.outdatedphases
1436 else:
1436 else:
1437 outdated = pushop.fallbackoutdatedphases
1437 outdated = pushop.fallbackoutdatedphases
1438
1438
1439 pushop.stepsdone.add(b'phases')
1439 pushop.stepsdone.add(b'phases')
1440
1440
1441 # filter heads already turned public by the push
1441 # filter heads already turned public by the push
1442 outdated = [c for c in outdated if c.node() not in pheads]
1442 outdated = [c for c in outdated if c.node() not in pheads]
1443 # fallback to independent pushkey command
1443 # fallback to independent pushkey command
1444 for newremotehead in outdated:
1444 for newremotehead in outdated:
1445 with pushop.remote.commandexecutor() as e:
1445 with pushop.remote.commandexecutor() as e:
1446 r = e.callcommand(
1446 r = e.callcommand(
1447 b'pushkey',
1447 b'pushkey',
1448 {
1448 {
1449 b'namespace': b'phases',
1449 b'namespace': b'phases',
1450 b'key': newremotehead.hex(),
1450 b'key': newremotehead.hex(),
1451 b'old': b'%d' % phases.draft,
1451 b'old': b'%d' % phases.draft,
1452 b'new': b'%d' % phases.public,
1452 b'new': b'%d' % phases.public,
1453 },
1453 },
1454 ).result()
1454 ).result()
1455
1455
1456 if not r:
1456 if not r:
1457 pushop.ui.warn(
1457 pushop.ui.warn(
1458 _(b'updating %s to public failed!\n') % newremotehead
1458 _(b'updating %s to public failed!\n') % newremotehead
1459 )
1459 )
1460
1460
1461
1461
1462 def _localphasemove(pushop, nodes, phase=phases.public):
1462 def _localphasemove(pushop, nodes, phase=phases.public):
1463 """move <nodes> to <phase> in the local source repo"""
1463 """move <nodes> to <phase> in the local source repo"""
1464 if pushop.trmanager:
1464 if pushop.trmanager:
1465 phases.advanceboundary(
1465 phases.advanceboundary(
1466 pushop.repo, pushop.trmanager.transaction(), phase, nodes
1466 pushop.repo, pushop.trmanager.transaction(), phase, nodes
1467 )
1467 )
1468 else:
1468 else:
1469 # repo is not locked, do not change any phases!
1469 # repo is not locked, do not change any phases!
1470 # Informs the user that phases should have been moved when
1470 # Informs the user that phases should have been moved when
1471 # applicable.
1471 # applicable.
1472 actualmoves = [n for n in nodes if phase < pushop.repo[n].phase()]
1472 actualmoves = [n for n in nodes if phase < pushop.repo[n].phase()]
1473 phasestr = phases.phasenames[phase]
1473 phasestr = phases.phasenames[phase]
1474 if actualmoves:
1474 if actualmoves:
1475 pushop.ui.status(
1475 pushop.ui.status(
1476 _(
1476 _(
1477 b'cannot lock source repo, skipping '
1477 b'cannot lock source repo, skipping '
1478 b'local %s phase update\n'
1478 b'local %s phase update\n'
1479 )
1479 )
1480 % phasestr
1480 % phasestr
1481 )
1481 )
1482
1482
1483
1483
1484 def _pushobsolete(pushop):
1484 def _pushobsolete(pushop):
1485 """utility function to push obsolete markers to a remote"""
1485 """utility function to push obsolete markers to a remote"""
1486 if b'obsmarkers' in pushop.stepsdone:
1486 if b'obsmarkers' in pushop.stepsdone:
1487 return
1487 return
1488 repo = pushop.repo
1488 repo = pushop.repo
1489 remote = pushop.remote
1489 remote = pushop.remote
1490 pushop.stepsdone.add(b'obsmarkers')
1490 pushop.stepsdone.add(b'obsmarkers')
1491 if pushop.outobsmarkers:
1491 if pushop.outobsmarkers:
1492 pushop.ui.debug(b'try to push obsolete markers to remote\n')
1492 pushop.ui.debug(b'try to push obsolete markers to remote\n')
1493 rslts = []
1493 rslts = []
1494 markers = obsutil.sortedmarkers(pushop.outobsmarkers)
1494 markers = obsutil.sortedmarkers(pushop.outobsmarkers)
1495 remotedata = obsolete._pushkeyescape(markers)
1495 remotedata = obsolete._pushkeyescape(markers)
1496 for key in sorted(remotedata, reverse=True):
1496 for key in sorted(remotedata, reverse=True):
1497 # reverse sort to ensure we end with dump0
1497 # reverse sort to ensure we end with dump0
1498 data = remotedata[key]
1498 data = remotedata[key]
1499 rslts.append(remote.pushkey(b'obsolete', key, b'', data))
1499 rslts.append(remote.pushkey(b'obsolete', key, b'', data))
1500 if [r for r in rslts if not r]:
1500 if [r for r in rslts if not r]:
1501 msg = _(b'failed to push some obsolete markers!\n')
1501 msg = _(b'failed to push some obsolete markers!\n')
1502 repo.ui.warn(msg)
1502 repo.ui.warn(msg)
1503
1503
1504
1504
1505 def _pushbookmark(pushop):
1505 def _pushbookmark(pushop):
1506 """Update bookmark position on remote"""
1506 """Update bookmark position on remote"""
1507 if pushop.cgresult == 0 or b'bookmarks' in pushop.stepsdone:
1507 if pushop.cgresult == 0 or b'bookmarks' in pushop.stepsdone:
1508 return
1508 return
1509 pushop.stepsdone.add(b'bookmarks')
1509 pushop.stepsdone.add(b'bookmarks')
1510 ui = pushop.ui
1510 ui = pushop.ui
1511 remote = pushop.remote
1511 remote = pushop.remote
1512
1512
1513 for b, old, new in pushop.outbookmarks:
1513 for b, old, new in pushop.outbookmarks:
1514 action = b'update'
1514 action = b'update'
1515 if not old:
1515 if not old:
1516 action = b'export'
1516 action = b'export'
1517 elif not new:
1517 elif not new:
1518 action = b'delete'
1518 action = b'delete'
1519
1519
1520 with remote.commandexecutor() as e:
1520 with remote.commandexecutor() as e:
1521 r = e.callcommand(
1521 r = e.callcommand(
1522 b'pushkey',
1522 b'pushkey',
1523 {
1523 {
1524 b'namespace': b'bookmarks',
1524 b'namespace': b'bookmarks',
1525 b'key': b,
1525 b'key': b,
1526 b'old': hex(old),
1526 b'old': hex(old),
1527 b'new': hex(new),
1527 b'new': hex(new),
1528 },
1528 },
1529 ).result()
1529 ).result()
1530
1530
1531 if r:
1531 if r:
1532 ui.status(bookmsgmap[action][0] % b)
1532 ui.status(bookmsgmap[action][0] % b)
1533 else:
1533 else:
1534 ui.warn(bookmsgmap[action][1] % b)
1534 ui.warn(bookmsgmap[action][1] % b)
1535 # discovery can have set the value form invalid entry
1535 # discovery can have set the value form invalid entry
1536 if pushop.bkresult is not None:
1536 if pushop.bkresult is not None:
1537 pushop.bkresult = 1
1537 pushop.bkresult = 1
1538
1538
1539
1539
1540 class pulloperation(object):
1540 class pulloperation(object):
1541 """A object that represent a single pull operation
1541 """A object that represent a single pull operation
1542
1542
1543 It purpose is to carry pull related state and very common operation.
1543 It purpose is to carry pull related state and very common operation.
1544
1544
1545 A new should be created at the beginning of each pull and discarded
1545 A new should be created at the beginning of each pull and discarded
1546 afterward.
1546 afterward.
1547 """
1547 """
1548
1548
1549 def __init__(
1549 def __init__(
1550 self,
1550 self,
1551 repo,
1551 repo,
1552 remote,
1552 remote,
1553 heads=None,
1553 heads=None,
1554 force=False,
1554 force=False,
1555 bookmarks=(),
1555 bookmarks=(),
1556 remotebookmarks=None,
1556 remotebookmarks=None,
1557 streamclonerequested=None,
1557 streamclonerequested=None,
1558 includepats=None,
1558 includepats=None,
1559 excludepats=None,
1559 excludepats=None,
1560 depth=None,
1560 depth=None,
1561 ):
1561 ):
1562 # repo we pull into
1562 # repo we pull into
1563 self.repo = repo
1563 self.repo = repo
1564 # repo we pull from
1564 # repo we pull from
1565 self.remote = remote
1565 self.remote = remote
1566 # revision we try to pull (None is "all")
1566 # revision we try to pull (None is "all")
1567 self.heads = heads
1567 self.heads = heads
1568 # bookmark pulled explicitly
1568 # bookmark pulled explicitly
1569 self.explicitbookmarks = [
1569 self.explicitbookmarks = [
1570 repo._bookmarks.expandname(bookmark) for bookmark in bookmarks
1570 repo._bookmarks.expandname(bookmark) for bookmark in bookmarks
1571 ]
1571 ]
1572 # do we force pull?
1572 # do we force pull?
1573 self.force = force
1573 self.force = force
1574 # whether a streaming clone was requested
1574 # whether a streaming clone was requested
1575 self.streamclonerequested = streamclonerequested
1575 self.streamclonerequested = streamclonerequested
1576 # transaction manager
1576 # transaction manager
1577 self.trmanager = None
1577 self.trmanager = None
1578 # set of common changeset between local and remote before pull
1578 # set of common changeset between local and remote before pull
1579 self.common = None
1579 self.common = None
1580 # set of pulled head
1580 # set of pulled head
1581 self.rheads = None
1581 self.rheads = None
1582 # list of missing changeset to fetch remotely
1582 # list of missing changeset to fetch remotely
1583 self.fetch = None
1583 self.fetch = None
1584 # remote bookmarks data
1584 # remote bookmarks data
1585 self.remotebookmarks = remotebookmarks
1585 self.remotebookmarks = remotebookmarks
1586 # result of changegroup pulling (used as return code by pull)
1586 # result of changegroup pulling (used as return code by pull)
1587 self.cgresult = None
1587 self.cgresult = None
1588 # list of step already done
1588 # list of step already done
1589 self.stepsdone = set()
1589 self.stepsdone = set()
1590 # Whether we attempted a clone from pre-generated bundles.
1590 # Whether we attempted a clone from pre-generated bundles.
1591 self.clonebundleattempted = False
1591 self.clonebundleattempted = False
1592 # Set of file patterns to include.
1592 # Set of file patterns to include.
1593 self.includepats = includepats
1593 self.includepats = includepats
1594 # Set of file patterns to exclude.
1594 # Set of file patterns to exclude.
1595 self.excludepats = excludepats
1595 self.excludepats = excludepats
1596 # Number of ancestor changesets to pull from each pulled head.
1596 # Number of ancestor changesets to pull from each pulled head.
1597 self.depth = depth
1597 self.depth = depth
1598
1598
1599 @util.propertycache
1599 @util.propertycache
1600 def pulledsubset(self):
1600 def pulledsubset(self):
1601 """heads of the set of changeset target by the pull"""
1601 """heads of the set of changeset target by the pull"""
1602 # compute target subset
1602 # compute target subset
1603 if self.heads is None:
1603 if self.heads is None:
1604 # We pulled every thing possible
1604 # We pulled every thing possible
1605 # sync on everything common
1605 # sync on everything common
1606 c = set(self.common)
1606 c = set(self.common)
1607 ret = list(self.common)
1607 ret = list(self.common)
1608 for n in self.rheads:
1608 for n in self.rheads:
1609 if n not in c:
1609 if n not in c:
1610 ret.append(n)
1610 ret.append(n)
1611 return ret
1611 return ret
1612 else:
1612 else:
1613 # We pulled a specific subset
1613 # We pulled a specific subset
1614 # sync on this subset
1614 # sync on this subset
1615 return self.heads
1615 return self.heads
1616
1616
1617 @util.propertycache
1617 @util.propertycache
1618 def canusebundle2(self):
1618 def canusebundle2(self):
1619 return not _forcebundle1(self)
1619 return not _forcebundle1(self)
1620
1620
1621 @util.propertycache
1621 @util.propertycache
1622 def remotebundle2caps(self):
1622 def remotebundle2caps(self):
1623 return bundle2.bundle2caps(self.remote)
1623 return bundle2.bundle2caps(self.remote)
1624
1624
1625 def gettransaction(self):
1625 def gettransaction(self):
1626 # deprecated; talk to trmanager directly
1626 # deprecated; talk to trmanager directly
1627 return self.trmanager.transaction()
1627 return self.trmanager.transaction()
1628
1628
1629
1629
1630 class transactionmanager(util.transactional):
1630 class transactionmanager(util.transactional):
1631 """An object to manage the life cycle of a transaction
1631 """An object to manage the life cycle of a transaction
1632
1632
1633 It creates the transaction on demand and calls the appropriate hooks when
1633 It creates the transaction on demand and calls the appropriate hooks when
1634 closing the transaction."""
1634 closing the transaction."""
1635
1635
1636 def __init__(self, repo, source, url):
1636 def __init__(self, repo, source, url):
1637 self.repo = repo
1637 self.repo = repo
1638 self.source = source
1638 self.source = source
1639 self.url = url
1639 self.url = url
1640 self._tr = None
1640 self._tr = None
1641
1641
1642 def transaction(self):
1642 def transaction(self):
1643 """Return an open transaction object, constructing if necessary"""
1643 """Return an open transaction object, constructing if necessary"""
1644 if not self._tr:
1644 if not self._tr:
1645 trname = b'%s\n%s' % (self.source, util.hidepassword(self.url))
1645 trname = b'%s\n%s' % (self.source, util.hidepassword(self.url))
1646 self._tr = self.repo.transaction(trname)
1646 self._tr = self.repo.transaction(trname)
1647 self._tr.hookargs[b'source'] = self.source
1647 self._tr.hookargs[b'source'] = self.source
1648 self._tr.hookargs[b'url'] = self.url
1648 self._tr.hookargs[b'url'] = self.url
1649 return self._tr
1649 return self._tr
1650
1650
1651 def close(self):
1651 def close(self):
1652 """close transaction if created"""
1652 """close transaction if created"""
1653 if self._tr is not None:
1653 if self._tr is not None:
1654 self._tr.close()
1654 self._tr.close()
1655
1655
1656 def release(self):
1656 def release(self):
1657 """release transaction if created"""
1657 """release transaction if created"""
1658 if self._tr is not None:
1658 if self._tr is not None:
1659 self._tr.release()
1659 self._tr.release()
1660
1660
1661
1661
1662 def listkeys(remote, namespace):
1662 def listkeys(remote, namespace):
1663 with remote.commandexecutor() as e:
1663 with remote.commandexecutor() as e:
1664 return e.callcommand(b'listkeys', {b'namespace': namespace}).result()
1664 return e.callcommand(b'listkeys', {b'namespace': namespace}).result()
1665
1665
1666
1666
1667 def _fullpullbundle2(repo, pullop):
1667 def _fullpullbundle2(repo, pullop):
1668 # The server may send a partial reply, i.e. when inlining
1668 # The server may send a partial reply, i.e. when inlining
1669 # pre-computed bundles. In that case, update the common
1669 # pre-computed bundles. In that case, update the common
1670 # set based on the results and pull another bundle.
1670 # set based on the results and pull another bundle.
1671 #
1671 #
1672 # There are two indicators that the process is finished:
1672 # There are two indicators that the process is finished:
1673 # - no changeset has been added, or
1673 # - no changeset has been added, or
1674 # - all remote heads are known locally.
1674 # - all remote heads are known locally.
1675 # The head check must use the unfiltered view as obsoletion
1675 # The head check must use the unfiltered view as obsoletion
1676 # markers can hide heads.
1676 # markers can hide heads.
1677 unfi = repo.unfiltered()
1677 unfi = repo.unfiltered()
1678 unficl = unfi.changelog
1678 unficl = unfi.changelog
1679
1679
1680 def headsofdiff(h1, h2):
1680 def headsofdiff(h1, h2):
1681 """Returns heads(h1 % h2)"""
1681 """Returns heads(h1 % h2)"""
1682 res = unfi.set(b'heads(%ln %% %ln)', h1, h2)
1682 res = unfi.set(b'heads(%ln %% %ln)', h1, h2)
1683 return {ctx.node() for ctx in res}
1683 return {ctx.node() for ctx in res}
1684
1684
1685 def headsofunion(h1, h2):
1685 def headsofunion(h1, h2):
1686 """Returns heads((h1 + h2) - null)"""
1686 """Returns heads((h1 + h2) - null)"""
1687 res = unfi.set(b'heads((%ln + %ln - null))', h1, h2)
1687 res = unfi.set(b'heads((%ln + %ln - null))', h1, h2)
1688 return {ctx.node() for ctx in res}
1688 return {ctx.node() for ctx in res}
1689
1689
1690 while True:
1690 while True:
1691 old_heads = unficl.heads()
1691 old_heads = unficl.heads()
1692 clstart = len(unficl)
1692 clstart = len(unficl)
1693 _pullbundle2(pullop)
1693 _pullbundle2(pullop)
1694 if requirements.NARROW_REQUIREMENT in repo.requirements:
1694 if requirements.NARROW_REQUIREMENT in repo.requirements:
1695 # XXX narrow clones filter the heads on the server side during
1695 # XXX narrow clones filter the heads on the server side during
1696 # XXX getbundle and result in partial replies as well.
1696 # XXX getbundle and result in partial replies as well.
1697 # XXX Disable pull bundles in this case as band aid to avoid
1697 # XXX Disable pull bundles in this case as band aid to avoid
1698 # XXX extra round trips.
1698 # XXX extra round trips.
1699 break
1699 break
1700 if clstart == len(unficl):
1700 if clstart == len(unficl):
1701 break
1701 break
1702 if all(unficl.hasnode(n) for n in pullop.rheads):
1702 if all(unficl.hasnode(n) for n in pullop.rheads):
1703 break
1703 break
1704 new_heads = headsofdiff(unficl.heads(), old_heads)
1704 new_heads = headsofdiff(unficl.heads(), old_heads)
1705 pullop.common = headsofunion(new_heads, pullop.common)
1705 pullop.common = headsofunion(new_heads, pullop.common)
1706 pullop.rheads = set(pullop.rheads) - pullop.common
1706 pullop.rheads = set(pullop.rheads) - pullop.common
1707
1707
1708
1708
1709 def add_confirm_callback(repo, pullop):
1709 def add_confirm_callback(repo, pullop):
1710 """ adds a finalize callback to transaction which can be used to show stats
1710 """ adds a finalize callback to transaction which can be used to show stats
1711 to user and confirm the pull before committing transaction """
1711 to user and confirm the pull before committing transaction """
1712
1712
1713 tr = pullop.trmanager.transaction()
1713 tr = pullop.trmanager.transaction()
1714 scmutil.registersummarycallback(
1714 scmutil.registersummarycallback(
1715 repo, tr, txnname=b'pull', as_validator=True
1715 repo, tr, txnname=b'pull', as_validator=True
1716 )
1716 )
1717 reporef = weakref.ref(repo.unfiltered())
1717 reporef = weakref.ref(repo.unfiltered())
1718
1718
1719 def prompt(tr):
1719 def prompt(tr):
1720 repo = reporef()
1720 repo = reporef()
1721 cm = _(b'accept incoming changes (yn)?$$ &Yes $$ &No')
1721 cm = _(b'accept incoming changes (yn)?$$ &Yes $$ &No')
1722 if repo.ui.promptchoice(cm):
1722 if repo.ui.promptchoice(cm):
1723 raise error.Abort("user aborted")
1723 raise error.Abort(b"user aborted")
1724
1724
1725 tr.addvalidator(b'900-pull-prompt', prompt)
1725 tr.addvalidator(b'900-pull-prompt', prompt)
1726
1726
1727
1727
1728 def pull(
1728 def pull(
1729 repo,
1729 repo,
1730 remote,
1730 remote,
1731 heads=None,
1731 heads=None,
1732 force=False,
1732 force=False,
1733 bookmarks=(),
1733 bookmarks=(),
1734 opargs=None,
1734 opargs=None,
1735 streamclonerequested=None,
1735 streamclonerequested=None,
1736 includepats=None,
1736 includepats=None,
1737 excludepats=None,
1737 excludepats=None,
1738 depth=None,
1738 depth=None,
1739 confirm=None,
1739 confirm=None,
1740 ):
1740 ):
1741 """Fetch repository data from a remote.
1741 """Fetch repository data from a remote.
1742
1742
1743 This is the main function used to retrieve data from a remote repository.
1743 This is the main function used to retrieve data from a remote repository.
1744
1744
1745 ``repo`` is the local repository to clone into.
1745 ``repo`` is the local repository to clone into.
1746 ``remote`` is a peer instance.
1746 ``remote`` is a peer instance.
1747 ``heads`` is an iterable of revisions we want to pull. ``None`` (the
1747 ``heads`` is an iterable of revisions we want to pull. ``None`` (the
1748 default) means to pull everything from the remote.
1748 default) means to pull everything from the remote.
1749 ``bookmarks`` is an iterable of bookmarks requesting to be pulled. By
1749 ``bookmarks`` is an iterable of bookmarks requesting to be pulled. By
1750 default, all remote bookmarks are pulled.
1750 default, all remote bookmarks are pulled.
1751 ``opargs`` are additional keyword arguments to pass to ``pulloperation``
1751 ``opargs`` are additional keyword arguments to pass to ``pulloperation``
1752 initialization.
1752 initialization.
1753 ``streamclonerequested`` is a boolean indicating whether a "streaming
1753 ``streamclonerequested`` is a boolean indicating whether a "streaming
1754 clone" is requested. A "streaming clone" is essentially a raw file copy
1754 clone" is requested. A "streaming clone" is essentially a raw file copy
1755 of revlogs from the server. This only works when the local repository is
1755 of revlogs from the server. This only works when the local repository is
1756 empty. The default value of ``None`` means to respect the server
1756 empty. The default value of ``None`` means to respect the server
1757 configuration for preferring stream clones.
1757 configuration for preferring stream clones.
1758 ``includepats`` and ``excludepats`` define explicit file patterns to
1758 ``includepats`` and ``excludepats`` define explicit file patterns to
1759 include and exclude in storage, respectively. If not defined, narrow
1759 include and exclude in storage, respectively. If not defined, narrow
1760 patterns from the repo instance are used, if available.
1760 patterns from the repo instance are used, if available.
1761 ``depth`` is an integer indicating the DAG depth of history we're
1761 ``depth`` is an integer indicating the DAG depth of history we're
1762 interested in. If defined, for each revision specified in ``heads``, we
1762 interested in. If defined, for each revision specified in ``heads``, we
1763 will fetch up to this many of its ancestors and data associated with them.
1763 will fetch up to this many of its ancestors and data associated with them.
1764 ``confirm`` is a boolean indicating whether the pull should be confirmed
1764 ``confirm`` is a boolean indicating whether the pull should be confirmed
1765 before committing the transaction. This overrides HGPLAIN.
1765 before committing the transaction. This overrides HGPLAIN.
1766
1766
1767 Returns the ``pulloperation`` created for this pull.
1767 Returns the ``pulloperation`` created for this pull.
1768 """
1768 """
1769 if opargs is None:
1769 if opargs is None:
1770 opargs = {}
1770 opargs = {}
1771
1771
1772 # We allow the narrow patterns to be passed in explicitly to provide more
1772 # We allow the narrow patterns to be passed in explicitly to provide more
1773 # flexibility for API consumers.
1773 # flexibility for API consumers.
1774 if includepats or excludepats:
1774 if includepats or excludepats:
1775 includepats = includepats or set()
1775 includepats = includepats or set()
1776 excludepats = excludepats or set()
1776 excludepats = excludepats or set()
1777 else:
1777 else:
1778 includepats, excludepats = repo.narrowpats
1778 includepats, excludepats = repo.narrowpats
1779
1779
1780 narrowspec.validatepatterns(includepats)
1780 narrowspec.validatepatterns(includepats)
1781 narrowspec.validatepatterns(excludepats)
1781 narrowspec.validatepatterns(excludepats)
1782
1782
1783 pullop = pulloperation(
1783 pullop = pulloperation(
1784 repo,
1784 repo,
1785 remote,
1785 remote,
1786 heads,
1786 heads,
1787 force,
1787 force,
1788 bookmarks=bookmarks,
1788 bookmarks=bookmarks,
1789 streamclonerequested=streamclonerequested,
1789 streamclonerequested=streamclonerequested,
1790 includepats=includepats,
1790 includepats=includepats,
1791 excludepats=excludepats,
1791 excludepats=excludepats,
1792 depth=depth,
1792 depth=depth,
1793 **pycompat.strkwargs(opargs)
1793 **pycompat.strkwargs(opargs)
1794 )
1794 )
1795
1795
1796 peerlocal = pullop.remote.local()
1796 peerlocal = pullop.remote.local()
1797 if peerlocal:
1797 if peerlocal:
1798 missing = set(peerlocal.requirements) - pullop.repo.supported
1798 missing = set(peerlocal.requirements) - pullop.repo.supported
1799 if missing:
1799 if missing:
1800 msg = _(
1800 msg = _(
1801 b"required features are not"
1801 b"required features are not"
1802 b" supported in the destination:"
1802 b" supported in the destination:"
1803 b" %s"
1803 b" %s"
1804 ) % (b', '.join(sorted(missing)))
1804 ) % (b', '.join(sorted(missing)))
1805 raise error.Abort(msg)
1805 raise error.Abort(msg)
1806
1806
1807 pullop.trmanager = transactionmanager(repo, b'pull', remote.url())
1807 pullop.trmanager = transactionmanager(repo, b'pull', remote.url())
1808 wlock = util.nullcontextmanager()
1808 wlock = util.nullcontextmanager()
1809 if not bookmod.bookmarksinstore(repo):
1809 if not bookmod.bookmarksinstore(repo):
1810 wlock = repo.wlock()
1810 wlock = repo.wlock()
1811 with wlock, repo.lock(), pullop.trmanager:
1811 with wlock, repo.lock(), pullop.trmanager:
1812 if confirm or (
1812 if confirm or (
1813 repo.ui.configbool(b"pull", b"confirm") and not repo.ui.plain()
1813 repo.ui.configbool(b"pull", b"confirm") and not repo.ui.plain()
1814 ):
1814 ):
1815 add_confirm_callback(repo, pullop)
1815 add_confirm_callback(repo, pullop)
1816
1816
1817 # Use the modern wire protocol, if available.
1817 # Use the modern wire protocol, if available.
1818 if remote.capable(b'command-changesetdata'):
1818 if remote.capable(b'command-changesetdata'):
1819 exchangev2.pull(pullop)
1819 exchangev2.pull(pullop)
1820 else:
1820 else:
1821 # This should ideally be in _pullbundle2(). However, it needs to run
1821 # This should ideally be in _pullbundle2(). However, it needs to run
1822 # before discovery to avoid extra work.
1822 # before discovery to avoid extra work.
1823 _maybeapplyclonebundle(pullop)
1823 _maybeapplyclonebundle(pullop)
1824 streamclone.maybeperformlegacystreamclone(pullop)
1824 streamclone.maybeperformlegacystreamclone(pullop)
1825 _pulldiscovery(pullop)
1825 _pulldiscovery(pullop)
1826 if pullop.canusebundle2:
1826 if pullop.canusebundle2:
1827 _fullpullbundle2(repo, pullop)
1827 _fullpullbundle2(repo, pullop)
1828 _pullchangeset(pullop)
1828 _pullchangeset(pullop)
1829 _pullphase(pullop)
1829 _pullphase(pullop)
1830 _pullbookmarks(pullop)
1830 _pullbookmarks(pullop)
1831 _pullobsolete(pullop)
1831 _pullobsolete(pullop)
1832
1832
1833 # storing remotenames
1833 # storing remotenames
1834 if repo.ui.configbool(b'experimental', b'remotenames'):
1834 if repo.ui.configbool(b'experimental', b'remotenames'):
1835 logexchange.pullremotenames(repo, remote)
1835 logexchange.pullremotenames(repo, remote)
1836
1836
1837 return pullop
1837 return pullop
1838
1838
1839
1839
1840 # list of steps to perform discovery before pull
1840 # list of steps to perform discovery before pull
1841 pulldiscoveryorder = []
1841 pulldiscoveryorder = []
1842
1842
1843 # Mapping between step name and function
1843 # Mapping between step name and function
1844 #
1844 #
1845 # This exists to help extensions wrap steps if necessary
1845 # This exists to help extensions wrap steps if necessary
1846 pulldiscoverymapping = {}
1846 pulldiscoverymapping = {}
1847
1847
1848
1848
1849 def pulldiscovery(stepname):
1849 def pulldiscovery(stepname):
1850 """decorator for function performing discovery before pull
1850 """decorator for function performing discovery before pull
1851
1851
1852 The function is added to the step -> function mapping and appended to the
1852 The function is added to the step -> function mapping and appended to the
1853 list of steps. Beware that decorated function will be added in order (this
1853 list of steps. Beware that decorated function will be added in order (this
1854 may matter).
1854 may matter).
1855
1855
1856 You can only use this decorator for a new step, if you want to wrap a step
1856 You can only use this decorator for a new step, if you want to wrap a step
1857 from an extension, change the pulldiscovery dictionary directly."""
1857 from an extension, change the pulldiscovery dictionary directly."""
1858
1858
1859 def dec(func):
1859 def dec(func):
1860 assert stepname not in pulldiscoverymapping
1860 assert stepname not in pulldiscoverymapping
1861 pulldiscoverymapping[stepname] = func
1861 pulldiscoverymapping[stepname] = func
1862 pulldiscoveryorder.append(stepname)
1862 pulldiscoveryorder.append(stepname)
1863 return func
1863 return func
1864
1864
1865 return dec
1865 return dec
1866
1866
1867
1867
1868 def _pulldiscovery(pullop):
1868 def _pulldiscovery(pullop):
1869 """Run all discovery steps"""
1869 """Run all discovery steps"""
1870 for stepname in pulldiscoveryorder:
1870 for stepname in pulldiscoveryorder:
1871 step = pulldiscoverymapping[stepname]
1871 step = pulldiscoverymapping[stepname]
1872 step(pullop)
1872 step(pullop)
1873
1873
1874
1874
1875 @pulldiscovery(b'b1:bookmarks')
1875 @pulldiscovery(b'b1:bookmarks')
1876 def _pullbookmarkbundle1(pullop):
1876 def _pullbookmarkbundle1(pullop):
1877 """fetch bookmark data in bundle1 case
1877 """fetch bookmark data in bundle1 case
1878
1878
1879 If not using bundle2, we have to fetch bookmarks before changeset
1879 If not using bundle2, we have to fetch bookmarks before changeset
1880 discovery to reduce the chance and impact of race conditions."""
1880 discovery to reduce the chance and impact of race conditions."""
1881 if pullop.remotebookmarks is not None:
1881 if pullop.remotebookmarks is not None:
1882 return
1882 return
1883 if pullop.canusebundle2 and b'listkeys' in pullop.remotebundle2caps:
1883 if pullop.canusebundle2 and b'listkeys' in pullop.remotebundle2caps:
1884 # all known bundle2 servers now support listkeys, but lets be nice with
1884 # all known bundle2 servers now support listkeys, but lets be nice with
1885 # new implementation.
1885 # new implementation.
1886 return
1886 return
1887 books = listkeys(pullop.remote, b'bookmarks')
1887 books = listkeys(pullop.remote, b'bookmarks')
1888 pullop.remotebookmarks = bookmod.unhexlifybookmarks(books)
1888 pullop.remotebookmarks = bookmod.unhexlifybookmarks(books)
1889
1889
1890
1890
1891 @pulldiscovery(b'changegroup')
1891 @pulldiscovery(b'changegroup')
1892 def _pulldiscoverychangegroup(pullop):
1892 def _pulldiscoverychangegroup(pullop):
1893 """discovery phase for the pull
1893 """discovery phase for the pull
1894
1894
1895 Current handle changeset discovery only, will change handle all discovery
1895 Current handle changeset discovery only, will change handle all discovery
1896 at some point."""
1896 at some point."""
1897 tmp = discovery.findcommonincoming(
1897 tmp = discovery.findcommonincoming(
1898 pullop.repo, pullop.remote, heads=pullop.heads, force=pullop.force
1898 pullop.repo, pullop.remote, heads=pullop.heads, force=pullop.force
1899 )
1899 )
1900 common, fetch, rheads = tmp
1900 common, fetch, rheads = tmp
1901 has_node = pullop.repo.unfiltered().changelog.index.has_node
1901 has_node = pullop.repo.unfiltered().changelog.index.has_node
1902 if fetch and rheads:
1902 if fetch and rheads:
1903 # If a remote heads is filtered locally, put in back in common.
1903 # If a remote heads is filtered locally, put in back in common.
1904 #
1904 #
1905 # This is a hackish solution to catch most of "common but locally
1905 # This is a hackish solution to catch most of "common but locally
1906 # hidden situation". We do not performs discovery on unfiltered
1906 # hidden situation". We do not performs discovery on unfiltered
1907 # repository because it end up doing a pathological amount of round
1907 # repository because it end up doing a pathological amount of round
1908 # trip for w huge amount of changeset we do not care about.
1908 # trip for w huge amount of changeset we do not care about.
1909 #
1909 #
1910 # If a set of such "common but filtered" changeset exist on the server
1910 # If a set of such "common but filtered" changeset exist on the server
1911 # but are not including a remote heads, we'll not be able to detect it,
1911 # but are not including a remote heads, we'll not be able to detect it,
1912 scommon = set(common)
1912 scommon = set(common)
1913 for n in rheads:
1913 for n in rheads:
1914 if has_node(n):
1914 if has_node(n):
1915 if n not in scommon:
1915 if n not in scommon:
1916 common.append(n)
1916 common.append(n)
1917 if set(rheads).issubset(set(common)):
1917 if set(rheads).issubset(set(common)):
1918 fetch = []
1918 fetch = []
1919 pullop.common = common
1919 pullop.common = common
1920 pullop.fetch = fetch
1920 pullop.fetch = fetch
1921 pullop.rheads = rheads
1921 pullop.rheads = rheads
1922
1922
1923
1923
1924 def _pullbundle2(pullop):
1924 def _pullbundle2(pullop):
1925 """pull data using bundle2
1925 """pull data using bundle2
1926
1926
1927 For now, the only supported data are changegroup."""
1927 For now, the only supported data are changegroup."""
1928 kwargs = {b'bundlecaps': caps20to10(pullop.repo, role=b'client')}
1928 kwargs = {b'bundlecaps': caps20to10(pullop.repo, role=b'client')}
1929
1929
1930 # make ui easier to access
1930 # make ui easier to access
1931 ui = pullop.repo.ui
1931 ui = pullop.repo.ui
1932
1932
1933 # At the moment we don't do stream clones over bundle2. If that is
1933 # At the moment we don't do stream clones over bundle2. If that is
1934 # implemented then here's where the check for that will go.
1934 # implemented then here's where the check for that will go.
1935 streaming = streamclone.canperformstreamclone(pullop, bundle2=True)[0]
1935 streaming = streamclone.canperformstreamclone(pullop, bundle2=True)[0]
1936
1936
1937 # declare pull perimeters
1937 # declare pull perimeters
1938 kwargs[b'common'] = pullop.common
1938 kwargs[b'common'] = pullop.common
1939 kwargs[b'heads'] = pullop.heads or pullop.rheads
1939 kwargs[b'heads'] = pullop.heads or pullop.rheads
1940
1940
1941 # check server supports narrow and then adding includepats and excludepats
1941 # check server supports narrow and then adding includepats and excludepats
1942 servernarrow = pullop.remote.capable(wireprototypes.NARROWCAP)
1942 servernarrow = pullop.remote.capable(wireprototypes.NARROWCAP)
1943 if servernarrow and pullop.includepats:
1943 if servernarrow and pullop.includepats:
1944 kwargs[b'includepats'] = pullop.includepats
1944 kwargs[b'includepats'] = pullop.includepats
1945 if servernarrow and pullop.excludepats:
1945 if servernarrow and pullop.excludepats:
1946 kwargs[b'excludepats'] = pullop.excludepats
1946 kwargs[b'excludepats'] = pullop.excludepats
1947
1947
1948 if streaming:
1948 if streaming:
1949 kwargs[b'cg'] = False
1949 kwargs[b'cg'] = False
1950 kwargs[b'stream'] = True
1950 kwargs[b'stream'] = True
1951 pullop.stepsdone.add(b'changegroup')
1951 pullop.stepsdone.add(b'changegroup')
1952 pullop.stepsdone.add(b'phases')
1952 pullop.stepsdone.add(b'phases')
1953
1953
1954 else:
1954 else:
1955 # pulling changegroup
1955 # pulling changegroup
1956 pullop.stepsdone.add(b'changegroup')
1956 pullop.stepsdone.add(b'changegroup')
1957
1957
1958 kwargs[b'cg'] = pullop.fetch
1958 kwargs[b'cg'] = pullop.fetch
1959
1959
1960 legacyphase = b'phases' in ui.configlist(b'devel', b'legacy.exchange')
1960 legacyphase = b'phases' in ui.configlist(b'devel', b'legacy.exchange')
1961 hasbinaryphase = b'heads' in pullop.remotebundle2caps.get(b'phases', ())
1961 hasbinaryphase = b'heads' in pullop.remotebundle2caps.get(b'phases', ())
1962 if not legacyphase and hasbinaryphase:
1962 if not legacyphase and hasbinaryphase:
1963 kwargs[b'phases'] = True
1963 kwargs[b'phases'] = True
1964 pullop.stepsdone.add(b'phases')
1964 pullop.stepsdone.add(b'phases')
1965
1965
1966 if b'listkeys' in pullop.remotebundle2caps:
1966 if b'listkeys' in pullop.remotebundle2caps:
1967 if b'phases' not in pullop.stepsdone:
1967 if b'phases' not in pullop.stepsdone:
1968 kwargs[b'listkeys'] = [b'phases']
1968 kwargs[b'listkeys'] = [b'phases']
1969
1969
1970 bookmarksrequested = False
1970 bookmarksrequested = False
1971 legacybookmark = b'bookmarks' in ui.configlist(b'devel', b'legacy.exchange')
1971 legacybookmark = b'bookmarks' in ui.configlist(b'devel', b'legacy.exchange')
1972 hasbinarybook = b'bookmarks' in pullop.remotebundle2caps
1972 hasbinarybook = b'bookmarks' in pullop.remotebundle2caps
1973
1973
1974 if pullop.remotebookmarks is not None:
1974 if pullop.remotebookmarks is not None:
1975 pullop.stepsdone.add(b'request-bookmarks')
1975 pullop.stepsdone.add(b'request-bookmarks')
1976
1976
1977 if (
1977 if (
1978 b'request-bookmarks' not in pullop.stepsdone
1978 b'request-bookmarks' not in pullop.stepsdone
1979 and pullop.remotebookmarks is None
1979 and pullop.remotebookmarks is None
1980 and not legacybookmark
1980 and not legacybookmark
1981 and hasbinarybook
1981 and hasbinarybook
1982 ):
1982 ):
1983 kwargs[b'bookmarks'] = True
1983 kwargs[b'bookmarks'] = True
1984 bookmarksrequested = True
1984 bookmarksrequested = True
1985
1985
1986 if b'listkeys' in pullop.remotebundle2caps:
1986 if b'listkeys' in pullop.remotebundle2caps:
1987 if b'request-bookmarks' not in pullop.stepsdone:
1987 if b'request-bookmarks' not in pullop.stepsdone:
1988 # make sure to always includes bookmark data when migrating
1988 # make sure to always includes bookmark data when migrating
1989 # `hg incoming --bundle` to using this function.
1989 # `hg incoming --bundle` to using this function.
1990 pullop.stepsdone.add(b'request-bookmarks')
1990 pullop.stepsdone.add(b'request-bookmarks')
1991 kwargs.setdefault(b'listkeys', []).append(b'bookmarks')
1991 kwargs.setdefault(b'listkeys', []).append(b'bookmarks')
1992
1992
1993 # If this is a full pull / clone and the server supports the clone bundles
1993 # If this is a full pull / clone and the server supports the clone bundles
1994 # feature, tell the server whether we attempted a clone bundle. The
1994 # feature, tell the server whether we attempted a clone bundle. The
1995 # presence of this flag indicates the client supports clone bundles. This
1995 # presence of this flag indicates the client supports clone bundles. This
1996 # will enable the server to treat clients that support clone bundles
1996 # will enable the server to treat clients that support clone bundles
1997 # differently from those that don't.
1997 # differently from those that don't.
1998 if (
1998 if (
1999 pullop.remote.capable(b'clonebundles')
1999 pullop.remote.capable(b'clonebundles')
2000 and pullop.heads is None
2000 and pullop.heads is None
2001 and list(pullop.common) == [nullid]
2001 and list(pullop.common) == [nullid]
2002 ):
2002 ):
2003 kwargs[b'cbattempted'] = pullop.clonebundleattempted
2003 kwargs[b'cbattempted'] = pullop.clonebundleattempted
2004
2004
2005 if streaming:
2005 if streaming:
2006 pullop.repo.ui.status(_(b'streaming all changes\n'))
2006 pullop.repo.ui.status(_(b'streaming all changes\n'))
2007 elif not pullop.fetch:
2007 elif not pullop.fetch:
2008 pullop.repo.ui.status(_(b"no changes found\n"))
2008 pullop.repo.ui.status(_(b"no changes found\n"))
2009 pullop.cgresult = 0
2009 pullop.cgresult = 0
2010 else:
2010 else:
2011 if pullop.heads is None and list(pullop.common) == [nullid]:
2011 if pullop.heads is None and list(pullop.common) == [nullid]:
2012 pullop.repo.ui.status(_(b"requesting all changes\n"))
2012 pullop.repo.ui.status(_(b"requesting all changes\n"))
2013 if obsolete.isenabled(pullop.repo, obsolete.exchangeopt):
2013 if obsolete.isenabled(pullop.repo, obsolete.exchangeopt):
2014 remoteversions = bundle2.obsmarkersversion(pullop.remotebundle2caps)
2014 remoteversions = bundle2.obsmarkersversion(pullop.remotebundle2caps)
2015 if obsolete.commonversion(remoteversions) is not None:
2015 if obsolete.commonversion(remoteversions) is not None:
2016 kwargs[b'obsmarkers'] = True
2016 kwargs[b'obsmarkers'] = True
2017 pullop.stepsdone.add(b'obsmarkers')
2017 pullop.stepsdone.add(b'obsmarkers')
2018 _pullbundle2extraprepare(pullop, kwargs)
2018 _pullbundle2extraprepare(pullop, kwargs)
2019
2019
2020 with pullop.remote.commandexecutor() as e:
2020 with pullop.remote.commandexecutor() as e:
2021 args = dict(kwargs)
2021 args = dict(kwargs)
2022 args[b'source'] = b'pull'
2022 args[b'source'] = b'pull'
2023 bundle = e.callcommand(b'getbundle', args).result()
2023 bundle = e.callcommand(b'getbundle', args).result()
2024
2024
2025 try:
2025 try:
2026 op = bundle2.bundleoperation(
2026 op = bundle2.bundleoperation(
2027 pullop.repo, pullop.gettransaction, source=b'pull'
2027 pullop.repo, pullop.gettransaction, source=b'pull'
2028 )
2028 )
2029 op.modes[b'bookmarks'] = b'records'
2029 op.modes[b'bookmarks'] = b'records'
2030 bundle2.processbundle(pullop.repo, bundle, op=op)
2030 bundle2.processbundle(pullop.repo, bundle, op=op)
2031 except bundle2.AbortFromPart as exc:
2031 except bundle2.AbortFromPart as exc:
2032 pullop.repo.ui.status(_(b'remote: abort: %s\n') % exc)
2032 pullop.repo.ui.status(_(b'remote: abort: %s\n') % exc)
2033 raise error.Abort(_(b'pull failed on remote'), hint=exc.hint)
2033 raise error.Abort(_(b'pull failed on remote'), hint=exc.hint)
2034 except error.BundleValueError as exc:
2034 except error.BundleValueError as exc:
2035 raise error.Abort(_(b'missing support for %s') % exc)
2035 raise error.Abort(_(b'missing support for %s') % exc)
2036
2036
2037 if pullop.fetch:
2037 if pullop.fetch:
2038 pullop.cgresult = bundle2.combinechangegroupresults(op)
2038 pullop.cgresult = bundle2.combinechangegroupresults(op)
2039
2039
2040 # processing phases change
2040 # processing phases change
2041 for namespace, value in op.records[b'listkeys']:
2041 for namespace, value in op.records[b'listkeys']:
2042 if namespace == b'phases':
2042 if namespace == b'phases':
2043 _pullapplyphases(pullop, value)
2043 _pullapplyphases(pullop, value)
2044
2044
2045 # processing bookmark update
2045 # processing bookmark update
2046 if bookmarksrequested:
2046 if bookmarksrequested:
2047 books = {}
2047 books = {}
2048 for record in op.records[b'bookmarks']:
2048 for record in op.records[b'bookmarks']:
2049 books[record[b'bookmark']] = record[b"node"]
2049 books[record[b'bookmark']] = record[b"node"]
2050 pullop.remotebookmarks = books
2050 pullop.remotebookmarks = books
2051 else:
2051 else:
2052 for namespace, value in op.records[b'listkeys']:
2052 for namespace, value in op.records[b'listkeys']:
2053 if namespace == b'bookmarks':
2053 if namespace == b'bookmarks':
2054 pullop.remotebookmarks = bookmod.unhexlifybookmarks(value)
2054 pullop.remotebookmarks = bookmod.unhexlifybookmarks(value)
2055
2055
2056 # bookmark data were either already there or pulled in the bundle
2056 # bookmark data were either already there or pulled in the bundle
2057 if pullop.remotebookmarks is not None:
2057 if pullop.remotebookmarks is not None:
2058 _pullbookmarks(pullop)
2058 _pullbookmarks(pullop)
2059
2059
2060
2060
2061 def _pullbundle2extraprepare(pullop, kwargs):
2061 def _pullbundle2extraprepare(pullop, kwargs):
2062 """hook function so that extensions can extend the getbundle call"""
2062 """hook function so that extensions can extend the getbundle call"""
2063
2063
2064
2064
2065 def _pullchangeset(pullop):
2065 def _pullchangeset(pullop):
2066 """pull changeset from unbundle into the local repo"""
2066 """pull changeset from unbundle into the local repo"""
2067 # We delay the open of the transaction as late as possible so we
2067 # We delay the open of the transaction as late as possible so we
2068 # don't open transaction for nothing or you break future useful
2068 # don't open transaction for nothing or you break future useful
2069 # rollback call
2069 # rollback call
2070 if b'changegroup' in pullop.stepsdone:
2070 if b'changegroup' in pullop.stepsdone:
2071 return
2071 return
2072 pullop.stepsdone.add(b'changegroup')
2072 pullop.stepsdone.add(b'changegroup')
2073 if not pullop.fetch:
2073 if not pullop.fetch:
2074 pullop.repo.ui.status(_(b"no changes found\n"))
2074 pullop.repo.ui.status(_(b"no changes found\n"))
2075 pullop.cgresult = 0
2075 pullop.cgresult = 0
2076 return
2076 return
2077 tr = pullop.gettransaction()
2077 tr = pullop.gettransaction()
2078 if pullop.heads is None and list(pullop.common) == [nullid]:
2078 if pullop.heads is None and list(pullop.common) == [nullid]:
2079 pullop.repo.ui.status(_(b"requesting all changes\n"))
2079 pullop.repo.ui.status(_(b"requesting all changes\n"))
2080 elif pullop.heads is None and pullop.remote.capable(b'changegroupsubset'):
2080 elif pullop.heads is None and pullop.remote.capable(b'changegroupsubset'):
2081 # issue1320, avoid a race if remote changed after discovery
2081 # issue1320, avoid a race if remote changed after discovery
2082 pullop.heads = pullop.rheads
2082 pullop.heads = pullop.rheads
2083
2083
2084 if pullop.remote.capable(b'getbundle'):
2084 if pullop.remote.capable(b'getbundle'):
2085 # TODO: get bundlecaps from remote
2085 # TODO: get bundlecaps from remote
2086 cg = pullop.remote.getbundle(
2086 cg = pullop.remote.getbundle(
2087 b'pull', common=pullop.common, heads=pullop.heads or pullop.rheads
2087 b'pull', common=pullop.common, heads=pullop.heads or pullop.rheads
2088 )
2088 )
2089 elif pullop.heads is None:
2089 elif pullop.heads is None:
2090 with pullop.remote.commandexecutor() as e:
2090 with pullop.remote.commandexecutor() as e:
2091 cg = e.callcommand(
2091 cg = e.callcommand(
2092 b'changegroup', {b'nodes': pullop.fetch, b'source': b'pull',}
2092 b'changegroup', {b'nodes': pullop.fetch, b'source': b'pull',}
2093 ).result()
2093 ).result()
2094
2094
2095 elif not pullop.remote.capable(b'changegroupsubset'):
2095 elif not pullop.remote.capable(b'changegroupsubset'):
2096 raise error.Abort(
2096 raise error.Abort(
2097 _(
2097 _(
2098 b"partial pull cannot be done because "
2098 b"partial pull cannot be done because "
2099 b"other repository doesn't support "
2099 b"other repository doesn't support "
2100 b"changegroupsubset."
2100 b"changegroupsubset."
2101 )
2101 )
2102 )
2102 )
2103 else:
2103 else:
2104 with pullop.remote.commandexecutor() as e:
2104 with pullop.remote.commandexecutor() as e:
2105 cg = e.callcommand(
2105 cg = e.callcommand(
2106 b'changegroupsubset',
2106 b'changegroupsubset',
2107 {
2107 {
2108 b'bases': pullop.fetch,
2108 b'bases': pullop.fetch,
2109 b'heads': pullop.heads,
2109 b'heads': pullop.heads,
2110 b'source': b'pull',
2110 b'source': b'pull',
2111 },
2111 },
2112 ).result()
2112 ).result()
2113
2113
2114 bundleop = bundle2.applybundle(
2114 bundleop = bundle2.applybundle(
2115 pullop.repo, cg, tr, b'pull', pullop.remote.url()
2115 pullop.repo, cg, tr, b'pull', pullop.remote.url()
2116 )
2116 )
2117 pullop.cgresult = bundle2.combinechangegroupresults(bundleop)
2117 pullop.cgresult = bundle2.combinechangegroupresults(bundleop)
2118
2118
2119
2119
2120 def _pullphase(pullop):
2120 def _pullphase(pullop):
2121 # Get remote phases data from remote
2121 # Get remote phases data from remote
2122 if b'phases' in pullop.stepsdone:
2122 if b'phases' in pullop.stepsdone:
2123 return
2123 return
2124 remotephases = listkeys(pullop.remote, b'phases')
2124 remotephases = listkeys(pullop.remote, b'phases')
2125 _pullapplyphases(pullop, remotephases)
2125 _pullapplyphases(pullop, remotephases)
2126
2126
2127
2127
2128 def _pullapplyphases(pullop, remotephases):
2128 def _pullapplyphases(pullop, remotephases):
2129 """apply phase movement from observed remote state"""
2129 """apply phase movement from observed remote state"""
2130 if b'phases' in pullop.stepsdone:
2130 if b'phases' in pullop.stepsdone:
2131 return
2131 return
2132 pullop.stepsdone.add(b'phases')
2132 pullop.stepsdone.add(b'phases')
2133 publishing = bool(remotephases.get(b'publishing', False))
2133 publishing = bool(remotephases.get(b'publishing', False))
2134 if remotephases and not publishing:
2134 if remotephases and not publishing:
2135 # remote is new and non-publishing
2135 # remote is new and non-publishing
2136 pheads, _dr = phases.analyzeremotephases(
2136 pheads, _dr = phases.analyzeremotephases(
2137 pullop.repo, pullop.pulledsubset, remotephases
2137 pullop.repo, pullop.pulledsubset, remotephases
2138 )
2138 )
2139 dheads = pullop.pulledsubset
2139 dheads = pullop.pulledsubset
2140 else:
2140 else:
2141 # Remote is old or publishing all common changesets
2141 # Remote is old or publishing all common changesets
2142 # should be seen as public
2142 # should be seen as public
2143 pheads = pullop.pulledsubset
2143 pheads = pullop.pulledsubset
2144 dheads = []
2144 dheads = []
2145 unfi = pullop.repo.unfiltered()
2145 unfi = pullop.repo.unfiltered()
2146 phase = unfi._phasecache.phase
2146 phase = unfi._phasecache.phase
2147 rev = unfi.changelog.index.get_rev
2147 rev = unfi.changelog.index.get_rev
2148 public = phases.public
2148 public = phases.public
2149 draft = phases.draft
2149 draft = phases.draft
2150
2150
2151 # exclude changesets already public locally and update the others
2151 # exclude changesets already public locally and update the others
2152 pheads = [pn for pn in pheads if phase(unfi, rev(pn)) > public]
2152 pheads = [pn for pn in pheads if phase(unfi, rev(pn)) > public]
2153 if pheads:
2153 if pheads:
2154 tr = pullop.gettransaction()
2154 tr = pullop.gettransaction()
2155 phases.advanceboundary(pullop.repo, tr, public, pheads)
2155 phases.advanceboundary(pullop.repo, tr, public, pheads)
2156
2156
2157 # exclude changesets already draft locally and update the others
2157 # exclude changesets already draft locally and update the others
2158 dheads = [pn for pn in dheads if phase(unfi, rev(pn)) > draft]
2158 dheads = [pn for pn in dheads if phase(unfi, rev(pn)) > draft]
2159 if dheads:
2159 if dheads:
2160 tr = pullop.gettransaction()
2160 tr = pullop.gettransaction()
2161 phases.advanceboundary(pullop.repo, tr, draft, dheads)
2161 phases.advanceboundary(pullop.repo, tr, draft, dheads)
2162
2162
2163
2163
2164 def _pullbookmarks(pullop):
2164 def _pullbookmarks(pullop):
2165 """process the remote bookmark information to update the local one"""
2165 """process the remote bookmark information to update the local one"""
2166 if b'bookmarks' in pullop.stepsdone:
2166 if b'bookmarks' in pullop.stepsdone:
2167 return
2167 return
2168 pullop.stepsdone.add(b'bookmarks')
2168 pullop.stepsdone.add(b'bookmarks')
2169 repo = pullop.repo
2169 repo = pullop.repo
2170 remotebookmarks = pullop.remotebookmarks
2170 remotebookmarks = pullop.remotebookmarks
2171 bookmod.updatefromremote(
2171 bookmod.updatefromremote(
2172 repo.ui,
2172 repo.ui,
2173 repo,
2173 repo,
2174 remotebookmarks,
2174 remotebookmarks,
2175 pullop.remote.url(),
2175 pullop.remote.url(),
2176 pullop.gettransaction,
2176 pullop.gettransaction,
2177 explicit=pullop.explicitbookmarks,
2177 explicit=pullop.explicitbookmarks,
2178 )
2178 )
2179
2179
2180
2180
2181 def _pullobsolete(pullop):
2181 def _pullobsolete(pullop):
2182 """utility function to pull obsolete markers from a remote
2182 """utility function to pull obsolete markers from a remote
2183
2183
2184 The `gettransaction` is function that return the pull transaction, creating
2184 The `gettransaction` is function that return the pull transaction, creating
2185 one if necessary. We return the transaction to inform the calling code that
2185 one if necessary. We return the transaction to inform the calling code that
2186 a new transaction have been created (when applicable).
2186 a new transaction have been created (when applicable).
2187
2187
2188 Exists mostly to allow overriding for experimentation purpose"""
2188 Exists mostly to allow overriding for experimentation purpose"""
2189 if b'obsmarkers' in pullop.stepsdone:
2189 if b'obsmarkers' in pullop.stepsdone:
2190 return
2190 return
2191 pullop.stepsdone.add(b'obsmarkers')
2191 pullop.stepsdone.add(b'obsmarkers')
2192 tr = None
2192 tr = None
2193 if obsolete.isenabled(pullop.repo, obsolete.exchangeopt):
2193 if obsolete.isenabled(pullop.repo, obsolete.exchangeopt):
2194 pullop.repo.ui.debug(b'fetching remote obsolete markers\n')
2194 pullop.repo.ui.debug(b'fetching remote obsolete markers\n')
2195 remoteobs = listkeys(pullop.remote, b'obsolete')
2195 remoteobs = listkeys(pullop.remote, b'obsolete')
2196 if b'dump0' in remoteobs:
2196 if b'dump0' in remoteobs:
2197 tr = pullop.gettransaction()
2197 tr = pullop.gettransaction()
2198 markers = []
2198 markers = []
2199 for key in sorted(remoteobs, reverse=True):
2199 for key in sorted(remoteobs, reverse=True):
2200 if key.startswith(b'dump'):
2200 if key.startswith(b'dump'):
2201 data = util.b85decode(remoteobs[key])
2201 data = util.b85decode(remoteobs[key])
2202 version, newmarks = obsolete._readmarkers(data)
2202 version, newmarks = obsolete._readmarkers(data)
2203 markers += newmarks
2203 markers += newmarks
2204 if markers:
2204 if markers:
2205 pullop.repo.obsstore.add(tr, markers)
2205 pullop.repo.obsstore.add(tr, markers)
2206 pullop.repo.invalidatevolatilesets()
2206 pullop.repo.invalidatevolatilesets()
2207 return tr
2207 return tr
2208
2208
2209
2209
2210 def applynarrowacl(repo, kwargs):
2210 def applynarrowacl(repo, kwargs):
2211 """Apply narrow fetch access control.
2211 """Apply narrow fetch access control.
2212
2212
2213 This massages the named arguments for getbundle wire protocol commands
2213 This massages the named arguments for getbundle wire protocol commands
2214 so requested data is filtered through access control rules.
2214 so requested data is filtered through access control rules.
2215 """
2215 """
2216 ui = repo.ui
2216 ui = repo.ui
2217 # TODO this assumes existence of HTTP and is a layering violation.
2217 # TODO this assumes existence of HTTP and is a layering violation.
2218 username = ui.shortuser(ui.environ.get(b'REMOTE_USER') or ui.username())
2218 username = ui.shortuser(ui.environ.get(b'REMOTE_USER') or ui.username())
2219 user_includes = ui.configlist(
2219 user_includes = ui.configlist(
2220 _NARROWACL_SECTION,
2220 _NARROWACL_SECTION,
2221 username + b'.includes',
2221 username + b'.includes',
2222 ui.configlist(_NARROWACL_SECTION, b'default.includes'),
2222 ui.configlist(_NARROWACL_SECTION, b'default.includes'),
2223 )
2223 )
2224 user_excludes = ui.configlist(
2224 user_excludes = ui.configlist(
2225 _NARROWACL_SECTION,
2225 _NARROWACL_SECTION,
2226 username + b'.excludes',
2226 username + b'.excludes',
2227 ui.configlist(_NARROWACL_SECTION, b'default.excludes'),
2227 ui.configlist(_NARROWACL_SECTION, b'default.excludes'),
2228 )
2228 )
2229 if not user_includes:
2229 if not user_includes:
2230 raise error.Abort(
2230 raise error.Abort(
2231 _(b"%s configuration for user %s is empty")
2231 _(b"%s configuration for user %s is empty")
2232 % (_NARROWACL_SECTION, username)
2232 % (_NARROWACL_SECTION, username)
2233 )
2233 )
2234
2234
2235 user_includes = [
2235 user_includes = [
2236 b'path:.' if p == b'*' else b'path:' + p for p in user_includes
2236 b'path:.' if p == b'*' else b'path:' + p for p in user_includes
2237 ]
2237 ]
2238 user_excludes = [
2238 user_excludes = [
2239 b'path:.' if p == b'*' else b'path:' + p for p in user_excludes
2239 b'path:.' if p == b'*' else b'path:' + p for p in user_excludes
2240 ]
2240 ]
2241
2241
2242 req_includes = set(kwargs.get('includepats', []))
2242 req_includes = set(kwargs.get('includepats', []))
2243 req_excludes = set(kwargs.get('excludepats', []))
2243 req_excludes = set(kwargs.get('excludepats', []))
2244
2244
2245 req_includes, req_excludes, invalid_includes = narrowspec.restrictpatterns(
2245 req_includes, req_excludes, invalid_includes = narrowspec.restrictpatterns(
2246 req_includes, req_excludes, user_includes, user_excludes
2246 req_includes, req_excludes, user_includes, user_excludes
2247 )
2247 )
2248
2248
2249 if invalid_includes:
2249 if invalid_includes:
2250 raise error.Abort(
2250 raise error.Abort(
2251 _(b"The following includes are not accessible for %s: %s")
2251 _(b"The following includes are not accessible for %s: %s")
2252 % (username, stringutil.pprint(invalid_includes))
2252 % (username, stringutil.pprint(invalid_includes))
2253 )
2253 )
2254
2254
2255 new_args = {}
2255 new_args = {}
2256 new_args.update(kwargs)
2256 new_args.update(kwargs)
2257 new_args['narrow'] = True
2257 new_args['narrow'] = True
2258 new_args['narrow_acl'] = True
2258 new_args['narrow_acl'] = True
2259 new_args['includepats'] = req_includes
2259 new_args['includepats'] = req_includes
2260 if req_excludes:
2260 if req_excludes:
2261 new_args['excludepats'] = req_excludes
2261 new_args['excludepats'] = req_excludes
2262
2262
2263 return new_args
2263 return new_args
2264
2264
2265
2265
2266 def _computeellipsis(repo, common, heads, known, match, depth=None):
2266 def _computeellipsis(repo, common, heads, known, match, depth=None):
2267 """Compute the shape of a narrowed DAG.
2267 """Compute the shape of a narrowed DAG.
2268
2268
2269 Args:
2269 Args:
2270 repo: The repository we're transferring.
2270 repo: The repository we're transferring.
2271 common: The roots of the DAG range we're transferring.
2271 common: The roots of the DAG range we're transferring.
2272 May be just [nullid], which means all ancestors of heads.
2272 May be just [nullid], which means all ancestors of heads.
2273 heads: The heads of the DAG range we're transferring.
2273 heads: The heads of the DAG range we're transferring.
2274 match: The narrowmatcher that allows us to identify relevant changes.
2274 match: The narrowmatcher that allows us to identify relevant changes.
2275 depth: If not None, only consider nodes to be full nodes if they are at
2275 depth: If not None, only consider nodes to be full nodes if they are at
2276 most depth changesets away from one of heads.
2276 most depth changesets away from one of heads.
2277
2277
2278 Returns:
2278 Returns:
2279 A tuple of (visitnodes, relevant_nodes, ellipsisroots) where:
2279 A tuple of (visitnodes, relevant_nodes, ellipsisroots) where:
2280
2280
2281 visitnodes: The list of nodes (either full or ellipsis) which
2281 visitnodes: The list of nodes (either full or ellipsis) which
2282 need to be sent to the client.
2282 need to be sent to the client.
2283 relevant_nodes: The set of changelog nodes which change a file inside
2283 relevant_nodes: The set of changelog nodes which change a file inside
2284 the narrowspec. The client needs these as non-ellipsis nodes.
2284 the narrowspec. The client needs these as non-ellipsis nodes.
2285 ellipsisroots: A dict of {rev: parents} that is used in
2285 ellipsisroots: A dict of {rev: parents} that is used in
2286 narrowchangegroup to produce ellipsis nodes with the
2286 narrowchangegroup to produce ellipsis nodes with the
2287 correct parents.
2287 correct parents.
2288 """
2288 """
2289 cl = repo.changelog
2289 cl = repo.changelog
2290 mfl = repo.manifestlog
2290 mfl = repo.manifestlog
2291
2291
2292 clrev = cl.rev
2292 clrev = cl.rev
2293
2293
2294 commonrevs = {clrev(n) for n in common} | {nullrev}
2294 commonrevs = {clrev(n) for n in common} | {nullrev}
2295 headsrevs = {clrev(n) for n in heads}
2295 headsrevs = {clrev(n) for n in heads}
2296
2296
2297 if depth:
2297 if depth:
2298 revdepth = {h: 0 for h in headsrevs}
2298 revdepth = {h: 0 for h in headsrevs}
2299
2299
2300 ellipsisheads = collections.defaultdict(set)
2300 ellipsisheads = collections.defaultdict(set)
2301 ellipsisroots = collections.defaultdict(set)
2301 ellipsisroots = collections.defaultdict(set)
2302
2302
2303 def addroot(head, curchange):
2303 def addroot(head, curchange):
2304 """Add a root to an ellipsis head, splitting heads with 3 roots."""
2304 """Add a root to an ellipsis head, splitting heads with 3 roots."""
2305 ellipsisroots[head].add(curchange)
2305 ellipsisroots[head].add(curchange)
2306 # Recursively split ellipsis heads with 3 roots by finding the
2306 # Recursively split ellipsis heads with 3 roots by finding the
2307 # roots' youngest common descendant which is an elided merge commit.
2307 # roots' youngest common descendant which is an elided merge commit.
2308 # That descendant takes 2 of the 3 roots as its own, and becomes a
2308 # That descendant takes 2 of the 3 roots as its own, and becomes a
2309 # root of the head.
2309 # root of the head.
2310 while len(ellipsisroots[head]) > 2:
2310 while len(ellipsisroots[head]) > 2:
2311 child, roots = splithead(head)
2311 child, roots = splithead(head)
2312 splitroots(head, child, roots)
2312 splitroots(head, child, roots)
2313 head = child # Recurse in case we just added a 3rd root
2313 head = child # Recurse in case we just added a 3rd root
2314
2314
2315 def splitroots(head, child, roots):
2315 def splitroots(head, child, roots):
2316 ellipsisroots[head].difference_update(roots)
2316 ellipsisroots[head].difference_update(roots)
2317 ellipsisroots[head].add(child)
2317 ellipsisroots[head].add(child)
2318 ellipsisroots[child].update(roots)
2318 ellipsisroots[child].update(roots)
2319 ellipsisroots[child].discard(child)
2319 ellipsisroots[child].discard(child)
2320
2320
2321 def splithead(head):
2321 def splithead(head):
2322 r1, r2, r3 = sorted(ellipsisroots[head])
2322 r1, r2, r3 = sorted(ellipsisroots[head])
2323 for nr1, nr2 in ((r2, r3), (r1, r3), (r1, r2)):
2323 for nr1, nr2 in ((r2, r3), (r1, r3), (r1, r2)):
2324 mid = repo.revs(
2324 mid = repo.revs(
2325 b'sort(merge() & %d::%d & %d::%d, -rev)', nr1, head, nr2, head
2325 b'sort(merge() & %d::%d & %d::%d, -rev)', nr1, head, nr2, head
2326 )
2326 )
2327 for j in mid:
2327 for j in mid:
2328 if j == nr2:
2328 if j == nr2:
2329 return nr2, (nr1, nr2)
2329 return nr2, (nr1, nr2)
2330 if j not in ellipsisroots or len(ellipsisroots[j]) < 2:
2330 if j not in ellipsisroots or len(ellipsisroots[j]) < 2:
2331 return j, (nr1, nr2)
2331 return j, (nr1, nr2)
2332 raise error.Abort(
2332 raise error.Abort(
2333 _(
2333 _(
2334 b'Failed to split up ellipsis node! head: %d, '
2334 b'Failed to split up ellipsis node! head: %d, '
2335 b'roots: %d %d %d'
2335 b'roots: %d %d %d'
2336 )
2336 )
2337 % (head, r1, r2, r3)
2337 % (head, r1, r2, r3)
2338 )
2338 )
2339
2339
2340 missing = list(cl.findmissingrevs(common=commonrevs, heads=headsrevs))
2340 missing = list(cl.findmissingrevs(common=commonrevs, heads=headsrevs))
2341 visit = reversed(missing)
2341 visit = reversed(missing)
2342 relevant_nodes = set()
2342 relevant_nodes = set()
2343 visitnodes = [cl.node(m) for m in missing]
2343 visitnodes = [cl.node(m) for m in missing]
2344 required = set(headsrevs) | known
2344 required = set(headsrevs) | known
2345 for rev in visit:
2345 for rev in visit:
2346 clrev = cl.changelogrevision(rev)
2346 clrev = cl.changelogrevision(rev)
2347 ps = [prev for prev in cl.parentrevs(rev) if prev != nullrev]
2347 ps = [prev for prev in cl.parentrevs(rev) if prev != nullrev]
2348 if depth is not None:
2348 if depth is not None:
2349 curdepth = revdepth[rev]
2349 curdepth = revdepth[rev]
2350 for p in ps:
2350 for p in ps:
2351 revdepth[p] = min(curdepth + 1, revdepth.get(p, depth + 1))
2351 revdepth[p] = min(curdepth + 1, revdepth.get(p, depth + 1))
2352 needed = False
2352 needed = False
2353 shallow_enough = depth is None or revdepth[rev] <= depth
2353 shallow_enough = depth is None or revdepth[rev] <= depth
2354 if shallow_enough:
2354 if shallow_enough:
2355 curmf = mfl[clrev.manifest].read()
2355 curmf = mfl[clrev.manifest].read()
2356 if ps:
2356 if ps:
2357 # We choose to not trust the changed files list in
2357 # We choose to not trust the changed files list in
2358 # changesets because it's not always correct. TODO: could
2358 # changesets because it's not always correct. TODO: could
2359 # we trust it for the non-merge case?
2359 # we trust it for the non-merge case?
2360 p1mf = mfl[cl.changelogrevision(ps[0]).manifest].read()
2360 p1mf = mfl[cl.changelogrevision(ps[0]).manifest].read()
2361 needed = bool(curmf.diff(p1mf, match))
2361 needed = bool(curmf.diff(p1mf, match))
2362 if not needed and len(ps) > 1:
2362 if not needed and len(ps) > 1:
2363 # For merge changes, the list of changed files is not
2363 # For merge changes, the list of changed files is not
2364 # helpful, since we need to emit the merge if a file
2364 # helpful, since we need to emit the merge if a file
2365 # in the narrow spec has changed on either side of the
2365 # in the narrow spec has changed on either side of the
2366 # merge. As a result, we do a manifest diff to check.
2366 # merge. As a result, we do a manifest diff to check.
2367 p2mf = mfl[cl.changelogrevision(ps[1]).manifest].read()
2367 p2mf = mfl[cl.changelogrevision(ps[1]).manifest].read()
2368 needed = bool(curmf.diff(p2mf, match))
2368 needed = bool(curmf.diff(p2mf, match))
2369 else:
2369 else:
2370 # For a root node, we need to include the node if any
2370 # For a root node, we need to include the node if any
2371 # files in the node match the narrowspec.
2371 # files in the node match the narrowspec.
2372 needed = any(curmf.walk(match))
2372 needed = any(curmf.walk(match))
2373
2373
2374 if needed:
2374 if needed:
2375 for head in ellipsisheads[rev]:
2375 for head in ellipsisheads[rev]:
2376 addroot(head, rev)
2376 addroot(head, rev)
2377 for p in ps:
2377 for p in ps:
2378 required.add(p)
2378 required.add(p)
2379 relevant_nodes.add(cl.node(rev))
2379 relevant_nodes.add(cl.node(rev))
2380 else:
2380 else:
2381 if not ps:
2381 if not ps:
2382 ps = [nullrev]
2382 ps = [nullrev]
2383 if rev in required:
2383 if rev in required:
2384 for head in ellipsisheads[rev]:
2384 for head in ellipsisheads[rev]:
2385 addroot(head, rev)
2385 addroot(head, rev)
2386 for p in ps:
2386 for p in ps:
2387 ellipsisheads[p].add(rev)
2387 ellipsisheads[p].add(rev)
2388 else:
2388 else:
2389 for p in ps:
2389 for p in ps:
2390 ellipsisheads[p] |= ellipsisheads[rev]
2390 ellipsisheads[p] |= ellipsisheads[rev]
2391
2391
2392 # add common changesets as roots of their reachable ellipsis heads
2392 # add common changesets as roots of their reachable ellipsis heads
2393 for c in commonrevs:
2393 for c in commonrevs:
2394 for head in ellipsisheads[c]:
2394 for head in ellipsisheads[c]:
2395 addroot(head, c)
2395 addroot(head, c)
2396 return visitnodes, relevant_nodes, ellipsisroots
2396 return visitnodes, relevant_nodes, ellipsisroots
2397
2397
2398
2398
2399 def caps20to10(repo, role):
2399 def caps20to10(repo, role):
2400 """return a set with appropriate options to use bundle20 during getbundle"""
2400 """return a set with appropriate options to use bundle20 during getbundle"""
2401 caps = {b'HG20'}
2401 caps = {b'HG20'}
2402 capsblob = bundle2.encodecaps(bundle2.getrepocaps(repo, role=role))
2402 capsblob = bundle2.encodecaps(bundle2.getrepocaps(repo, role=role))
2403 caps.add(b'bundle2=' + urlreq.quote(capsblob))
2403 caps.add(b'bundle2=' + urlreq.quote(capsblob))
2404 return caps
2404 return caps
2405
2405
2406
2406
2407 # List of names of steps to perform for a bundle2 for getbundle, order matters.
2407 # List of names of steps to perform for a bundle2 for getbundle, order matters.
2408 getbundle2partsorder = []
2408 getbundle2partsorder = []
2409
2409
2410 # Mapping between step name and function
2410 # Mapping between step name and function
2411 #
2411 #
2412 # This exists to help extensions wrap steps if necessary
2412 # This exists to help extensions wrap steps if necessary
2413 getbundle2partsmapping = {}
2413 getbundle2partsmapping = {}
2414
2414
2415
2415
2416 def getbundle2partsgenerator(stepname, idx=None):
2416 def getbundle2partsgenerator(stepname, idx=None):
2417 """decorator for function generating bundle2 part for getbundle
2417 """decorator for function generating bundle2 part for getbundle
2418
2418
2419 The function is added to the step -> function mapping and appended to the
2419 The function is added to the step -> function mapping and appended to the
2420 list of steps. Beware that decorated functions will be added in order
2420 list of steps. Beware that decorated functions will be added in order
2421 (this may matter).
2421 (this may matter).
2422
2422
2423 You can only use this decorator for new steps, if you want to wrap a step
2423 You can only use this decorator for new steps, if you want to wrap a step
2424 from an extension, attack the getbundle2partsmapping dictionary directly."""
2424 from an extension, attack the getbundle2partsmapping dictionary directly."""
2425
2425
2426 def dec(func):
2426 def dec(func):
2427 assert stepname not in getbundle2partsmapping
2427 assert stepname not in getbundle2partsmapping
2428 getbundle2partsmapping[stepname] = func
2428 getbundle2partsmapping[stepname] = func
2429 if idx is None:
2429 if idx is None:
2430 getbundle2partsorder.append(stepname)
2430 getbundle2partsorder.append(stepname)
2431 else:
2431 else:
2432 getbundle2partsorder.insert(idx, stepname)
2432 getbundle2partsorder.insert(idx, stepname)
2433 return func
2433 return func
2434
2434
2435 return dec
2435 return dec
2436
2436
2437
2437
2438 def bundle2requested(bundlecaps):
2438 def bundle2requested(bundlecaps):
2439 if bundlecaps is not None:
2439 if bundlecaps is not None:
2440 return any(cap.startswith(b'HG2') for cap in bundlecaps)
2440 return any(cap.startswith(b'HG2') for cap in bundlecaps)
2441 return False
2441 return False
2442
2442
2443
2443
2444 def getbundlechunks(
2444 def getbundlechunks(
2445 repo, source, heads=None, common=None, bundlecaps=None, **kwargs
2445 repo, source, heads=None, common=None, bundlecaps=None, **kwargs
2446 ):
2446 ):
2447 """Return chunks constituting a bundle's raw data.
2447 """Return chunks constituting a bundle's raw data.
2448
2448
2449 Could be a bundle HG10 or a bundle HG20 depending on bundlecaps
2449 Could be a bundle HG10 or a bundle HG20 depending on bundlecaps
2450 passed.
2450 passed.
2451
2451
2452 Returns a 2-tuple of a dict with metadata about the generated bundle
2452 Returns a 2-tuple of a dict with metadata about the generated bundle
2453 and an iterator over raw chunks (of varying sizes).
2453 and an iterator over raw chunks (of varying sizes).
2454 """
2454 """
2455 kwargs = pycompat.byteskwargs(kwargs)
2455 kwargs = pycompat.byteskwargs(kwargs)
2456 info = {}
2456 info = {}
2457 usebundle2 = bundle2requested(bundlecaps)
2457 usebundle2 = bundle2requested(bundlecaps)
2458 # bundle10 case
2458 # bundle10 case
2459 if not usebundle2:
2459 if not usebundle2:
2460 if bundlecaps and not kwargs.get(b'cg', True):
2460 if bundlecaps and not kwargs.get(b'cg', True):
2461 raise ValueError(
2461 raise ValueError(
2462 _(b'request for bundle10 must include changegroup')
2462 _(b'request for bundle10 must include changegroup')
2463 )
2463 )
2464
2464
2465 if kwargs:
2465 if kwargs:
2466 raise ValueError(
2466 raise ValueError(
2467 _(b'unsupported getbundle arguments: %s')
2467 _(b'unsupported getbundle arguments: %s')
2468 % b', '.join(sorted(kwargs.keys()))
2468 % b', '.join(sorted(kwargs.keys()))
2469 )
2469 )
2470 outgoing = _computeoutgoing(repo, heads, common)
2470 outgoing = _computeoutgoing(repo, heads, common)
2471 info[b'bundleversion'] = 1
2471 info[b'bundleversion'] = 1
2472 return (
2472 return (
2473 info,
2473 info,
2474 changegroup.makestream(
2474 changegroup.makestream(
2475 repo, outgoing, b'01', source, bundlecaps=bundlecaps
2475 repo, outgoing, b'01', source, bundlecaps=bundlecaps
2476 ),
2476 ),
2477 )
2477 )
2478
2478
2479 # bundle20 case
2479 # bundle20 case
2480 info[b'bundleversion'] = 2
2480 info[b'bundleversion'] = 2
2481 b2caps = {}
2481 b2caps = {}
2482 for bcaps in bundlecaps:
2482 for bcaps in bundlecaps:
2483 if bcaps.startswith(b'bundle2='):
2483 if bcaps.startswith(b'bundle2='):
2484 blob = urlreq.unquote(bcaps[len(b'bundle2=') :])
2484 blob = urlreq.unquote(bcaps[len(b'bundle2=') :])
2485 b2caps.update(bundle2.decodecaps(blob))
2485 b2caps.update(bundle2.decodecaps(blob))
2486 bundler = bundle2.bundle20(repo.ui, b2caps)
2486 bundler = bundle2.bundle20(repo.ui, b2caps)
2487
2487
2488 kwargs[b'heads'] = heads
2488 kwargs[b'heads'] = heads
2489 kwargs[b'common'] = common
2489 kwargs[b'common'] = common
2490
2490
2491 for name in getbundle2partsorder:
2491 for name in getbundle2partsorder:
2492 func = getbundle2partsmapping[name]
2492 func = getbundle2partsmapping[name]
2493 func(
2493 func(
2494 bundler,
2494 bundler,
2495 repo,
2495 repo,
2496 source,
2496 source,
2497 bundlecaps=bundlecaps,
2497 bundlecaps=bundlecaps,
2498 b2caps=b2caps,
2498 b2caps=b2caps,
2499 **pycompat.strkwargs(kwargs)
2499 **pycompat.strkwargs(kwargs)
2500 )
2500 )
2501
2501
2502 info[b'prefercompressed'] = bundler.prefercompressed
2502 info[b'prefercompressed'] = bundler.prefercompressed
2503
2503
2504 return info, bundler.getchunks()
2504 return info, bundler.getchunks()
2505
2505
2506
2506
2507 @getbundle2partsgenerator(b'stream2')
2507 @getbundle2partsgenerator(b'stream2')
2508 def _getbundlestream2(bundler, repo, *args, **kwargs):
2508 def _getbundlestream2(bundler, repo, *args, **kwargs):
2509 return bundle2.addpartbundlestream2(bundler, repo, **kwargs)
2509 return bundle2.addpartbundlestream2(bundler, repo, **kwargs)
2510
2510
2511
2511
2512 @getbundle2partsgenerator(b'changegroup')
2512 @getbundle2partsgenerator(b'changegroup')
2513 def _getbundlechangegrouppart(
2513 def _getbundlechangegrouppart(
2514 bundler,
2514 bundler,
2515 repo,
2515 repo,
2516 source,
2516 source,
2517 bundlecaps=None,
2517 bundlecaps=None,
2518 b2caps=None,
2518 b2caps=None,
2519 heads=None,
2519 heads=None,
2520 common=None,
2520 common=None,
2521 **kwargs
2521 **kwargs
2522 ):
2522 ):
2523 """add a changegroup part to the requested bundle"""
2523 """add a changegroup part to the requested bundle"""
2524 if not kwargs.get('cg', True) or not b2caps:
2524 if not kwargs.get('cg', True) or not b2caps:
2525 return
2525 return
2526
2526
2527 version = b'01'
2527 version = b'01'
2528 cgversions = b2caps.get(b'changegroup')
2528 cgversions = b2caps.get(b'changegroup')
2529 if cgversions: # 3.1 and 3.2 ship with an empty value
2529 if cgversions: # 3.1 and 3.2 ship with an empty value
2530 cgversions = [
2530 cgversions = [
2531 v
2531 v
2532 for v in cgversions
2532 for v in cgversions
2533 if v in changegroup.supportedoutgoingversions(repo)
2533 if v in changegroup.supportedoutgoingversions(repo)
2534 ]
2534 ]
2535 if not cgversions:
2535 if not cgversions:
2536 raise error.Abort(_(b'no common changegroup version'))
2536 raise error.Abort(_(b'no common changegroup version'))
2537 version = max(cgversions)
2537 version = max(cgversions)
2538
2538
2539 outgoing = _computeoutgoing(repo, heads, common)
2539 outgoing = _computeoutgoing(repo, heads, common)
2540 if not outgoing.missing:
2540 if not outgoing.missing:
2541 return
2541 return
2542
2542
2543 if kwargs.get('narrow', False):
2543 if kwargs.get('narrow', False):
2544 include = sorted(filter(bool, kwargs.get('includepats', [])))
2544 include = sorted(filter(bool, kwargs.get('includepats', [])))
2545 exclude = sorted(filter(bool, kwargs.get('excludepats', [])))
2545 exclude = sorted(filter(bool, kwargs.get('excludepats', [])))
2546 matcher = narrowspec.match(repo.root, include=include, exclude=exclude)
2546 matcher = narrowspec.match(repo.root, include=include, exclude=exclude)
2547 else:
2547 else:
2548 matcher = None
2548 matcher = None
2549
2549
2550 cgstream = changegroup.makestream(
2550 cgstream = changegroup.makestream(
2551 repo, outgoing, version, source, bundlecaps=bundlecaps, matcher=matcher
2551 repo, outgoing, version, source, bundlecaps=bundlecaps, matcher=matcher
2552 )
2552 )
2553
2553
2554 part = bundler.newpart(b'changegroup', data=cgstream)
2554 part = bundler.newpart(b'changegroup', data=cgstream)
2555 if cgversions:
2555 if cgversions:
2556 part.addparam(b'version', version)
2556 part.addparam(b'version', version)
2557
2557
2558 part.addparam(b'nbchanges', b'%d' % len(outgoing.missing), mandatory=False)
2558 part.addparam(b'nbchanges', b'%d' % len(outgoing.missing), mandatory=False)
2559
2559
2560 if scmutil.istreemanifest(repo):
2560 if scmutil.istreemanifest(repo):
2561 part.addparam(b'treemanifest', b'1')
2561 part.addparam(b'treemanifest', b'1')
2562
2562
2563 if b'exp-sidedata-flag' in repo.requirements:
2563 if b'exp-sidedata-flag' in repo.requirements:
2564 part.addparam(b'exp-sidedata', b'1')
2564 part.addparam(b'exp-sidedata', b'1')
2565
2565
2566 if (
2566 if (
2567 kwargs.get('narrow', False)
2567 kwargs.get('narrow', False)
2568 and kwargs.get('narrow_acl', False)
2568 and kwargs.get('narrow_acl', False)
2569 and (include or exclude)
2569 and (include or exclude)
2570 ):
2570 ):
2571 # this is mandatory because otherwise ACL clients won't work
2571 # this is mandatory because otherwise ACL clients won't work
2572 narrowspecpart = bundler.newpart(b'Narrow:responsespec')
2572 narrowspecpart = bundler.newpart(b'Narrow:responsespec')
2573 narrowspecpart.data = b'%s\0%s' % (
2573 narrowspecpart.data = b'%s\0%s' % (
2574 b'\n'.join(include),
2574 b'\n'.join(include),
2575 b'\n'.join(exclude),
2575 b'\n'.join(exclude),
2576 )
2576 )
2577
2577
2578
2578
2579 @getbundle2partsgenerator(b'bookmarks')
2579 @getbundle2partsgenerator(b'bookmarks')
2580 def _getbundlebookmarkpart(
2580 def _getbundlebookmarkpart(
2581 bundler, repo, source, bundlecaps=None, b2caps=None, **kwargs
2581 bundler, repo, source, bundlecaps=None, b2caps=None, **kwargs
2582 ):
2582 ):
2583 """add a bookmark part to the requested bundle"""
2583 """add a bookmark part to the requested bundle"""
2584 if not kwargs.get('bookmarks', False):
2584 if not kwargs.get('bookmarks', False):
2585 return
2585 return
2586 if not b2caps or b'bookmarks' not in b2caps:
2586 if not b2caps or b'bookmarks' not in b2caps:
2587 raise error.Abort(_(b'no common bookmarks exchange method'))
2587 raise error.Abort(_(b'no common bookmarks exchange method'))
2588 books = bookmod.listbinbookmarks(repo)
2588 books = bookmod.listbinbookmarks(repo)
2589 data = bookmod.binaryencode(books)
2589 data = bookmod.binaryencode(books)
2590 if data:
2590 if data:
2591 bundler.newpart(b'bookmarks', data=data)
2591 bundler.newpart(b'bookmarks', data=data)
2592
2592
2593
2593
2594 @getbundle2partsgenerator(b'listkeys')
2594 @getbundle2partsgenerator(b'listkeys')
2595 def _getbundlelistkeysparts(
2595 def _getbundlelistkeysparts(
2596 bundler, repo, source, bundlecaps=None, b2caps=None, **kwargs
2596 bundler, repo, source, bundlecaps=None, b2caps=None, **kwargs
2597 ):
2597 ):
2598 """add parts containing listkeys namespaces to the requested bundle"""
2598 """add parts containing listkeys namespaces to the requested bundle"""
2599 listkeys = kwargs.get('listkeys', ())
2599 listkeys = kwargs.get('listkeys', ())
2600 for namespace in listkeys:
2600 for namespace in listkeys:
2601 part = bundler.newpart(b'listkeys')
2601 part = bundler.newpart(b'listkeys')
2602 part.addparam(b'namespace', namespace)
2602 part.addparam(b'namespace', namespace)
2603 keys = repo.listkeys(namespace).items()
2603 keys = repo.listkeys(namespace).items()
2604 part.data = pushkey.encodekeys(keys)
2604 part.data = pushkey.encodekeys(keys)
2605
2605
2606
2606
2607 @getbundle2partsgenerator(b'obsmarkers')
2607 @getbundle2partsgenerator(b'obsmarkers')
2608 def _getbundleobsmarkerpart(
2608 def _getbundleobsmarkerpart(
2609 bundler, repo, source, bundlecaps=None, b2caps=None, heads=None, **kwargs
2609 bundler, repo, source, bundlecaps=None, b2caps=None, heads=None, **kwargs
2610 ):
2610 ):
2611 """add an obsolescence markers part to the requested bundle"""
2611 """add an obsolescence markers part to the requested bundle"""
2612 if kwargs.get('obsmarkers', False):
2612 if kwargs.get('obsmarkers', False):
2613 if heads is None:
2613 if heads is None:
2614 heads = repo.heads()
2614 heads = repo.heads()
2615 subset = [c.node() for c in repo.set(b'::%ln', heads)]
2615 subset = [c.node() for c in repo.set(b'::%ln', heads)]
2616 markers = repo.obsstore.relevantmarkers(subset)
2616 markers = repo.obsstore.relevantmarkers(subset)
2617 markers = obsutil.sortedmarkers(markers)
2617 markers = obsutil.sortedmarkers(markers)
2618 bundle2.buildobsmarkerspart(bundler, markers)
2618 bundle2.buildobsmarkerspart(bundler, markers)
2619
2619
2620
2620
2621 @getbundle2partsgenerator(b'phases')
2621 @getbundle2partsgenerator(b'phases')
2622 def _getbundlephasespart(
2622 def _getbundlephasespart(
2623 bundler, repo, source, bundlecaps=None, b2caps=None, heads=None, **kwargs
2623 bundler, repo, source, bundlecaps=None, b2caps=None, heads=None, **kwargs
2624 ):
2624 ):
2625 """add phase heads part to the requested bundle"""
2625 """add phase heads part to the requested bundle"""
2626 if kwargs.get('phases', False):
2626 if kwargs.get('phases', False):
2627 if not b2caps or b'heads' not in b2caps.get(b'phases'):
2627 if not b2caps or b'heads' not in b2caps.get(b'phases'):
2628 raise error.Abort(_(b'no common phases exchange method'))
2628 raise error.Abort(_(b'no common phases exchange method'))
2629 if heads is None:
2629 if heads is None:
2630 heads = repo.heads()
2630 heads = repo.heads()
2631
2631
2632 headsbyphase = collections.defaultdict(set)
2632 headsbyphase = collections.defaultdict(set)
2633 if repo.publishing():
2633 if repo.publishing():
2634 headsbyphase[phases.public] = heads
2634 headsbyphase[phases.public] = heads
2635 else:
2635 else:
2636 # find the appropriate heads to move
2636 # find the appropriate heads to move
2637
2637
2638 phase = repo._phasecache.phase
2638 phase = repo._phasecache.phase
2639 node = repo.changelog.node
2639 node = repo.changelog.node
2640 rev = repo.changelog.rev
2640 rev = repo.changelog.rev
2641 for h in heads:
2641 for h in heads:
2642 headsbyphase[phase(repo, rev(h))].add(h)
2642 headsbyphase[phase(repo, rev(h))].add(h)
2643 seenphases = list(headsbyphase.keys())
2643 seenphases = list(headsbyphase.keys())
2644
2644
2645 # We do not handle anything but public and draft phase for now)
2645 # We do not handle anything but public and draft phase for now)
2646 if seenphases:
2646 if seenphases:
2647 assert max(seenphases) <= phases.draft
2647 assert max(seenphases) <= phases.draft
2648
2648
2649 # if client is pulling non-public changesets, we need to find
2649 # if client is pulling non-public changesets, we need to find
2650 # intermediate public heads.
2650 # intermediate public heads.
2651 draftheads = headsbyphase.get(phases.draft, set())
2651 draftheads = headsbyphase.get(phases.draft, set())
2652 if draftheads:
2652 if draftheads:
2653 publicheads = headsbyphase.get(phases.public, set())
2653 publicheads = headsbyphase.get(phases.public, set())
2654
2654
2655 revset = b'heads(only(%ln, %ln) and public())'
2655 revset = b'heads(only(%ln, %ln) and public())'
2656 extraheads = repo.revs(revset, draftheads, publicheads)
2656 extraheads = repo.revs(revset, draftheads, publicheads)
2657 for r in extraheads:
2657 for r in extraheads:
2658 headsbyphase[phases.public].add(node(r))
2658 headsbyphase[phases.public].add(node(r))
2659
2659
2660 # transform data in a format used by the encoding function
2660 # transform data in a format used by the encoding function
2661 phasemapping = {
2661 phasemapping = {
2662 phase: sorted(headsbyphase[phase]) for phase in phases.allphases
2662 phase: sorted(headsbyphase[phase]) for phase in phases.allphases
2663 }
2663 }
2664
2664
2665 # generate the actual part
2665 # generate the actual part
2666 phasedata = phases.binaryencode(phasemapping)
2666 phasedata = phases.binaryencode(phasemapping)
2667 bundler.newpart(b'phase-heads', data=phasedata)
2667 bundler.newpart(b'phase-heads', data=phasedata)
2668
2668
2669
2669
2670 @getbundle2partsgenerator(b'hgtagsfnodes')
2670 @getbundle2partsgenerator(b'hgtagsfnodes')
2671 def _getbundletagsfnodes(
2671 def _getbundletagsfnodes(
2672 bundler,
2672 bundler,
2673 repo,
2673 repo,
2674 source,
2674 source,
2675 bundlecaps=None,
2675 bundlecaps=None,
2676 b2caps=None,
2676 b2caps=None,
2677 heads=None,
2677 heads=None,
2678 common=None,
2678 common=None,
2679 **kwargs
2679 **kwargs
2680 ):
2680 ):
2681 """Transfer the .hgtags filenodes mapping.
2681 """Transfer the .hgtags filenodes mapping.
2682
2682
2683 Only values for heads in this bundle will be transferred.
2683 Only values for heads in this bundle will be transferred.
2684
2684
2685 The part data consists of pairs of 20 byte changeset node and .hgtags
2685 The part data consists of pairs of 20 byte changeset node and .hgtags
2686 filenodes raw values.
2686 filenodes raw values.
2687 """
2687 """
2688 # Don't send unless:
2688 # Don't send unless:
2689 # - changeset are being exchanged,
2689 # - changeset are being exchanged,
2690 # - the client supports it.
2690 # - the client supports it.
2691 if not b2caps or not (kwargs.get('cg', True) and b'hgtagsfnodes' in b2caps):
2691 if not b2caps or not (kwargs.get('cg', True) and b'hgtagsfnodes' in b2caps):
2692 return
2692 return
2693
2693
2694 outgoing = _computeoutgoing(repo, heads, common)
2694 outgoing = _computeoutgoing(repo, heads, common)
2695 bundle2.addparttagsfnodescache(repo, bundler, outgoing)
2695 bundle2.addparttagsfnodescache(repo, bundler, outgoing)
2696
2696
2697
2697
2698 @getbundle2partsgenerator(b'cache:rev-branch-cache')
2698 @getbundle2partsgenerator(b'cache:rev-branch-cache')
2699 def _getbundlerevbranchcache(
2699 def _getbundlerevbranchcache(
2700 bundler,
2700 bundler,
2701 repo,
2701 repo,
2702 source,
2702 source,
2703 bundlecaps=None,
2703 bundlecaps=None,
2704 b2caps=None,
2704 b2caps=None,
2705 heads=None,
2705 heads=None,
2706 common=None,
2706 common=None,
2707 **kwargs
2707 **kwargs
2708 ):
2708 ):
2709 """Transfer the rev-branch-cache mapping
2709 """Transfer the rev-branch-cache mapping
2710
2710
2711 The payload is a series of data related to each branch
2711 The payload is a series of data related to each branch
2712
2712
2713 1) branch name length
2713 1) branch name length
2714 2) number of open heads
2714 2) number of open heads
2715 3) number of closed heads
2715 3) number of closed heads
2716 4) open heads nodes
2716 4) open heads nodes
2717 5) closed heads nodes
2717 5) closed heads nodes
2718 """
2718 """
2719 # Don't send unless:
2719 # Don't send unless:
2720 # - changeset are being exchanged,
2720 # - changeset are being exchanged,
2721 # - the client supports it.
2721 # - the client supports it.
2722 # - narrow bundle isn't in play (not currently compatible).
2722 # - narrow bundle isn't in play (not currently compatible).
2723 if (
2723 if (
2724 not kwargs.get('cg', True)
2724 not kwargs.get('cg', True)
2725 or not b2caps
2725 or not b2caps
2726 or b'rev-branch-cache' not in b2caps
2726 or b'rev-branch-cache' not in b2caps
2727 or kwargs.get('narrow', False)
2727 or kwargs.get('narrow', False)
2728 or repo.ui.has_section(_NARROWACL_SECTION)
2728 or repo.ui.has_section(_NARROWACL_SECTION)
2729 ):
2729 ):
2730 return
2730 return
2731
2731
2732 outgoing = _computeoutgoing(repo, heads, common)
2732 outgoing = _computeoutgoing(repo, heads, common)
2733 bundle2.addpartrevbranchcache(repo, bundler, outgoing)
2733 bundle2.addpartrevbranchcache(repo, bundler, outgoing)
2734
2734
2735
2735
2736 def check_heads(repo, their_heads, context):
2736 def check_heads(repo, their_heads, context):
2737 """check if the heads of a repo have been modified
2737 """check if the heads of a repo have been modified
2738
2738
2739 Used by peer for unbundling.
2739 Used by peer for unbundling.
2740 """
2740 """
2741 heads = repo.heads()
2741 heads = repo.heads()
2742 heads_hash = hashutil.sha1(b''.join(sorted(heads))).digest()
2742 heads_hash = hashutil.sha1(b''.join(sorted(heads))).digest()
2743 if not (
2743 if not (
2744 their_heads == [b'force']
2744 their_heads == [b'force']
2745 or their_heads == heads
2745 or their_heads == heads
2746 or their_heads == [b'hashed', heads_hash]
2746 or their_heads == [b'hashed', heads_hash]
2747 ):
2747 ):
2748 # someone else committed/pushed/unbundled while we
2748 # someone else committed/pushed/unbundled while we
2749 # were transferring data
2749 # were transferring data
2750 raise error.PushRaced(
2750 raise error.PushRaced(
2751 b'repository changed while %s - please try again' % context
2751 b'repository changed while %s - please try again' % context
2752 )
2752 )
2753
2753
2754
2754
2755 def unbundle(repo, cg, heads, source, url):
2755 def unbundle(repo, cg, heads, source, url):
2756 """Apply a bundle to a repo.
2756 """Apply a bundle to a repo.
2757
2757
2758 this function makes sure the repo is locked during the application and have
2758 this function makes sure the repo is locked during the application and have
2759 mechanism to check that no push race occurred between the creation of the
2759 mechanism to check that no push race occurred between the creation of the
2760 bundle and its application.
2760 bundle and its application.
2761
2761
2762 If the push was raced as PushRaced exception is raised."""
2762 If the push was raced as PushRaced exception is raised."""
2763 r = 0
2763 r = 0
2764 # need a transaction when processing a bundle2 stream
2764 # need a transaction when processing a bundle2 stream
2765 # [wlock, lock, tr] - needs to be an array so nested functions can modify it
2765 # [wlock, lock, tr] - needs to be an array so nested functions can modify it
2766 lockandtr = [None, None, None]
2766 lockandtr = [None, None, None]
2767 recordout = None
2767 recordout = None
2768 # quick fix for output mismatch with bundle2 in 3.4
2768 # quick fix for output mismatch with bundle2 in 3.4
2769 captureoutput = repo.ui.configbool(
2769 captureoutput = repo.ui.configbool(
2770 b'experimental', b'bundle2-output-capture'
2770 b'experimental', b'bundle2-output-capture'
2771 )
2771 )
2772 if url.startswith(b'remote:http:') or url.startswith(b'remote:https:'):
2772 if url.startswith(b'remote:http:') or url.startswith(b'remote:https:'):
2773 captureoutput = True
2773 captureoutput = True
2774 try:
2774 try:
2775 # note: outside bundle1, 'heads' is expected to be empty and this
2775 # note: outside bundle1, 'heads' is expected to be empty and this
2776 # 'check_heads' call wil be a no-op
2776 # 'check_heads' call wil be a no-op
2777 check_heads(repo, heads, b'uploading changes')
2777 check_heads(repo, heads, b'uploading changes')
2778 # push can proceed
2778 # push can proceed
2779 if not isinstance(cg, bundle2.unbundle20):
2779 if not isinstance(cg, bundle2.unbundle20):
2780 # legacy case: bundle1 (changegroup 01)
2780 # legacy case: bundle1 (changegroup 01)
2781 txnname = b"\n".join([source, util.hidepassword(url)])
2781 txnname = b"\n".join([source, util.hidepassword(url)])
2782 with repo.lock(), repo.transaction(txnname) as tr:
2782 with repo.lock(), repo.transaction(txnname) as tr:
2783 op = bundle2.applybundle(repo, cg, tr, source, url)
2783 op = bundle2.applybundle(repo, cg, tr, source, url)
2784 r = bundle2.combinechangegroupresults(op)
2784 r = bundle2.combinechangegroupresults(op)
2785 else:
2785 else:
2786 r = None
2786 r = None
2787 try:
2787 try:
2788
2788
2789 def gettransaction():
2789 def gettransaction():
2790 if not lockandtr[2]:
2790 if not lockandtr[2]:
2791 if not bookmod.bookmarksinstore(repo):
2791 if not bookmod.bookmarksinstore(repo):
2792 lockandtr[0] = repo.wlock()
2792 lockandtr[0] = repo.wlock()
2793 lockandtr[1] = repo.lock()
2793 lockandtr[1] = repo.lock()
2794 lockandtr[2] = repo.transaction(source)
2794 lockandtr[2] = repo.transaction(source)
2795 lockandtr[2].hookargs[b'source'] = source
2795 lockandtr[2].hookargs[b'source'] = source
2796 lockandtr[2].hookargs[b'url'] = url
2796 lockandtr[2].hookargs[b'url'] = url
2797 lockandtr[2].hookargs[b'bundle2'] = b'1'
2797 lockandtr[2].hookargs[b'bundle2'] = b'1'
2798 return lockandtr[2]
2798 return lockandtr[2]
2799
2799
2800 # Do greedy locking by default until we're satisfied with lazy
2800 # Do greedy locking by default until we're satisfied with lazy
2801 # locking.
2801 # locking.
2802 if not repo.ui.configbool(
2802 if not repo.ui.configbool(
2803 b'experimental', b'bundle2lazylocking'
2803 b'experimental', b'bundle2lazylocking'
2804 ):
2804 ):
2805 gettransaction()
2805 gettransaction()
2806
2806
2807 op = bundle2.bundleoperation(
2807 op = bundle2.bundleoperation(
2808 repo,
2808 repo,
2809 gettransaction,
2809 gettransaction,
2810 captureoutput=captureoutput,
2810 captureoutput=captureoutput,
2811 source=b'push',
2811 source=b'push',
2812 )
2812 )
2813 try:
2813 try:
2814 op = bundle2.processbundle(repo, cg, op=op)
2814 op = bundle2.processbundle(repo, cg, op=op)
2815 finally:
2815 finally:
2816 r = op.reply
2816 r = op.reply
2817 if captureoutput and r is not None:
2817 if captureoutput and r is not None:
2818 repo.ui.pushbuffer(error=True, subproc=True)
2818 repo.ui.pushbuffer(error=True, subproc=True)
2819
2819
2820 def recordout(output):
2820 def recordout(output):
2821 r.newpart(b'output', data=output, mandatory=False)
2821 r.newpart(b'output', data=output, mandatory=False)
2822
2822
2823 if lockandtr[2] is not None:
2823 if lockandtr[2] is not None:
2824 lockandtr[2].close()
2824 lockandtr[2].close()
2825 except BaseException as exc:
2825 except BaseException as exc:
2826 exc.duringunbundle2 = True
2826 exc.duringunbundle2 = True
2827 if captureoutput and r is not None:
2827 if captureoutput and r is not None:
2828 parts = exc._bundle2salvagedoutput = r.salvageoutput()
2828 parts = exc._bundle2salvagedoutput = r.salvageoutput()
2829
2829
2830 def recordout(output):
2830 def recordout(output):
2831 part = bundle2.bundlepart(
2831 part = bundle2.bundlepart(
2832 b'output', data=output, mandatory=False
2832 b'output', data=output, mandatory=False
2833 )
2833 )
2834 parts.append(part)
2834 parts.append(part)
2835
2835
2836 raise
2836 raise
2837 finally:
2837 finally:
2838 lockmod.release(lockandtr[2], lockandtr[1], lockandtr[0])
2838 lockmod.release(lockandtr[2], lockandtr[1], lockandtr[0])
2839 if recordout is not None:
2839 if recordout is not None:
2840 recordout(repo.ui.popbuffer())
2840 recordout(repo.ui.popbuffer())
2841 return r
2841 return r
2842
2842
2843
2843
2844 def _maybeapplyclonebundle(pullop):
2844 def _maybeapplyclonebundle(pullop):
2845 """Apply a clone bundle from a remote, if possible."""
2845 """Apply a clone bundle from a remote, if possible."""
2846
2846
2847 repo = pullop.repo
2847 repo = pullop.repo
2848 remote = pullop.remote
2848 remote = pullop.remote
2849
2849
2850 if not repo.ui.configbool(b'ui', b'clonebundles'):
2850 if not repo.ui.configbool(b'ui', b'clonebundles'):
2851 return
2851 return
2852
2852
2853 # Only run if local repo is empty.
2853 # Only run if local repo is empty.
2854 if len(repo):
2854 if len(repo):
2855 return
2855 return
2856
2856
2857 if pullop.heads:
2857 if pullop.heads:
2858 return
2858 return
2859
2859
2860 if not remote.capable(b'clonebundles'):
2860 if not remote.capable(b'clonebundles'):
2861 return
2861 return
2862
2862
2863 with remote.commandexecutor() as e:
2863 with remote.commandexecutor() as e:
2864 res = e.callcommand(b'clonebundles', {}).result()
2864 res = e.callcommand(b'clonebundles', {}).result()
2865
2865
2866 # If we call the wire protocol command, that's good enough to record the
2866 # If we call the wire protocol command, that's good enough to record the
2867 # attempt.
2867 # attempt.
2868 pullop.clonebundleattempted = True
2868 pullop.clonebundleattempted = True
2869
2869
2870 entries = parseclonebundlesmanifest(repo, res)
2870 entries = parseclonebundlesmanifest(repo, res)
2871 if not entries:
2871 if not entries:
2872 repo.ui.note(
2872 repo.ui.note(
2873 _(
2873 _(
2874 b'no clone bundles available on remote; '
2874 b'no clone bundles available on remote; '
2875 b'falling back to regular clone\n'
2875 b'falling back to regular clone\n'
2876 )
2876 )
2877 )
2877 )
2878 return
2878 return
2879
2879
2880 entries = filterclonebundleentries(
2880 entries = filterclonebundleentries(
2881 repo, entries, streamclonerequested=pullop.streamclonerequested
2881 repo, entries, streamclonerequested=pullop.streamclonerequested
2882 )
2882 )
2883
2883
2884 if not entries:
2884 if not entries:
2885 # There is a thundering herd concern here. However, if a server
2885 # There is a thundering herd concern here. However, if a server
2886 # operator doesn't advertise bundles appropriate for its clients,
2886 # operator doesn't advertise bundles appropriate for its clients,
2887 # they deserve what's coming. Furthermore, from a client's
2887 # they deserve what's coming. Furthermore, from a client's
2888 # perspective, no automatic fallback would mean not being able to
2888 # perspective, no automatic fallback would mean not being able to
2889 # clone!
2889 # clone!
2890 repo.ui.warn(
2890 repo.ui.warn(
2891 _(
2891 _(
2892 b'no compatible clone bundles available on server; '
2892 b'no compatible clone bundles available on server; '
2893 b'falling back to regular clone\n'
2893 b'falling back to regular clone\n'
2894 )
2894 )
2895 )
2895 )
2896 repo.ui.warn(
2896 repo.ui.warn(
2897 _(b'(you may want to report this to the server operator)\n')
2897 _(b'(you may want to report this to the server operator)\n')
2898 )
2898 )
2899 return
2899 return
2900
2900
2901 entries = sortclonebundleentries(repo.ui, entries)
2901 entries = sortclonebundleentries(repo.ui, entries)
2902
2902
2903 url = entries[0][b'URL']
2903 url = entries[0][b'URL']
2904 repo.ui.status(_(b'applying clone bundle from %s\n') % url)
2904 repo.ui.status(_(b'applying clone bundle from %s\n') % url)
2905 if trypullbundlefromurl(repo.ui, repo, url):
2905 if trypullbundlefromurl(repo.ui, repo, url):
2906 repo.ui.status(_(b'finished applying clone bundle\n'))
2906 repo.ui.status(_(b'finished applying clone bundle\n'))
2907 # Bundle failed.
2907 # Bundle failed.
2908 #
2908 #
2909 # We abort by default to avoid the thundering herd of
2909 # We abort by default to avoid the thundering herd of
2910 # clients flooding a server that was expecting expensive
2910 # clients flooding a server that was expecting expensive
2911 # clone load to be offloaded.
2911 # clone load to be offloaded.
2912 elif repo.ui.configbool(b'ui', b'clonebundlefallback'):
2912 elif repo.ui.configbool(b'ui', b'clonebundlefallback'):
2913 repo.ui.warn(_(b'falling back to normal clone\n'))
2913 repo.ui.warn(_(b'falling back to normal clone\n'))
2914 else:
2914 else:
2915 raise error.Abort(
2915 raise error.Abort(
2916 _(b'error applying bundle'),
2916 _(b'error applying bundle'),
2917 hint=_(
2917 hint=_(
2918 b'if this error persists, consider contacting '
2918 b'if this error persists, consider contacting '
2919 b'the server operator or disable clone '
2919 b'the server operator or disable clone '
2920 b'bundles via '
2920 b'bundles via '
2921 b'"--config ui.clonebundles=false"'
2921 b'"--config ui.clonebundles=false"'
2922 ),
2922 ),
2923 )
2923 )
2924
2924
2925
2925
2926 def parseclonebundlesmanifest(repo, s):
2926 def parseclonebundlesmanifest(repo, s):
2927 """Parses the raw text of a clone bundles manifest.
2927 """Parses the raw text of a clone bundles manifest.
2928
2928
2929 Returns a list of dicts. The dicts have a ``URL`` key corresponding
2929 Returns a list of dicts. The dicts have a ``URL`` key corresponding
2930 to the URL and other keys are the attributes for the entry.
2930 to the URL and other keys are the attributes for the entry.
2931 """
2931 """
2932 m = []
2932 m = []
2933 for line in s.splitlines():
2933 for line in s.splitlines():
2934 fields = line.split()
2934 fields = line.split()
2935 if not fields:
2935 if not fields:
2936 continue
2936 continue
2937 attrs = {b'URL': fields[0]}
2937 attrs = {b'URL': fields[0]}
2938 for rawattr in fields[1:]:
2938 for rawattr in fields[1:]:
2939 key, value = rawattr.split(b'=', 1)
2939 key, value = rawattr.split(b'=', 1)
2940 key = urlreq.unquote(key)
2940 key = urlreq.unquote(key)
2941 value = urlreq.unquote(value)
2941 value = urlreq.unquote(value)
2942 attrs[key] = value
2942 attrs[key] = value
2943
2943
2944 # Parse BUNDLESPEC into components. This makes client-side
2944 # Parse BUNDLESPEC into components. This makes client-side
2945 # preferences easier to specify since you can prefer a single
2945 # preferences easier to specify since you can prefer a single
2946 # component of the BUNDLESPEC.
2946 # component of the BUNDLESPEC.
2947 if key == b'BUNDLESPEC':
2947 if key == b'BUNDLESPEC':
2948 try:
2948 try:
2949 bundlespec = parsebundlespec(repo, value)
2949 bundlespec = parsebundlespec(repo, value)
2950 attrs[b'COMPRESSION'] = bundlespec.compression
2950 attrs[b'COMPRESSION'] = bundlespec.compression
2951 attrs[b'VERSION'] = bundlespec.version
2951 attrs[b'VERSION'] = bundlespec.version
2952 except error.InvalidBundleSpecification:
2952 except error.InvalidBundleSpecification:
2953 pass
2953 pass
2954 except error.UnsupportedBundleSpecification:
2954 except error.UnsupportedBundleSpecification:
2955 pass
2955 pass
2956
2956
2957 m.append(attrs)
2957 m.append(attrs)
2958
2958
2959 return m
2959 return m
2960
2960
2961
2961
2962 def isstreamclonespec(bundlespec):
2962 def isstreamclonespec(bundlespec):
2963 # Stream clone v1
2963 # Stream clone v1
2964 if bundlespec.wirecompression == b'UN' and bundlespec.wireversion == b's1':
2964 if bundlespec.wirecompression == b'UN' and bundlespec.wireversion == b's1':
2965 return True
2965 return True
2966
2966
2967 # Stream clone v2
2967 # Stream clone v2
2968 if (
2968 if (
2969 bundlespec.wirecompression == b'UN'
2969 bundlespec.wirecompression == b'UN'
2970 and bundlespec.wireversion == b'02'
2970 and bundlespec.wireversion == b'02'
2971 and bundlespec.contentopts.get(b'streamv2')
2971 and bundlespec.contentopts.get(b'streamv2')
2972 ):
2972 ):
2973 return True
2973 return True
2974
2974
2975 return False
2975 return False
2976
2976
2977
2977
2978 def filterclonebundleentries(repo, entries, streamclonerequested=False):
2978 def filterclonebundleentries(repo, entries, streamclonerequested=False):
2979 """Remove incompatible clone bundle manifest entries.
2979 """Remove incompatible clone bundle manifest entries.
2980
2980
2981 Accepts a list of entries parsed with ``parseclonebundlesmanifest``
2981 Accepts a list of entries parsed with ``parseclonebundlesmanifest``
2982 and returns a new list consisting of only the entries that this client
2982 and returns a new list consisting of only the entries that this client
2983 should be able to apply.
2983 should be able to apply.
2984
2984
2985 There is no guarantee we'll be able to apply all returned entries because
2985 There is no guarantee we'll be able to apply all returned entries because
2986 the metadata we use to filter on may be missing or wrong.
2986 the metadata we use to filter on may be missing or wrong.
2987 """
2987 """
2988 newentries = []
2988 newentries = []
2989 for entry in entries:
2989 for entry in entries:
2990 spec = entry.get(b'BUNDLESPEC')
2990 spec = entry.get(b'BUNDLESPEC')
2991 if spec:
2991 if spec:
2992 try:
2992 try:
2993 bundlespec = parsebundlespec(repo, spec, strict=True)
2993 bundlespec = parsebundlespec(repo, spec, strict=True)
2994
2994
2995 # If a stream clone was requested, filter out non-streamclone
2995 # If a stream clone was requested, filter out non-streamclone
2996 # entries.
2996 # entries.
2997 if streamclonerequested and not isstreamclonespec(bundlespec):
2997 if streamclonerequested and not isstreamclonespec(bundlespec):
2998 repo.ui.debug(
2998 repo.ui.debug(
2999 b'filtering %s because not a stream clone\n'
2999 b'filtering %s because not a stream clone\n'
3000 % entry[b'URL']
3000 % entry[b'URL']
3001 )
3001 )
3002 continue
3002 continue
3003
3003
3004 except error.InvalidBundleSpecification as e:
3004 except error.InvalidBundleSpecification as e:
3005 repo.ui.debug(stringutil.forcebytestr(e) + b'\n')
3005 repo.ui.debug(stringutil.forcebytestr(e) + b'\n')
3006 continue
3006 continue
3007 except error.UnsupportedBundleSpecification as e:
3007 except error.UnsupportedBundleSpecification as e:
3008 repo.ui.debug(
3008 repo.ui.debug(
3009 b'filtering %s because unsupported bundle '
3009 b'filtering %s because unsupported bundle '
3010 b'spec: %s\n' % (entry[b'URL'], stringutil.forcebytestr(e))
3010 b'spec: %s\n' % (entry[b'URL'], stringutil.forcebytestr(e))
3011 )
3011 )
3012 continue
3012 continue
3013 # If we don't have a spec and requested a stream clone, we don't know
3013 # If we don't have a spec and requested a stream clone, we don't know
3014 # what the entry is so don't attempt to apply it.
3014 # what the entry is so don't attempt to apply it.
3015 elif streamclonerequested:
3015 elif streamclonerequested:
3016 repo.ui.debug(
3016 repo.ui.debug(
3017 b'filtering %s because cannot determine if a stream '
3017 b'filtering %s because cannot determine if a stream '
3018 b'clone bundle\n' % entry[b'URL']
3018 b'clone bundle\n' % entry[b'URL']
3019 )
3019 )
3020 continue
3020 continue
3021
3021
3022 if b'REQUIRESNI' in entry and not sslutil.hassni:
3022 if b'REQUIRESNI' in entry and not sslutil.hassni:
3023 repo.ui.debug(
3023 repo.ui.debug(
3024 b'filtering %s because SNI not supported\n' % entry[b'URL']
3024 b'filtering %s because SNI not supported\n' % entry[b'URL']
3025 )
3025 )
3026 continue
3026 continue
3027
3027
3028 if b'REQUIREDRAM' in entry:
3028 if b'REQUIREDRAM' in entry:
3029 try:
3029 try:
3030 requiredram = util.sizetoint(entry[b'REQUIREDRAM'])
3030 requiredram = util.sizetoint(entry[b'REQUIREDRAM'])
3031 except error.ParseError:
3031 except error.ParseError:
3032 repo.ui.debug(
3032 repo.ui.debug(
3033 b'filtering %s due to a bad REQUIREDRAM attribute\n'
3033 b'filtering %s due to a bad REQUIREDRAM attribute\n'
3034 % entry[b'URL']
3034 % entry[b'URL']
3035 )
3035 )
3036 continue
3036 continue
3037 actualram = repo.ui.estimatememory()
3037 actualram = repo.ui.estimatememory()
3038 if actualram is not None and actualram * 0.66 < requiredram:
3038 if actualram is not None and actualram * 0.66 < requiredram:
3039 repo.ui.debug(
3039 repo.ui.debug(
3040 b'filtering %s as it needs more than 2/3 of system memory\n'
3040 b'filtering %s as it needs more than 2/3 of system memory\n'
3041 % entry[b'URL']
3041 % entry[b'URL']
3042 )
3042 )
3043 continue
3043 continue
3044
3044
3045 newentries.append(entry)
3045 newentries.append(entry)
3046
3046
3047 return newentries
3047 return newentries
3048
3048
3049
3049
3050 class clonebundleentry(object):
3050 class clonebundleentry(object):
3051 """Represents an item in a clone bundles manifest.
3051 """Represents an item in a clone bundles manifest.
3052
3052
3053 This rich class is needed to support sorting since sorted() in Python 3
3053 This rich class is needed to support sorting since sorted() in Python 3
3054 doesn't support ``cmp`` and our comparison is complex enough that ``key=``
3054 doesn't support ``cmp`` and our comparison is complex enough that ``key=``
3055 won't work.
3055 won't work.
3056 """
3056 """
3057
3057
3058 def __init__(self, value, prefers):
3058 def __init__(self, value, prefers):
3059 self.value = value
3059 self.value = value
3060 self.prefers = prefers
3060 self.prefers = prefers
3061
3061
3062 def _cmp(self, other):
3062 def _cmp(self, other):
3063 for prefkey, prefvalue in self.prefers:
3063 for prefkey, prefvalue in self.prefers:
3064 avalue = self.value.get(prefkey)
3064 avalue = self.value.get(prefkey)
3065 bvalue = other.value.get(prefkey)
3065 bvalue = other.value.get(prefkey)
3066
3066
3067 # Special case for b missing attribute and a matches exactly.
3067 # Special case for b missing attribute and a matches exactly.
3068 if avalue is not None and bvalue is None and avalue == prefvalue:
3068 if avalue is not None and bvalue is None and avalue == prefvalue:
3069 return -1
3069 return -1
3070
3070
3071 # Special case for a missing attribute and b matches exactly.
3071 # Special case for a missing attribute and b matches exactly.
3072 if bvalue is not None and avalue is None and bvalue == prefvalue:
3072 if bvalue is not None and avalue is None and bvalue == prefvalue:
3073 return 1
3073 return 1
3074
3074
3075 # We can't compare unless attribute present on both.
3075 # We can't compare unless attribute present on both.
3076 if avalue is None or bvalue is None:
3076 if avalue is None or bvalue is None:
3077 continue
3077 continue
3078
3078
3079 # Same values should fall back to next attribute.
3079 # Same values should fall back to next attribute.
3080 if avalue == bvalue:
3080 if avalue == bvalue:
3081 continue
3081 continue
3082
3082
3083 # Exact matches come first.
3083 # Exact matches come first.
3084 if avalue == prefvalue:
3084 if avalue == prefvalue:
3085 return -1
3085 return -1
3086 if bvalue == prefvalue:
3086 if bvalue == prefvalue:
3087 return 1
3087 return 1
3088
3088
3089 # Fall back to next attribute.
3089 # Fall back to next attribute.
3090 continue
3090 continue
3091
3091
3092 # If we got here we couldn't sort by attributes and prefers. Fall
3092 # If we got here we couldn't sort by attributes and prefers. Fall
3093 # back to index order.
3093 # back to index order.
3094 return 0
3094 return 0
3095
3095
3096 def __lt__(self, other):
3096 def __lt__(self, other):
3097 return self._cmp(other) < 0
3097 return self._cmp(other) < 0
3098
3098
3099 def __gt__(self, other):
3099 def __gt__(self, other):
3100 return self._cmp(other) > 0
3100 return self._cmp(other) > 0
3101
3101
3102 def __eq__(self, other):
3102 def __eq__(self, other):
3103 return self._cmp(other) == 0
3103 return self._cmp(other) == 0
3104
3104
3105 def __le__(self, other):
3105 def __le__(self, other):
3106 return self._cmp(other) <= 0
3106 return self._cmp(other) <= 0
3107
3107
3108 def __ge__(self, other):
3108 def __ge__(self, other):
3109 return self._cmp(other) >= 0
3109 return self._cmp(other) >= 0
3110
3110
3111 def __ne__(self, other):
3111 def __ne__(self, other):
3112 return self._cmp(other) != 0
3112 return self._cmp(other) != 0
3113
3113
3114
3114
3115 def sortclonebundleentries(ui, entries):
3115 def sortclonebundleentries(ui, entries):
3116 prefers = ui.configlist(b'ui', b'clonebundleprefers')
3116 prefers = ui.configlist(b'ui', b'clonebundleprefers')
3117 if not prefers:
3117 if not prefers:
3118 return list(entries)
3118 return list(entries)
3119
3119
3120 def _split(p):
3120 def _split(p):
3121 if b'=' not in p:
3121 if b'=' not in p:
3122 hint = _(b"each comma separated item should be key=value pairs")
3122 hint = _(b"each comma separated item should be key=value pairs")
3123 raise error.Abort(
3123 raise error.Abort(
3124 _(b"invalid ui.clonebundleprefers item: %s") % p, hint=hint
3124 _(b"invalid ui.clonebundleprefers item: %s") % p, hint=hint
3125 )
3125 )
3126 return p.split(b'=', 1)
3126 return p.split(b'=', 1)
3127
3127
3128 prefers = [_split(p) for p in prefers]
3128 prefers = [_split(p) for p in prefers]
3129
3129
3130 items = sorted(clonebundleentry(v, prefers) for v in entries)
3130 items = sorted(clonebundleentry(v, prefers) for v in entries)
3131 return [i.value for i in items]
3131 return [i.value for i in items]
3132
3132
3133
3133
3134 def trypullbundlefromurl(ui, repo, url):
3134 def trypullbundlefromurl(ui, repo, url):
3135 """Attempt to apply a bundle from a URL."""
3135 """Attempt to apply a bundle from a URL."""
3136 with repo.lock(), repo.transaction(b'bundleurl') as tr:
3136 with repo.lock(), repo.transaction(b'bundleurl') as tr:
3137 try:
3137 try:
3138 fh = urlmod.open(ui, url)
3138 fh = urlmod.open(ui, url)
3139 cg = readbundle(ui, fh, b'stream')
3139 cg = readbundle(ui, fh, b'stream')
3140
3140
3141 if isinstance(cg, streamclone.streamcloneapplier):
3141 if isinstance(cg, streamclone.streamcloneapplier):
3142 cg.apply(repo)
3142 cg.apply(repo)
3143 else:
3143 else:
3144 bundle2.applybundle(repo, cg, tr, b'clonebundles', url)
3144 bundle2.applybundle(repo, cg, tr, b'clonebundles', url)
3145 return True
3145 return True
3146 except urlerr.httperror as e:
3146 except urlerr.httperror as e:
3147 ui.warn(
3147 ui.warn(
3148 _(b'HTTP error fetching bundle: %s\n')
3148 _(b'HTTP error fetching bundle: %s\n')
3149 % stringutil.forcebytestr(e)
3149 % stringutil.forcebytestr(e)
3150 )
3150 )
3151 except urlerr.urlerror as e:
3151 except urlerr.urlerror as e:
3152 ui.warn(
3152 ui.warn(
3153 _(b'error fetching bundle: %s\n')
3153 _(b'error fetching bundle: %s\n')
3154 % stringutil.forcebytestr(e.reason)
3154 % stringutil.forcebytestr(e.reason)
3155 )
3155 )
3156
3156
3157 return False
3157 return False
@@ -1,20 +1,20 b''
1 # extension to emulate interrupting filemerge._filemerge
1 # extension to emulate interrupting filemerge._filemerge
2
2
3 from __future__ import absolute_import
3 from __future__ import absolute_import
4
4
5 from mercurial import (
5 from mercurial import (
6 error,
6 error,
7 extensions,
7 extensions,
8 filemerge,
8 filemerge,
9 )
9 )
10
10
11
11
12 def failfilemerge(
12 def failfilemerge(
13 filemergefn, premerge, repo, wctx, mynode, orig, fcd, fco, fca, labels=None
13 filemergefn, premerge, repo, wctx, mynode, orig, fcd, fco, fca, labels=None
14 ):
14 ):
15 raise error.Abort("^C")
15 raise error.Abort(b"^C")
16 return filemergefn(premerge, repo, mynode, orig, fcd, fco, fca, labels)
16 return filemergefn(premerge, repo, mynode, orig, fcd, fco, fca, labels)
17
17
18
18
19 def extsetup(ui):
19 def extsetup(ui):
20 extensions.wrapfunction(filemerge, '_filemerge', failfilemerge)
20 extensions.wrapfunction(filemerge, '_filemerge', failfilemerge)
@@ -1,1236 +1,1237 b''
1 This test is dedicated to test the bundle2 container format
1 This test is dedicated to test the bundle2 container format
2
2
3 It test multiple existing parts to test different feature of the container. You
3 It test multiple existing parts to test different feature of the container. You
4 probably do not need to touch this test unless you change the binary encoding
4 probably do not need to touch this test unless you change the binary encoding
5 of the bundle2 format itself.
5 of the bundle2 format itself.
6
6
7 Create an extension to test bundle2 API
7 Create an extension to test bundle2 API
8
8
9 $ cat > bundle2.py << EOF
9 $ cat > bundle2.py << EOF
10 > """A small extension to test bundle2 implementation
10 > """A small extension to test bundle2 implementation
11 >
11 >
12 > This extension allows detailed testing of the various bundle2 API and
12 > This extension allows detailed testing of the various bundle2 API and
13 > behaviors.
13 > behaviors.
14 > """
14 > """
15 > import gc
15 > import gc
16 > import os
16 > import os
17 > import sys
17 > import sys
18 > from mercurial import util
18 > from mercurial import util
19 > from mercurial import bundle2
19 > from mercurial import bundle2
20 > from mercurial import scmutil
20 > from mercurial import scmutil
21 > from mercurial import discovery
21 > from mercurial import discovery
22 > from mercurial import changegroup
22 > from mercurial import changegroup
23 > from mercurial import error
23 > from mercurial import error
24 > from mercurial import obsolete
24 > from mercurial import obsolete
25 > from mercurial import pycompat
25 > from mercurial import registrar
26 > from mercurial import registrar
26 > from mercurial.utils import procutil
27 > from mercurial.utils import procutil
27 >
28 >
28 >
29 >
29 > try:
30 > try:
30 > import msvcrt
31 > import msvcrt
31 > msvcrt.setmode(sys.stdin.fileno(), os.O_BINARY)
32 > msvcrt.setmode(sys.stdin.fileno(), os.O_BINARY)
32 > msvcrt.setmode(sys.stdout.fileno(), os.O_BINARY)
33 > msvcrt.setmode(sys.stdout.fileno(), os.O_BINARY)
33 > msvcrt.setmode(sys.stderr.fileno(), os.O_BINARY)
34 > msvcrt.setmode(sys.stderr.fileno(), os.O_BINARY)
34 > except ImportError:
35 > except ImportError:
35 > pass
36 > pass
36 >
37 >
37 > cmdtable = {}
38 > cmdtable = {}
38 > command = registrar.command(cmdtable)
39 > command = registrar.command(cmdtable)
39 >
40 >
40 > ELEPHANTSSONG = b"""Patali Dirapata, Cromda Cromda Ripalo, Pata Pata, Ko Ko Ko
41 > ELEPHANTSSONG = b"""Patali Dirapata, Cromda Cromda Ripalo, Pata Pata, Ko Ko Ko
41 > Bokoro Dipoulito, Rondi Rondi Pepino, Pata Pata, Ko Ko Ko
42 > Bokoro Dipoulito, Rondi Rondi Pepino, Pata Pata, Ko Ko Ko
42 > Emana Karassoli, Loucra Loucra Ponponto, Pata Pata, Ko Ko Ko."""
43 > Emana Karassoli, Loucra Loucra Ponponto, Pata Pata, Ko Ko Ko."""
43 > assert len(ELEPHANTSSONG) == 178 # future test say 178 bytes, trust it.
44 > assert len(ELEPHANTSSONG) == 178 # future test say 178 bytes, trust it.
44 >
45 >
45 > @bundle2.parthandler(b'test:song')
46 > @bundle2.parthandler(b'test:song')
46 > def songhandler(op, part):
47 > def songhandler(op, part):
47 > """handle a "test:song" bundle2 part, printing the lyrics on stdin"""
48 > """handle a "test:song" bundle2 part, printing the lyrics on stdin"""
48 > op.ui.write(b'The choir starts singing:\n')
49 > op.ui.write(b'The choir starts singing:\n')
49 > verses = 0
50 > verses = 0
50 > for line in part.read().split(b'\n'):
51 > for line in part.read().split(b'\n'):
51 > op.ui.write(b' %s\n' % line)
52 > op.ui.write(b' %s\n' % line)
52 > verses += 1
53 > verses += 1
53 > op.records.add(b'song', {b'verses': verses})
54 > op.records.add(b'song', {b'verses': verses})
54 >
55 >
55 > @bundle2.parthandler(b'test:ping')
56 > @bundle2.parthandler(b'test:ping')
56 > def pinghandler(op, part):
57 > def pinghandler(op, part):
57 > op.ui.write(b'received ping request (id %i)\n' % part.id)
58 > op.ui.write(b'received ping request (id %i)\n' % part.id)
58 > if op.reply is not None and b'ping-pong' in op.reply.capabilities:
59 > if op.reply is not None and b'ping-pong' in op.reply.capabilities:
59 > op.ui.write_err(b'replying to ping request (id %i)\n' % part.id)
60 > op.ui.write_err(b'replying to ping request (id %i)\n' % part.id)
60 > op.reply.newpart(b'test:pong', [(b'in-reply-to', b'%d' % part.id)],
61 > op.reply.newpart(b'test:pong', [(b'in-reply-to', b'%d' % part.id)],
61 > mandatory=False)
62 > mandatory=False)
62 >
63 >
63 > @bundle2.parthandler(b'test:debugreply')
64 > @bundle2.parthandler(b'test:debugreply')
64 > def debugreply(op, part):
65 > def debugreply(op, part):
65 > """print data about the capacity of the bundle reply"""
66 > """print data about the capacity of the bundle reply"""
66 > if op.reply is None:
67 > if op.reply is None:
67 > op.ui.write(b'debugreply: no reply\n')
68 > op.ui.write(b'debugreply: no reply\n')
68 > else:
69 > else:
69 > op.ui.write(b'debugreply: capabilities:\n')
70 > op.ui.write(b'debugreply: capabilities:\n')
70 > for cap in sorted(op.reply.capabilities):
71 > for cap in sorted(op.reply.capabilities):
71 > op.ui.write(b"debugreply: '%s'\n" % cap)
72 > op.ui.write(b"debugreply: '%s'\n" % cap)
72 > for val in op.reply.capabilities[cap]:
73 > for val in op.reply.capabilities[cap]:
73 > op.ui.write(b"debugreply: '%s'\n" % val)
74 > op.ui.write(b"debugreply: '%s'\n" % val)
74 >
75 >
75 > @command(b'bundle2',
76 > @command(b'bundle2',
76 > [(b'', b'param', [], b'stream level parameter'),
77 > [(b'', b'param', [], b'stream level parameter'),
77 > (b'', b'unknown', False, b'include an unknown mandatory part in the bundle'),
78 > (b'', b'unknown', False, b'include an unknown mandatory part in the bundle'),
78 > (b'', b'unknownparams', False, b'include an unknown part parameters in the bundle'),
79 > (b'', b'unknownparams', False, b'include an unknown part parameters in the bundle'),
79 > (b'', b'parts', False, b'include some arbitrary parts to the bundle'),
80 > (b'', b'parts', False, b'include some arbitrary parts to the bundle'),
80 > (b'', b'reply', False, b'produce a reply bundle'),
81 > (b'', b'reply', False, b'produce a reply bundle'),
81 > (b'', b'pushrace', False, b'includes a check:head part with unknown nodes'),
82 > (b'', b'pushrace', False, b'includes a check:head part with unknown nodes'),
82 > (b'', b'genraise', False, b'includes a part that raise an exception during generation'),
83 > (b'', b'genraise', False, b'includes a part that raise an exception during generation'),
83 > (b'', b'timeout', False, b'emulate a timeout during bundle generation'),
84 > (b'', b'timeout', False, b'emulate a timeout during bundle generation'),
84 > (b'r', b'rev', [], b'includes those changeset in the bundle'),
85 > (b'r', b'rev', [], b'includes those changeset in the bundle'),
85 > (b'', b'compress', b'', b'compress the stream'),
86 > (b'', b'compress', b'', b'compress the stream'),
86 > ],
87 > ],
87 > b'[OUTPUTFILE]')
88 > b'[OUTPUTFILE]')
88 > def cmdbundle2(ui, repo, path=None, **opts):
89 > def cmdbundle2(ui, repo, path=None, **opts):
89 > """write a bundle2 container on standard output"""
90 > """write a bundle2 container on standard output"""
90 > bundler = bundle2.bundle20(ui)
91 > bundler = bundle2.bundle20(ui)
91 > for p in opts['param']:
92 > for p in opts['param']:
92 > p = p.split(b'=', 1)
93 > p = p.split(b'=', 1)
93 > try:
94 > try:
94 > bundler.addparam(*p)
95 > bundler.addparam(*p)
95 > except error.ProgrammingError as exc:
96 > except error.ProgrammingError as exc:
96 > raise error.Abort(b'%s' % exc)
97 > raise error.Abort(b'%s' % exc)
97 >
98 >
98 > if opts['compress']:
99 > if opts['compress']:
99 > bundler.setcompression(opts['compress'])
100 > bundler.setcompression(opts['compress'])
100 >
101 >
101 > if opts['reply']:
102 > if opts['reply']:
102 > capsstring = b'ping-pong\nelephants=babar,celeste\ncity%3D%21=celeste%2Cville'
103 > capsstring = b'ping-pong\nelephants=babar,celeste\ncity%3D%21=celeste%2Cville'
103 > bundler.newpart(b'replycaps', data=capsstring)
104 > bundler.newpart(b'replycaps', data=capsstring)
104 >
105 >
105 > if opts['pushrace']:
106 > if opts['pushrace']:
106 > # also serve to test the assignement of data outside of init
107 > # also serve to test the assignement of data outside of init
107 > part = bundler.newpart(b'check:heads')
108 > part = bundler.newpart(b'check:heads')
108 > part.data = b'01234567890123456789'
109 > part.data = b'01234567890123456789'
109 >
110 >
110 > revs = opts['rev']
111 > revs = opts['rev']
111 > if 'rev' in opts:
112 > if 'rev' in opts:
112 > revs = scmutil.revrange(repo, opts['rev'])
113 > revs = scmutil.revrange(repo, opts['rev'])
113 > if revs:
114 > if revs:
114 > # very crude version of a changegroup part creation
115 > # very crude version of a changegroup part creation
115 > bundled = repo.revs('%ld::%ld', revs, revs)
116 > bundled = repo.revs('%ld::%ld', revs, revs)
116 > headmissing = [c.node() for c in repo.set('heads(%ld)', revs)]
117 > headmissing = [c.node() for c in repo.set('heads(%ld)', revs)]
117 > headcommon = [c.node() for c in repo.set('parents(%ld) - %ld', revs, revs)]
118 > headcommon = [c.node() for c in repo.set('parents(%ld) - %ld', revs, revs)]
118 > outgoing = discovery.outgoing(repo, headcommon, headmissing)
119 > outgoing = discovery.outgoing(repo, headcommon, headmissing)
119 > cg = changegroup.makechangegroup(repo, outgoing, b'01',
120 > cg = changegroup.makechangegroup(repo, outgoing, b'01',
120 > b'test:bundle2')
121 > b'test:bundle2')
121 > bundler.newpart(b'changegroup', data=cg.getchunks(),
122 > bundler.newpart(b'changegroup', data=cg.getchunks(),
122 > mandatory=False)
123 > mandatory=False)
123 >
124 >
124 > if opts['parts']:
125 > if opts['parts']:
125 > bundler.newpart(b'test:empty', mandatory=False)
126 > bundler.newpart(b'test:empty', mandatory=False)
126 > # add a second one to make sure we handle multiple parts
127 > # add a second one to make sure we handle multiple parts
127 > bundler.newpart(b'test:empty', mandatory=False)
128 > bundler.newpart(b'test:empty', mandatory=False)
128 > bundler.newpart(b'test:song', data=ELEPHANTSSONG, mandatory=False)
129 > bundler.newpart(b'test:song', data=ELEPHANTSSONG, mandatory=False)
129 > bundler.newpart(b'test:debugreply', mandatory=False)
130 > bundler.newpart(b'test:debugreply', mandatory=False)
130 > mathpart = bundler.newpart(b'test:math')
131 > mathpart = bundler.newpart(b'test:math')
131 > mathpart.addparam(b'pi', b'3.14')
132 > mathpart.addparam(b'pi', b'3.14')
132 > mathpart.addparam(b'e', b'2.72')
133 > mathpart.addparam(b'e', b'2.72')
133 > mathpart.addparam(b'cooking', b'raw', mandatory=False)
134 > mathpart.addparam(b'cooking', b'raw', mandatory=False)
134 > mathpart.data = b'42'
135 > mathpart.data = b'42'
135 > mathpart.mandatory = False
136 > mathpart.mandatory = False
136 > # advisory known part with unknown mandatory param
137 > # advisory known part with unknown mandatory param
137 > bundler.newpart(b'test:song', [(b'randomparam', b'')], mandatory=False)
138 > bundler.newpart(b'test:song', [(b'randomparam', b'')], mandatory=False)
138 > if opts['unknown']:
139 > if opts['unknown']:
139 > bundler.newpart(b'test:unknown', data=b'some random content')
140 > bundler.newpart(b'test:unknown', data=b'some random content')
140 > if opts['unknownparams']:
141 > if opts['unknownparams']:
141 > bundler.newpart(b'test:song', [(b'randomparams', b'')])
142 > bundler.newpart(b'test:song', [(b'randomparams', b'')])
142 > if opts['parts']:
143 > if opts['parts']:
143 > bundler.newpart(b'test:ping', mandatory=False)
144 > bundler.newpart(b'test:ping', mandatory=False)
144 > if opts['genraise']:
145 > if opts['genraise']:
145 > def genraise():
146 > def genraise():
146 > yield b'first line\n'
147 > yield b'first line\n'
147 > raise RuntimeError('Someone set up us the bomb!')
148 > raise RuntimeError('Someone set up us the bomb!')
148 > bundler.newpart(b'output', data=genraise(), mandatory=False)
149 > bundler.newpart(b'output', data=genraise(), mandatory=False)
149 >
150 >
150 > if path is None:
151 > if path is None:
151 > file = procutil.stdout
152 > file = procutil.stdout
152 > else:
153 > else:
153 > file = open(path, 'wb')
154 > file = open(path, 'wb')
154 >
155 >
155 > if opts['timeout']:
156 > if opts['timeout']:
156 > bundler.newpart(b'test:song', data=ELEPHANTSSONG, mandatory=False)
157 > bundler.newpart(b'test:song', data=ELEPHANTSSONG, mandatory=False)
157 > for idx, junk in enumerate(bundler.getchunks()):
158 > for idx, junk in enumerate(bundler.getchunks()):
158 > ui.write(b'%d chunk\n' % idx)
159 > ui.write(b'%d chunk\n' % idx)
159 > if idx > 4:
160 > if idx > 4:
160 > # This throws a GeneratorExit inside the generator, which
161 > # This throws a GeneratorExit inside the generator, which
161 > # can cause problems if the exception-recovery code is
162 > # can cause problems if the exception-recovery code is
162 > # too zealous. It's important for this test that the break
163 > # too zealous. It's important for this test that the break
163 > # occur while we're in the middle of a part.
164 > # occur while we're in the middle of a part.
164 > break
165 > break
165 > gc.collect()
166 > gc.collect()
166 > ui.write(b'fake timeout complete.\n')
167 > ui.write(b'fake timeout complete.\n')
167 > return
168 > return
168 > try:
169 > try:
169 > for chunk in bundler.getchunks():
170 > for chunk in bundler.getchunks():
170 > file.write(chunk)
171 > file.write(chunk)
171 > except RuntimeError as exc:
172 > except RuntimeError as exc:
172 > raise error.Abort(exc)
173 > raise error.Abort(pycompat.bytestr(exc))
173 > finally:
174 > finally:
174 > file.flush()
175 > file.flush()
175 >
176 >
176 > @command(b'unbundle2', [], b'')
177 > @command(b'unbundle2', [], b'')
177 > def cmdunbundle2(ui, repo, replypath=None):
178 > def cmdunbundle2(ui, repo, replypath=None):
178 > """process a bundle2 stream from stdin on the current repo"""
179 > """process a bundle2 stream from stdin on the current repo"""
179 > try:
180 > try:
180 > tr = None
181 > tr = None
181 > lock = repo.lock()
182 > lock = repo.lock()
182 > tr = repo.transaction(b'processbundle')
183 > tr = repo.transaction(b'processbundle')
183 > try:
184 > try:
184 > unbundler = bundle2.getunbundler(ui, procutil.stdin)
185 > unbundler = bundle2.getunbundler(ui, procutil.stdin)
185 > op = bundle2.processbundle(repo, unbundler, lambda: tr)
186 > op = bundle2.processbundle(repo, unbundler, lambda: tr)
186 > tr.close()
187 > tr.close()
187 > except error.BundleValueError as exc:
188 > except error.BundleValueError as exc:
188 > raise error.Abort(b'missing support for %s' % exc)
189 > raise error.Abort(b'missing support for %s' % exc)
189 > except error.PushRaced as exc:
190 > except error.PushRaced as exc:
190 > raise error.Abort(b'push race: %s' % exc)
191 > raise error.Abort(b'push race: %s' % exc)
191 > finally:
192 > finally:
192 > if tr is not None:
193 > if tr is not None:
193 > tr.release()
194 > tr.release()
194 > lock.release()
195 > lock.release()
195 > remains = procutil.stdin.read()
196 > remains = procutil.stdin.read()
196 > ui.write(b'%i unread bytes\n' % len(remains))
197 > ui.write(b'%i unread bytes\n' % len(remains))
197 > if op.records[b'song']:
198 > if op.records[b'song']:
198 > totalverses = sum(r[b'verses'] for r in op.records[b'song'])
199 > totalverses = sum(r[b'verses'] for r in op.records[b'song'])
199 > ui.write(b'%i total verses sung\n' % totalverses)
200 > ui.write(b'%i total verses sung\n' % totalverses)
200 > for rec in op.records[b'changegroup']:
201 > for rec in op.records[b'changegroup']:
201 > ui.write(b'addchangegroup return: %i\n' % rec[b'return'])
202 > ui.write(b'addchangegroup return: %i\n' % rec[b'return'])
202 > if op.reply is not None and replypath is not None:
203 > if op.reply is not None and replypath is not None:
203 > with open(replypath, 'wb') as file:
204 > with open(replypath, 'wb') as file:
204 > for chunk in op.reply.getchunks():
205 > for chunk in op.reply.getchunks():
205 > file.write(chunk)
206 > file.write(chunk)
206 >
207 >
207 > @command(b'statbundle2', [], b'')
208 > @command(b'statbundle2', [], b'')
208 > def cmdstatbundle2(ui, repo):
209 > def cmdstatbundle2(ui, repo):
209 > """print statistic on the bundle2 container read from stdin"""
210 > """print statistic on the bundle2 container read from stdin"""
210 > unbundler = bundle2.getunbundler(ui, procutil.stdin)
211 > unbundler = bundle2.getunbundler(ui, procutil.stdin)
211 > try:
212 > try:
212 > params = unbundler.params
213 > params = unbundler.params
213 > except error.BundleValueError as exc:
214 > except error.BundleValueError as exc:
214 > raise error.Abort(b'unknown parameters: %s' % exc)
215 > raise error.Abort(b'unknown parameters: %s' % exc)
215 > ui.write(b'options count: %i\n' % len(params))
216 > ui.write(b'options count: %i\n' % len(params))
216 > for key in sorted(params):
217 > for key in sorted(params):
217 > ui.write(b'- %s\n' % key)
218 > ui.write(b'- %s\n' % key)
218 > value = params[key]
219 > value = params[key]
219 > if value is not None:
220 > if value is not None:
220 > ui.write(b' %s\n' % value)
221 > ui.write(b' %s\n' % value)
221 > count = 0
222 > count = 0
222 > for p in unbundler.iterparts():
223 > for p in unbundler.iterparts():
223 > count += 1
224 > count += 1
224 > ui.write(b' :%s:\n' % p.type)
225 > ui.write(b' :%s:\n' % p.type)
225 > ui.write(b' mandatory: %i\n' % len(p.mandatoryparams))
226 > ui.write(b' mandatory: %i\n' % len(p.mandatoryparams))
226 > ui.write(b' advisory: %i\n' % len(p.advisoryparams))
227 > ui.write(b' advisory: %i\n' % len(p.advisoryparams))
227 > ui.write(b' payload: %i bytes\n' % len(p.read()))
228 > ui.write(b' payload: %i bytes\n' % len(p.read()))
228 > ui.write(b'parts count: %i\n' % count)
229 > ui.write(b'parts count: %i\n' % count)
229 > EOF
230 > EOF
230 $ cat >> $HGRCPATH << EOF
231 $ cat >> $HGRCPATH << EOF
231 > [extensions]
232 > [extensions]
232 > bundle2=$TESTTMP/bundle2.py
233 > bundle2=$TESTTMP/bundle2.py
233 > [experimental]
234 > [experimental]
234 > evolution.createmarkers=True
235 > evolution.createmarkers=True
235 > [ui]
236 > [ui]
236 > ssh="$PYTHON" "$TESTDIR/dummyssh"
237 > ssh="$PYTHON" "$TESTDIR/dummyssh"
237 > logtemplate={rev}:{node|short} {phase} {author} {bookmarks} {desc|firstline}
238 > logtemplate={rev}:{node|short} {phase} {author} {bookmarks} {desc|firstline}
238 > [web]
239 > [web]
239 > push_ssl = false
240 > push_ssl = false
240 > allow_push = *
241 > allow_push = *
241 > [phases]
242 > [phases]
242 > publish=False
243 > publish=False
243 > EOF
244 > EOF
244
245
245 The extension requires a repo (currently unused)
246 The extension requires a repo (currently unused)
246
247
247 $ hg init main
248 $ hg init main
248 $ cd main
249 $ cd main
249 $ touch a
250 $ touch a
250 $ hg add a
251 $ hg add a
251 $ hg commit -m 'a'
252 $ hg commit -m 'a'
252
253
253
254
254 Empty bundle
255 Empty bundle
255 =================
256 =================
256
257
257 - no option
258 - no option
258 - no parts
259 - no parts
259
260
260 Test bundling
261 Test bundling
261
262
262 $ hg bundle2 | f --hexdump
263 $ hg bundle2 | f --hexdump
263
264
264 0000: 48 47 32 30 00 00 00 00 00 00 00 00 |HG20........|
265 0000: 48 47 32 30 00 00 00 00 00 00 00 00 |HG20........|
265
266
266 Test timeouts during bundling
267 Test timeouts during bundling
267 $ hg bundle2 --timeout --debug --config devel.bundle2.debug=yes
268 $ hg bundle2 --timeout --debug --config devel.bundle2.debug=yes
268 bundle2-output-bundle: "HG20", 1 parts total
269 bundle2-output-bundle: "HG20", 1 parts total
269 bundle2-output: start emission of HG20 stream
270 bundle2-output: start emission of HG20 stream
270 0 chunk
271 0 chunk
271 bundle2-output: bundle parameter:
272 bundle2-output: bundle parameter:
272 1 chunk
273 1 chunk
273 bundle2-output: start of parts
274 bundle2-output: start of parts
274 bundle2-output: bundle part: "test:song"
275 bundle2-output: bundle part: "test:song"
275 bundle2-output-part: "test:song" (advisory) 178 bytes payload
276 bundle2-output-part: "test:song" (advisory) 178 bytes payload
276 bundle2-output: part 0: "test:song"
277 bundle2-output: part 0: "test:song"
277 bundle2-output: header chunk size: 16
278 bundle2-output: header chunk size: 16
278 2 chunk
279 2 chunk
279 3 chunk
280 3 chunk
280 bundle2-output: payload chunk size: 178
281 bundle2-output: payload chunk size: 178
281 4 chunk
282 4 chunk
282 5 chunk
283 5 chunk
283 bundle2-generatorexit
284 bundle2-generatorexit
284 fake timeout complete.
285 fake timeout complete.
285
286
286 Test unbundling
287 Test unbundling
287
288
288 $ hg bundle2 | hg statbundle2
289 $ hg bundle2 | hg statbundle2
289 options count: 0
290 options count: 0
290 parts count: 0
291 parts count: 0
291
292
292 Test old style bundle are detected and refused
293 Test old style bundle are detected and refused
293
294
294 $ hg bundle --all --type v1 ../bundle.hg
295 $ hg bundle --all --type v1 ../bundle.hg
295 1 changesets found
296 1 changesets found
296 $ hg statbundle2 < ../bundle.hg
297 $ hg statbundle2 < ../bundle.hg
297 abort: unknown bundle version 10
298 abort: unknown bundle version 10
298 [255]
299 [255]
299
300
300 Test parameters
301 Test parameters
301 =================
302 =================
302
303
303 - some options
304 - some options
304 - no parts
305 - no parts
305
306
306 advisory parameters, no value
307 advisory parameters, no value
307 -------------------------------
308 -------------------------------
308
309
309 Simplest possible parameters form
310 Simplest possible parameters form
310
311
311 Test generation simple option
312 Test generation simple option
312
313
313 $ hg bundle2 --param 'caution' | f --hexdump
314 $ hg bundle2 --param 'caution' | f --hexdump
314
315
315 0000: 48 47 32 30 00 00 00 07 63 61 75 74 69 6f 6e 00 |HG20....caution.|
316 0000: 48 47 32 30 00 00 00 07 63 61 75 74 69 6f 6e 00 |HG20....caution.|
316 0010: 00 00 00 |...|
317 0010: 00 00 00 |...|
317
318
318 Test unbundling
319 Test unbundling
319
320
320 $ hg bundle2 --param 'caution' | hg statbundle2
321 $ hg bundle2 --param 'caution' | hg statbundle2
321 options count: 1
322 options count: 1
322 - caution
323 - caution
323 parts count: 0
324 parts count: 0
324
325
325 Test generation multiple option
326 Test generation multiple option
326
327
327 $ hg bundle2 --param 'caution' --param 'meal' | f --hexdump
328 $ hg bundle2 --param 'caution' --param 'meal' | f --hexdump
328
329
329 0000: 48 47 32 30 00 00 00 0c 63 61 75 74 69 6f 6e 20 |HG20....caution |
330 0000: 48 47 32 30 00 00 00 0c 63 61 75 74 69 6f 6e 20 |HG20....caution |
330 0010: 6d 65 61 6c 00 00 00 00 |meal....|
331 0010: 6d 65 61 6c 00 00 00 00 |meal....|
331
332
332 Test unbundling
333 Test unbundling
333
334
334 $ hg bundle2 --param 'caution' --param 'meal' | hg statbundle2
335 $ hg bundle2 --param 'caution' --param 'meal' | hg statbundle2
335 options count: 2
336 options count: 2
336 - caution
337 - caution
337 - meal
338 - meal
338 parts count: 0
339 parts count: 0
339
340
340 advisory parameters, with value
341 advisory parameters, with value
341 -------------------------------
342 -------------------------------
342
343
343 Test generation
344 Test generation
344
345
345 $ hg bundle2 --param 'caution' --param 'meal=vegan' --param 'elephants' | f --hexdump
346 $ hg bundle2 --param 'caution' --param 'meal=vegan' --param 'elephants' | f --hexdump
346
347
347 0000: 48 47 32 30 00 00 00 1c 63 61 75 74 69 6f 6e 20 |HG20....caution |
348 0000: 48 47 32 30 00 00 00 1c 63 61 75 74 69 6f 6e 20 |HG20....caution |
348 0010: 6d 65 61 6c 3d 76 65 67 61 6e 20 65 6c 65 70 68 |meal=vegan eleph|
349 0010: 6d 65 61 6c 3d 76 65 67 61 6e 20 65 6c 65 70 68 |meal=vegan eleph|
349 0020: 61 6e 74 73 00 00 00 00 |ants....|
350 0020: 61 6e 74 73 00 00 00 00 |ants....|
350
351
351 Test unbundling
352 Test unbundling
352
353
353 $ hg bundle2 --param 'caution' --param 'meal=vegan' --param 'elephants' | hg statbundle2
354 $ hg bundle2 --param 'caution' --param 'meal=vegan' --param 'elephants' | hg statbundle2
354 options count: 3
355 options count: 3
355 - caution
356 - caution
356 - elephants
357 - elephants
357 - meal
358 - meal
358 vegan
359 vegan
359 parts count: 0
360 parts count: 0
360
361
361 parameter with special char in value
362 parameter with special char in value
362 ---------------------------------------------------
363 ---------------------------------------------------
363
364
364 Test generation
365 Test generation
365
366
366 $ hg bundle2 --param 'e|! 7/=babar%#==tutu' --param simple | f --hexdump
367 $ hg bundle2 --param 'e|! 7/=babar%#==tutu' --param simple | f --hexdump
367
368
368 0000: 48 47 32 30 00 00 00 29 65 25 37 43 25 32 31 25 |HG20...)e%7C%21%|
369 0000: 48 47 32 30 00 00 00 29 65 25 37 43 25 32 31 25 |HG20...)e%7C%21%|
369 0010: 32 30 37 2f 3d 62 61 62 61 72 25 32 35 25 32 33 |207/=babar%25%23|
370 0010: 32 30 37 2f 3d 62 61 62 61 72 25 32 35 25 32 33 |207/=babar%25%23|
370 0020: 25 33 44 25 33 44 74 75 74 75 20 73 69 6d 70 6c |%3D%3Dtutu simpl|
371 0020: 25 33 44 25 33 44 74 75 74 75 20 73 69 6d 70 6c |%3D%3Dtutu simpl|
371 0030: 65 00 00 00 00 |e....|
372 0030: 65 00 00 00 00 |e....|
372
373
373 Test unbundling
374 Test unbundling
374
375
375 $ hg bundle2 --param 'e|! 7/=babar%#==tutu' --param simple | hg statbundle2
376 $ hg bundle2 --param 'e|! 7/=babar%#==tutu' --param simple | hg statbundle2
376 options count: 2
377 options count: 2
377 - e|! 7/
378 - e|! 7/
378 babar%#==tutu
379 babar%#==tutu
379 - simple
380 - simple
380 parts count: 0
381 parts count: 0
381
382
382 Test unknown mandatory option
383 Test unknown mandatory option
383 ---------------------------------------------------
384 ---------------------------------------------------
384
385
385 $ hg bundle2 --param 'Gravity' | hg statbundle2
386 $ hg bundle2 --param 'Gravity' | hg statbundle2
386 abort: unknown parameters: Stream Parameter - Gravity
387 abort: unknown parameters: Stream Parameter - Gravity
387 [255]
388 [255]
388
389
389 Test debug output
390 Test debug output
390 ---------------------------------------------------
391 ---------------------------------------------------
391
392
392 bundling debug
393 bundling debug
393
394
394 $ hg bundle2 --debug --param 'e|! 7/=babar%#==tutu' --param simple ../out.hg2 --config progress.debug=true --config devel.bundle2.debug=true
395 $ hg bundle2 --debug --param 'e|! 7/=babar%#==tutu' --param simple ../out.hg2 --config progress.debug=true --config devel.bundle2.debug=true
395 bundle2-output-bundle: "HG20", (2 params) 0 parts total
396 bundle2-output-bundle: "HG20", (2 params) 0 parts total
396 bundle2-output: start emission of HG20 stream
397 bundle2-output: start emission of HG20 stream
397 bundle2-output: bundle parameter: e%7C%21%207/=babar%25%23%3D%3Dtutu simple
398 bundle2-output: bundle parameter: e%7C%21%207/=babar%25%23%3D%3Dtutu simple
398 bundle2-output: start of parts
399 bundle2-output: start of parts
399 bundle2-output: end of bundle
400 bundle2-output: end of bundle
400
401
401 file content is ok
402 file content is ok
402
403
403 $ f --hexdump ../out.hg2
404 $ f --hexdump ../out.hg2
404 ../out.hg2:
405 ../out.hg2:
405 0000: 48 47 32 30 00 00 00 29 65 25 37 43 25 32 31 25 |HG20...)e%7C%21%|
406 0000: 48 47 32 30 00 00 00 29 65 25 37 43 25 32 31 25 |HG20...)e%7C%21%|
406 0010: 32 30 37 2f 3d 62 61 62 61 72 25 32 35 25 32 33 |207/=babar%25%23|
407 0010: 32 30 37 2f 3d 62 61 62 61 72 25 32 35 25 32 33 |207/=babar%25%23|
407 0020: 25 33 44 25 33 44 74 75 74 75 20 73 69 6d 70 6c |%3D%3Dtutu simpl|
408 0020: 25 33 44 25 33 44 74 75 74 75 20 73 69 6d 70 6c |%3D%3Dtutu simpl|
408 0030: 65 00 00 00 00 |e....|
409 0030: 65 00 00 00 00 |e....|
409
410
410 unbundling debug
411 unbundling debug
411
412
412 $ hg statbundle2 --debug --config progress.debug=true --config devel.bundle2.debug=true < ../out.hg2
413 $ hg statbundle2 --debug --config progress.debug=true --config devel.bundle2.debug=true < ../out.hg2
413 bundle2-input: start processing of HG20 stream
414 bundle2-input: start processing of HG20 stream
414 bundle2-input: reading bundle2 stream parameters
415 bundle2-input: reading bundle2 stream parameters
415 bundle2-input: ignoring unknown parameter e|! 7/
416 bundle2-input: ignoring unknown parameter e|! 7/
416 bundle2-input: ignoring unknown parameter simple
417 bundle2-input: ignoring unknown parameter simple
417 options count: 2
418 options count: 2
418 - e|! 7/
419 - e|! 7/
419 babar%#==tutu
420 babar%#==tutu
420 - simple
421 - simple
421 bundle2-input: start extraction of bundle2 parts
422 bundle2-input: start extraction of bundle2 parts
422 bundle2-input: part header size: 0
423 bundle2-input: part header size: 0
423 bundle2-input: end of bundle2 stream
424 bundle2-input: end of bundle2 stream
424 parts count: 0
425 parts count: 0
425
426
426
427
427 Test buggy input
428 Test buggy input
428 ---------------------------------------------------
429 ---------------------------------------------------
429
430
430 empty parameter name
431 empty parameter name
431
432
432 $ hg bundle2 --param '' --quiet
433 $ hg bundle2 --param '' --quiet
433 abort: empty parameter name
434 abort: empty parameter name
434 [255]
435 [255]
435
436
436 bad parameter name
437 bad parameter name
437
438
438 $ hg bundle2 --param 42babar
439 $ hg bundle2 --param 42babar
439 abort: non letter first character: 42babar
440 abort: non letter first character: 42babar
440 [255]
441 [255]
441
442
442
443
443 Test part
444 Test part
444 =================
445 =================
445
446
446 $ hg bundle2 --parts ../parts.hg2 --debug --config progress.debug=true --config devel.bundle2.debug=true
447 $ hg bundle2 --parts ../parts.hg2 --debug --config progress.debug=true --config devel.bundle2.debug=true
447 bundle2-output-bundle: "HG20", 7 parts total
448 bundle2-output-bundle: "HG20", 7 parts total
448 bundle2-output: start emission of HG20 stream
449 bundle2-output: start emission of HG20 stream
449 bundle2-output: bundle parameter:
450 bundle2-output: bundle parameter:
450 bundle2-output: start of parts
451 bundle2-output: start of parts
451 bundle2-output: bundle part: "test:empty"
452 bundle2-output: bundle part: "test:empty"
452 bundle2-output-part: "test:empty" (advisory) empty payload
453 bundle2-output-part: "test:empty" (advisory) empty payload
453 bundle2-output: part 0: "test:empty"
454 bundle2-output: part 0: "test:empty"
454 bundle2-output: header chunk size: 17
455 bundle2-output: header chunk size: 17
455 bundle2-output: closing payload chunk
456 bundle2-output: closing payload chunk
456 bundle2-output: bundle part: "test:empty"
457 bundle2-output: bundle part: "test:empty"
457 bundle2-output-part: "test:empty" (advisory) empty payload
458 bundle2-output-part: "test:empty" (advisory) empty payload
458 bundle2-output: part 1: "test:empty"
459 bundle2-output: part 1: "test:empty"
459 bundle2-output: header chunk size: 17
460 bundle2-output: header chunk size: 17
460 bundle2-output: closing payload chunk
461 bundle2-output: closing payload chunk
461 bundle2-output: bundle part: "test:song"
462 bundle2-output: bundle part: "test:song"
462 bundle2-output-part: "test:song" (advisory) 178 bytes payload
463 bundle2-output-part: "test:song" (advisory) 178 bytes payload
463 bundle2-output: part 2: "test:song"
464 bundle2-output: part 2: "test:song"
464 bundle2-output: header chunk size: 16
465 bundle2-output: header chunk size: 16
465 bundle2-output: payload chunk size: 178
466 bundle2-output: payload chunk size: 178
466 bundle2-output: closing payload chunk
467 bundle2-output: closing payload chunk
467 bundle2-output: bundle part: "test:debugreply"
468 bundle2-output: bundle part: "test:debugreply"
468 bundle2-output-part: "test:debugreply" (advisory) empty payload
469 bundle2-output-part: "test:debugreply" (advisory) empty payload
469 bundle2-output: part 3: "test:debugreply"
470 bundle2-output: part 3: "test:debugreply"
470 bundle2-output: header chunk size: 22
471 bundle2-output: header chunk size: 22
471 bundle2-output: closing payload chunk
472 bundle2-output: closing payload chunk
472 bundle2-output: bundle part: "test:math"
473 bundle2-output: bundle part: "test:math"
473 bundle2-output-part: "test:math" (advisory) (params: 2 mandatory 2 advisory) 2 bytes payload
474 bundle2-output-part: "test:math" (advisory) (params: 2 mandatory 2 advisory) 2 bytes payload
474 bundle2-output: part 4: "test:math"
475 bundle2-output: part 4: "test:math"
475 bundle2-output: header chunk size: 43
476 bundle2-output: header chunk size: 43
476 bundle2-output: payload chunk size: 2
477 bundle2-output: payload chunk size: 2
477 bundle2-output: closing payload chunk
478 bundle2-output: closing payload chunk
478 bundle2-output: bundle part: "test:song"
479 bundle2-output: bundle part: "test:song"
479 bundle2-output-part: "test:song" (advisory) (params: 1 mandatory) empty payload
480 bundle2-output-part: "test:song" (advisory) (params: 1 mandatory) empty payload
480 bundle2-output: part 5: "test:song"
481 bundle2-output: part 5: "test:song"
481 bundle2-output: header chunk size: 29
482 bundle2-output: header chunk size: 29
482 bundle2-output: closing payload chunk
483 bundle2-output: closing payload chunk
483 bundle2-output: bundle part: "test:ping"
484 bundle2-output: bundle part: "test:ping"
484 bundle2-output-part: "test:ping" (advisory) empty payload
485 bundle2-output-part: "test:ping" (advisory) empty payload
485 bundle2-output: part 6: "test:ping"
486 bundle2-output: part 6: "test:ping"
486 bundle2-output: header chunk size: 16
487 bundle2-output: header chunk size: 16
487 bundle2-output: closing payload chunk
488 bundle2-output: closing payload chunk
488 bundle2-output: end of bundle
489 bundle2-output: end of bundle
489
490
490 $ f --hexdump ../parts.hg2
491 $ f --hexdump ../parts.hg2
491 ../parts.hg2:
492 ../parts.hg2:
492 0000: 48 47 32 30 00 00 00 00 00 00 00 11 0a 74 65 73 |HG20.........tes|
493 0000: 48 47 32 30 00 00 00 00 00 00 00 11 0a 74 65 73 |HG20.........tes|
493 0010: 74 3a 65 6d 70 74 79 00 00 00 00 00 00 00 00 00 |t:empty.........|
494 0010: 74 3a 65 6d 70 74 79 00 00 00 00 00 00 00 00 00 |t:empty.........|
494 0020: 00 00 00 00 11 0a 74 65 73 74 3a 65 6d 70 74 79 |......test:empty|
495 0020: 00 00 00 00 11 0a 74 65 73 74 3a 65 6d 70 74 79 |......test:empty|
495 0030: 00 00 00 01 00 00 00 00 00 00 00 00 00 10 09 74 |...............t|
496 0030: 00 00 00 01 00 00 00 00 00 00 00 00 00 10 09 74 |...............t|
496 0040: 65 73 74 3a 73 6f 6e 67 00 00 00 02 00 00 00 00 |est:song........|
497 0040: 65 73 74 3a 73 6f 6e 67 00 00 00 02 00 00 00 00 |est:song........|
497 0050: 00 b2 50 61 74 61 6c 69 20 44 69 72 61 70 61 74 |..Patali Dirapat|
498 0050: 00 b2 50 61 74 61 6c 69 20 44 69 72 61 70 61 74 |..Patali Dirapat|
498 0060: 61 2c 20 43 72 6f 6d 64 61 20 43 72 6f 6d 64 61 |a, Cromda Cromda|
499 0060: 61 2c 20 43 72 6f 6d 64 61 20 43 72 6f 6d 64 61 |a, Cromda Cromda|
499 0070: 20 52 69 70 61 6c 6f 2c 20 50 61 74 61 20 50 61 | Ripalo, Pata Pa|
500 0070: 20 52 69 70 61 6c 6f 2c 20 50 61 74 61 20 50 61 | Ripalo, Pata Pa|
500 0080: 74 61 2c 20 4b 6f 20 4b 6f 20 4b 6f 0a 42 6f 6b |ta, Ko Ko Ko.Bok|
501 0080: 74 61 2c 20 4b 6f 20 4b 6f 20 4b 6f 0a 42 6f 6b |ta, Ko Ko Ko.Bok|
501 0090: 6f 72 6f 20 44 69 70 6f 75 6c 69 74 6f 2c 20 52 |oro Dipoulito, R|
502 0090: 6f 72 6f 20 44 69 70 6f 75 6c 69 74 6f 2c 20 52 |oro Dipoulito, R|
502 00a0: 6f 6e 64 69 20 52 6f 6e 64 69 20 50 65 70 69 6e |ondi Rondi Pepin|
503 00a0: 6f 6e 64 69 20 52 6f 6e 64 69 20 50 65 70 69 6e |ondi Rondi Pepin|
503 00b0: 6f 2c 20 50 61 74 61 20 50 61 74 61 2c 20 4b 6f |o, Pata Pata, Ko|
504 00b0: 6f 2c 20 50 61 74 61 20 50 61 74 61 2c 20 4b 6f |o, Pata Pata, Ko|
504 00c0: 20 4b 6f 20 4b 6f 0a 45 6d 61 6e 61 20 4b 61 72 | Ko Ko.Emana Kar|
505 00c0: 20 4b 6f 20 4b 6f 0a 45 6d 61 6e 61 20 4b 61 72 | Ko Ko.Emana Kar|
505 00d0: 61 73 73 6f 6c 69 2c 20 4c 6f 75 63 72 61 20 4c |assoli, Loucra L|
506 00d0: 61 73 73 6f 6c 69 2c 20 4c 6f 75 63 72 61 20 4c |assoli, Loucra L|
506 00e0: 6f 75 63 72 61 20 50 6f 6e 70 6f 6e 74 6f 2c 20 |oucra Ponponto, |
507 00e0: 6f 75 63 72 61 20 50 6f 6e 70 6f 6e 74 6f 2c 20 |oucra Ponponto, |
507 00f0: 50 61 74 61 20 50 61 74 61 2c 20 4b 6f 20 4b 6f |Pata Pata, Ko Ko|
508 00f0: 50 61 74 61 20 50 61 74 61 2c 20 4b 6f 20 4b 6f |Pata Pata, Ko Ko|
508 0100: 20 4b 6f 2e 00 00 00 00 00 00 00 16 0f 74 65 73 | Ko..........tes|
509 0100: 20 4b 6f 2e 00 00 00 00 00 00 00 16 0f 74 65 73 | Ko..........tes|
509 0110: 74 3a 64 65 62 75 67 72 65 70 6c 79 00 00 00 03 |t:debugreply....|
510 0110: 74 3a 64 65 62 75 67 72 65 70 6c 79 00 00 00 03 |t:debugreply....|
510 0120: 00 00 00 00 00 00 00 00 00 2b 09 74 65 73 74 3a |.........+.test:|
511 0120: 00 00 00 00 00 00 00 00 00 2b 09 74 65 73 74 3a |.........+.test:|
511 0130: 6d 61 74 68 00 00 00 04 02 01 02 04 01 04 07 03 |math............|
512 0130: 6d 61 74 68 00 00 00 04 02 01 02 04 01 04 07 03 |math............|
512 0140: 70 69 33 2e 31 34 65 32 2e 37 32 63 6f 6f 6b 69 |pi3.14e2.72cooki|
513 0140: 70 69 33 2e 31 34 65 32 2e 37 32 63 6f 6f 6b 69 |pi3.14e2.72cooki|
513 0150: 6e 67 72 61 77 00 00 00 02 34 32 00 00 00 00 00 |ngraw....42.....|
514 0150: 6e 67 72 61 77 00 00 00 02 34 32 00 00 00 00 00 |ngraw....42.....|
514 0160: 00 00 1d 09 74 65 73 74 3a 73 6f 6e 67 00 00 00 |....test:song...|
515 0160: 00 00 1d 09 74 65 73 74 3a 73 6f 6e 67 00 00 00 |....test:song...|
515 0170: 05 01 00 0b 00 72 61 6e 64 6f 6d 70 61 72 61 6d |.....randomparam|
516 0170: 05 01 00 0b 00 72 61 6e 64 6f 6d 70 61 72 61 6d |.....randomparam|
516 0180: 00 00 00 00 00 00 00 10 09 74 65 73 74 3a 70 69 |.........test:pi|
517 0180: 00 00 00 00 00 00 00 10 09 74 65 73 74 3a 70 69 |.........test:pi|
517 0190: 6e 67 00 00 00 06 00 00 00 00 00 00 00 00 00 00 |ng..............|
518 0190: 6e 67 00 00 00 06 00 00 00 00 00 00 00 00 00 00 |ng..............|
518
519
519
520
520 $ hg statbundle2 < ../parts.hg2
521 $ hg statbundle2 < ../parts.hg2
521 options count: 0
522 options count: 0
522 :test:empty:
523 :test:empty:
523 mandatory: 0
524 mandatory: 0
524 advisory: 0
525 advisory: 0
525 payload: 0 bytes
526 payload: 0 bytes
526 :test:empty:
527 :test:empty:
527 mandatory: 0
528 mandatory: 0
528 advisory: 0
529 advisory: 0
529 payload: 0 bytes
530 payload: 0 bytes
530 :test:song:
531 :test:song:
531 mandatory: 0
532 mandatory: 0
532 advisory: 0
533 advisory: 0
533 payload: 178 bytes
534 payload: 178 bytes
534 :test:debugreply:
535 :test:debugreply:
535 mandatory: 0
536 mandatory: 0
536 advisory: 0
537 advisory: 0
537 payload: 0 bytes
538 payload: 0 bytes
538 :test:math:
539 :test:math:
539 mandatory: 2
540 mandatory: 2
540 advisory: 1
541 advisory: 1
541 payload: 2 bytes
542 payload: 2 bytes
542 :test:song:
543 :test:song:
543 mandatory: 1
544 mandatory: 1
544 advisory: 0
545 advisory: 0
545 payload: 0 bytes
546 payload: 0 bytes
546 :test:ping:
547 :test:ping:
547 mandatory: 0
548 mandatory: 0
548 advisory: 0
549 advisory: 0
549 payload: 0 bytes
550 payload: 0 bytes
550 parts count: 7
551 parts count: 7
551
552
552 $ hg statbundle2 --debug --config progress.debug=true --config devel.bundle2.debug=true < ../parts.hg2
553 $ hg statbundle2 --debug --config progress.debug=true --config devel.bundle2.debug=true < ../parts.hg2
553 bundle2-input: start processing of HG20 stream
554 bundle2-input: start processing of HG20 stream
554 bundle2-input: reading bundle2 stream parameters
555 bundle2-input: reading bundle2 stream parameters
555 options count: 0
556 options count: 0
556 bundle2-input: start extraction of bundle2 parts
557 bundle2-input: start extraction of bundle2 parts
557 bundle2-input: part header size: 17
558 bundle2-input: part header size: 17
558 bundle2-input: part type: "test:empty"
559 bundle2-input: part type: "test:empty"
559 bundle2-input: part id: "0"
560 bundle2-input: part id: "0"
560 bundle2-input: part parameters: 0
561 bundle2-input: part parameters: 0
561 :test:empty:
562 :test:empty:
562 mandatory: 0
563 mandatory: 0
563 advisory: 0
564 advisory: 0
564 bundle2-input: payload chunk size: 0
565 bundle2-input: payload chunk size: 0
565 payload: 0 bytes
566 payload: 0 bytes
566 bundle2-input: part header size: 17
567 bundle2-input: part header size: 17
567 bundle2-input: part type: "test:empty"
568 bundle2-input: part type: "test:empty"
568 bundle2-input: part id: "1"
569 bundle2-input: part id: "1"
569 bundle2-input: part parameters: 0
570 bundle2-input: part parameters: 0
570 :test:empty:
571 :test:empty:
571 mandatory: 0
572 mandatory: 0
572 advisory: 0
573 advisory: 0
573 bundle2-input: payload chunk size: 0
574 bundle2-input: payload chunk size: 0
574 payload: 0 bytes
575 payload: 0 bytes
575 bundle2-input: part header size: 16
576 bundle2-input: part header size: 16
576 bundle2-input: part type: "test:song"
577 bundle2-input: part type: "test:song"
577 bundle2-input: part id: "2"
578 bundle2-input: part id: "2"
578 bundle2-input: part parameters: 0
579 bundle2-input: part parameters: 0
579 :test:song:
580 :test:song:
580 mandatory: 0
581 mandatory: 0
581 advisory: 0
582 advisory: 0
582 bundle2-input: payload chunk size: 178
583 bundle2-input: payload chunk size: 178
583 bundle2-input: payload chunk size: 0
584 bundle2-input: payload chunk size: 0
584 bundle2-input-part: total payload size 178
585 bundle2-input-part: total payload size 178
585 payload: 178 bytes
586 payload: 178 bytes
586 bundle2-input: part header size: 22
587 bundle2-input: part header size: 22
587 bundle2-input: part type: "test:debugreply"
588 bundle2-input: part type: "test:debugreply"
588 bundle2-input: part id: "3"
589 bundle2-input: part id: "3"
589 bundle2-input: part parameters: 0
590 bundle2-input: part parameters: 0
590 :test:debugreply:
591 :test:debugreply:
591 mandatory: 0
592 mandatory: 0
592 advisory: 0
593 advisory: 0
593 bundle2-input: payload chunk size: 0
594 bundle2-input: payload chunk size: 0
594 payload: 0 bytes
595 payload: 0 bytes
595 bundle2-input: part header size: 43
596 bundle2-input: part header size: 43
596 bundle2-input: part type: "test:math"
597 bundle2-input: part type: "test:math"
597 bundle2-input: part id: "4"
598 bundle2-input: part id: "4"
598 bundle2-input: part parameters: 3
599 bundle2-input: part parameters: 3
599 :test:math:
600 :test:math:
600 mandatory: 2
601 mandatory: 2
601 advisory: 1
602 advisory: 1
602 bundle2-input: payload chunk size: 2
603 bundle2-input: payload chunk size: 2
603 bundle2-input: payload chunk size: 0
604 bundle2-input: payload chunk size: 0
604 bundle2-input-part: total payload size 2
605 bundle2-input-part: total payload size 2
605 payload: 2 bytes
606 payload: 2 bytes
606 bundle2-input: part header size: 29
607 bundle2-input: part header size: 29
607 bundle2-input: part type: "test:song"
608 bundle2-input: part type: "test:song"
608 bundle2-input: part id: "5"
609 bundle2-input: part id: "5"
609 bundle2-input: part parameters: 1
610 bundle2-input: part parameters: 1
610 :test:song:
611 :test:song:
611 mandatory: 1
612 mandatory: 1
612 advisory: 0
613 advisory: 0
613 bundle2-input: payload chunk size: 0
614 bundle2-input: payload chunk size: 0
614 payload: 0 bytes
615 payload: 0 bytes
615 bundle2-input: part header size: 16
616 bundle2-input: part header size: 16
616 bundle2-input: part type: "test:ping"
617 bundle2-input: part type: "test:ping"
617 bundle2-input: part id: "6"
618 bundle2-input: part id: "6"
618 bundle2-input: part parameters: 0
619 bundle2-input: part parameters: 0
619 :test:ping:
620 :test:ping:
620 mandatory: 0
621 mandatory: 0
621 advisory: 0
622 advisory: 0
622 bundle2-input: payload chunk size: 0
623 bundle2-input: payload chunk size: 0
623 payload: 0 bytes
624 payload: 0 bytes
624 bundle2-input: part header size: 0
625 bundle2-input: part header size: 0
625 bundle2-input: end of bundle2 stream
626 bundle2-input: end of bundle2 stream
626 parts count: 7
627 parts count: 7
627
628
628 Test actual unbundling of test part
629 Test actual unbundling of test part
629 =======================================
630 =======================================
630
631
631 Process the bundle
632 Process the bundle
632
633
633 $ hg unbundle2 --debug --config progress.debug=true --config devel.bundle2.debug=true < ../parts.hg2
634 $ hg unbundle2 --debug --config progress.debug=true --config devel.bundle2.debug=true < ../parts.hg2
634 bundle2-input: start processing of HG20 stream
635 bundle2-input: start processing of HG20 stream
635 bundle2-input: reading bundle2 stream parameters
636 bundle2-input: reading bundle2 stream parameters
636 bundle2-input-bundle: with-transaction
637 bundle2-input-bundle: with-transaction
637 bundle2-input: start extraction of bundle2 parts
638 bundle2-input: start extraction of bundle2 parts
638 bundle2-input: part header size: 17
639 bundle2-input: part header size: 17
639 bundle2-input: part type: "test:empty"
640 bundle2-input: part type: "test:empty"
640 bundle2-input: part id: "0"
641 bundle2-input: part id: "0"
641 bundle2-input: part parameters: 0
642 bundle2-input: part parameters: 0
642 bundle2-input: ignoring unsupported advisory part test:empty
643 bundle2-input: ignoring unsupported advisory part test:empty
643 bundle2-input-part: "test:empty" (advisory) unsupported-type
644 bundle2-input-part: "test:empty" (advisory) unsupported-type
644 bundle2-input: payload chunk size: 0
645 bundle2-input: payload chunk size: 0
645 bundle2-input: part header size: 17
646 bundle2-input: part header size: 17
646 bundle2-input: part type: "test:empty"
647 bundle2-input: part type: "test:empty"
647 bundle2-input: part id: "1"
648 bundle2-input: part id: "1"
648 bundle2-input: part parameters: 0
649 bundle2-input: part parameters: 0
649 bundle2-input: ignoring unsupported advisory part test:empty
650 bundle2-input: ignoring unsupported advisory part test:empty
650 bundle2-input-part: "test:empty" (advisory) unsupported-type
651 bundle2-input-part: "test:empty" (advisory) unsupported-type
651 bundle2-input: payload chunk size: 0
652 bundle2-input: payload chunk size: 0
652 bundle2-input: part header size: 16
653 bundle2-input: part header size: 16
653 bundle2-input: part type: "test:song"
654 bundle2-input: part type: "test:song"
654 bundle2-input: part id: "2"
655 bundle2-input: part id: "2"
655 bundle2-input: part parameters: 0
656 bundle2-input: part parameters: 0
656 bundle2-input: found a handler for part test:song
657 bundle2-input: found a handler for part test:song
657 bundle2-input-part: "test:song" (advisory) supported
658 bundle2-input-part: "test:song" (advisory) supported
658 The choir starts singing:
659 The choir starts singing:
659 bundle2-input: payload chunk size: 178
660 bundle2-input: payload chunk size: 178
660 bundle2-input: payload chunk size: 0
661 bundle2-input: payload chunk size: 0
661 bundle2-input-part: total payload size 178
662 bundle2-input-part: total payload size 178
662 Patali Dirapata, Cromda Cromda Ripalo, Pata Pata, Ko Ko Ko
663 Patali Dirapata, Cromda Cromda Ripalo, Pata Pata, Ko Ko Ko
663 Bokoro Dipoulito, Rondi Rondi Pepino, Pata Pata, Ko Ko Ko
664 Bokoro Dipoulito, Rondi Rondi Pepino, Pata Pata, Ko Ko Ko
664 Emana Karassoli, Loucra Loucra Ponponto, Pata Pata, Ko Ko Ko.
665 Emana Karassoli, Loucra Loucra Ponponto, Pata Pata, Ko Ko Ko.
665 bundle2-input: part header size: 22
666 bundle2-input: part header size: 22
666 bundle2-input: part type: "test:debugreply"
667 bundle2-input: part type: "test:debugreply"
667 bundle2-input: part id: "3"
668 bundle2-input: part id: "3"
668 bundle2-input: part parameters: 0
669 bundle2-input: part parameters: 0
669 bundle2-input: found a handler for part test:debugreply
670 bundle2-input: found a handler for part test:debugreply
670 bundle2-input-part: "test:debugreply" (advisory) supported
671 bundle2-input-part: "test:debugreply" (advisory) supported
671 debugreply: no reply
672 debugreply: no reply
672 bundle2-input: payload chunk size: 0
673 bundle2-input: payload chunk size: 0
673 bundle2-input: part header size: 43
674 bundle2-input: part header size: 43
674 bundle2-input: part type: "test:math"
675 bundle2-input: part type: "test:math"
675 bundle2-input: part id: "4"
676 bundle2-input: part id: "4"
676 bundle2-input: part parameters: 3
677 bundle2-input: part parameters: 3
677 bundle2-input: ignoring unsupported advisory part test:math
678 bundle2-input: ignoring unsupported advisory part test:math
678 bundle2-input-part: "test:math" (advisory) (params: 2 mandatory 2 advisory) unsupported-type
679 bundle2-input-part: "test:math" (advisory) (params: 2 mandatory 2 advisory) unsupported-type
679 bundle2-input: payload chunk size: 2
680 bundle2-input: payload chunk size: 2
680 bundle2-input: payload chunk size: 0
681 bundle2-input: payload chunk size: 0
681 bundle2-input-part: total payload size 2
682 bundle2-input-part: total payload size 2
682 bundle2-input: part header size: 29
683 bundle2-input: part header size: 29
683 bundle2-input: part type: "test:song"
684 bundle2-input: part type: "test:song"
684 bundle2-input: part id: "5"
685 bundle2-input: part id: "5"
685 bundle2-input: part parameters: 1
686 bundle2-input: part parameters: 1
686 bundle2-input: found a handler for part test:song
687 bundle2-input: found a handler for part test:song
687 bundle2-input: ignoring unsupported advisory part test:song - randomparam
688 bundle2-input: ignoring unsupported advisory part test:song - randomparam
688 bundle2-input-part: "test:song" (advisory) (params: 1 mandatory) unsupported-params (randomparam)
689 bundle2-input-part: "test:song" (advisory) (params: 1 mandatory) unsupported-params (randomparam)
689 bundle2-input: payload chunk size: 0
690 bundle2-input: payload chunk size: 0
690 bundle2-input: part header size: 16
691 bundle2-input: part header size: 16
691 bundle2-input: part type: "test:ping"
692 bundle2-input: part type: "test:ping"
692 bundle2-input: part id: "6"
693 bundle2-input: part id: "6"
693 bundle2-input: part parameters: 0
694 bundle2-input: part parameters: 0
694 bundle2-input: found a handler for part test:ping
695 bundle2-input: found a handler for part test:ping
695 bundle2-input-part: "test:ping" (advisory) supported
696 bundle2-input-part: "test:ping" (advisory) supported
696 received ping request (id 6)
697 received ping request (id 6)
697 bundle2-input: payload chunk size: 0
698 bundle2-input: payload chunk size: 0
698 bundle2-input: part header size: 0
699 bundle2-input: part header size: 0
699 bundle2-input: end of bundle2 stream
700 bundle2-input: end of bundle2 stream
700 bundle2-input-bundle: 7 parts total
701 bundle2-input-bundle: 7 parts total
701 0 unread bytes
702 0 unread bytes
702 3 total verses sung
703 3 total verses sung
703
704
704 Unbundle with an unknown mandatory part
705 Unbundle with an unknown mandatory part
705 (should abort)
706 (should abort)
706
707
707 $ hg bundle2 --parts --unknown ../unknown.hg2
708 $ hg bundle2 --parts --unknown ../unknown.hg2
708
709
709 $ hg unbundle2 < ../unknown.hg2
710 $ hg unbundle2 < ../unknown.hg2
710 The choir starts singing:
711 The choir starts singing:
711 Patali Dirapata, Cromda Cromda Ripalo, Pata Pata, Ko Ko Ko
712 Patali Dirapata, Cromda Cromda Ripalo, Pata Pata, Ko Ko Ko
712 Bokoro Dipoulito, Rondi Rondi Pepino, Pata Pata, Ko Ko Ko
713 Bokoro Dipoulito, Rondi Rondi Pepino, Pata Pata, Ko Ko Ko
713 Emana Karassoli, Loucra Loucra Ponponto, Pata Pata, Ko Ko Ko.
714 Emana Karassoli, Loucra Loucra Ponponto, Pata Pata, Ko Ko Ko.
714 debugreply: no reply
715 debugreply: no reply
715 0 unread bytes
716 0 unread bytes
716 abort: missing support for test:unknown
717 abort: missing support for test:unknown
717 [255]
718 [255]
718
719
719 Unbundle with an unknown mandatory part parameters
720 Unbundle with an unknown mandatory part parameters
720 (should abort)
721 (should abort)
721
722
722 $ hg bundle2 --unknownparams ../unknown.hg2
723 $ hg bundle2 --unknownparams ../unknown.hg2
723
724
724 $ hg unbundle2 < ../unknown.hg2
725 $ hg unbundle2 < ../unknown.hg2
725 0 unread bytes
726 0 unread bytes
726 abort: missing support for test:song - randomparams
727 abort: missing support for test:song - randomparams
727 [255]
728 [255]
728
729
729 unbundle with a reply
730 unbundle with a reply
730
731
731 $ hg bundle2 --parts --reply ../parts-reply.hg2
732 $ hg bundle2 --parts --reply ../parts-reply.hg2
732 $ hg unbundle2 ../reply.hg2 < ../parts-reply.hg2
733 $ hg unbundle2 ../reply.hg2 < ../parts-reply.hg2
733 0 unread bytes
734 0 unread bytes
734 3 total verses sung
735 3 total verses sung
735
736
736 The reply is a bundle
737 The reply is a bundle
737
738
738 $ f --hexdump ../reply.hg2
739 $ f --hexdump ../reply.hg2
739 ../reply.hg2:
740 ../reply.hg2:
740 0000: 48 47 32 30 00 00 00 00 00 00 00 1b 06 6f 75 74 |HG20.........out|
741 0000: 48 47 32 30 00 00 00 00 00 00 00 1b 06 6f 75 74 |HG20.........out|
741 0010: 70 75 74 00 00 00 00 00 01 0b 01 69 6e 2d 72 65 |put........in-re|
742 0010: 70 75 74 00 00 00 00 00 01 0b 01 69 6e 2d 72 65 |put........in-re|
742 0020: 70 6c 79 2d 74 6f 33 00 00 00 d9 54 68 65 20 63 |ply-to3....The c|
743 0020: 70 6c 79 2d 74 6f 33 00 00 00 d9 54 68 65 20 63 |ply-to3....The c|
743 0030: 68 6f 69 72 20 73 74 61 72 74 73 20 73 69 6e 67 |hoir starts sing|
744 0030: 68 6f 69 72 20 73 74 61 72 74 73 20 73 69 6e 67 |hoir starts sing|
744 0040: 69 6e 67 3a 0a 20 20 20 20 50 61 74 61 6c 69 20 |ing:. Patali |
745 0040: 69 6e 67 3a 0a 20 20 20 20 50 61 74 61 6c 69 20 |ing:. Patali |
745 0050: 44 69 72 61 70 61 74 61 2c 20 43 72 6f 6d 64 61 |Dirapata, Cromda|
746 0050: 44 69 72 61 70 61 74 61 2c 20 43 72 6f 6d 64 61 |Dirapata, Cromda|
746 0060: 20 43 72 6f 6d 64 61 20 52 69 70 61 6c 6f 2c 20 | Cromda Ripalo, |
747 0060: 20 43 72 6f 6d 64 61 20 52 69 70 61 6c 6f 2c 20 | Cromda Ripalo, |
747 0070: 50 61 74 61 20 50 61 74 61 2c 20 4b 6f 20 4b 6f |Pata Pata, Ko Ko|
748 0070: 50 61 74 61 20 50 61 74 61 2c 20 4b 6f 20 4b 6f |Pata Pata, Ko Ko|
748 0080: 20 4b 6f 0a 20 20 20 20 42 6f 6b 6f 72 6f 20 44 | Ko. Bokoro D|
749 0080: 20 4b 6f 0a 20 20 20 20 42 6f 6b 6f 72 6f 20 44 | Ko. Bokoro D|
749 0090: 69 70 6f 75 6c 69 74 6f 2c 20 52 6f 6e 64 69 20 |ipoulito, Rondi |
750 0090: 69 70 6f 75 6c 69 74 6f 2c 20 52 6f 6e 64 69 20 |ipoulito, Rondi |
750 00a0: 52 6f 6e 64 69 20 50 65 70 69 6e 6f 2c 20 50 61 |Rondi Pepino, Pa|
751 00a0: 52 6f 6e 64 69 20 50 65 70 69 6e 6f 2c 20 50 61 |Rondi Pepino, Pa|
751 00b0: 74 61 20 50 61 74 61 2c 20 4b 6f 20 4b 6f 20 4b |ta Pata, Ko Ko K|
752 00b0: 74 61 20 50 61 74 61 2c 20 4b 6f 20 4b 6f 20 4b |ta Pata, Ko Ko K|
752 00c0: 6f 0a 20 20 20 20 45 6d 61 6e 61 20 4b 61 72 61 |o. Emana Kara|
753 00c0: 6f 0a 20 20 20 20 45 6d 61 6e 61 20 4b 61 72 61 |o. Emana Kara|
753 00d0: 73 73 6f 6c 69 2c 20 4c 6f 75 63 72 61 20 4c 6f |ssoli, Loucra Lo|
754 00d0: 73 73 6f 6c 69 2c 20 4c 6f 75 63 72 61 20 4c 6f |ssoli, Loucra Lo|
754 00e0: 75 63 72 61 20 50 6f 6e 70 6f 6e 74 6f 2c 20 50 |ucra Ponponto, P|
755 00e0: 75 63 72 61 20 50 6f 6e 70 6f 6e 74 6f 2c 20 50 |ucra Ponponto, P|
755 00f0: 61 74 61 20 50 61 74 61 2c 20 4b 6f 20 4b 6f 20 |ata Pata, Ko Ko |
756 00f0: 61 74 61 20 50 61 74 61 2c 20 4b 6f 20 4b 6f 20 |ata Pata, Ko Ko |
756 0100: 4b 6f 2e 0a 00 00 00 00 00 00 00 1b 06 6f 75 74 |Ko...........out|
757 0100: 4b 6f 2e 0a 00 00 00 00 00 00 00 1b 06 6f 75 74 |Ko...........out|
757 0110: 70 75 74 00 00 00 01 00 01 0b 01 69 6e 2d 72 65 |put........in-re|
758 0110: 70 75 74 00 00 00 01 00 01 0b 01 69 6e 2d 72 65 |put........in-re|
758 0120: 70 6c 79 2d 74 6f 34 00 00 00 c9 64 65 62 75 67 |ply-to4....debug|
759 0120: 70 6c 79 2d 74 6f 34 00 00 00 c9 64 65 62 75 67 |ply-to4....debug|
759 0130: 72 65 70 6c 79 3a 20 63 61 70 61 62 69 6c 69 74 |reply: capabilit|
760 0130: 72 65 70 6c 79 3a 20 63 61 70 61 62 69 6c 69 74 |reply: capabilit|
760 0140: 69 65 73 3a 0a 64 65 62 75 67 72 65 70 6c 79 3a |ies:.debugreply:|
761 0140: 69 65 73 3a 0a 64 65 62 75 67 72 65 70 6c 79 3a |ies:.debugreply:|
761 0150: 20 20 20 20 20 27 63 69 74 79 3d 21 27 0a 64 65 | 'city=!'.de|
762 0150: 20 20 20 20 20 27 63 69 74 79 3d 21 27 0a 64 65 | 'city=!'.de|
762 0160: 62 75 67 72 65 70 6c 79 3a 20 20 20 20 20 20 20 |bugreply: |
763 0160: 62 75 67 72 65 70 6c 79 3a 20 20 20 20 20 20 20 |bugreply: |
763 0170: 20 20 27 63 65 6c 65 73 74 65 2c 76 69 6c 6c 65 | 'celeste,ville|
764 0170: 20 20 27 63 65 6c 65 73 74 65 2c 76 69 6c 6c 65 | 'celeste,ville|
764 0180: 27 0a 64 65 62 75 67 72 65 70 6c 79 3a 20 20 20 |'.debugreply: |
765 0180: 27 0a 64 65 62 75 67 72 65 70 6c 79 3a 20 20 20 |'.debugreply: |
765 0190: 20 20 27 65 6c 65 70 68 61 6e 74 73 27 0a 64 65 | 'elephants'.de|
766 0190: 20 20 27 65 6c 65 70 68 61 6e 74 73 27 0a 64 65 | 'elephants'.de|
766 01a0: 62 75 67 72 65 70 6c 79 3a 20 20 20 20 20 20 20 |bugreply: |
767 01a0: 62 75 67 72 65 70 6c 79 3a 20 20 20 20 20 20 20 |bugreply: |
767 01b0: 20 20 27 62 61 62 61 72 27 0a 64 65 62 75 67 72 | 'babar'.debugr|
768 01b0: 20 20 27 62 61 62 61 72 27 0a 64 65 62 75 67 72 | 'babar'.debugr|
768 01c0: 65 70 6c 79 3a 20 20 20 20 20 20 20 20 20 27 63 |eply: 'c|
769 01c0: 65 70 6c 79 3a 20 20 20 20 20 20 20 20 20 27 63 |eply: 'c|
769 01d0: 65 6c 65 73 74 65 27 0a 64 65 62 75 67 72 65 70 |eleste'.debugrep|
770 01d0: 65 6c 65 73 74 65 27 0a 64 65 62 75 67 72 65 70 |eleste'.debugrep|
770 01e0: 6c 79 3a 20 20 20 20 20 27 70 69 6e 67 2d 70 6f |ly: 'ping-po|
771 01e0: 6c 79 3a 20 20 20 20 20 27 70 69 6e 67 2d 70 6f |ly: 'ping-po|
771 01f0: 6e 67 27 0a 00 00 00 00 00 00 00 1e 09 74 65 73 |ng'..........tes|
772 01f0: 6e 67 27 0a 00 00 00 00 00 00 00 1e 09 74 65 73 |ng'..........tes|
772 0200: 74 3a 70 6f 6e 67 00 00 00 02 01 00 0b 01 69 6e |t:pong........in|
773 0200: 74 3a 70 6f 6e 67 00 00 00 02 01 00 0b 01 69 6e |t:pong........in|
773 0210: 2d 72 65 70 6c 79 2d 74 6f 37 00 00 00 00 00 00 |-reply-to7......|
774 0210: 2d 72 65 70 6c 79 2d 74 6f 37 00 00 00 00 00 00 |-reply-to7......|
774 0220: 00 1b 06 6f 75 74 70 75 74 00 00 00 03 00 01 0b |...output.......|
775 0220: 00 1b 06 6f 75 74 70 75 74 00 00 00 03 00 01 0b |...output.......|
775 0230: 01 69 6e 2d 72 65 70 6c 79 2d 74 6f 37 00 00 00 |.in-reply-to7...|
776 0230: 01 69 6e 2d 72 65 70 6c 79 2d 74 6f 37 00 00 00 |.in-reply-to7...|
776 0240: 3d 72 65 63 65 69 76 65 64 20 70 69 6e 67 20 72 |=received ping r|
777 0240: 3d 72 65 63 65 69 76 65 64 20 70 69 6e 67 20 72 |=received ping r|
777 0250: 65 71 75 65 73 74 20 28 69 64 20 37 29 0a 72 65 |equest (id 7).re|
778 0250: 65 71 75 65 73 74 20 28 69 64 20 37 29 0a 72 65 |equest (id 7).re|
778 0260: 70 6c 79 69 6e 67 20 74 6f 20 70 69 6e 67 20 72 |plying to ping r|
779 0260: 70 6c 79 69 6e 67 20 74 6f 20 70 69 6e 67 20 72 |plying to ping r|
779 0270: 65 71 75 65 73 74 20 28 69 64 20 37 29 0a 00 00 |equest (id 7)...|
780 0270: 65 71 75 65 73 74 20 28 69 64 20 37 29 0a 00 00 |equest (id 7)...|
780 0280: 00 00 00 00 00 00 |......|
781 0280: 00 00 00 00 00 00 |......|
781
782
782 The reply is valid
783 The reply is valid
783
784
784 $ hg statbundle2 < ../reply.hg2
785 $ hg statbundle2 < ../reply.hg2
785 options count: 0
786 options count: 0
786 :output:
787 :output:
787 mandatory: 0
788 mandatory: 0
788 advisory: 1
789 advisory: 1
789 payload: 217 bytes
790 payload: 217 bytes
790 :output:
791 :output:
791 mandatory: 0
792 mandatory: 0
792 advisory: 1
793 advisory: 1
793 payload: 201 bytes
794 payload: 201 bytes
794 :test:pong:
795 :test:pong:
795 mandatory: 1
796 mandatory: 1
796 advisory: 0
797 advisory: 0
797 payload: 0 bytes
798 payload: 0 bytes
798 :output:
799 :output:
799 mandatory: 0
800 mandatory: 0
800 advisory: 1
801 advisory: 1
801 payload: 61 bytes
802 payload: 61 bytes
802 parts count: 4
803 parts count: 4
803
804
804 Unbundle the reply to get the output:
805 Unbundle the reply to get the output:
805
806
806 $ hg unbundle2 < ../reply.hg2
807 $ hg unbundle2 < ../reply.hg2
807 remote: The choir starts singing:
808 remote: The choir starts singing:
808 remote: Patali Dirapata, Cromda Cromda Ripalo, Pata Pata, Ko Ko Ko
809 remote: Patali Dirapata, Cromda Cromda Ripalo, Pata Pata, Ko Ko Ko
809 remote: Bokoro Dipoulito, Rondi Rondi Pepino, Pata Pata, Ko Ko Ko
810 remote: Bokoro Dipoulito, Rondi Rondi Pepino, Pata Pata, Ko Ko Ko
810 remote: Emana Karassoli, Loucra Loucra Ponponto, Pata Pata, Ko Ko Ko.
811 remote: Emana Karassoli, Loucra Loucra Ponponto, Pata Pata, Ko Ko Ko.
811 remote: debugreply: capabilities:
812 remote: debugreply: capabilities:
812 remote: debugreply: 'city=!'
813 remote: debugreply: 'city=!'
813 remote: debugreply: 'celeste,ville'
814 remote: debugreply: 'celeste,ville'
814 remote: debugreply: 'elephants'
815 remote: debugreply: 'elephants'
815 remote: debugreply: 'babar'
816 remote: debugreply: 'babar'
816 remote: debugreply: 'celeste'
817 remote: debugreply: 'celeste'
817 remote: debugreply: 'ping-pong'
818 remote: debugreply: 'ping-pong'
818 remote: received ping request (id 7)
819 remote: received ping request (id 7)
819 remote: replying to ping request (id 7)
820 remote: replying to ping request (id 7)
820 0 unread bytes
821 0 unread bytes
821
822
822 Test push race detection
823 Test push race detection
823
824
824 $ hg bundle2 --pushrace ../part-race.hg2
825 $ hg bundle2 --pushrace ../part-race.hg2
825
826
826 $ hg unbundle2 < ../part-race.hg2
827 $ hg unbundle2 < ../part-race.hg2
827 0 unread bytes
828 0 unread bytes
828 abort: push race: remote repository changed while pushing - please try again
829 abort: push race: remote repository changed while pushing - please try again
829 [255]
830 [255]
830
831
831 Support for changegroup
832 Support for changegroup
832 ===================================
833 ===================================
833
834
834 $ hg unbundle $TESTDIR/bundles/rebase.hg
835 $ hg unbundle $TESTDIR/bundles/rebase.hg
835 adding changesets
836 adding changesets
836 adding manifests
837 adding manifests
837 adding file changes
838 adding file changes
838 added 8 changesets with 7 changes to 7 files (+3 heads)
839 added 8 changesets with 7 changes to 7 files (+3 heads)
839 new changesets cd010b8cd998:02de42196ebe (8 drafts)
840 new changesets cd010b8cd998:02de42196ebe (8 drafts)
840 (run 'hg heads' to see heads, 'hg merge' to merge)
841 (run 'hg heads' to see heads, 'hg merge' to merge)
841
842
842 $ hg log -G
843 $ hg log -G
843 o 8:02de42196ebe draft Nicolas Dumazet <nicdumz.commits@gmail.com> H
844 o 8:02de42196ebe draft Nicolas Dumazet <nicdumz.commits@gmail.com> H
844 |
845 |
845 | o 7:eea13746799a draft Nicolas Dumazet <nicdumz.commits@gmail.com> G
846 | o 7:eea13746799a draft Nicolas Dumazet <nicdumz.commits@gmail.com> G
846 |/|
847 |/|
847 o | 6:24b6387c8c8c draft Nicolas Dumazet <nicdumz.commits@gmail.com> F
848 o | 6:24b6387c8c8c draft Nicolas Dumazet <nicdumz.commits@gmail.com> F
848 | |
849 | |
849 | o 5:9520eea781bc draft Nicolas Dumazet <nicdumz.commits@gmail.com> E
850 | o 5:9520eea781bc draft Nicolas Dumazet <nicdumz.commits@gmail.com> E
850 |/
851 |/
851 | o 4:32af7686d403 draft Nicolas Dumazet <nicdumz.commits@gmail.com> D
852 | o 4:32af7686d403 draft Nicolas Dumazet <nicdumz.commits@gmail.com> D
852 | |
853 | |
853 | o 3:5fddd98957c8 draft Nicolas Dumazet <nicdumz.commits@gmail.com> C
854 | o 3:5fddd98957c8 draft Nicolas Dumazet <nicdumz.commits@gmail.com> C
854 | |
855 | |
855 | o 2:42ccdea3bb16 draft Nicolas Dumazet <nicdumz.commits@gmail.com> B
856 | o 2:42ccdea3bb16 draft Nicolas Dumazet <nicdumz.commits@gmail.com> B
856 |/
857 |/
857 o 1:cd010b8cd998 draft Nicolas Dumazet <nicdumz.commits@gmail.com> A
858 o 1:cd010b8cd998 draft Nicolas Dumazet <nicdumz.commits@gmail.com> A
858
859
859 @ 0:3903775176ed draft test a
860 @ 0:3903775176ed draft test a
860
861
861
862
862 $ hg bundle2 --debug --config progress.debug=true --config devel.bundle2.debug=true --rev '8+7+5+4' ../rev.hg2
863 $ hg bundle2 --debug --config progress.debug=true --config devel.bundle2.debug=true --rev '8+7+5+4' ../rev.hg2
863 4 changesets found
864 4 changesets found
864 list of changesets:
865 list of changesets:
865 32af7686d403cf45b5d95f2d70cebea587ac806a
866 32af7686d403cf45b5d95f2d70cebea587ac806a
866 9520eea781bcca16c1e15acc0ba14335a0e8e5ba
867 9520eea781bcca16c1e15acc0ba14335a0e8e5ba
867 eea13746799a9e0bfd88f29d3c2e9dc9389f524f
868 eea13746799a9e0bfd88f29d3c2e9dc9389f524f
868 02de42196ebee42ef284b6780a87cdc96e8eaab6
869 02de42196ebee42ef284b6780a87cdc96e8eaab6
869 bundle2-output-bundle: "HG20", 1 parts total
870 bundle2-output-bundle: "HG20", 1 parts total
870 bundle2-output: start emission of HG20 stream
871 bundle2-output: start emission of HG20 stream
871 bundle2-output: bundle parameter:
872 bundle2-output: bundle parameter:
872 bundle2-output: start of parts
873 bundle2-output: start of parts
873 bundle2-output: bundle part: "changegroup"
874 bundle2-output: bundle part: "changegroup"
874 bundle2-output-part: "changegroup" (advisory) streamed payload
875 bundle2-output-part: "changegroup" (advisory) streamed payload
875 bundle2-output: part 0: "changegroup"
876 bundle2-output: part 0: "changegroup"
876 bundle2-output: header chunk size: 18
877 bundle2-output: header chunk size: 18
877 changesets: 1/4 chunks (25.00%)
878 changesets: 1/4 chunks (25.00%)
878 changesets: 2/4 chunks (50.00%)
879 changesets: 2/4 chunks (50.00%)
879 changesets: 3/4 chunks (75.00%)
880 changesets: 3/4 chunks (75.00%)
880 changesets: 4/4 chunks (100.00%)
881 changesets: 4/4 chunks (100.00%)
881 manifests: 1/4 chunks (25.00%)
882 manifests: 1/4 chunks (25.00%)
882 manifests: 2/4 chunks (50.00%)
883 manifests: 2/4 chunks (50.00%)
883 manifests: 3/4 chunks (75.00%)
884 manifests: 3/4 chunks (75.00%)
884 manifests: 4/4 chunks (100.00%)
885 manifests: 4/4 chunks (100.00%)
885 files: D 1/3 files (33.33%)
886 files: D 1/3 files (33.33%)
886 files: E 2/3 files (66.67%)
887 files: E 2/3 files (66.67%)
887 files: H 3/3 files (100.00%)
888 files: H 3/3 files (100.00%)
888 bundle2-output: payload chunk size: 1555
889 bundle2-output: payload chunk size: 1555
889 bundle2-output: closing payload chunk
890 bundle2-output: closing payload chunk
890 bundle2-output: end of bundle
891 bundle2-output: end of bundle
891
892
892 $ f --hexdump ../rev.hg2
893 $ f --hexdump ../rev.hg2
893 ../rev.hg2:
894 ../rev.hg2:
894 0000: 48 47 32 30 00 00 00 00 00 00 00 12 0b 63 68 61 |HG20.........cha|
895 0000: 48 47 32 30 00 00 00 00 00 00 00 12 0b 63 68 61 |HG20.........cha|
895 0010: 6e 67 65 67 72 6f 75 70 00 00 00 00 00 00 00 00 |ngegroup........|
896 0010: 6e 67 65 67 72 6f 75 70 00 00 00 00 00 00 00 00 |ngegroup........|
896 0020: 06 13 00 00 00 a4 32 af 76 86 d4 03 cf 45 b5 d9 |......2.v....E..|
897 0020: 06 13 00 00 00 a4 32 af 76 86 d4 03 cf 45 b5 d9 |......2.v....E..|
897 0030: 5f 2d 70 ce be a5 87 ac 80 6a 5f dd d9 89 57 c8 |_-p......j_...W.|
898 0030: 5f 2d 70 ce be a5 87 ac 80 6a 5f dd d9 89 57 c8 |_-p......j_...W.|
898 0040: a5 4a 4d 43 6d fe 1d a9 d8 7f 21 a1 b9 7b 00 00 |.JMCm.....!..{..|
899 0040: a5 4a 4d 43 6d fe 1d a9 d8 7f 21 a1 b9 7b 00 00 |.JMCm.....!..{..|
899 0050: 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 |................|
900 0050: 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 |................|
900 0060: 00 00 32 af 76 86 d4 03 cf 45 b5 d9 5f 2d 70 ce |..2.v....E.._-p.|
901 0060: 00 00 32 af 76 86 d4 03 cf 45 b5 d9 5f 2d 70 ce |..2.v....E.._-p.|
901 0070: be a5 87 ac 80 6a 00 00 00 00 00 00 00 29 00 00 |.....j.......)..|
902 0070: be a5 87 ac 80 6a 00 00 00 00 00 00 00 29 00 00 |.....j.......)..|
902 0080: 00 29 36 65 31 66 34 63 34 37 65 63 62 35 33 33 |.)6e1f4c47ecb533|
903 0080: 00 29 36 65 31 66 34 63 34 37 65 63 62 35 33 33 |.)6e1f4c47ecb533|
903 0090: 66 66 64 30 63 38 65 35 32 63 64 63 38 38 61 66 |ffd0c8e52cdc88af|
904 0090: 66 66 64 30 63 38 65 35 32 63 64 63 38 38 61 66 |ffd0c8e52cdc88af|
904 00a0: 62 36 63 64 33 39 65 32 30 63 0a 00 00 00 66 00 |b6cd39e20c....f.|
905 00a0: 62 36 63 64 33 39 65 32 30 63 0a 00 00 00 66 00 |b6cd39e20c....f.|
905 00b0: 00 00 68 00 00 00 02 44 0a 00 00 00 69 00 00 00 |..h....D....i...|
906 00b0: 00 00 68 00 00 00 02 44 0a 00 00 00 69 00 00 00 |..h....D....i...|
906 00c0: 6a 00 00 00 01 44 00 00 00 a4 95 20 ee a7 81 bc |j....D..... ....|
907 00c0: 6a 00 00 00 01 44 00 00 00 a4 95 20 ee a7 81 bc |j....D..... ....|
907 00d0: ca 16 c1 e1 5a cc 0b a1 43 35 a0 e8 e5 ba cd 01 |....Z...C5......|
908 00d0: ca 16 c1 e1 5a cc 0b a1 43 35 a0 e8 e5 ba cd 01 |....Z...C5......|
908 00e0: 0b 8c d9 98 f3 98 1a 5a 81 15 f9 4f 8d a4 ab 50 |.......Z...O...P|
909 00e0: 0b 8c d9 98 f3 98 1a 5a 81 15 f9 4f 8d a4 ab 50 |.......Z...O...P|
909 00f0: 60 89 00 00 00 00 00 00 00 00 00 00 00 00 00 00 |`...............|
910 00f0: 60 89 00 00 00 00 00 00 00 00 00 00 00 00 00 00 |`...............|
910 0100: 00 00 00 00 00 00 95 20 ee a7 81 bc ca 16 c1 e1 |....... ........|
911 0100: 00 00 00 00 00 00 95 20 ee a7 81 bc ca 16 c1 e1 |....... ........|
911 0110: 5a cc 0b a1 43 35 a0 e8 e5 ba 00 00 00 00 00 00 |Z...C5..........|
912 0110: 5a cc 0b a1 43 35 a0 e8 e5 ba 00 00 00 00 00 00 |Z...C5..........|
912 0120: 00 29 00 00 00 29 34 64 65 63 65 39 63 38 32 36 |.)...)4dece9c826|
913 0120: 00 29 00 00 00 29 34 64 65 63 65 39 63 38 32 36 |.)...)4dece9c826|
913 0130: 66 36 39 34 39 30 35 30 37 62 39 38 63 36 33 38 |f69490507b98c638|
914 0130: 66 36 39 34 39 30 35 30 37 62 39 38 63 36 33 38 |f69490507b98c638|
914 0140: 33 61 33 30 30 39 62 32 39 35 38 33 37 64 0a 00 |3a3009b295837d..|
915 0140: 33 61 33 30 30 39 62 32 39 35 38 33 37 64 0a 00 |3a3009b295837d..|
915 0150: 00 00 66 00 00 00 68 00 00 00 02 45 0a 00 00 00 |..f...h....E....|
916 0150: 00 00 66 00 00 00 68 00 00 00 02 45 0a 00 00 00 |..f...h....E....|
916 0160: 69 00 00 00 6a 00 00 00 01 45 00 00 00 a2 ee a1 |i...j....E......|
917 0160: 69 00 00 00 6a 00 00 00 01 45 00 00 00 a2 ee a1 |i...j....E......|
917 0170: 37 46 79 9a 9e 0b fd 88 f2 9d 3c 2e 9d c9 38 9f |7Fy.......<...8.|
918 0170: 37 46 79 9a 9e 0b fd 88 f2 9d 3c 2e 9d c9 38 9f |7Fy.......<...8.|
918 0180: 52 4f 24 b6 38 7c 8c 8c ae 37 17 88 80 f3 fa 95 |RO$.8|...7......|
919 0180: 52 4f 24 b6 38 7c 8c 8c ae 37 17 88 80 f3 fa 95 |RO$.8|...7......|
919 0190: de d3 cb 1c f7 85 95 20 ee a7 81 bc ca 16 c1 e1 |....... ........|
920 0190: de d3 cb 1c f7 85 95 20 ee a7 81 bc ca 16 c1 e1 |....... ........|
920 01a0: 5a cc 0b a1 43 35 a0 e8 e5 ba ee a1 37 46 79 9a |Z...C5......7Fy.|
921 01a0: 5a cc 0b a1 43 35 a0 e8 e5 ba ee a1 37 46 79 9a |Z...C5......7Fy.|
921 01b0: 9e 0b fd 88 f2 9d 3c 2e 9d c9 38 9f 52 4f 00 00 |......<...8.RO..|
922 01b0: 9e 0b fd 88 f2 9d 3c 2e 9d c9 38 9f 52 4f 00 00 |......<...8.RO..|
922 01c0: 00 00 00 00 00 29 00 00 00 29 33 36 35 62 39 33 |.....)...)365b93|
923 01c0: 00 00 00 00 00 29 00 00 00 29 33 36 35 62 39 33 |.....)...)365b93|
923 01d0: 64 35 37 66 64 66 34 38 31 34 65 32 62 35 39 31 |d57fdf4814e2b591|
924 01d0: 64 35 37 66 64 66 34 38 31 34 65 32 62 35 39 31 |d57fdf4814e2b591|
924 01e0: 31 64 36 62 61 63 66 66 32 62 31 32 30 31 34 34 |1d6bacff2b120144|
925 01e0: 31 64 36 62 61 63 66 66 32 62 31 32 30 31 34 34 |1d6bacff2b120144|
925 01f0: 34 31 0a 00 00 00 66 00 00 00 68 00 00 00 00 00 |41....f...h.....|
926 01f0: 34 31 0a 00 00 00 66 00 00 00 68 00 00 00 00 00 |41....f...h.....|
926 0200: 00 00 69 00 00 00 6a 00 00 00 01 47 00 00 00 a4 |..i...j....G....|
927 0200: 00 00 69 00 00 00 6a 00 00 00 01 47 00 00 00 a4 |..i...j....G....|
927 0210: 02 de 42 19 6e be e4 2e f2 84 b6 78 0a 87 cd c9 |..B.n......x....|
928 0210: 02 de 42 19 6e be e4 2e f2 84 b6 78 0a 87 cd c9 |..B.n......x....|
928 0220: 6e 8e aa b6 24 b6 38 7c 8c 8c ae 37 17 88 80 f3 |n...$.8|...7....|
929 0220: 6e 8e aa b6 24 b6 38 7c 8c 8c ae 37 17 88 80 f3 |n...$.8|...7....|
929 0230: fa 95 de d3 cb 1c f7 85 00 00 00 00 00 00 00 00 |................|
930 0230: fa 95 de d3 cb 1c f7 85 00 00 00 00 00 00 00 00 |................|
930 0240: 00 00 00 00 00 00 00 00 00 00 00 00 02 de 42 19 |..............B.|
931 0240: 00 00 00 00 00 00 00 00 00 00 00 00 02 de 42 19 |..............B.|
931 0250: 6e be e4 2e f2 84 b6 78 0a 87 cd c9 6e 8e aa b6 |n......x....n...|
932 0250: 6e be e4 2e f2 84 b6 78 0a 87 cd c9 6e 8e aa b6 |n......x....n...|
932 0260: 00 00 00 00 00 00 00 29 00 00 00 29 38 62 65 65 |.......)...)8bee|
933 0260: 00 00 00 00 00 00 00 29 00 00 00 29 38 62 65 65 |.......)...)8bee|
933 0270: 34 38 65 64 63 37 33 31 38 35 34 31 66 63 30 30 |48edc7318541fc00|
934 0270: 34 38 65 64 63 37 33 31 38 35 34 31 66 63 30 30 |48edc7318541fc00|
934 0280: 31 33 65 65 34 31 62 30 38 39 32 37 36 61 38 63 |13ee41b089276a8c|
935 0280: 31 33 65 65 34 31 62 30 38 39 32 37 36 61 38 63 |13ee41b089276a8c|
935 0290: 32 34 62 66 0a 00 00 00 66 00 00 00 66 00 00 00 |24bf....f...f...|
936 0290: 32 34 62 66 0a 00 00 00 66 00 00 00 66 00 00 00 |24bf....f...f...|
936 02a0: 02 48 0a 00 00 00 67 00 00 00 68 00 00 00 01 48 |.H....g...h....H|
937 02a0: 02 48 0a 00 00 00 67 00 00 00 68 00 00 00 01 48 |.H....g...h....H|
937 02b0: 00 00 00 00 00 00 00 8b 6e 1f 4c 47 ec b5 33 ff |........n.LG..3.|
938 02b0: 00 00 00 00 00 00 00 8b 6e 1f 4c 47 ec b5 33 ff |........n.LG..3.|
938 02c0: d0 c8 e5 2c dc 88 af b6 cd 39 e2 0c 66 a5 a0 18 |...,.....9..f...|
939 02c0: d0 c8 e5 2c dc 88 af b6 cd 39 e2 0c 66 a5 a0 18 |...,.....9..f...|
939 02d0: 17 fd f5 23 9c 27 38 02 b5 b7 61 8d 05 1c 89 e4 |...#.'8...a.....|
940 02d0: 17 fd f5 23 9c 27 38 02 b5 b7 61 8d 05 1c 89 e4 |...#.'8...a.....|
940 02e0: 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 |................|
941 02e0: 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 |................|
941 02f0: 00 00 00 00 32 af 76 86 d4 03 cf 45 b5 d9 5f 2d |....2.v....E.._-|
942 02f0: 00 00 00 00 32 af 76 86 d4 03 cf 45 b5 d9 5f 2d |....2.v....E.._-|
942 0300: 70 ce be a5 87 ac 80 6a 00 00 00 81 00 00 00 81 |p......j........|
943 0300: 70 ce be a5 87 ac 80 6a 00 00 00 81 00 00 00 81 |p......j........|
943 0310: 00 00 00 2b 44 00 63 33 66 31 63 61 32 39 32 34 |...+D.c3f1ca2924|
944 0310: 00 00 00 2b 44 00 63 33 66 31 63 61 32 39 32 34 |...+D.c3f1ca2924|
944 0320: 63 31 36 61 31 39 62 30 36 35 36 61 38 34 39 30 |c16a19b0656a8490|
945 0320: 63 31 36 61 31 39 62 30 36 35 36 61 38 34 39 30 |c16a19b0656a8490|
945 0330: 30 65 35 30 34 65 35 62 30 61 65 63 32 64 0a 00 |0e504e5b0aec2d..|
946 0330: 30 65 35 30 34 65 35 62 30 61 65 63 32 64 0a 00 |0e504e5b0aec2d..|
946 0340: 00 00 8b 4d ec e9 c8 26 f6 94 90 50 7b 98 c6 38 |...M...&...P{..8|
947 0340: 00 00 8b 4d ec e9 c8 26 f6 94 90 50 7b 98 c6 38 |...M...&...P{..8|
947 0350: 3a 30 09 b2 95 83 7d 00 7d 8c 9d 88 84 13 25 f5 |:0....}.}.....%.|
948 0350: 3a 30 09 b2 95 83 7d 00 7d 8c 9d 88 84 13 25 f5 |:0....}.}.....%.|
948 0360: c6 b0 63 71 b3 5b 4e 8a 2b 1a 83 00 00 00 00 00 |..cq.[N.+.......|
949 0360: c6 b0 63 71 b3 5b 4e 8a 2b 1a 83 00 00 00 00 00 |..cq.[N.+.......|
949 0370: 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 95 |................|
950 0370: 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 95 |................|
950 0380: 20 ee a7 81 bc ca 16 c1 e1 5a cc 0b a1 43 35 a0 | ........Z...C5.|
951 0380: 20 ee a7 81 bc ca 16 c1 e1 5a cc 0b a1 43 35 a0 | ........Z...C5.|
951 0390: e8 e5 ba 00 00 00 2b 00 00 00 ac 00 00 00 2b 45 |......+.......+E|
952 0390: e8 e5 ba 00 00 00 2b 00 00 00 ac 00 00 00 2b 45 |......+.......+E|
952 03a0: 00 39 63 36 66 64 30 33 35 30 61 36 63 30 64 30 |.9c6fd0350a6c0d0|
953 03a0: 00 39 63 36 66 64 30 33 35 30 61 36 63 30 64 30 |.9c6fd0350a6c0d0|
953 03b0: 63 34 39 64 34 61 39 63 35 30 31 37 63 66 30 37 |c49d4a9c5017cf07|
954 03b0: 63 34 39 64 34 61 39 63 35 30 31 37 63 66 30 37 |c49d4a9c5017cf07|
954 03c0: 30 34 33 66 35 34 65 35 38 0a 00 00 00 8b 36 5b |043f54e58.....6[|
955 03c0: 30 34 33 66 35 34 65 35 38 0a 00 00 00 8b 36 5b |043f54e58.....6[|
955 03d0: 93 d5 7f df 48 14 e2 b5 91 1d 6b ac ff 2b 12 01 |....H.....k..+..|
956 03d0: 93 d5 7f df 48 14 e2 b5 91 1d 6b ac ff 2b 12 01 |....H.....k..+..|
956 03e0: 44 41 28 a5 84 c6 5e f1 21 f8 9e b6 6a b7 d0 bc |DA(...^.!...j...|
957 03e0: 44 41 28 a5 84 c6 5e f1 21 f8 9e b6 6a b7 d0 bc |DA(...^.!...j...|
957 03f0: 15 3d 80 99 e7 ce 4d ec e9 c8 26 f6 94 90 50 7b |.=....M...&...P{|
958 03f0: 15 3d 80 99 e7 ce 4d ec e9 c8 26 f6 94 90 50 7b |.=....M...&...P{|
958 0400: 98 c6 38 3a 30 09 b2 95 83 7d ee a1 37 46 79 9a |..8:0....}..7Fy.|
959 0400: 98 c6 38 3a 30 09 b2 95 83 7d ee a1 37 46 79 9a |..8:0....}..7Fy.|
959 0410: 9e 0b fd 88 f2 9d 3c 2e 9d c9 38 9f 52 4f 00 00 |......<...8.RO..|
960 0410: 9e 0b fd 88 f2 9d 3c 2e 9d c9 38 9f 52 4f 00 00 |......<...8.RO..|
960 0420: 00 56 00 00 00 56 00 00 00 2b 46 00 32 32 62 66 |.V...V...+F.22bf|
961 0420: 00 56 00 00 00 56 00 00 00 2b 46 00 32 32 62 66 |.V...V...+F.22bf|
961 0430: 63 66 64 36 32 61 32 31 61 33 32 38 37 65 64 62 |cfd62a21a3287edb|
962 0430: 63 66 64 36 32 61 32 31 61 33 32 38 37 65 64 62 |cfd62a21a3287edb|
962 0440: 64 34 64 36 35 36 32 31 38 64 30 66 35 32 35 65 |d4d656218d0f525e|
963 0440: 64 34 64 36 35 36 32 31 38 64 30 66 35 32 35 65 |d4d656218d0f525e|
963 0450: 64 37 36 61 0a 00 00 00 97 8b ee 48 ed c7 31 85 |d76a.......H..1.|
964 0450: 64 37 36 61 0a 00 00 00 97 8b ee 48 ed c7 31 85 |d76a.......H..1.|
964 0460: 41 fc 00 13 ee 41 b0 89 27 6a 8c 24 bf 28 a5 84 |A....A..'j.$.(..|
965 0460: 41 fc 00 13 ee 41 b0 89 27 6a 8c 24 bf 28 a5 84 |A....A..'j.$.(..|
965 0470: c6 5e f1 21 f8 9e b6 6a b7 d0 bc 15 3d 80 99 e7 |.^.!...j....=...|
966 0470: c6 5e f1 21 f8 9e b6 6a b7 d0 bc 15 3d 80 99 e7 |.^.!...j....=...|
966 0480: ce 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 |................|
967 0480: ce 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 |................|
967 0490: 00 00 00 00 00 02 de 42 19 6e be e4 2e f2 84 b6 |.......B.n......|
968 0490: 00 00 00 00 00 02 de 42 19 6e be e4 2e f2 84 b6 |.......B.n......|
968 04a0: 78 0a 87 cd c9 6e 8e aa b6 00 00 00 2b 00 00 00 |x....n......+...|
969 04a0: 78 0a 87 cd c9 6e 8e aa b6 00 00 00 2b 00 00 00 |x....n......+...|
969 04b0: 56 00 00 00 00 00 00 00 81 00 00 00 81 00 00 00 |V...............|
970 04b0: 56 00 00 00 00 00 00 00 81 00 00 00 81 00 00 00 |V...............|
970 04c0: 2b 48 00 38 35 30 30 31 38 39 65 37 34 61 39 65 |+H.8500189e74a9e|
971 04c0: 2b 48 00 38 35 30 30 31 38 39 65 37 34 61 39 65 |+H.8500189e74a9e|
971 04d0: 30 34 37 35 65 38 32 32 30 39 33 62 63 37 64 62 |0475e822093bc7db|
972 04d0: 30 34 37 35 65 38 32 32 30 39 33 62 63 37 64 62 |0475e822093bc7db|
972 04e0: 30 64 36 33 31 61 65 62 30 62 34 0a 00 00 00 00 |0d631aeb0b4.....|
973 04e0: 30 64 36 33 31 61 65 62 30 62 34 0a 00 00 00 00 |0d631aeb0b4.....|
973 04f0: 00 00 00 05 44 00 00 00 62 c3 f1 ca 29 24 c1 6a |....D...b...)$.j|
974 04f0: 00 00 00 05 44 00 00 00 62 c3 f1 ca 29 24 c1 6a |....D...b...)$.j|
974 0500: 19 b0 65 6a 84 90 0e 50 4e 5b 0a ec 2d 00 00 00 |..ej...PN[..-...|
975 0500: 19 b0 65 6a 84 90 0e 50 4e 5b 0a ec 2d 00 00 00 |..ej...PN[..-...|
975 0510: 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 |................|
976 0510: 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 |................|
976 0520: 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 |................|
977 0520: 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 |................|
977 0530: 00 00 00 00 00 32 af 76 86 d4 03 cf 45 b5 d9 5f |.....2.v....E.._|
978 0530: 00 00 00 00 00 32 af 76 86 d4 03 cf 45 b5 d9 5f |.....2.v....E.._|
978 0540: 2d 70 ce be a5 87 ac 80 6a 00 00 00 00 00 00 00 |-p......j.......|
979 0540: 2d 70 ce be a5 87 ac 80 6a 00 00 00 00 00 00 00 |-p......j.......|
979 0550: 00 00 00 00 02 44 0a 00 00 00 00 00 00 00 05 45 |.....D.........E|
980 0550: 00 00 00 00 02 44 0a 00 00 00 00 00 00 00 05 45 |.....D.........E|
980 0560: 00 00 00 62 9c 6f d0 35 0a 6c 0d 0c 49 d4 a9 c5 |...b.o.5.l..I...|
981 0560: 00 00 00 62 9c 6f d0 35 0a 6c 0d 0c 49 d4 a9 c5 |...b.o.5.l..I...|
981 0570: 01 7c f0 70 43 f5 4e 58 00 00 00 00 00 00 00 00 |.|.pC.NX........|
982 0570: 01 7c f0 70 43 f5 4e 58 00 00 00 00 00 00 00 00 |.|.pC.NX........|
982 0580: 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 |................|
983 0580: 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 |................|
983 0590: 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 |................|
984 0590: 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 |................|
984 05a0: 95 20 ee a7 81 bc ca 16 c1 e1 5a cc 0b a1 43 35 |. ........Z...C5|
985 05a0: 95 20 ee a7 81 bc ca 16 c1 e1 5a cc 0b a1 43 35 |. ........Z...C5|
985 05b0: a0 e8 e5 ba 00 00 00 00 00 00 00 00 00 00 00 02 |................|
986 05b0: a0 e8 e5 ba 00 00 00 00 00 00 00 00 00 00 00 02 |................|
986 05c0: 45 0a 00 00 00 00 00 00 00 05 48 00 00 00 62 85 |E.........H...b.|
987 05c0: 45 0a 00 00 00 00 00 00 00 05 48 00 00 00 62 85 |E.........H...b.|
987 05d0: 00 18 9e 74 a9 e0 47 5e 82 20 93 bc 7d b0 d6 31 |...t..G^. ..}..1|
988 05d0: 00 18 9e 74 a9 e0 47 5e 82 20 93 bc 7d b0 d6 31 |...t..G^. ..}..1|
988 05e0: ae b0 b4 00 00 00 00 00 00 00 00 00 00 00 00 00 |................|
989 05e0: ae b0 b4 00 00 00 00 00 00 00 00 00 00 00 00 00 |................|
989 05f0: 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 |................|
990 05f0: 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 |................|
990 0600: 00 00 00 00 00 00 00 00 00 00 00 02 de 42 19 6e |.............B.n|
991 0600: 00 00 00 00 00 00 00 00 00 00 00 02 de 42 19 6e |.............B.n|
991 0610: be e4 2e f2 84 b6 78 0a 87 cd c9 6e 8e aa b6 00 |......x....n....|
992 0610: be e4 2e f2 84 b6 78 0a 87 cd c9 6e 8e aa b6 00 |......x....n....|
992 0620: 00 00 00 00 00 00 00 00 00 00 02 48 0a 00 00 00 |...........H....|
993 0620: 00 00 00 00 00 00 00 00 00 00 02 48 0a 00 00 00 |...........H....|
993 0630: 00 00 00 00 00 00 00 00 00 00 00 00 00 |.............|
994 0630: 00 00 00 00 00 00 00 00 00 00 00 00 00 |.............|
994
995
995 $ hg debugbundle ../rev.hg2
996 $ hg debugbundle ../rev.hg2
996 Stream params: {}
997 Stream params: {}
997 changegroup -- {} (mandatory: False)
998 changegroup -- {} (mandatory: False)
998 32af7686d403cf45b5d95f2d70cebea587ac806a
999 32af7686d403cf45b5d95f2d70cebea587ac806a
999 9520eea781bcca16c1e15acc0ba14335a0e8e5ba
1000 9520eea781bcca16c1e15acc0ba14335a0e8e5ba
1000 eea13746799a9e0bfd88f29d3c2e9dc9389f524f
1001 eea13746799a9e0bfd88f29d3c2e9dc9389f524f
1001 02de42196ebee42ef284b6780a87cdc96e8eaab6
1002 02de42196ebee42ef284b6780a87cdc96e8eaab6
1002 $ hg unbundle ../rev.hg2
1003 $ hg unbundle ../rev.hg2
1003 adding changesets
1004 adding changesets
1004 adding manifests
1005 adding manifests
1005 adding file changes
1006 adding file changes
1006 added 0 changesets with 0 changes to 3 files
1007 added 0 changesets with 0 changes to 3 files
1007 (run 'hg update' to get a working copy)
1008 (run 'hg update' to get a working copy)
1008
1009
1009 with reply
1010 with reply
1010
1011
1011 $ hg bundle2 --rev '8+7+5+4' --reply ../rev-rr.hg2
1012 $ hg bundle2 --rev '8+7+5+4' --reply ../rev-rr.hg2
1012 $ hg unbundle2 ../rev-reply.hg2 < ../rev-rr.hg2
1013 $ hg unbundle2 ../rev-reply.hg2 < ../rev-rr.hg2
1013 added 0 changesets with 0 changes to 3 files
1014 added 0 changesets with 0 changes to 3 files
1014 0 unread bytes
1015 0 unread bytes
1015 addchangegroup return: 1
1016 addchangegroup return: 1
1016
1017
1017 $ f --hexdump ../rev-reply.hg2
1018 $ f --hexdump ../rev-reply.hg2
1018 ../rev-reply.hg2:
1019 ../rev-reply.hg2:
1019 0000: 48 47 32 30 00 00 00 00 00 00 00 2f 11 72 65 70 |HG20......./.rep|
1020 0000: 48 47 32 30 00 00 00 00 00 00 00 2f 11 72 65 70 |HG20......./.rep|
1020 0010: 6c 79 3a 63 68 61 6e 67 65 67 72 6f 75 70 00 00 |ly:changegroup..|
1021 0010: 6c 79 3a 63 68 61 6e 67 65 67 72 6f 75 70 00 00 |ly:changegroup..|
1021 0020: 00 00 00 02 0b 01 06 01 69 6e 2d 72 65 70 6c 79 |........in-reply|
1022 0020: 00 00 00 02 0b 01 06 01 69 6e 2d 72 65 70 6c 79 |........in-reply|
1022 0030: 2d 74 6f 31 72 65 74 75 72 6e 31 00 00 00 00 00 |-to1return1.....|
1023 0030: 2d 74 6f 31 72 65 74 75 72 6e 31 00 00 00 00 00 |-to1return1.....|
1023 0040: 00 00 1b 06 6f 75 74 70 75 74 00 00 00 01 00 01 |....output......|
1024 0040: 00 00 1b 06 6f 75 74 70 75 74 00 00 00 01 00 01 |....output......|
1024 0050: 0b 01 69 6e 2d 72 65 70 6c 79 2d 74 6f 31 00 00 |..in-reply-to1..|
1025 0050: 0b 01 69 6e 2d 72 65 70 6c 79 2d 74 6f 31 00 00 |..in-reply-to1..|
1025 0060: 00 37 61 64 64 69 6e 67 20 63 68 61 6e 67 65 73 |.7adding changes|
1026 0060: 00 37 61 64 64 69 6e 67 20 63 68 61 6e 67 65 73 |.7adding changes|
1026 0070: 65 74 73 0a 61 64 64 69 6e 67 20 6d 61 6e 69 66 |ets.adding manif|
1027 0070: 65 74 73 0a 61 64 64 69 6e 67 20 6d 61 6e 69 66 |ets.adding manif|
1027 0080: 65 73 74 73 0a 61 64 64 69 6e 67 20 66 69 6c 65 |ests.adding file|
1028 0080: 65 73 74 73 0a 61 64 64 69 6e 67 20 66 69 6c 65 |ests.adding file|
1028 0090: 20 63 68 61 6e 67 65 73 0a 00 00 00 00 00 00 00 | changes........|
1029 0090: 20 63 68 61 6e 67 65 73 0a 00 00 00 00 00 00 00 | changes........|
1029 00a0: 00 |.|
1030 00a0: 00 |.|
1030
1031
1031 Check handling of exception during generation.
1032 Check handling of exception during generation.
1032 ----------------------------------------------
1033 ----------------------------------------------
1033
1034
1034 $ hg bundle2 --genraise > ../genfailed.hg2
1035 $ hg bundle2 --genraise > ../genfailed.hg2
1035 abort: Someone set up us the bomb!
1036 abort: Someone set up us the bomb!
1036 [255]
1037 [255]
1037
1038
1038 Should still be a valid bundle
1039 Should still be a valid bundle
1039
1040
1040 $ f --hexdump ../genfailed.hg2
1041 $ f --hexdump ../genfailed.hg2
1041 ../genfailed.hg2:
1042 ../genfailed.hg2:
1042 0000: 48 47 32 30 00 00 00 00 00 00 00 0d 06 6f 75 74 |HG20.........out|
1043 0000: 48 47 32 30 00 00 00 00 00 00 00 0d 06 6f 75 74 |HG20.........out|
1043 0010: 70 75 74 00 00 00 00 00 00 ff ff ff ff 00 00 00 |put.............|
1044 0010: 70 75 74 00 00 00 00 00 00 ff ff ff ff 00 00 00 |put.............|
1044 0020: 48 0b 65 72 72 6f 72 3a 61 62 6f 72 74 00 00 00 |H.error:abort...|
1045 0020: 48 0b 65 72 72 6f 72 3a 61 62 6f 72 74 00 00 00 |H.error:abort...|
1045 0030: 00 01 00 07 2d 6d 65 73 73 61 67 65 75 6e 65 78 |....-messageunex|
1046 0030: 00 01 00 07 2d 6d 65 73 73 61 67 65 75 6e 65 78 |....-messageunex|
1046 0040: 70 65 63 74 65 64 20 65 72 72 6f 72 3a 20 53 6f |pected error: So|
1047 0040: 70 65 63 74 65 64 20 65 72 72 6f 72 3a 20 53 6f |pected error: So|
1047 0050: 6d 65 6f 6e 65 20 73 65 74 20 75 70 20 75 73 20 |meone set up us |
1048 0050: 6d 65 6f 6e 65 20 73 65 74 20 75 70 20 75 73 20 |meone set up us |
1048 0060: 74 68 65 20 62 6f 6d 62 21 00 00 00 00 00 00 00 |the bomb!.......|
1049 0060: 74 68 65 20 62 6f 6d 62 21 00 00 00 00 00 00 00 |the bomb!.......|
1049 0070: 00 |.|
1050 0070: 00 |.|
1050
1051
1051 And its handling on the other size raise a clean exception
1052 And its handling on the other size raise a clean exception
1052
1053
1053 $ cat ../genfailed.hg2 | hg unbundle2
1054 $ cat ../genfailed.hg2 | hg unbundle2
1054 0 unread bytes
1055 0 unread bytes
1055 abort: unexpected error: Someone set up us the bomb!
1056 abort: unexpected error: Someone set up us the bomb!
1056 [255]
1057 [255]
1057
1058
1058 Test compression
1059 Test compression
1059 ================
1060 ================
1060
1061
1061 Simple case where it just work: GZ
1062 Simple case where it just work: GZ
1062 ----------------------------------
1063 ----------------------------------
1063
1064
1064 $ hg bundle2 --compress GZ --rev '8+7+5+4' ../rev.hg2.bz
1065 $ hg bundle2 --compress GZ --rev '8+7+5+4' ../rev.hg2.bz
1065 $ f --hexdump ../rev.hg2.bz
1066 $ f --hexdump ../rev.hg2.bz
1066 ../rev.hg2.bz:
1067 ../rev.hg2.bz:
1067 0000: 48 47 32 30 00 00 00 0e 43 6f 6d 70 72 65 73 73 |HG20....Compress|
1068 0000: 48 47 32 30 00 00 00 0e 43 6f 6d 70 72 65 73 73 |HG20....Compress|
1068 0010: 69 6f 6e 3d 47 5a 78 9c 95 94 7d 68 95 55 1c c7 |ion=GZx...}h.U..|
1069 0010: 69 6f 6e 3d 47 5a 78 9c 95 94 7d 68 95 55 1c c7 |ion=GZx...}h.U..|
1069 0020: 9f 3b 31 e8 ce fa c3 65 be a0 a4 b4 52 b9 29 e7 |.;1....e....R.).|
1070 0020: 9f 3b 31 e8 ce fa c3 65 be a0 a4 b4 52 b9 29 e7 |.;1....e....R.).|
1070 0030: f5 79 ce 89 fa 63 ed 5e 77 8b 9c c3 3f 2a 1c 68 |.y...c.^w...?*.h|
1071 0030: f5 79 ce 89 fa 63 ed 5e 77 8b 9c c3 3f 2a 1c 68 |.y...c.^w...?*.h|
1071 0040: cf 79 9b dd 6a ae b0 28 74 b8 e5 96 5b bb 86 61 |.y..j..(t...[..a|
1072 0040: cf 79 9b dd 6a ae b0 28 74 b8 e5 96 5b bb 86 61 |.y..j..(t...[..a|
1072 0050: a3 15 6e 3a 71 c8 6a e8 a5 da 95 64 28 22 ce 69 |..n:q.j....d(".i|
1073 0050: a3 15 6e 3a 71 c8 6a e8 a5 da 95 64 28 22 ce 69 |..n:q.j....d(".i|
1073 0060: cd 06 59 34 28 2b 51 2a 58 c3 17 56 2a 9a 9d 67 |..Y4(+Q*X..V*..g|
1074 0060: cd 06 59 34 28 2b 51 2a 58 c3 17 56 2a 9a 9d 67 |..Y4(+Q*X..V*..g|
1074 0070: dc c6 35 9e c4 1d f8 9e 87 f3 9c f3 3b bf 0f bf |..5.........;...|
1075 0070: dc c6 35 9e c4 1d f8 9e 87 f3 9c f3 3b bf 0f bf |..5.........;...|
1075 0080: 97 e3 38 ce f4 42 b9 d6 af ae d2 55 af ae 7b ad |..8..B.....U..{.|
1076 0080: 97 e3 38 ce f4 42 b9 d6 af ae d2 55 af ae 7b ad |..8..B.....U..{.|
1076 0090: c6 c9 8d bb 8a ec b4 07 ed 7f fd ed d3 53 be 4e |.............S.N|
1077 0090: c6 c9 8d bb 8a ec b4 07 ed 7f fd ed d3 53 be 4e |.............S.N|
1077 00a0: f4 0e af 59 52 73 ea 50 d7 96 9e ba d4 9a 1f 87 |...YRs.P........|
1078 00a0: f4 0e af 59 52 73 ea 50 d7 96 9e ba d4 9a 1f 87 |...YRs.P........|
1078 00b0: 9b 9f 1d e8 7a 6a 79 e9 cb 7f cf eb fe 7e d3 82 |....zjy......~..|
1079 00b0: 9b 9f 1d e8 7a 6a 79 e9 cb 7f cf eb fe 7e d3 82 |....zjy......~..|
1079 00c0: ce 2f 36 38 21 23 cc 36 b7 b5 38 90 ab a1 21 92 |./68!#.6..8...!.|
1080 00c0: ce 2f 36 38 21 23 cc 36 b7 b5 38 90 ab a1 21 92 |./68!#.6..8...!.|
1080 00d0: 78 5a 0a 8a b1 31 0a 48 a6 29 92 4a 32 e6 1b e1 |xZ...1.H.).J2...|
1081 00d0: 78 5a 0a 8a b1 31 0a 48 a6 29 92 4a 32 e6 1b e1 |xZ...1.H.).J2...|
1081 00e0: 4a 85 b9 46 40 46 ed 61 63 b5 d6 aa 20 1e ac 5e |J..F@F.ac... ..^|
1082 00e0: 4a 85 b9 46 40 46 ed 61 63 b5 d6 aa 20 1e ac 5e |J..F@F.ac... ..^|
1082 00f0: b0 0a ae 8a c4 03 c6 d6 f9 a3 7b eb fb 4e de 7f |..........{..N..|
1083 00f0: b0 0a ae 8a c4 03 c6 d6 f9 a3 7b eb fb 4e de 7f |..........{..N..|
1083 0100: e4 97 55 5f 15 76 96 d2 5d bf 9d 3f 38 18 29 4c |..U_.v..]..?8.)L|
1084 0100: e4 97 55 5f 15 76 96 d2 5d bf 9d 3f 38 18 29 4c |..U_.v..]..?8.)L|
1084 0110: 0f b7 5d 6e 9b b3 aa 7e c6 d5 15 5b f7 7c 52 f1 |..]n...~...[.|R.|
1085 0110: 0f b7 5d 6e 9b b3 aa 7e c6 d5 15 5b f7 7c 52 f1 |..]n...~...[.|R.|
1085 0120: 7c 73 18 63 98 6d 3e 23 51 5a 6a 2e 19 72 8d cb ||s.c.m>#QZj..r..|
1086 0120: 7c 73 18 63 98 6d 3e 23 51 5a 6a 2e 19 72 8d cb ||s.c.m>#QZj..r..|
1086 0130: 09 07 14 78 82 33 e9 62 86 7d 0c 00 17 88 53 86 |...x.3.b.}....S.|
1087 0130: 09 07 14 78 82 33 e9 62 86 7d 0c 00 17 88 53 86 |...x.3.b.}....S.|
1087 0140: 3d 75 0b 63 e2 16 c6 84 9d 76 8f 76 7a cb de fc |=u.c.....v.vz...|
1088 0140: 3d 75 0b 63 e2 16 c6 84 9d 76 8f 76 7a cb de fc |=u.c.....v.vz...|
1088 0150: a8 a3 f0 46 d3 a5 f6 c7 96 b6 9f 60 3b 57 ae 28 |...F.......`;W.(|
1089 0150: a8 a3 f0 46 d3 a5 f6 c7 96 b6 9f 60 3b 57 ae 28 |...F.......`;W.(|
1089 0160: ce b2 8d e9 f4 3e 6f 66 53 dd e5 6b ad 67 be f9 |.....>ofS..k.g..|
1090 0160: ce b2 8d e9 f4 3e 6f 66 53 dd e5 6b ad 67 be f9 |.....>ofS..k.g..|
1090 0170: 72 ee 5f 8d 61 3c 61 b6 f9 8c d8 a5 82 63 45 3d |r._.a<a......cE=|
1091 0170: 72 ee 5f 8d 61 3c 61 b6 f9 8c d8 a5 82 63 45 3d |r._.a<a......cE=|
1091 0180: a3 0c 61 90 68 24 28 87 50 b9 c2 97 c6 20 01 11 |..a.h$(.P.... ..|
1092 0180: a3 0c 61 90 68 24 28 87 50 b9 c2 97 c6 20 01 11 |..a.h$(.P.... ..|
1092 0190: 80 84 10 98 cf e8 e4 13 96 05 51 2c 38 f3 c4 ec |..........Q,8...|
1093 0190: 80 84 10 98 cf e8 e4 13 96 05 51 2c 38 f3 c4 ec |..........Q,8...|
1093 01a0: ea 43 e7 96 5e 6a c8 be 11 dd 32 78 a2 fa dd 8f |.C..^j....2x....|
1094 01a0: ea 43 e7 96 5e 6a c8 be 11 dd 32 78 a2 fa dd 8f |.C..^j....2x....|
1094 01b0: b3 61 84 61 51 0c b3 cd 27 64 42 6b c2 b4 92 1e |.a.aQ...'dBk....|
1095 01b0: b3 61 84 61 51 0c b3 cd 27 64 42 6b c2 b4 92 1e |.a.aQ...'dBk....|
1095 01c0: 86 8c 12 68 24 00 10 db 7f 50 00 c6 91 e7 fa 4c |...h$....P.....L|
1096 01c0: 86 8c 12 68 24 00 10 db 7f 50 00 c6 91 e7 fa 4c |...h$....P.....L|
1096 01d0: 22 22 cc bf 84 81 0a 92 c1 aa 2a c7 1b 49 e6 ee |""........*..I..|
1097 01d0: 22 22 cc bf 84 81 0a 92 c1 aa 2a c7 1b 49 e6 ee |""........*..I..|
1097 01e0: 6b a9 7e e0 e9 b2 91 5e 7c 73 68 e0 fc 23 3f 34 |k.~....^|sh..#?4|
1098 01e0: 6b a9 7e e0 e9 b2 91 5e 7c 73 68 e0 fc 23 3f 34 |k.~....^|sh..#?4|
1098 01f0: ed cf 0e f2 b3 d3 4c d7 ae 59 33 6f 8c 3d b8 63 |......L..Y3o.=.c|
1099 01f0: ed cf 0e f2 b3 d3 4c d7 ae 59 33 6f 8c 3d b8 63 |......L..Y3o.=.c|
1099 0200: 21 2b e8 3d e0 6f 9d 3a b7 f9 dc 24 2a b2 3e a7 |!+.=.o.:...$*.>.|
1100 0200: 21 2b e8 3d e0 6f 9d 3a b7 f9 dc 24 2a b2 3e a7 |!+.=.o.:...$*.>.|
1100 0210: 58 dc 91 d8 40 e9 23 8e 88 84 ae 0f b9 00 2e b5 |X...@.#.........|
1101 0210: 58 dc 91 d8 40 e9 23 8e 88 84 ae 0f b9 00 2e b5 |X...@.#.........|
1101 0220: 74 36 f3 40 53 40 34 15 c0 d7 12 8d e7 bb 65 f9 |t6.@S@4.......e.|
1102 0220: 74 36 f3 40 53 40 34 15 c0 d7 12 8d e7 bb 65 f9 |t6.@S@4.......e.|
1102 0230: c8 ef 03 0f ff f9 fe b6 8a 0d 6d fd ec 51 70 f7 |..........m..Qp.|
1103 0230: c8 ef 03 0f ff f9 fe b6 8a 0d 6d fd ec 51 70 f7 |..........m..Qp.|
1103 0240: a7 ad 9b 6b 9d da 74 7b 53 43 d1 43 63 fd 19 f9 |...k..t{SC.Cc...|
1104 0240: a7 ad 9b 6b 9d da 74 7b 53 43 d1 43 63 fd 19 f9 |...k..t{SC.Cc...|
1104 0250: ca 67 95 e5 ef c4 e6 6c 9e 44 e1 c5 ac 7a 82 6f |.g.....l.D...z.o|
1105 0250: ca 67 95 e5 ef c4 e6 6c 9e 44 e1 c5 ac 7a 82 6f |.g.....l.D...z.o|
1105 0260: c2 e1 d2 b5 2d 81 29 f0 5d 09 6c 6f 10 ae 88 cf |....-.).].lo....|
1106 0260: c2 e1 d2 b5 2d 81 29 f0 5d 09 6c 6f 10 ae 88 cf |....-.).].lo....|
1106 0270: 25 05 d0 93 06 78 80 60 43 2d 10 1b 47 71 2b b7 |%....x.`C-..Gq+.|
1107 0270: 25 05 d0 93 06 78 80 60 43 2d 10 1b 47 71 2b b7 |%....x.`C-..Gq+.|
1107 0280: 7f bb e9 a7 e4 7d 67 7b df 9b f7 62 cf cd d8 f4 |.....}g{...b....|
1108 0280: 7f bb e9 a7 e4 7d 67 7b df 9b f7 62 cf cd d8 f4 |.....}g{...b....|
1108 0290: 48 bc 64 51 57 43 ff ea 8b 0b ae 74 64 53 07 86 |H.dQWC.....tdS..|
1109 0290: 48 bc 64 51 57 43 ff ea 8b 0b ae 74 64 53 07 86 |H.dQWC.....tdS..|
1109 02a0: fa 66 3c 5e f7 e1 af a7 c2 90 ff a7 be 9e c9 29 |.f<^...........)|
1110 02a0: fa 66 3c 5e f7 e1 af a7 c2 90 ff a7 be 9e c9 29 |.f<^...........)|
1110 02b0: b6 cc 41 48 18 69 94 8b 7c 04 7d 8c 98 a7 95 50 |..AH.i..|.}....P|
1111 02b0: b6 cc 41 48 18 69 94 8b 7c 04 7d 8c 98 a7 95 50 |..AH.i..|.}....P|
1111 02c0: 44 d9 d0 20 c8 14 30 14 51 ad 6c 16 03 94 0f 5a |D.. ..0.Q.l....Z|
1112 02c0: 44 d9 d0 20 c8 14 30 14 51 ad 6c 16 03 94 0f 5a |D.. ..0.Q.l....Z|
1112 02d0: 46 93 7f 1c 87 8d 25 d7 9d a2 d1 92 4c f3 c2 54 |F.....%.....L..T|
1113 02d0: 46 93 7f 1c 87 8d 25 d7 9d a2 d1 92 4c f3 c2 54 |F.....%.....L..T|
1113 02e0: ba f8 70 18 ca 24 0a 29 96 43 71 f2 93 95 74 18 |..p..$.).Cq...t.|
1114 02e0: ba f8 70 18 ca 24 0a 29 96 43 71 f2 93 95 74 18 |..p..$.).Cq...t.|
1114 02f0: b5 65 c4 b8 f6 6c 5c 34 20 1e d5 0c 21 c0 b1 90 |.e...l\4 ...!...|
1115 02f0: b5 65 c4 b8 f6 6c 5c 34 20 1e d5 0c 21 c0 b1 90 |.e...l\4 ...!...|
1115 0300: 9e 12 40 b9 18 fa 5a 00 41 a2 39 d3 a9 c1 73 21 |..@...Z.A.9...s!|
1116 0300: 9e 12 40 b9 18 fa 5a 00 41 a2 39 d3 a9 c1 73 21 |..@...Z.A.9...s!|
1116 0310: 8e 5e 3c b9 b8 f8 48 6a 76 46 a7 1a b6 dd 5b 51 |.^<...HjvF....[Q|
1117 0310: 8e 5e 3c b9 b8 f8 48 6a 76 46 a7 1a b6 dd 5b 51 |.^<...HjvF....[Q|
1117 0320: 5e 19 1d 59 12 c6 32 89 02 9a c0 8f 4f b8 0a ba |^..Y..2.....O...|
1118 0320: 5e 19 1d 59 12 c6 32 89 02 9a c0 8f 4f b8 0a ba |^..Y..2.....O...|
1118 0330: 5e ec 58 37 44 a3 2f dd 33 ed c9 d3 dd c7 22 1b |^.X7D./.3.....".|
1119 0330: 5e ec 58 37 44 a3 2f dd 33 ed c9 d3 dd c7 22 1b |^.X7D./.3.....".|
1119 0340: 2f d4 94 8e 95 3f 77 a7 ae 6e f3 32 8d bb 4a 4c |/....?w..n.2..JL|
1120 0340: 2f d4 94 8e 95 3f 77 a7 ae 6e f3 32 8d bb 4a 4c |/....?w..n.2..JL|
1120 0350: b8 0a 5a 43 34 3a b3 3a d6 77 ff 5c b6 fa ad f9 |..ZC4:.:.w.\....|
1121 0350: b8 0a 5a 43 34 3a b3 3a d6 77 ff 5c b6 fa ad f9 |..ZC4:.:.w.\....|
1121 0360: db fb 6a 33 df c1 7d 99 cf ef d4 d5 6d da 77 7c |..j3..}.....m.w||
1122 0360: db fb 6a 33 df c1 7d 99 cf ef d4 d5 6d da 77 7c |..j3..}.....m.w||
1122 0370: 3b 19 fd af c5 3f f1 60 c3 17 |;....?.`..|
1123 0370: 3b 19 fd af c5 3f f1 60 c3 17 |;....?.`..|
1123 $ hg debugbundle ../rev.hg2.bz
1124 $ hg debugbundle ../rev.hg2.bz
1124 Stream params: {Compression: GZ}
1125 Stream params: {Compression: GZ}
1125 changegroup -- {} (mandatory: False)
1126 changegroup -- {} (mandatory: False)
1126 32af7686d403cf45b5d95f2d70cebea587ac806a
1127 32af7686d403cf45b5d95f2d70cebea587ac806a
1127 9520eea781bcca16c1e15acc0ba14335a0e8e5ba
1128 9520eea781bcca16c1e15acc0ba14335a0e8e5ba
1128 eea13746799a9e0bfd88f29d3c2e9dc9389f524f
1129 eea13746799a9e0bfd88f29d3c2e9dc9389f524f
1129 02de42196ebee42ef284b6780a87cdc96e8eaab6
1130 02de42196ebee42ef284b6780a87cdc96e8eaab6
1130 $ hg unbundle ../rev.hg2.bz
1131 $ hg unbundle ../rev.hg2.bz
1131 adding changesets
1132 adding changesets
1132 adding manifests
1133 adding manifests
1133 adding file changes
1134 adding file changes
1134 added 0 changesets with 0 changes to 3 files
1135 added 0 changesets with 0 changes to 3 files
1135 (run 'hg update' to get a working copy)
1136 (run 'hg update' to get a working copy)
1136 Simple case where it just work: BZ
1137 Simple case where it just work: BZ
1137 ----------------------------------
1138 ----------------------------------
1138
1139
1139 $ hg bundle2 --compress BZ --rev '8+7+5+4' ../rev.hg2.bz
1140 $ hg bundle2 --compress BZ --rev '8+7+5+4' ../rev.hg2.bz
1140 $ f --hexdump ../rev.hg2.bz
1141 $ f --hexdump ../rev.hg2.bz
1141 ../rev.hg2.bz:
1142 ../rev.hg2.bz:
1142 0000: 48 47 32 30 00 00 00 0e 43 6f 6d 70 72 65 73 73 |HG20....Compress|
1143 0000: 48 47 32 30 00 00 00 0e 43 6f 6d 70 72 65 73 73 |HG20....Compress|
1143 0010: 69 6f 6e 3d 42 5a 42 5a 68 39 31 41 59 26 53 59 |ion=BZBZh91AY&SY|
1144 0010: 69 6f 6e 3d 42 5a 42 5a 68 39 31 41 59 26 53 59 |ion=BZBZh91AY&SY|
1144 0020: a3 4b 18 3d 00 00 1a 7f ff ff bf 5f f6 ef ef 7f |.K.=......._....|
1145 0020: a3 4b 18 3d 00 00 1a 7f ff ff bf 5f f6 ef ef 7f |.K.=......._....|
1145 0030: f6 3f f7 d1 d9 ff ff f7 6e ff ff 6e f7 f6 bd df |.?......n..n....|
1146 0030: f6 3f f7 d1 d9 ff ff f7 6e ff ff 6e f7 f6 bd df |.?......n..n....|
1146 0040: b5 ab ff cf 67 f6 e7 7b f7 c0 02 d7 33 82 8b 51 |....g..{....3..Q|
1147 0040: b5 ab ff cf 67 f6 e7 7b f7 c0 02 d7 33 82 8b 51 |....g..{....3..Q|
1147 0050: 04 a5 53 d5 3d 27 a0 99 18 4d 0d 34 00 d1 a1 e8 |..S.='...M.4....|
1148 0050: 04 a5 53 d5 3d 27 a0 99 18 4d 0d 34 00 d1 a1 e8 |..S.='...M.4....|
1148 0060: 80 c8 7a 87 a9 a3 43 6a 3d 46 86 26 80 34 3d 40 |..z...Cj=F.&.4=@|
1149 0060: 80 c8 7a 87 a9 a3 43 6a 3d 46 86 26 80 34 3d 40 |..z...Cj=F.&.4=@|
1149 0070: c8 c9 b5 34 f4 8f 48 0f 51 ea 34 34 fd 4d aa 19 |...4..H.Q.44.M..|
1150 0070: c8 c9 b5 34 f4 8f 48 0f 51 ea 34 34 fd 4d aa 19 |...4..H.Q.44.M..|
1150 0080: 03 40 0c 08 da 86 43 d4 f5 0f 42 1e a0 f3 54 33 |.@....C...B...T3|
1151 0080: 03 40 0c 08 da 86 43 d4 f5 0f 42 1e a0 f3 54 33 |.@....C...B...T3|
1151 0090: 54 d3 13 4d 03 40 32 00 00 32 03 26 80 0d 00 0d |T..M.@2..2.&....|
1152 0090: 54 d3 13 4d 03 40 32 00 00 32 03 26 80 0d 00 0d |T..M.@2..2.&....|
1152 00a0: 00 68 c8 c8 03 20 32 30 98 8c 80 00 00 03 4d 00 |.h... 20......M.|
1153 00a0: 00 68 c8 c8 03 20 32 30 98 8c 80 00 00 03 4d 00 |.h... 20......M.|
1153 00b0: c8 00 00 0d 00 00 22 99 a1 34 c2 64 a6 d5 34 1a |......"..4.d..4.|
1154 00b0: c8 00 00 0d 00 00 22 99 a1 34 c2 64 a6 d5 34 1a |......"..4.d..4.|
1154 00c0: 00 00 06 86 83 4d 07 a8 d1 a0 68 01 a0 00 00 00 |.....M....h.....|
1155 00c0: 00 00 06 86 83 4d 07 a8 d1 a0 68 01 a0 00 00 00 |.....M....h.....|
1155 00d0: 00 0d 06 80 00 00 00 0d 00 03 40 00 00 04 a4 a1 |..........@.....|
1156 00d0: 00 0d 06 80 00 00 00 0d 00 03 40 00 00 04 a4 a1 |..........@.....|
1156 00e0: 4d a9 89 89 b4 9a 32 0c 43 46 86 87 a9 8d 41 9a |M.....2.CF....A.|
1157 00e0: 4d a9 89 89 b4 9a 32 0c 43 46 86 87 a9 8d 41 9a |M.....2.CF....A.|
1157 00f0: 98 46 9a 0d 31 32 1a 34 0d 0c 8d a2 0c 98 4d 06 |.F..12.4......M.|
1158 00f0: 98 46 9a 0d 31 32 1a 34 0d 0c 8d a2 0c 98 4d 06 |.F..12.4......M.|
1158 0100: 8c 40 c2 60 8d 0d 0c 20 c9 89 fa a0 d0 d3 21 a1 |.@.`... ......!.|
1159 0100: 8c 40 c2 60 8d 0d 0c 20 c9 89 fa a0 d0 d3 21 a1 |.@.`... ......!.|
1159 0110: ea 34 d3 68 9e a6 d1 74 05 33 cb 66 96 93 28 64 |.4.h...t.3.f..(d|
1160 0110: ea 34 d3 68 9e a6 d1 74 05 33 cb 66 96 93 28 64 |.4.h...t.3.f..(d|
1160 0120: 40 91 22 ac 55 9b ea 40 7b 38 94 e2 f8 06 00 cb |@.".U..@{8......|
1161 0120: 40 91 22 ac 55 9b ea 40 7b 38 94 e2 f8 06 00 cb |@.".U..@{8......|
1161 0130: 28 02 00 4d ab 40 24 10 43 18 cf 64 b4 06 83 0c |(..M.@$.C..d....|
1162 0130: 28 02 00 4d ab 40 24 10 43 18 cf 64 b4 06 83 0c |(..M.@$.C..d....|
1162 0140: 34 6c b4 a3 d4 0a 0a e4 a8 5c 4e 23 c0 c9 7a 31 |4l.......\N#..z1|
1163 0140: 34 6c b4 a3 d4 0a 0a e4 a8 5c 4e 23 c0 c9 7a 31 |4l.......\N#..z1|
1163 0150: 97 87 77 7a 64 88 80 8e 60 97 20 93 0f 8e eb c4 |..wzd...`. .....|
1164 0150: 97 87 77 7a 64 88 80 8e 60 97 20 93 0f 8e eb c4 |..wzd...`. .....|
1164 0160: 62 a4 44 a3 52 20 b2 99 a9 2e e1 d7 29 4a 54 ac |b.D.R ......)JT.|
1165 0160: 62 a4 44 a3 52 20 b2 99 a9 2e e1 d7 29 4a 54 ac |b.D.R ......)JT.|
1165 0170: 44 7a bb cc 04 3d e0 aa bd 6a 33 5e 9b a2 57 36 |Dz...=...j3^..W6|
1166 0170: 44 7a bb cc 04 3d e0 aa bd 6a 33 5e 9b a2 57 36 |Dz...=...j3^..W6|
1166 0180: fa cb 45 bb 6d 3e c1 d9 d9 f5 83 69 8a d0 e0 e2 |..E.m>.....i....|
1167 0180: fa cb 45 bb 6d 3e c1 d9 d9 f5 83 69 8a d0 e0 e2 |..E.m>.....i....|
1167 0190: e7 ae 90 55 24 da 3f ab 78 c0 4c b4 56 a3 9e a4 |...U$.?.x.L.V...|
1168 0190: e7 ae 90 55 24 da 3f ab 78 c0 4c b4 56 a3 9e a4 |...U$.?.x.L.V...|
1168 01a0: af 9c 65 74 86 ec 6d dc 62 dc 33 ca c8 50 dd 9d |..et..m.b.3..P..|
1169 01a0: af 9c 65 74 86 ec 6d dc 62 dc 33 ca c8 50 dd 9d |..et..m.b.3..P..|
1169 01b0: 98 8e 9e 59 20 f3 f0 42 91 4a 09 f5 75 8d 3d a5 |...Y ..B.J..u.=.|
1170 01b0: 98 8e 9e 59 20 f3 f0 42 91 4a 09 f5 75 8d 3d a5 |...Y ..B.J..u.=.|
1170 01c0: a5 15 cb 8d 10 63 b0 c2 2e b2 81 f7 c1 76 0e 53 |.....c.......v.S|
1171 01c0: a5 15 cb 8d 10 63 b0 c2 2e b2 81 f7 c1 76 0e 53 |.....c.......v.S|
1171 01d0: 6c 0e 46 73 b5 ae 67 f9 4c 0b 45 6b a8 32 2a 2f |l.Fs..g.L.Ek.2*/|
1172 01d0: 6c 0e 46 73 b5 ae 67 f9 4c 0b 45 6b a8 32 2a 2f |l.Fs..g.L.Ek.2*/|
1172 01e0: a2 54 a4 44 05 20 a1 38 d1 a4 c6 09 a8 2b 08 99 |.T.D. .8.....+..|
1173 01e0: a2 54 a4 44 05 20 a1 38 d1 a4 c6 09 a8 2b 08 99 |.T.D. .8.....+..|
1173 01f0: a4 14 ae 8d a3 e3 aa 34 27 d8 44 ca c3 5d 21 8b |.......4'.D..]!.|
1174 01f0: a4 14 ae 8d a3 e3 aa 34 27 d8 44 ca c3 5d 21 8b |.......4'.D..]!.|
1174 0200: 1a 1e 97 29 71 2b 09 4a 4a 55 55 94 58 65 b2 bc |...)q+.JJUU.Xe..|
1175 0200: 1a 1e 97 29 71 2b 09 4a 4a 55 55 94 58 65 b2 bc |...)q+.JJUU.Xe..|
1175 0210: f3 a5 90 26 36 76 67 7a 51 98 d6 8a 4a 99 50 b5 |...&6vgzQ...J.P.|
1176 0210: f3 a5 90 26 36 76 67 7a 51 98 d6 8a 4a 99 50 b5 |...&6vgzQ...J.P.|
1176 0220: 99 8f 94 21 17 a9 8b f3 ad 4c 33 d4 2e 40 c8 0c |...!.....L3..@..|
1177 0220: 99 8f 94 21 17 a9 8b f3 ad 4c 33 d4 2e 40 c8 0c |...!.....L3..@..|
1177 0230: 3b 90 53 39 db 48 02 34 83 48 d6 b3 99 13 d2 58 |;.S9.H.4.H.....X|
1178 0230: 3b 90 53 39 db 48 02 34 83 48 d6 b3 99 13 d2 58 |;.S9.H.4.H.....X|
1178 0240: 65 8e 71 ac a9 06 95 f2 c4 8e b4 08 6b d3 0c ae |e.q.........k...|
1179 0240: 65 8e 71 ac a9 06 95 f2 c4 8e b4 08 6b d3 0c ae |e.q.........k...|
1179 0250: d9 90 56 71 43 a7 a2 62 16 3e 50 63 d3 57 3c 2d |..VqC..b.>Pc.W<-|
1180 0250: d9 90 56 71 43 a7 a2 62 16 3e 50 63 d3 57 3c 2d |..VqC..b.>Pc.W<-|
1180 0260: 9f 0f 34 05 08 d8 a6 4b 59 31 54 66 3a 45 0c 8a |..4....KY1Tf:E..|
1181 0260: 9f 0f 34 05 08 d8 a6 4b 59 31 54 66 3a 45 0c 8a |..4....KY1Tf:E..|
1181 0270: c7 90 3a f0 6a 83 1b f5 ca fb 80 2b 50 06 fb 51 |..:.j......+P..Q|
1182 0270: c7 90 3a f0 6a 83 1b f5 ca fb 80 2b 50 06 fb 51 |..:.j......+P..Q|
1182 0280: 7e a6 a4 d4 81 44 82 21 54 00 5b 1a 30 83 62 a3 |~....D.!T.[.0.b.|
1183 0280: 7e a6 a4 d4 81 44 82 21 54 00 5b 1a 30 83 62 a3 |~....D.!T.[.0.b.|
1183 0290: 18 b6 24 19 1e 45 df 4d 5c db a6 af 5b ac 90 fa |..$..E.M\...[...|
1184 0290: 18 b6 24 19 1e 45 df 4d 5c db a6 af 5b ac 90 fa |..$..E.M\...[...|
1184 02a0: 3e ed f9 ec 4c ba 36 ee d8 60 20 a7 c7 3b cb d1 |>...L.6..` ..;..|
1185 02a0: 3e ed f9 ec 4c ba 36 ee d8 60 20 a7 c7 3b cb d1 |>...L.6..` ..;..|
1185 02b0: 90 43 7d 27 16 50 5d ad f4 14 07 0b 90 5c cc 6b |.C}'.P]......\.k|
1186 02b0: 90 43 7d 27 16 50 5d ad f4 14 07 0b 90 5c cc 6b |.C}'.P]......\.k|
1186 02c0: 8d 3f a6 88 f4 34 37 a8 cf 14 63 36 19 f7 3e 28 |.?...47...c6..>(|
1187 02c0: 8d 3f a6 88 f4 34 37 a8 cf 14 63 36 19 f7 3e 28 |.?...47...c6..>(|
1187 02d0: de 99 e8 16 a4 9d 0d 40 a1 a7 24 52 14 a6 72 62 |.......@..$R..rb|
1188 02d0: de 99 e8 16 a4 9d 0d 40 a1 a7 24 52 14 a6 72 62 |.......@..$R..rb|
1188 02e0: 59 5a ca 2d e5 51 90 78 88 d9 c6 c7 21 d0 f7 46 |YZ.-.Q.x....!..F|
1189 02e0: 59 5a ca 2d e5 51 90 78 88 d9 c6 c7 21 d0 f7 46 |YZ.-.Q.x....!..F|
1189 02f0: b2 04 46 44 4e 20 9c 12 b1 03 4e 25 e0 a9 0c 58 |..FDN ....N%...X|
1190 02f0: b2 04 46 44 4e 20 9c 12 b1 03 4e 25 e0 a9 0c 58 |..FDN ....N%...X|
1190 0300: 5b 1d 3c 93 20 01 51 de a9 1c 69 23 32 46 14 b4 |[.<. .Q...i#2F..|
1191 0300: 5b 1d 3c 93 20 01 51 de a9 1c 69 23 32 46 14 b4 |[.<. .Q...i#2F..|
1191 0310: 90 db 17 98 98 50 03 90 29 aa 40 b0 13 d8 43 d2 |.....P..).@...C.|
1192 0310: 90 db 17 98 98 50 03 90 29 aa 40 b0 13 d8 43 d2 |.....P..).@...C.|
1192 0320: 5f c5 9d eb f3 f2 ad 41 e8 7a a9 ed a1 58 84 a6 |_......A.z...X..|
1193 0320: 5f c5 9d eb f3 f2 ad 41 e8 7a a9 ed a1 58 84 a6 |_......A.z...X..|
1193 0330: 42 bf d6 fc 24 82 c1 20 32 26 4a 15 a6 1d 29 7f |B...$.. 2&J...).|
1194 0330: 42 bf d6 fc 24 82 c1 20 32 26 4a 15 a6 1d 29 7f |B...$.. 2&J...).|
1194 0340: 7e f4 3d 07 bc 62 9a 5b ec 44 3d 72 1d 41 8b 5c |~.=..b.[.D=r.A.\|
1195 0340: 7e f4 3d 07 bc 62 9a 5b ec 44 3d 72 1d 41 8b 5c |~.=..b.[.D=r.A.\|
1195 0350: 80 de 0e 62 9a 2e f8 83 00 d5 07 a0 9c c6 74 98 |...b..........t.|
1196 0350: 80 de 0e 62 9a 2e f8 83 00 d5 07 a0 9c c6 74 98 |...b..........t.|
1196 0360: 11 b2 5e a9 38 02 03 ee fd 86 5c f4 86 b3 ae da |..^.8.....\.....|
1197 0360: 11 b2 5e a9 38 02 03 ee fd 86 5c f4 86 b3 ae da |..^.8.....\.....|
1197 0370: 05 94 01 c5 c6 ea 18 e6 ba 2a ba b3 04 5c 96 89 |.........*...\..|
1198 0370: 05 94 01 c5 c6 ea 18 e6 ba 2a ba b3 04 5c 96 89 |.........*...\..|
1198 0380: 72 63 5b 10 11 f6 67 34 98 cb e4 c0 4e fa e6 99 |rc[...g4....N...|
1199 0380: 72 63 5b 10 11 f6 67 34 98 cb e4 c0 4e fa e6 99 |rc[...g4....N...|
1199 0390: 19 6e 50 e8 26 8d 0c 17 e0 be ef e1 8e 02 6f 32 |.nP.&.........o2|
1200 0390: 19 6e 50 e8 26 8d 0c 17 e0 be ef e1 8e 02 6f 32 |.nP.&.........o2|
1200 03a0: 82 dc 26 f8 a1 08 f3 8a 0d f3 c4 75 00 48 73 b8 |..&........u.Hs.|
1201 03a0: 82 dc 26 f8 a1 08 f3 8a 0d f3 c4 75 00 48 73 b8 |..&........u.Hs.|
1201 03b0: be 3b 0d 7f d0 fd c7 78 96 ec e0 03 80 68 4d 8d |.;.....x.....hM.|
1202 03b0: be 3b 0d 7f d0 fd c7 78 96 ec e0 03 80 68 4d 8d |.;.....x.....hM.|
1202 03c0: 43 8c d7 68 58 f9 50 f0 18 cb 21 58 1b 60 cd 1f |C..hX.P...!X.`..|
1203 03c0: 43 8c d7 68 58 f9 50 f0 18 cb 21 58 1b 60 cd 1f |C..hX.P...!X.`..|
1203 03d0: 84 36 2e 16 1f 0a f7 4e 8f eb df 01 2d c2 79 0b |.6.....N....-.y.|
1204 03d0: 84 36 2e 16 1f 0a f7 4e 8f eb df 01 2d c2 79 0b |.6.....N....-.y.|
1204 03e0: f7 24 ea 0d e8 59 86 51 6e 1c 30 a3 ad 2f ee 8c |.$...Y.Qn.0../..|
1205 03e0: f7 24 ea 0d e8 59 86 51 6e 1c 30 a3 ad 2f ee 8c |.$...Y.Qn.0../..|
1205 03f0: 90 c8 84 d5 e8 34 c1 95 b2 c9 f6 4d 87 1c 7d 19 |.....4.....M..}.|
1206 03f0: 90 c8 84 d5 e8 34 c1 95 b2 c9 f6 4d 87 1c 7d 19 |.....4.....M..}.|
1206 0400: d6 41 58 56 7a e0 6c ba 10 c7 e8 33 39 36 96 e7 |.AXVz.l....396..|
1207 0400: d6 41 58 56 7a e0 6c ba 10 c7 e8 33 39 36 96 e7 |.AXVz.l....396..|
1207 0410: d2 f9 59 9a 08 95 48 38 e7 0b b7 0a 24 67 c4 39 |..Y...H8....$g.9|
1208 0410: d2 f9 59 9a 08 95 48 38 e7 0b b7 0a 24 67 c4 39 |..Y...H8....$g.9|
1208 0420: 8b 43 88 57 9c 01 f5 61 b5 e1 27 41 7e af 83 fe |.C.W...a..'A~...|
1209 0420: 8b 43 88 57 9c 01 f5 61 b5 e1 27 41 7e af 83 fe |.C.W...a..'A~...|
1209 0430: 2e e4 8a 70 a1 21 46 96 30 7a |...p.!F.0z|
1210 0430: 2e e4 8a 70 a1 21 46 96 30 7a |...p.!F.0z|
1210 $ hg debugbundle ../rev.hg2.bz
1211 $ hg debugbundle ../rev.hg2.bz
1211 Stream params: {Compression: BZ}
1212 Stream params: {Compression: BZ}
1212 changegroup -- {} (mandatory: False)
1213 changegroup -- {} (mandatory: False)
1213 32af7686d403cf45b5d95f2d70cebea587ac806a
1214 32af7686d403cf45b5d95f2d70cebea587ac806a
1214 9520eea781bcca16c1e15acc0ba14335a0e8e5ba
1215 9520eea781bcca16c1e15acc0ba14335a0e8e5ba
1215 eea13746799a9e0bfd88f29d3c2e9dc9389f524f
1216 eea13746799a9e0bfd88f29d3c2e9dc9389f524f
1216 02de42196ebee42ef284b6780a87cdc96e8eaab6
1217 02de42196ebee42ef284b6780a87cdc96e8eaab6
1217 $ hg unbundle ../rev.hg2.bz
1218 $ hg unbundle ../rev.hg2.bz
1218 adding changesets
1219 adding changesets
1219 adding manifests
1220 adding manifests
1220 adding file changes
1221 adding file changes
1221 added 0 changesets with 0 changes to 3 files
1222 added 0 changesets with 0 changes to 3 files
1222 (run 'hg update' to get a working copy)
1223 (run 'hg update' to get a working copy)
1223
1224
1224 unknown compression while unbundling
1225 unknown compression while unbundling
1225 -----------------------------
1226 -----------------------------
1226
1227
1227 $ hg bundle2 --param Compression=FooBarUnknown --rev '8+7+5+4' ../rev.hg2.bz
1228 $ hg bundle2 --param Compression=FooBarUnknown --rev '8+7+5+4' ../rev.hg2.bz
1228 $ cat ../rev.hg2.bz | hg statbundle2
1229 $ cat ../rev.hg2.bz | hg statbundle2
1229 abort: unknown parameters: Stream Parameter - Compression='FooBarUnknown'
1230 abort: unknown parameters: Stream Parameter - Compression='FooBarUnknown'
1230 [255]
1231 [255]
1231 $ hg unbundle ../rev.hg2.bz
1232 $ hg unbundle ../rev.hg2.bz
1232 abort: ../rev.hg2.bz: unknown bundle feature, Stream Parameter - Compression='FooBarUnknown'
1233 abort: ../rev.hg2.bz: unknown bundle feature, Stream Parameter - Compression='FooBarUnknown'
1233 (see https://mercurial-scm.org/wiki/BundleFeature for more information)
1234 (see https://mercurial-scm.org/wiki/BundleFeature for more information)
1234 [255]
1235 [255]
1235
1236
1236 $ cd ..
1237 $ cd ..
@@ -1,94 +1,94 b''
1 ------ Test dirstate._dirs refcounting
1 ------ Test dirstate._dirs refcounting
2
2
3 $ hg init t
3 $ hg init t
4 $ cd t
4 $ cd t
5 $ mkdir -p a/b/c/d
5 $ mkdir -p a/b/c/d
6 $ touch a/b/c/d/x
6 $ touch a/b/c/d/x
7 $ touch a/b/c/d/y
7 $ touch a/b/c/d/y
8 $ touch a/b/c/d/z
8 $ touch a/b/c/d/z
9 $ hg ci -Am m
9 $ hg ci -Am m
10 adding a/b/c/d/x
10 adding a/b/c/d/x
11 adding a/b/c/d/y
11 adding a/b/c/d/y
12 adding a/b/c/d/z
12 adding a/b/c/d/z
13 $ hg mv a z
13 $ hg mv a z
14 moving a/b/c/d/x to z/b/c/d/x
14 moving a/b/c/d/x to z/b/c/d/x
15 moving a/b/c/d/y to z/b/c/d/y
15 moving a/b/c/d/y to z/b/c/d/y
16 moving a/b/c/d/z to z/b/c/d/z
16 moving a/b/c/d/z to z/b/c/d/z
17
17
18 Test name collisions
18 Test name collisions
19
19
20 $ rm z/b/c/d/x
20 $ rm z/b/c/d/x
21 $ mkdir z/b/c/d/x
21 $ mkdir z/b/c/d/x
22 $ touch z/b/c/d/x/y
22 $ touch z/b/c/d/x/y
23 $ hg add z/b/c/d/x/y
23 $ hg add z/b/c/d/x/y
24 abort: file 'z/b/c/d/x' in dirstate clashes with 'z/b/c/d/x/y'
24 abort: file 'z/b/c/d/x' in dirstate clashes with 'z/b/c/d/x/y'
25 [255]
25 [255]
26 $ rm -rf z/b/c/d
26 $ rm -rf z/b/c/d
27 $ touch z/b/c/d
27 $ touch z/b/c/d
28 $ hg add z/b/c/d
28 $ hg add z/b/c/d
29 abort: directory 'z/b/c/d' already in dirstate
29 abort: directory 'z/b/c/d' already in dirstate
30 [255]
30 [255]
31
31
32 $ cd ..
32 $ cd ..
33
33
34 Issue1790: dirstate entry locked into unset if file mtime is set into
34 Issue1790: dirstate entry locked into unset if file mtime is set into
35 the future
35 the future
36
36
37 Prepare test repo:
37 Prepare test repo:
38
38
39 $ hg init u
39 $ hg init u
40 $ cd u
40 $ cd u
41 $ echo a > a
41 $ echo a > a
42 $ hg add
42 $ hg add
43 adding a
43 adding a
44 $ hg ci -m1
44 $ hg ci -m1
45
45
46 Set mtime of a into the future:
46 Set mtime of a into the future:
47
47
48 $ touch -t 202101011200 a
48 $ touch -t 202101011200 a
49
49
50 Status must not set a's entry to unset (issue1790):
50 Status must not set a's entry to unset (issue1790):
51
51
52 $ hg status
52 $ hg status
53 $ hg debugstate
53 $ hg debugstate
54 n 644 2 2021-01-01 12:00:00 a
54 n 644 2 2021-01-01 12:00:00 a
55
55
56 Test modulo storage/comparison of absurd dates:
56 Test modulo storage/comparison of absurd dates:
57
57
58 #if no-aix
58 #if no-aix
59 $ touch -t 195001011200 a
59 $ touch -t 195001011200 a
60 $ hg st
60 $ hg st
61 $ hg debugstate
61 $ hg debugstate
62 n 644 2 2018-01-19 15:14:08 a
62 n 644 2 2018-01-19 15:14:08 a
63 #endif
63 #endif
64
64
65 Verify that exceptions during a dirstate change leave the dirstate
65 Verify that exceptions during a dirstate change leave the dirstate
66 coherent (issue4353)
66 coherent (issue4353)
67
67
68 $ cat > ../dirstateexception.py <<EOF
68 $ cat > ../dirstateexception.py <<EOF
69 > from __future__ import absolute_import
69 > from __future__ import absolute_import
70 > from mercurial import (
70 > from mercurial import (
71 > error,
71 > error,
72 > extensions,
72 > extensions,
73 > mergestate as mergestatemod,
73 > mergestate as mergestatemod,
74 > )
74 > )
75 >
75 >
76 > def wraprecordupdates(*args):
76 > def wraprecordupdates(*args):
77 > raise error.Abort("simulated error while recording dirstateupdates")
77 > raise error.Abort(b"simulated error while recording dirstateupdates")
78 >
78 >
79 > def reposetup(ui, repo):
79 > def reposetup(ui, repo):
80 > extensions.wrapfunction(mergestatemod, 'recordupdates',
80 > extensions.wrapfunction(mergestatemod, 'recordupdates',
81 > wraprecordupdates)
81 > wraprecordupdates)
82 > EOF
82 > EOF
83
83
84 $ hg rm a
84 $ hg rm a
85 $ hg commit -m 'rm a'
85 $ hg commit -m 'rm a'
86 $ echo "[extensions]" >> .hg/hgrc
86 $ echo "[extensions]" >> .hg/hgrc
87 $ echo "dirstateex=../dirstateexception.py" >> .hg/hgrc
87 $ echo "dirstateex=../dirstateexception.py" >> .hg/hgrc
88 $ hg up 0
88 $ hg up 0
89 abort: simulated error while recording dirstateupdates
89 abort: simulated error while recording dirstateupdates
90 [255]
90 [255]
91 $ hg log -r . -T '{rev}\n'
91 $ hg log -r . -T '{rev}\n'
92 1
92 1
93 $ hg status
93 $ hg status
94 ? a
94 ? a
@@ -1,545 +1,545 b''
1 #require repofncache
1 #require repofncache
2
2
3 An extension which will set fncache chunksize to 1 byte to make sure that logic
3 An extension which will set fncache chunksize to 1 byte to make sure that logic
4 does not break
4 does not break
5
5
6 $ cat > chunksize.py <<EOF
6 $ cat > chunksize.py <<EOF
7 > from __future__ import absolute_import
7 > from __future__ import absolute_import
8 > from mercurial import store
8 > from mercurial import store
9 > store.fncache_chunksize = 1
9 > store.fncache_chunksize = 1
10 > EOF
10 > EOF
11
11
12 $ cat >> $HGRCPATH <<EOF
12 $ cat >> $HGRCPATH <<EOF
13 > [extensions]
13 > [extensions]
14 > chunksize = $TESTTMP/chunksize.py
14 > chunksize = $TESTTMP/chunksize.py
15 > EOF
15 > EOF
16
16
17 Init repo1:
17 Init repo1:
18
18
19 $ hg init repo1
19 $ hg init repo1
20 $ cd repo1
20 $ cd repo1
21 $ echo "some text" > a
21 $ echo "some text" > a
22 $ hg add
22 $ hg add
23 adding a
23 adding a
24 $ hg ci -m first
24 $ hg ci -m first
25 $ cat .hg/store/fncache | sort
25 $ cat .hg/store/fncache | sort
26 data/a.i
26 data/a.i
27
27
28 Testing a.i/b:
28 Testing a.i/b:
29
29
30 $ mkdir a.i
30 $ mkdir a.i
31 $ echo "some other text" > a.i/b
31 $ echo "some other text" > a.i/b
32 $ hg add
32 $ hg add
33 adding a.i/b
33 adding a.i/b
34 $ hg ci -m second
34 $ hg ci -m second
35 $ cat .hg/store/fncache | sort
35 $ cat .hg/store/fncache | sort
36 data/a.i
36 data/a.i
37 data/a.i.hg/b.i
37 data/a.i.hg/b.i
38
38
39 Testing a.i.hg/c:
39 Testing a.i.hg/c:
40
40
41 $ mkdir a.i.hg
41 $ mkdir a.i.hg
42 $ echo "yet another text" > a.i.hg/c
42 $ echo "yet another text" > a.i.hg/c
43 $ hg add
43 $ hg add
44 adding a.i.hg/c
44 adding a.i.hg/c
45 $ hg ci -m third
45 $ hg ci -m third
46 $ cat .hg/store/fncache | sort
46 $ cat .hg/store/fncache | sort
47 data/a.i
47 data/a.i
48 data/a.i.hg.hg/c.i
48 data/a.i.hg.hg/c.i
49 data/a.i.hg/b.i
49 data/a.i.hg/b.i
50
50
51 Testing verify:
51 Testing verify:
52
52
53 $ hg verify
53 $ hg verify
54 checking changesets
54 checking changesets
55 checking manifests
55 checking manifests
56 crosschecking files in changesets and manifests
56 crosschecking files in changesets and manifests
57 checking files
57 checking files
58 checked 3 changesets with 3 changes to 3 files
58 checked 3 changesets with 3 changes to 3 files
59
59
60 $ rm .hg/store/fncache
60 $ rm .hg/store/fncache
61
61
62 $ hg verify
62 $ hg verify
63 checking changesets
63 checking changesets
64 checking manifests
64 checking manifests
65 crosschecking files in changesets and manifests
65 crosschecking files in changesets and manifests
66 checking files
66 checking files
67 warning: revlog 'data/a.i' not in fncache!
67 warning: revlog 'data/a.i' not in fncache!
68 warning: revlog 'data/a.i.hg/c.i' not in fncache!
68 warning: revlog 'data/a.i.hg/c.i' not in fncache!
69 warning: revlog 'data/a.i/b.i' not in fncache!
69 warning: revlog 'data/a.i/b.i' not in fncache!
70 checked 3 changesets with 3 changes to 3 files
70 checked 3 changesets with 3 changes to 3 files
71 3 warnings encountered!
71 3 warnings encountered!
72 hint: run "hg debugrebuildfncache" to recover from corrupt fncache
72 hint: run "hg debugrebuildfncache" to recover from corrupt fncache
73
73
74 Follow the hint to make sure it works
74 Follow the hint to make sure it works
75
75
76 $ hg debugrebuildfncache
76 $ hg debugrebuildfncache
77 adding data/a.i
77 adding data/a.i
78 adding data/a.i.hg/c.i
78 adding data/a.i.hg/c.i
79 adding data/a.i/b.i
79 adding data/a.i/b.i
80 3 items added, 0 removed from fncache
80 3 items added, 0 removed from fncache
81
81
82 $ hg verify
82 $ hg verify
83 checking changesets
83 checking changesets
84 checking manifests
84 checking manifests
85 crosschecking files in changesets and manifests
85 crosschecking files in changesets and manifests
86 checking files
86 checking files
87 checked 3 changesets with 3 changes to 3 files
87 checked 3 changesets with 3 changes to 3 files
88
88
89 $ cd ..
89 $ cd ..
90
90
91 Non store repo:
91 Non store repo:
92
92
93 $ hg --config format.usestore=False init foo
93 $ hg --config format.usestore=False init foo
94 $ cd foo
94 $ cd foo
95 $ mkdir tst.d
95 $ mkdir tst.d
96 $ echo foo > tst.d/foo
96 $ echo foo > tst.d/foo
97 $ hg ci -Amfoo
97 $ hg ci -Amfoo
98 adding tst.d/foo
98 adding tst.d/foo
99 $ find .hg | sort
99 $ find .hg | sort
100 .hg
100 .hg
101 .hg/00changelog.i
101 .hg/00changelog.i
102 .hg/00manifest.i
102 .hg/00manifest.i
103 .hg/cache
103 .hg/cache
104 .hg/cache/branch2-served
104 .hg/cache/branch2-served
105 .hg/cache/rbc-names-v1
105 .hg/cache/rbc-names-v1
106 .hg/cache/rbc-revs-v1
106 .hg/cache/rbc-revs-v1
107 .hg/data
107 .hg/data
108 .hg/data/tst.d.hg
108 .hg/data/tst.d.hg
109 .hg/data/tst.d.hg/foo.i
109 .hg/data/tst.d.hg/foo.i
110 .hg/dirstate
110 .hg/dirstate
111 .hg/fsmonitor.state (fsmonitor !)
111 .hg/fsmonitor.state (fsmonitor !)
112 .hg/last-message.txt
112 .hg/last-message.txt
113 .hg/phaseroots
113 .hg/phaseroots
114 .hg/requires
114 .hg/requires
115 .hg/undo
115 .hg/undo
116 .hg/undo.backup.dirstate
116 .hg/undo.backup.dirstate
117 .hg/undo.backupfiles
117 .hg/undo.backupfiles
118 .hg/undo.bookmarks
118 .hg/undo.bookmarks
119 .hg/undo.branch
119 .hg/undo.branch
120 .hg/undo.desc
120 .hg/undo.desc
121 .hg/undo.dirstate
121 .hg/undo.dirstate
122 .hg/undo.phaseroots
122 .hg/undo.phaseroots
123 .hg/wcache
123 .hg/wcache
124 .hg/wcache/checkisexec (execbit !)
124 .hg/wcache/checkisexec (execbit !)
125 .hg/wcache/checklink (symlink !)
125 .hg/wcache/checklink (symlink !)
126 .hg/wcache/checklink-target (symlink !)
126 .hg/wcache/checklink-target (symlink !)
127 .hg/wcache/manifestfulltextcache (reporevlogstore !)
127 .hg/wcache/manifestfulltextcache (reporevlogstore !)
128 $ cd ..
128 $ cd ..
129
129
130 Non fncache repo:
130 Non fncache repo:
131
131
132 $ hg --config format.usefncache=False init bar
132 $ hg --config format.usefncache=False init bar
133 $ cd bar
133 $ cd bar
134 $ mkdir tst.d
134 $ mkdir tst.d
135 $ echo foo > tst.d/Foo
135 $ echo foo > tst.d/Foo
136 $ hg ci -Amfoo
136 $ hg ci -Amfoo
137 adding tst.d/Foo
137 adding tst.d/Foo
138 $ find .hg | sort
138 $ find .hg | sort
139 .hg
139 .hg
140 .hg/00changelog.i
140 .hg/00changelog.i
141 .hg/cache
141 .hg/cache
142 .hg/cache/branch2-served
142 .hg/cache/branch2-served
143 .hg/cache/rbc-names-v1
143 .hg/cache/rbc-names-v1
144 .hg/cache/rbc-revs-v1
144 .hg/cache/rbc-revs-v1
145 .hg/dirstate
145 .hg/dirstate
146 .hg/fsmonitor.state (fsmonitor !)
146 .hg/fsmonitor.state (fsmonitor !)
147 .hg/last-message.txt
147 .hg/last-message.txt
148 .hg/requires
148 .hg/requires
149 .hg/store
149 .hg/store
150 .hg/store/00changelog.i
150 .hg/store/00changelog.i
151 .hg/store/00manifest.i
151 .hg/store/00manifest.i
152 .hg/store/data
152 .hg/store/data
153 .hg/store/data/tst.d.hg
153 .hg/store/data/tst.d.hg
154 .hg/store/data/tst.d.hg/_foo.i
154 .hg/store/data/tst.d.hg/_foo.i
155 .hg/store/phaseroots
155 .hg/store/phaseroots
156 .hg/store/undo
156 .hg/store/undo
157 .hg/store/undo.backupfiles
157 .hg/store/undo.backupfiles
158 .hg/store/undo.phaseroots
158 .hg/store/undo.phaseroots
159 .hg/undo.backup.dirstate
159 .hg/undo.backup.dirstate
160 .hg/undo.bookmarks
160 .hg/undo.bookmarks
161 .hg/undo.branch
161 .hg/undo.branch
162 .hg/undo.desc
162 .hg/undo.desc
163 .hg/undo.dirstate
163 .hg/undo.dirstate
164 .hg/wcache
164 .hg/wcache
165 .hg/wcache/checkisexec (execbit !)
165 .hg/wcache/checkisexec (execbit !)
166 .hg/wcache/checklink (symlink !)
166 .hg/wcache/checklink (symlink !)
167 .hg/wcache/checklink-target (symlink !)
167 .hg/wcache/checklink-target (symlink !)
168 .hg/wcache/manifestfulltextcache (reporevlogstore !)
168 .hg/wcache/manifestfulltextcache (reporevlogstore !)
169 $ cd ..
169 $ cd ..
170
170
171 Encoding of reserved / long paths in the store
171 Encoding of reserved / long paths in the store
172
172
173 $ hg init r2
173 $ hg init r2
174 $ cd r2
174 $ cd r2
175 $ cat <<EOF > .hg/hgrc
175 $ cat <<EOF > .hg/hgrc
176 > [ui]
176 > [ui]
177 > portablefilenames = ignore
177 > portablefilenames = ignore
178 > EOF
178 > EOF
179
179
180 $ hg import -q --bypass - <<EOF
180 $ hg import -q --bypass - <<EOF
181 > # HG changeset patch
181 > # HG changeset patch
182 > # User test
182 > # User test
183 > # Date 0 0
183 > # Date 0 0
184 > # Node ID 1c7a2f7cb77be1a0def34e4c7cabc562ad98fbd7
184 > # Node ID 1c7a2f7cb77be1a0def34e4c7cabc562ad98fbd7
185 > # Parent 0000000000000000000000000000000000000000
185 > # Parent 0000000000000000000000000000000000000000
186 > 1
186 > 1
187 >
187 >
188 > diff --git a/12345678/12345678/12345678/12345678/12345678/12345678/12345678/12345/xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-12.3456789-12345-ABCDEFGHIJKLMNOPRSTUVWXYZ-abcdefghjiklmnopqrstuvwxyz b/12345678/12345678/12345678/12345678/12345678/12345678/12345678/12345/xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-12.3456789-12345-ABCDEFGHIJKLMNOPRSTUVWXYZ-abcdefghjiklmnopqrstuvwxyz
188 > diff --git a/12345678/12345678/12345678/12345678/12345678/12345678/12345678/12345/xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-12.3456789-12345-ABCDEFGHIJKLMNOPRSTUVWXYZ-abcdefghjiklmnopqrstuvwxyz b/12345678/12345678/12345678/12345678/12345678/12345678/12345678/12345/xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-12.3456789-12345-ABCDEFGHIJKLMNOPRSTUVWXYZ-abcdefghjiklmnopqrstuvwxyz
189 > new file mode 100644
189 > new file mode 100644
190 > --- /dev/null
190 > --- /dev/null
191 > +++ b/12345678/12345678/12345678/12345678/12345678/12345678/12345678/12345/xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-12.3456789-12345-ABCDEFGHIJKLMNOPRSTUVWXYZ-abcdefghjiklmnopqrstuvwxyz
191 > +++ b/12345678/12345678/12345678/12345678/12345678/12345678/12345678/12345/xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-12.3456789-12345-ABCDEFGHIJKLMNOPRSTUVWXYZ-abcdefghjiklmnopqrstuvwxyz
192 > @@ -0,0 +1,1 @@
192 > @@ -0,0 +1,1 @@
193 > +foo
193 > +foo
194 > diff --git a/AUX/SECOND/X.PRN/FOURTH/FI:FTH/SIXTH/SEVENTH/EIGHTH/NINETH/TENTH/ELEVENTH/LOREMIPSUM.TXT b/AUX/SECOND/X.PRN/FOURTH/FI:FTH/SIXTH/SEVENTH/EIGHTH/NINETH/TENTH/ELEVENTH/LOREMIPSUM.TXT
194 > diff --git a/AUX/SECOND/X.PRN/FOURTH/FI:FTH/SIXTH/SEVENTH/EIGHTH/NINETH/TENTH/ELEVENTH/LOREMIPSUM.TXT b/AUX/SECOND/X.PRN/FOURTH/FI:FTH/SIXTH/SEVENTH/EIGHTH/NINETH/TENTH/ELEVENTH/LOREMIPSUM.TXT
195 > new file mode 100644
195 > new file mode 100644
196 > --- /dev/null
196 > --- /dev/null
197 > +++ b/AUX/SECOND/X.PRN/FOURTH/FI:FTH/SIXTH/SEVENTH/EIGHTH/NINETH/TENTH/ELEVENTH/LOREMIPSUM.TXT
197 > +++ b/AUX/SECOND/X.PRN/FOURTH/FI:FTH/SIXTH/SEVENTH/EIGHTH/NINETH/TENTH/ELEVENTH/LOREMIPSUM.TXT
198 > @@ -0,0 +1,1 @@
198 > @@ -0,0 +1,1 @@
199 > +foo
199 > +foo
200 > diff --git a/Project Planning/Resources/AnotherLongDirectoryName/Followedbyanother/AndAnother/AndThenAnExtremelyLongFileName.txt b/Project Planning/Resources/AnotherLongDirectoryName/Followedbyanother/AndAnother/AndThenAnExtremelyLongFileName.txt
200 > diff --git a/Project Planning/Resources/AnotherLongDirectoryName/Followedbyanother/AndAnother/AndThenAnExtremelyLongFileName.txt b/Project Planning/Resources/AnotherLongDirectoryName/Followedbyanother/AndAnother/AndThenAnExtremelyLongFileName.txt
201 > new file mode 100644
201 > new file mode 100644
202 > --- /dev/null
202 > --- /dev/null
203 > +++ b/Project Planning/Resources/AnotherLongDirectoryName/Followedbyanother/AndAnother/AndThenAnExtremelyLongFileName.txt
203 > +++ b/Project Planning/Resources/AnotherLongDirectoryName/Followedbyanother/AndAnother/AndThenAnExtremelyLongFileName.txt
204 > @@ -0,0 +1,1 @@
204 > @@ -0,0 +1,1 @@
205 > +foo
205 > +foo
206 > diff --git a/bla.aux/prn/PRN/lpt/com3/nul/coma/foo.NUL/normal.c b/bla.aux/prn/PRN/lpt/com3/nul/coma/foo.NUL/normal.c
206 > diff --git a/bla.aux/prn/PRN/lpt/com3/nul/coma/foo.NUL/normal.c b/bla.aux/prn/PRN/lpt/com3/nul/coma/foo.NUL/normal.c
207 > new file mode 100644
207 > new file mode 100644
208 > --- /dev/null
208 > --- /dev/null
209 > +++ b/bla.aux/prn/PRN/lpt/com3/nul/coma/foo.NUL/normal.c
209 > +++ b/bla.aux/prn/PRN/lpt/com3/nul/coma/foo.NUL/normal.c
210 > @@ -0,0 +1,1 @@
210 > @@ -0,0 +1,1 @@
211 > +foo
211 > +foo
212 > diff --git a/enterprise/openesbaddons/contrib-imola/corba-bc/netbeansplugin/wsdlExtension/src/main/java/META-INF/services/org.netbeans.modules.xml.wsdl.bindingsupport.spi.ExtensibilityElementTemplateProvider b/enterprise/openesbaddons/contrib-imola/corba-bc/netbeansplugin/wsdlExtension/src/main/java/META-INF/services/org.netbeans.modules.xml.wsdl.bindingsupport.spi.ExtensibilityElementTemplateProvider
212 > diff --git a/enterprise/openesbaddons/contrib-imola/corba-bc/netbeansplugin/wsdlExtension/src/main/java/META-INF/services/org.netbeans.modules.xml.wsdl.bindingsupport.spi.ExtensibilityElementTemplateProvider b/enterprise/openesbaddons/contrib-imola/corba-bc/netbeansplugin/wsdlExtension/src/main/java/META-INF/services/org.netbeans.modules.xml.wsdl.bindingsupport.spi.ExtensibilityElementTemplateProvider
213 > new file mode 100644
213 > new file mode 100644
214 > --- /dev/null
214 > --- /dev/null
215 > +++ b/enterprise/openesbaddons/contrib-imola/corba-bc/netbeansplugin/wsdlExtension/src/main/java/META-INF/services/org.netbeans.modules.xml.wsdl.bindingsupport.spi.ExtensibilityElementTemplateProvider
215 > +++ b/enterprise/openesbaddons/contrib-imola/corba-bc/netbeansplugin/wsdlExtension/src/main/java/META-INF/services/org.netbeans.modules.xml.wsdl.bindingsupport.spi.ExtensibilityElementTemplateProvider
216 > @@ -0,0 +1,1 @@
216 > @@ -0,0 +1,1 @@
217 > +foo
217 > +foo
218 > EOF
218 > EOF
219
219
220 $ find .hg/store -name *.i | sort
220 $ find .hg/store -name *.i | sort
221 .hg/store/00changelog.i
221 .hg/store/00changelog.i
222 .hg/store/00manifest.i
222 .hg/store/00manifest.i
223 .hg/store/data/bla.aux/pr~6e/_p_r_n/lpt/co~6d3/nu~6c/coma/foo._n_u_l/normal.c.i
223 .hg/store/data/bla.aux/pr~6e/_p_r_n/lpt/co~6d3/nu~6c/coma/foo._n_u_l/normal.c.i
224 .hg/store/dh/12345678/12345678/12345678/12345678/12345678/12345678/12345678/12345/xxxxxx168e07b38e65eff86ab579afaaa8e30bfbe0f35f.i
224 .hg/store/dh/12345678/12345678/12345678/12345678/12345678/12345678/12345678/12345/xxxxxx168e07b38e65eff86ab579afaaa8e30bfbe0f35f.i
225 .hg/store/dh/au~78/second/x.prn/fourth/fi~3afth/sixth/seventh/eighth/nineth/tenth/loremia20419e358ddff1bf8751e38288aff1d7c32ec05.i
225 .hg/store/dh/au~78/second/x.prn/fourth/fi~3afth/sixth/seventh/eighth/nineth/tenth/loremia20419e358ddff1bf8751e38288aff1d7c32ec05.i
226 .hg/store/dh/enterpri/openesba/contrib-/corba-bc/netbeans/wsdlexte/src/main/java/org.net7018f27961fdf338a598a40c4683429e7ffb9743.i
226 .hg/store/dh/enterpri/openesba/contrib-/corba-bc/netbeans/wsdlexte/src/main/java/org.net7018f27961fdf338a598a40c4683429e7ffb9743.i
227 .hg/store/dh/project_/resource/anotherl/followed/andanoth/andthenanextremelylongfilename0d8e1f4187c650e2f1fdca9fd90f786bc0976b6b.i
227 .hg/store/dh/project_/resource/anotherl/followed/andanoth/andthenanextremelylongfilename0d8e1f4187c650e2f1fdca9fd90f786bc0976b6b.i
228
228
229 $ cd ..
229 $ cd ..
230
230
231 Aborting lock does not prevent fncache writes
231 Aborting lock does not prevent fncache writes
232
232
233 $ cat > exceptionext.py <<EOF
233 $ cat > exceptionext.py <<EOF
234 > from __future__ import absolute_import
234 > from __future__ import absolute_import
235 > import os
235 > import os
236 > from mercurial import commands, error, extensions
236 > from mercurial import commands, error, extensions
237 >
237 >
238 > def lockexception(orig, vfs, lockname, wait, releasefn, *args, **kwargs):
238 > def lockexception(orig, vfs, lockname, wait, releasefn, *args, **kwargs):
239 > def releasewrap():
239 > def releasewrap():
240 > l.held = False # ensure __del__ is a noop
240 > l.held = False # ensure __del__ is a noop
241 > raise error.Abort("forced lock failure")
241 > raise error.Abort(b"forced lock failure")
242 > l = orig(vfs, lockname, wait, releasewrap, *args, **kwargs)
242 > l = orig(vfs, lockname, wait, releasewrap, *args, **kwargs)
243 > return l
243 > return l
244 >
244 >
245 > def reposetup(ui, repo):
245 > def reposetup(ui, repo):
246 > extensions.wrapfunction(repo, '_lock', lockexception)
246 > extensions.wrapfunction(repo, '_lock', lockexception)
247 >
247 >
248 > cmdtable = {}
248 > cmdtable = {}
249 >
249 >
250 > # wrap "commit" command to prevent wlock from being '__del__()'-ed
250 > # wrap "commit" command to prevent wlock from being '__del__()'-ed
251 > # at the end of dispatching (for intentional "forced lcok failure")
251 > # at the end of dispatching (for intentional "forced lcok failure")
252 > def commitwrap(orig, ui, repo, *pats, **opts):
252 > def commitwrap(orig, ui, repo, *pats, **opts):
253 > repo = repo.unfiltered() # to use replaced repo._lock certainly
253 > repo = repo.unfiltered() # to use replaced repo._lock certainly
254 > wlock = repo.wlock()
254 > wlock = repo.wlock()
255 > try:
255 > try:
256 > return orig(ui, repo, *pats, **opts)
256 > return orig(ui, repo, *pats, **opts)
257 > finally:
257 > finally:
258 > # multiple 'relase()' is needed for complete releasing wlock,
258 > # multiple 'relase()' is needed for complete releasing wlock,
259 > # because "forced" abort at last releasing store lock
259 > # because "forced" abort at last releasing store lock
260 > # prevents wlock from being released at same 'lockmod.release()'
260 > # prevents wlock from being released at same 'lockmod.release()'
261 > for i in range(wlock.held):
261 > for i in range(wlock.held):
262 > wlock.release()
262 > wlock.release()
263 >
263 >
264 > def extsetup(ui):
264 > def extsetup(ui):
265 > extensions.wrapcommand(commands.table, b"commit", commitwrap)
265 > extensions.wrapcommand(commands.table, b"commit", commitwrap)
266 > EOF
266 > EOF
267 $ extpath=`pwd`/exceptionext.py
267 $ extpath=`pwd`/exceptionext.py
268 $ hg init fncachetxn
268 $ hg init fncachetxn
269 $ cd fncachetxn
269 $ cd fncachetxn
270 $ printf "[extensions]\nexceptionext=$extpath\n" >> .hg/hgrc
270 $ printf "[extensions]\nexceptionext=$extpath\n" >> .hg/hgrc
271 $ touch y
271 $ touch y
272 $ hg ci -qAm y
272 $ hg ci -qAm y
273 abort: forced lock failure
273 abort: forced lock failure
274 [255]
274 [255]
275 $ cat .hg/store/fncache
275 $ cat .hg/store/fncache
276 data/y.i
276 data/y.i
277
277
278 Aborting transaction prevents fncache change
278 Aborting transaction prevents fncache change
279
279
280 $ cat > ../exceptionext.py <<EOF
280 $ cat > ../exceptionext.py <<EOF
281 > from __future__ import absolute_import
281 > from __future__ import absolute_import
282 > import os
282 > import os
283 > from mercurial import commands, error, extensions, localrepo
283 > from mercurial import commands, error, extensions, localrepo
284 >
284 >
285 > def wrapper(orig, self, *args, **kwargs):
285 > def wrapper(orig, self, *args, **kwargs):
286 > tr = orig(self, *args, **kwargs)
286 > tr = orig(self, *args, **kwargs)
287 > def fail(tr):
287 > def fail(tr):
288 > raise error.Abort(b"forced transaction failure")
288 > raise error.Abort(b"forced transaction failure")
289 > # zzz prefix to ensure it sorted after store.write
289 > # zzz prefix to ensure it sorted after store.write
290 > tr.addfinalize(b'zzz-forcefails', fail)
290 > tr.addfinalize(b'zzz-forcefails', fail)
291 > return tr
291 > return tr
292 >
292 >
293 > def uisetup(ui):
293 > def uisetup(ui):
294 > extensions.wrapfunction(
294 > extensions.wrapfunction(
295 > localrepo.localrepository, b'transaction', wrapper)
295 > localrepo.localrepository, b'transaction', wrapper)
296 >
296 >
297 > cmdtable = {}
297 > cmdtable = {}
298 >
298 >
299 > EOF
299 > EOF
300
300
301 Clean cached version
301 Clean cached version
302 $ rm -f "${extpath}c"
302 $ rm -f "${extpath}c"
303 $ rm -Rf "`dirname $extpath`/__pycache__"
303 $ rm -Rf "`dirname $extpath`/__pycache__"
304
304
305 $ touch z
305 $ touch z
306 $ hg ci -qAm z
306 $ hg ci -qAm z
307 transaction abort!
307 transaction abort!
308 rollback completed
308 rollback completed
309 abort: forced transaction failure
309 abort: forced transaction failure
310 [255]
310 [255]
311 $ cat .hg/store/fncache
311 $ cat .hg/store/fncache
312 data/y.i
312 data/y.i
313
313
314 Aborted transactions can be recovered later
314 Aborted transactions can be recovered later
315
315
316 $ cat > ../exceptionext.py <<EOF
316 $ cat > ../exceptionext.py <<EOF
317 > from __future__ import absolute_import
317 > from __future__ import absolute_import
318 > import os
318 > import os
319 > from mercurial import (
319 > from mercurial import (
320 > commands,
320 > commands,
321 > error,
321 > error,
322 > extensions,
322 > extensions,
323 > localrepo,
323 > localrepo,
324 > transaction,
324 > transaction,
325 > )
325 > )
326 >
326 >
327 > def trwrapper(orig, self, *args, **kwargs):
327 > def trwrapper(orig, self, *args, **kwargs):
328 > tr = orig(self, *args, **kwargs)
328 > tr = orig(self, *args, **kwargs)
329 > def fail(tr):
329 > def fail(tr):
330 > raise error.Abort(b"forced transaction failure")
330 > raise error.Abort(b"forced transaction failure")
331 > # zzz prefix to ensure it sorted after store.write
331 > # zzz prefix to ensure it sorted after store.write
332 > tr.addfinalize(b'zzz-forcefails', fail)
332 > tr.addfinalize(b'zzz-forcefails', fail)
333 > return tr
333 > return tr
334 >
334 >
335 > def abortwrapper(orig, self, *args, **kwargs):
335 > def abortwrapper(orig, self, *args, **kwargs):
336 > raise error.Abort(b"forced transaction failure")
336 > raise error.Abort(b"forced transaction failure")
337 >
337 >
338 > def uisetup(ui):
338 > def uisetup(ui):
339 > extensions.wrapfunction(localrepo.localrepository, 'transaction',
339 > extensions.wrapfunction(localrepo.localrepository, 'transaction',
340 > trwrapper)
340 > trwrapper)
341 > extensions.wrapfunction(transaction.transaction, '_abort',
341 > extensions.wrapfunction(transaction.transaction, '_abort',
342 > abortwrapper)
342 > abortwrapper)
343 >
343 >
344 > cmdtable = {}
344 > cmdtable = {}
345 >
345 >
346 > EOF
346 > EOF
347
347
348 Clean cached versions
348 Clean cached versions
349 $ rm -f "${extpath}c"
349 $ rm -f "${extpath}c"
350 $ rm -Rf "`dirname $extpath`/__pycache__"
350 $ rm -Rf "`dirname $extpath`/__pycache__"
351
351
352 $ hg up -q 1
352 $ hg up -q 1
353 $ touch z
353 $ touch z
354 $ hg ci -qAm z 2>/dev/null
354 $ hg ci -qAm z 2>/dev/null
355 [255]
355 [255]
356 $ cat .hg/store/fncache | sort
356 $ cat .hg/store/fncache | sort
357 data/y.i
357 data/y.i
358 data/z.i
358 data/z.i
359 $ hg recover --verify
359 $ hg recover --verify
360 rolling back interrupted transaction
360 rolling back interrupted transaction
361 checking changesets
361 checking changesets
362 checking manifests
362 checking manifests
363 crosschecking files in changesets and manifests
363 crosschecking files in changesets and manifests
364 checking files
364 checking files
365 checked 1 changesets with 1 changes to 1 files
365 checked 1 changesets with 1 changes to 1 files
366 $ cat .hg/store/fncache
366 $ cat .hg/store/fncache
367 data/y.i
367 data/y.i
368
368
369 $ cd ..
369 $ cd ..
370
370
371 debugrebuildfncache does nothing unless repo has fncache requirement
371 debugrebuildfncache does nothing unless repo has fncache requirement
372
372
373 $ hg --config format.usefncache=false init nofncache
373 $ hg --config format.usefncache=false init nofncache
374 $ cd nofncache
374 $ cd nofncache
375 $ hg debugrebuildfncache
375 $ hg debugrebuildfncache
376 (not rebuilding fncache because repository does not support fncache)
376 (not rebuilding fncache because repository does not support fncache)
377
377
378 $ cd ..
378 $ cd ..
379
379
380 debugrebuildfncache works on empty repository
380 debugrebuildfncache works on empty repository
381
381
382 $ hg init empty
382 $ hg init empty
383 $ cd empty
383 $ cd empty
384 $ hg debugrebuildfncache
384 $ hg debugrebuildfncache
385 fncache already up to date
385 fncache already up to date
386 $ cd ..
386 $ cd ..
387
387
388 debugrebuildfncache on an up to date repository no-ops
388 debugrebuildfncache on an up to date repository no-ops
389
389
390 $ hg init repo
390 $ hg init repo
391 $ cd repo
391 $ cd repo
392 $ echo initial > foo
392 $ echo initial > foo
393 $ echo initial > .bar
393 $ echo initial > .bar
394 $ hg commit -A -m initial
394 $ hg commit -A -m initial
395 adding .bar
395 adding .bar
396 adding foo
396 adding foo
397
397
398 $ cat .hg/store/fncache | sort
398 $ cat .hg/store/fncache | sort
399 data/.bar.i
399 data/.bar.i
400 data/foo.i
400 data/foo.i
401
401
402 $ hg debugrebuildfncache
402 $ hg debugrebuildfncache
403 fncache already up to date
403 fncache already up to date
404
404
405 debugrebuildfncache restores deleted fncache file
405 debugrebuildfncache restores deleted fncache file
406
406
407 $ rm -f .hg/store/fncache
407 $ rm -f .hg/store/fncache
408 $ hg debugrebuildfncache
408 $ hg debugrebuildfncache
409 adding data/.bar.i
409 adding data/.bar.i
410 adding data/foo.i
410 adding data/foo.i
411 2 items added, 0 removed from fncache
411 2 items added, 0 removed from fncache
412
412
413 $ cat .hg/store/fncache | sort
413 $ cat .hg/store/fncache | sort
414 data/.bar.i
414 data/.bar.i
415 data/foo.i
415 data/foo.i
416
416
417 Rebuild after rebuild should no-op
417 Rebuild after rebuild should no-op
418
418
419 $ hg debugrebuildfncache
419 $ hg debugrebuildfncache
420 fncache already up to date
420 fncache already up to date
421
421
422 A single missing file should get restored, an extra file should be removed
422 A single missing file should get restored, an extra file should be removed
423
423
424 $ cat > .hg/store/fncache << EOF
424 $ cat > .hg/store/fncache << EOF
425 > data/foo.i
425 > data/foo.i
426 > data/bad-entry.i
426 > data/bad-entry.i
427 > EOF
427 > EOF
428
428
429 $ hg debugrebuildfncache
429 $ hg debugrebuildfncache
430 removing data/bad-entry.i
430 removing data/bad-entry.i
431 adding data/.bar.i
431 adding data/.bar.i
432 1 items added, 1 removed from fncache
432 1 items added, 1 removed from fncache
433
433
434 $ cat .hg/store/fncache | sort
434 $ cat .hg/store/fncache | sort
435 data/.bar.i
435 data/.bar.i
436 data/foo.i
436 data/foo.i
437
437
438 debugrebuildfncache recovers from truncated line in fncache
438 debugrebuildfncache recovers from truncated line in fncache
439
439
440 $ printf a > .hg/store/fncache
440 $ printf a > .hg/store/fncache
441 $ hg debugrebuildfncache
441 $ hg debugrebuildfncache
442 fncache does not ends with a newline
442 fncache does not ends with a newline
443 adding data/.bar.i
443 adding data/.bar.i
444 adding data/foo.i
444 adding data/foo.i
445 2 items added, 0 removed from fncache
445 2 items added, 0 removed from fncache
446
446
447 $ cat .hg/store/fncache | sort
447 $ cat .hg/store/fncache | sort
448 data/.bar.i
448 data/.bar.i
449 data/foo.i
449 data/foo.i
450
450
451 $ cd ..
451 $ cd ..
452
452
453 Try a simple variation without dotencode to ensure fncache is ignorant of encoding
453 Try a simple variation without dotencode to ensure fncache is ignorant of encoding
454
454
455 $ hg --config format.dotencode=false init nodotencode
455 $ hg --config format.dotencode=false init nodotencode
456 $ cd nodotencode
456 $ cd nodotencode
457 $ echo initial > foo
457 $ echo initial > foo
458 $ echo initial > .bar
458 $ echo initial > .bar
459 $ hg commit -A -m initial
459 $ hg commit -A -m initial
460 adding .bar
460 adding .bar
461 adding foo
461 adding foo
462
462
463 $ cat .hg/store/fncache | sort
463 $ cat .hg/store/fncache | sort
464 data/.bar.i
464 data/.bar.i
465 data/foo.i
465 data/foo.i
466
466
467 $ rm .hg/store/fncache
467 $ rm .hg/store/fncache
468 $ hg debugrebuildfncache
468 $ hg debugrebuildfncache
469 adding data/.bar.i
469 adding data/.bar.i
470 adding data/foo.i
470 adding data/foo.i
471 2 items added, 0 removed from fncache
471 2 items added, 0 removed from fncache
472
472
473 $ cat .hg/store/fncache | sort
473 $ cat .hg/store/fncache | sort
474 data/.bar.i
474 data/.bar.i
475 data/foo.i
475 data/foo.i
476
476
477 $ cd ..
477 $ cd ..
478
478
479 In repositories that have accumulated a large number of files over time, the
479 In repositories that have accumulated a large number of files over time, the
480 fncache file is going to be large. If we possibly can avoid loading it, so much the better.
480 fncache file is going to be large. If we possibly can avoid loading it, so much the better.
481 The cache should not loaded when committing changes to existing files, or when unbundling
481 The cache should not loaded when committing changes to existing files, or when unbundling
482 changesets that only contain changes to existing files:
482 changesets that only contain changes to existing files:
483
483
484 $ cat > fncacheloadwarn.py << EOF
484 $ cat > fncacheloadwarn.py << EOF
485 > from __future__ import absolute_import
485 > from __future__ import absolute_import
486 > from mercurial import extensions, localrepo
486 > from mercurial import extensions, localrepo
487 >
487 >
488 > def extsetup(ui):
488 > def extsetup(ui):
489 > def wrapstore(orig, requirements, *args):
489 > def wrapstore(orig, requirements, *args):
490 > store = orig(requirements, *args)
490 > store = orig(requirements, *args)
491 > if b'store' in requirements and b'fncache' in requirements:
491 > if b'store' in requirements and b'fncache' in requirements:
492 > instrumentfncachestore(store, ui)
492 > instrumentfncachestore(store, ui)
493 > return store
493 > return store
494 > extensions.wrapfunction(localrepo, 'makestore', wrapstore)
494 > extensions.wrapfunction(localrepo, 'makestore', wrapstore)
495 >
495 >
496 > def instrumentfncachestore(fncachestore, ui):
496 > def instrumentfncachestore(fncachestore, ui):
497 > class instrumentedfncache(type(fncachestore.fncache)):
497 > class instrumentedfncache(type(fncachestore.fncache)):
498 > def _load(self):
498 > def _load(self):
499 > ui.warn(b'fncache load triggered!\n')
499 > ui.warn(b'fncache load triggered!\n')
500 > super(instrumentedfncache, self)._load()
500 > super(instrumentedfncache, self)._load()
501 > fncachestore.fncache.__class__ = instrumentedfncache
501 > fncachestore.fncache.__class__ = instrumentedfncache
502 > EOF
502 > EOF
503
503
504 $ fncachextpath=`pwd`/fncacheloadwarn.py
504 $ fncachextpath=`pwd`/fncacheloadwarn.py
505 $ hg init nofncacheload
505 $ hg init nofncacheload
506 $ cd nofncacheload
506 $ cd nofncacheload
507 $ printf "[extensions]\nfncacheloadwarn=$fncachextpath\n" >> .hg/hgrc
507 $ printf "[extensions]\nfncacheloadwarn=$fncachextpath\n" >> .hg/hgrc
508
508
509 A new file should trigger a load, as we'd want to update the fncache set in that case:
509 A new file should trigger a load, as we'd want to update the fncache set in that case:
510
510
511 $ touch foo
511 $ touch foo
512 $ hg ci -qAm foo
512 $ hg ci -qAm foo
513 fncache load triggered!
513 fncache load triggered!
514
514
515 But modifying that file should not:
515 But modifying that file should not:
516
516
517 $ echo bar >> foo
517 $ echo bar >> foo
518 $ hg ci -qm foo
518 $ hg ci -qm foo
519
519
520 If a transaction has been aborted, the zero-size truncated index file will
520 If a transaction has been aborted, the zero-size truncated index file will
521 not prevent the fncache from being loaded; rather than actually abort
521 not prevent the fncache from being loaded; rather than actually abort
522 a transaction, we simulate the situation by creating a zero-size index file:
522 a transaction, we simulate the situation by creating a zero-size index file:
523
523
524 $ touch .hg/store/data/bar.i
524 $ touch .hg/store/data/bar.i
525 $ touch bar
525 $ touch bar
526 $ hg ci -qAm bar
526 $ hg ci -qAm bar
527 fncache load triggered!
527 fncache load triggered!
528
528
529 Unbundling should follow the same rules; existing files should not cause a load:
529 Unbundling should follow the same rules; existing files should not cause a load:
530
530
531 $ hg clone -q . tobundle
531 $ hg clone -q . tobundle
532 $ echo 'new line' > tobundle/bar
532 $ echo 'new line' > tobundle/bar
533 $ hg -R tobundle ci -qm bar
533 $ hg -R tobundle ci -qm bar
534 $ hg -R tobundle bundle -q barupdated.hg
534 $ hg -R tobundle bundle -q barupdated.hg
535 $ hg unbundle -q barupdated.hg
535 $ hg unbundle -q barupdated.hg
536
536
537 but adding new files should:
537 but adding new files should:
538
538
539 $ touch tobundle/newfile
539 $ touch tobundle/newfile
540 $ hg -R tobundle ci -qAm newfile
540 $ hg -R tobundle ci -qAm newfile
541 $ hg -R tobundle bundle -q newfile.hg
541 $ hg -R tobundle bundle -q newfile.hg
542 $ hg unbundle -q newfile.hg
542 $ hg unbundle -q newfile.hg
543 fncache load triggered!
543 fncache load triggered!
544
544
545 $ cd ..
545 $ cd ..
@@ -1,556 +1,556 b''
1 $ . "$TESTDIR/histedit-helpers.sh"
1 $ . "$TESTDIR/histedit-helpers.sh"
2
2
3 $ cat >> $HGRCPATH <<EOF
3 $ cat >> $HGRCPATH <<EOF
4 > [extensions]
4 > [extensions]
5 > histedit=
5 > histedit=
6 > strip=
6 > strip=
7 > mockmakedate = $TESTDIR/mockmakedate.py
7 > mockmakedate = $TESTDIR/mockmakedate.py
8 > EOF
8 > EOF
9
9
10 $ initrepo ()
10 $ initrepo ()
11 > {
11 > {
12 > hg init r
12 > hg init r
13 > cd r
13 > cd r
14 > for x in a b c d e f g; do
14 > for x in a b c d e f g; do
15 > echo $x > $x
15 > echo $x > $x
16 > hg add $x
16 > hg add $x
17 > hg ci -m $x
17 > hg ci -m $x
18 > done
18 > done
19 > }
19 > }
20
20
21 $ initrepo
21 $ initrepo
22
22
23 log before edit
23 log before edit
24 $ hg log --graph
24 $ hg log --graph
25 @ changeset: 6:3c6a8ed2ebe8
25 @ changeset: 6:3c6a8ed2ebe8
26 | tag: tip
26 | tag: tip
27 | user: test
27 | user: test
28 | date: Thu Jan 01 00:00:00 1970 +0000
28 | date: Thu Jan 01 00:00:00 1970 +0000
29 | summary: g
29 | summary: g
30 |
30 |
31 o changeset: 5:652413bf663e
31 o changeset: 5:652413bf663e
32 | user: test
32 | user: test
33 | date: Thu Jan 01 00:00:00 1970 +0000
33 | date: Thu Jan 01 00:00:00 1970 +0000
34 | summary: f
34 | summary: f
35 |
35 |
36 o changeset: 4:e860deea161a
36 o changeset: 4:e860deea161a
37 | user: test
37 | user: test
38 | date: Thu Jan 01 00:00:00 1970 +0000
38 | date: Thu Jan 01 00:00:00 1970 +0000
39 | summary: e
39 | summary: e
40 |
40 |
41 o changeset: 3:055a42cdd887
41 o changeset: 3:055a42cdd887
42 | user: test
42 | user: test
43 | date: Thu Jan 01 00:00:00 1970 +0000
43 | date: Thu Jan 01 00:00:00 1970 +0000
44 | summary: d
44 | summary: d
45 |
45 |
46 o changeset: 2:177f92b77385
46 o changeset: 2:177f92b77385
47 | user: test
47 | user: test
48 | date: Thu Jan 01 00:00:00 1970 +0000
48 | date: Thu Jan 01 00:00:00 1970 +0000
49 | summary: c
49 | summary: c
50 |
50 |
51 o changeset: 1:d2ae7f538514
51 o changeset: 1:d2ae7f538514
52 | user: test
52 | user: test
53 | date: Thu Jan 01 00:00:00 1970 +0000
53 | date: Thu Jan 01 00:00:00 1970 +0000
54 | summary: b
54 | summary: b
55 |
55 |
56 o changeset: 0:cb9a9f314b8b
56 o changeset: 0:cb9a9f314b8b
57 user: test
57 user: test
58 date: Thu Jan 01 00:00:00 1970 +0000
58 date: Thu Jan 01 00:00:00 1970 +0000
59 summary: a
59 summary: a
60
60
61 dirty a file
61 dirty a file
62 $ echo a > g
62 $ echo a > g
63 $ hg histedit 177f92b77385 --commands - 2>&1 << EOF
63 $ hg histedit 177f92b77385 --commands - 2>&1 << EOF
64 > EOF
64 > EOF
65 abort: uncommitted changes
65 abort: uncommitted changes
66 [255]
66 [255]
67 $ echo g > g
67 $ echo g > g
68
68
69 edit the history
69 edit the history
70 $ hg histedit 177f92b77385 --commands - 2>&1 << EOF| fixbundle
70 $ hg histedit 177f92b77385 --commands - 2>&1 << EOF| fixbundle
71 > pick 177f92b77385 c
71 > pick 177f92b77385 c
72 > pick 055a42cdd887 d
72 > pick 055a42cdd887 d
73 > edit e860deea161a e
73 > edit e860deea161a e
74 > pick 652413bf663e f
74 > pick 652413bf663e f
75 > pick 3c6a8ed2ebe8 g
75 > pick 3c6a8ed2ebe8 g
76 > EOF
76 > EOF
77 0 files updated, 0 files merged, 3 files removed, 0 files unresolved
77 0 files updated, 0 files merged, 3 files removed, 0 files unresolved
78 Editing (e860deea161a), you may commit or record as needed now.
78 Editing (e860deea161a), you may commit or record as needed now.
79 (hg histedit --continue to resume)
79 (hg histedit --continue to resume)
80
80
81 try to update and get an error
81 try to update and get an error
82 $ hg update tip
82 $ hg update tip
83 abort: histedit in progress
83 abort: histedit in progress
84 (use 'hg histedit --continue' or 'hg histedit --abort')
84 (use 'hg histedit --continue' or 'hg histedit --abort')
85 [255]
85 [255]
86
86
87 edit the plan via the editor
87 edit the plan via the editor
88 $ cat >> $TESTTMP/editplan.sh <<EOF
88 $ cat >> $TESTTMP/editplan.sh <<EOF
89 > cat > \$1 <<EOF2
89 > cat > \$1 <<EOF2
90 > drop e860deea161a e
90 > drop e860deea161a e
91 > drop 652413bf663e f
91 > drop 652413bf663e f
92 > drop 3c6a8ed2ebe8 g
92 > drop 3c6a8ed2ebe8 g
93 > EOF2
93 > EOF2
94 > EOF
94 > EOF
95 $ HGEDITOR="sh $TESTTMP/editplan.sh" hg histedit --edit-plan
95 $ HGEDITOR="sh $TESTTMP/editplan.sh" hg histedit --edit-plan
96 $ cat .hg/histedit-state
96 $ cat .hg/histedit-state
97 v1
97 v1
98 055a42cdd88768532f9cf79daa407fc8d138de9b
98 055a42cdd88768532f9cf79daa407fc8d138de9b
99 3c6a8ed2ebe862cc949d2caa30775dd6f16fb799
99 3c6a8ed2ebe862cc949d2caa30775dd6f16fb799
100 False
100 False
101 3
101 3
102 drop
102 drop
103 e860deea161a2f77de56603b340ebbb4536308ae
103 e860deea161a2f77de56603b340ebbb4536308ae
104 drop
104 drop
105 652413bf663ef2a641cab26574e46d5f5a64a55a
105 652413bf663ef2a641cab26574e46d5f5a64a55a
106 drop
106 drop
107 3c6a8ed2ebe862cc949d2caa30775dd6f16fb799
107 3c6a8ed2ebe862cc949d2caa30775dd6f16fb799
108 0
108 0
109 strip-backup/177f92b77385-0ebe6a8f-histedit.hg
109 strip-backup/177f92b77385-0ebe6a8f-histedit.hg
110
110
111 edit the plan via --commands
111 edit the plan via --commands
112 $ hg histedit --edit-plan --commands - 2>&1 << EOF
112 $ hg histedit --edit-plan --commands - 2>&1 << EOF
113 > edit e860deea161a e
113 > edit e860deea161a e
114 > pick 652413bf663e f
114 > pick 652413bf663e f
115 > drop 3c6a8ed2ebe8 g
115 > drop 3c6a8ed2ebe8 g
116 > EOF
116 > EOF
117 $ cat .hg/histedit-state
117 $ cat .hg/histedit-state
118 v1
118 v1
119 055a42cdd88768532f9cf79daa407fc8d138de9b
119 055a42cdd88768532f9cf79daa407fc8d138de9b
120 3c6a8ed2ebe862cc949d2caa30775dd6f16fb799
120 3c6a8ed2ebe862cc949d2caa30775dd6f16fb799
121 False
121 False
122 3
122 3
123 edit
123 edit
124 e860deea161a2f77de56603b340ebbb4536308ae
124 e860deea161a2f77de56603b340ebbb4536308ae
125 pick
125 pick
126 652413bf663ef2a641cab26574e46d5f5a64a55a
126 652413bf663ef2a641cab26574e46d5f5a64a55a
127 drop
127 drop
128 3c6a8ed2ebe862cc949d2caa30775dd6f16fb799
128 3c6a8ed2ebe862cc949d2caa30775dd6f16fb799
129 0
129 0
130 strip-backup/177f92b77385-0ebe6a8f-histedit.hg
130 strip-backup/177f92b77385-0ebe6a8f-histedit.hg
131
131
132 Go at a random point and try to continue
132 Go at a random point and try to continue
133
133
134 $ hg id -n
134 $ hg id -n
135 3+
135 3+
136 $ hg up 0
136 $ hg up 0
137 abort: histedit in progress
137 abort: histedit in progress
138 (use 'hg histedit --continue' or 'hg histedit --abort')
138 (use 'hg histedit --continue' or 'hg histedit --abort')
139 [255]
139 [255]
140
140
141 Try to delete necessary commit
141 Try to delete necessary commit
142 $ hg strip -r 652413b
142 $ hg strip -r 652413b
143 abort: histedit in progress, can't strip 652413bf663e
143 abort: histedit in progress, can't strip 652413bf663e
144 [255]
144 [255]
145
145
146 commit, then edit the revision
146 commit, then edit the revision
147 $ hg ci -m 'wat'
147 $ hg ci -m 'wat'
148 created new head
148 created new head
149 $ echo a > e
149 $ echo a > e
150
150
151 qnew should fail while we're in the middle of the edit step
151 qnew should fail while we're in the middle of the edit step
152
152
153 $ hg --config extensions.mq= qnew please-fail
153 $ hg --config extensions.mq= qnew please-fail
154 abort: histedit in progress
154 abort: histedit in progress
155 (use 'hg histedit --continue' or 'hg histedit --abort')
155 (use 'hg histedit --continue' or 'hg histedit --abort')
156 [255]
156 [255]
157 $ HGEDITOR='echo foobaz > ' hg histedit --continue 2>&1 | fixbundle
157 $ HGEDITOR='echo foobaz > ' hg histedit --continue 2>&1 | fixbundle
158
158
159 $ hg log --graph
159 $ hg log --graph
160 @ changeset: 6:b5f70786f9b0
160 @ changeset: 6:b5f70786f9b0
161 | tag: tip
161 | tag: tip
162 | user: test
162 | user: test
163 | date: Thu Jan 01 00:00:00 1970 +0000
163 | date: Thu Jan 01 00:00:00 1970 +0000
164 | summary: f
164 | summary: f
165 |
165 |
166 o changeset: 5:a5e1ba2f7afb
166 o changeset: 5:a5e1ba2f7afb
167 | user: test
167 | user: test
168 | date: Thu Jan 01 00:00:00 1970 +0000
168 | date: Thu Jan 01 00:00:00 1970 +0000
169 | summary: foobaz
169 | summary: foobaz
170 |
170 |
171 o changeset: 4:1a60820cd1f6
171 o changeset: 4:1a60820cd1f6
172 | user: test
172 | user: test
173 | date: Thu Jan 01 00:00:00 1970 +0000
173 | date: Thu Jan 01 00:00:00 1970 +0000
174 | summary: wat
174 | summary: wat
175 |
175 |
176 o changeset: 3:055a42cdd887
176 o changeset: 3:055a42cdd887
177 | user: test
177 | user: test
178 | date: Thu Jan 01 00:00:00 1970 +0000
178 | date: Thu Jan 01 00:00:00 1970 +0000
179 | summary: d
179 | summary: d
180 |
180 |
181 o changeset: 2:177f92b77385
181 o changeset: 2:177f92b77385
182 | user: test
182 | user: test
183 | date: Thu Jan 01 00:00:00 1970 +0000
183 | date: Thu Jan 01 00:00:00 1970 +0000
184 | summary: c
184 | summary: c
185 |
185 |
186 o changeset: 1:d2ae7f538514
186 o changeset: 1:d2ae7f538514
187 | user: test
187 | user: test
188 | date: Thu Jan 01 00:00:00 1970 +0000
188 | date: Thu Jan 01 00:00:00 1970 +0000
189 | summary: b
189 | summary: b
190 |
190 |
191 o changeset: 0:cb9a9f314b8b
191 o changeset: 0:cb9a9f314b8b
192 user: test
192 user: test
193 date: Thu Jan 01 00:00:00 1970 +0000
193 date: Thu Jan 01 00:00:00 1970 +0000
194 summary: a
194 summary: a
195
195
196
196
197 $ hg cat e
197 $ hg cat e
198 a
198 a
199
199
200 Stripping necessary commits should not break --abort
200 Stripping necessary commits should not break --abort
201
201
202 $ hg histedit 1a60820cd1f6 --commands - 2>&1 << EOF| fixbundle
202 $ hg histedit 1a60820cd1f6 --commands - 2>&1 << EOF| fixbundle
203 > edit 1a60820cd1f6 wat
203 > edit 1a60820cd1f6 wat
204 > pick a5e1ba2f7afb foobaz
204 > pick a5e1ba2f7afb foobaz
205 > pick b5f70786f9b0 g
205 > pick b5f70786f9b0 g
206 > EOF
206 > EOF
207 0 files updated, 0 files merged, 2 files removed, 0 files unresolved
207 0 files updated, 0 files merged, 2 files removed, 0 files unresolved
208 Editing (1a60820cd1f6), you may commit or record as needed now.
208 Editing (1a60820cd1f6), you may commit or record as needed now.
209 (hg histedit --continue to resume)
209 (hg histedit --continue to resume)
210
210
211 $ mv .hg/histedit-state .hg/histedit-state.bak
211 $ mv .hg/histedit-state .hg/histedit-state.bak
212 $ hg strip -q -r b5f70786f9b0
212 $ hg strip -q -r b5f70786f9b0
213 $ mv .hg/histedit-state.bak .hg/histedit-state
213 $ mv .hg/histedit-state.bak .hg/histedit-state
214 $ hg histedit --abort
214 $ hg histedit --abort
215 adding changesets
215 adding changesets
216 adding manifests
216 adding manifests
217 adding file changes
217 adding file changes
218 added 1 changesets with 1 changes to 3 files
218 added 1 changesets with 1 changes to 3 files
219 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
219 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
220 $ hg log -r .
220 $ hg log -r .
221 changeset: 6:b5f70786f9b0
221 changeset: 6:b5f70786f9b0
222 tag: tip
222 tag: tip
223 user: test
223 user: test
224 date: Thu Jan 01 00:00:00 1970 +0000
224 date: Thu Jan 01 00:00:00 1970 +0000
225 summary: f
225 summary: f
226
226
227
227
228 check histedit_source
228 check histedit_source
229
229
230 $ hg log --debug --rev 5
230 $ hg log --debug --rev 5
231 changeset: 5:a5e1ba2f7afb899ef1581cea528fd885d2fca70d
231 changeset: 5:a5e1ba2f7afb899ef1581cea528fd885d2fca70d
232 phase: draft
232 phase: draft
233 parent: 4:1a60820cd1f6004a362aa622ebc47d59bc48eb34
233 parent: 4:1a60820cd1f6004a362aa622ebc47d59bc48eb34
234 parent: -1:0000000000000000000000000000000000000000
234 parent: -1:0000000000000000000000000000000000000000
235 manifest: 5:5ad3be8791f39117565557781f5464363b918a45
235 manifest: 5:5ad3be8791f39117565557781f5464363b918a45
236 user: test
236 user: test
237 date: Thu Jan 01 00:00:00 1970 +0000
237 date: Thu Jan 01 00:00:00 1970 +0000
238 files: e
238 files: e
239 extra: branch=default
239 extra: branch=default
240 extra: histedit_source=e860deea161a2f77de56603b340ebbb4536308ae
240 extra: histedit_source=e860deea161a2f77de56603b340ebbb4536308ae
241 description:
241 description:
242 foobaz
242 foobaz
243
243
244
244
245
245
246 $ hg histedit tip --commands - 2>&1 <<EOF| fixbundle
246 $ hg histedit tip --commands - 2>&1 <<EOF| fixbundle
247 > edit b5f70786f9b0 f
247 > edit b5f70786f9b0 f
248 > EOF
248 > EOF
249 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
249 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
250 Editing (b5f70786f9b0), you may commit or record as needed now.
250 Editing (b5f70786f9b0), you may commit or record as needed now.
251 (hg histedit --continue to resume)
251 (hg histedit --continue to resume)
252 $ hg status
252 $ hg status
253 A f
253 A f
254
254
255 $ hg summary
255 $ hg summary
256 parent: 5:a5e1ba2f7afb
256 parent: 5:a5e1ba2f7afb
257 foobaz
257 foobaz
258 branch: default
258 branch: default
259 commit: 1 added (new branch head)
259 commit: 1 added (new branch head)
260 update: 1 new changesets (update)
260 update: 1 new changesets (update)
261 phases: 7 draft
261 phases: 7 draft
262 hist: 1 remaining (histedit --continue)
262 hist: 1 remaining (histedit --continue)
263
263
264 (test also that editor is invoked if histedit is continued for
264 (test also that editor is invoked if histedit is continued for
265 "edit" action)
265 "edit" action)
266
266
267 $ HGEDITOR='cat' hg histedit --continue
267 $ HGEDITOR='cat' hg histedit --continue
268 f
268 f
269
269
270
270
271 HG: Enter commit message. Lines beginning with 'HG:' are removed.
271 HG: Enter commit message. Lines beginning with 'HG:' are removed.
272 HG: Leave message empty to abort commit.
272 HG: Leave message empty to abort commit.
273 HG: --
273 HG: --
274 HG: user: test
274 HG: user: test
275 HG: branch 'default'
275 HG: branch 'default'
276 HG: added f
276 HG: added f
277 saved backup bundle to $TESTTMP/r/.hg/strip-backup/b5f70786f9b0-c28d9c86-histedit.hg
277 saved backup bundle to $TESTTMP/r/.hg/strip-backup/b5f70786f9b0-c28d9c86-histedit.hg
278
278
279 $ hg status
279 $ hg status
280
280
281 log after edit
281 log after edit
282 $ hg log --limit 1
282 $ hg log --limit 1
283 changeset: 6:a107ee126658
283 changeset: 6:a107ee126658
284 tag: tip
284 tag: tip
285 user: test
285 user: test
286 date: Thu Jan 01 00:00:00 1970 +0000
286 date: Thu Jan 01 00:00:00 1970 +0000
287 summary: f
287 summary: f
288
288
289
289
290 say we'll change the message, but don't.
290 say we'll change the message, but don't.
291 $ cat > ../edit.sh <<EOF
291 $ cat > ../edit.sh <<EOF
292 > cat "\$1" | sed s/pick/mess/ > tmp
292 > cat "\$1" | sed s/pick/mess/ > tmp
293 > mv tmp "\$1"
293 > mv tmp "\$1"
294 > EOF
294 > EOF
295 $ HGEDITOR="sh ../edit.sh" hg histedit tip 2>&1 | fixbundle
295 $ HGEDITOR="sh ../edit.sh" hg histedit tip 2>&1 | fixbundle
296 $ hg status
296 $ hg status
297 $ hg log --limit 1
297 $ hg log --limit 1
298 changeset: 6:1fd3b2fe7754
298 changeset: 6:1fd3b2fe7754
299 tag: tip
299 tag: tip
300 user: test
300 user: test
301 date: Thu Jan 01 00:00:00 1970 +0000
301 date: Thu Jan 01 00:00:00 1970 +0000
302 summary: f
302 summary: f
303
303
304
304
305 modify the message
305 modify the message
306
306
307 check saving last-message.txt, at first
307 check saving last-message.txt, at first
308
308
309 $ cat > $TESTTMP/commitfailure.py <<EOF
309 $ cat > $TESTTMP/commitfailure.py <<EOF
310 > from mercurial import error
310 > from mercurial import error
311 > def reposetup(ui, repo):
311 > def reposetup(ui, repo):
312 > class commitfailure(repo.__class__):
312 > class commitfailure(repo.__class__):
313 > def commit(self, *args, **kwargs):
313 > def commit(self, *args, **kwargs):
314 > raise error.Abort('emulating unexpected abort')
314 > raise error.Abort(b'emulating unexpected abort')
315 > repo.__class__ = commitfailure
315 > repo.__class__ = commitfailure
316 > EOF
316 > EOF
317 $ cat >> .hg/hgrc <<EOF
317 $ cat >> .hg/hgrc <<EOF
318 > [extensions]
318 > [extensions]
319 > # this failure occurs before editor invocation
319 > # this failure occurs before editor invocation
320 > commitfailure = $TESTTMP/commitfailure.py
320 > commitfailure = $TESTTMP/commitfailure.py
321 > EOF
321 > EOF
322
322
323 $ cat > $TESTTMP/editor.sh <<EOF
323 $ cat > $TESTTMP/editor.sh <<EOF
324 > echo "==== before editing"
324 > echo "==== before editing"
325 > cat \$1
325 > cat \$1
326 > echo "===="
326 > echo "===="
327 > echo "check saving last-message.txt" >> \$1
327 > echo "check saving last-message.txt" >> \$1
328 > EOF
328 > EOF
329
329
330 (test that editor is not invoked before transaction starting)
330 (test that editor is not invoked before transaction starting)
331
331
332 $ rm -f .hg/last-message.txt
332 $ rm -f .hg/last-message.txt
333 $ HGEDITOR="sh $TESTTMP/editor.sh" hg histedit tip --commands - 2>&1 << EOF | fixbundle
333 $ HGEDITOR="sh $TESTTMP/editor.sh" hg histedit tip --commands - 2>&1 << EOF | fixbundle
334 > mess 1fd3b2fe7754 f
334 > mess 1fd3b2fe7754 f
335 > EOF
335 > EOF
336 abort: emulating unexpected abort
336 abort: emulating unexpected abort
337 $ test -f .hg/last-message.txt
337 $ test -f .hg/last-message.txt
338 [1]
338 [1]
339
339
340 $ cat >> .hg/hgrc <<EOF
340 $ cat >> .hg/hgrc <<EOF
341 > [extensions]
341 > [extensions]
342 > commitfailure = !
342 > commitfailure = !
343 > EOF
343 > EOF
344 $ hg histedit --abort -q
344 $ hg histedit --abort -q
345
345
346 (test that editor is invoked and commit message is saved into
346 (test that editor is invoked and commit message is saved into
347 "last-message.txt")
347 "last-message.txt")
348
348
349 $ cat >> .hg/hgrc <<EOF
349 $ cat >> .hg/hgrc <<EOF
350 > [hooks]
350 > [hooks]
351 > # this failure occurs after editor invocation
351 > # this failure occurs after editor invocation
352 > pretxncommit.unexpectedabort = false
352 > pretxncommit.unexpectedabort = false
353 > EOF
353 > EOF
354
354
355 $ hg status --rev '1fd3b2fe7754^1' --rev 1fd3b2fe7754
355 $ hg status --rev '1fd3b2fe7754^1' --rev 1fd3b2fe7754
356 A f
356 A f
357
357
358 $ rm -f .hg/last-message.txt
358 $ rm -f .hg/last-message.txt
359 $ HGEDITOR="sh $TESTTMP/editor.sh" hg histedit tip --commands - 2>&1 << EOF
359 $ HGEDITOR="sh $TESTTMP/editor.sh" hg histedit tip --commands - 2>&1 << EOF
360 > mess 1fd3b2fe7754 f
360 > mess 1fd3b2fe7754 f
361 > EOF
361 > EOF
362 ==== before editing
362 ==== before editing
363 f
363 f
364
364
365
365
366 HG: Enter commit message. Lines beginning with 'HG:' are removed.
366 HG: Enter commit message. Lines beginning with 'HG:' are removed.
367 HG: Leave message empty to abort commit.
367 HG: Leave message empty to abort commit.
368 HG: --
368 HG: --
369 HG: user: test
369 HG: user: test
370 HG: branch 'default'
370 HG: branch 'default'
371 HG: added f
371 HG: added f
372 ====
372 ====
373 transaction abort!
373 transaction abort!
374 rollback completed
374 rollback completed
375 note: commit message saved in .hg/last-message.txt
375 note: commit message saved in .hg/last-message.txt
376 note: use 'hg commit --logfile .hg/last-message.txt --edit' to reuse it
376 note: use 'hg commit --logfile .hg/last-message.txt --edit' to reuse it
377 abort: pretxncommit.unexpectedabort hook exited with status 1
377 abort: pretxncommit.unexpectedabort hook exited with status 1
378 [255]
378 [255]
379 $ cat .hg/last-message.txt
379 $ cat .hg/last-message.txt
380 f
380 f
381
381
382
382
383 check saving last-message.txt
383 check saving last-message.txt
384
384
385 (test also that editor is invoked if histedit is continued for "message"
385 (test also that editor is invoked if histedit is continued for "message"
386 action)
386 action)
387
387
388 $ HGEDITOR=cat hg histedit --continue
388 $ HGEDITOR=cat hg histedit --continue
389 f
389 f
390
390
391
391
392 HG: Enter commit message. Lines beginning with 'HG:' are removed.
392 HG: Enter commit message. Lines beginning with 'HG:' are removed.
393 HG: Leave message empty to abort commit.
393 HG: Leave message empty to abort commit.
394 HG: --
394 HG: --
395 HG: user: test
395 HG: user: test
396 HG: branch 'default'
396 HG: branch 'default'
397 HG: added f
397 HG: added f
398 transaction abort!
398 transaction abort!
399 rollback completed
399 rollback completed
400 note: commit message saved in .hg/last-message.txt
400 note: commit message saved in .hg/last-message.txt
401 note: use 'hg commit --logfile .hg/last-message.txt --edit' to reuse it
401 note: use 'hg commit --logfile .hg/last-message.txt --edit' to reuse it
402 abort: pretxncommit.unexpectedabort hook exited with status 1
402 abort: pretxncommit.unexpectedabort hook exited with status 1
403 [255]
403 [255]
404
404
405 $ cat >> .hg/hgrc <<EOF
405 $ cat >> .hg/hgrc <<EOF
406 > [hooks]
406 > [hooks]
407 > pretxncommit.unexpectedabort =
407 > pretxncommit.unexpectedabort =
408 > EOF
408 > EOF
409 $ hg histedit --abort -q
409 $ hg histedit --abort -q
410
410
411 then, check "modify the message" itself
411 then, check "modify the message" itself
412
412
413 $ hg histedit tip --commands - 2>&1 << EOF | fixbundle
413 $ hg histedit tip --commands - 2>&1 << EOF | fixbundle
414 > mess 1fd3b2fe7754 f
414 > mess 1fd3b2fe7754 f
415 > EOF
415 > EOF
416 $ hg status
416 $ hg status
417 $ hg log --limit 1
417 $ hg log --limit 1
418 changeset: 6:62feedb1200e
418 changeset: 6:62feedb1200e
419 tag: tip
419 tag: tip
420 user: test
420 user: test
421 date: Thu Jan 01 00:00:00 1970 +0000
421 date: Thu Jan 01 00:00:00 1970 +0000
422 summary: f
422 summary: f
423
423
424
424
425 rollback should not work after a histedit
425 rollback should not work after a histedit
426 $ hg rollback
426 $ hg rollback
427 no rollback information available
427 no rollback information available
428 [1]
428 [1]
429
429
430 $ cd ..
430 $ cd ..
431 $ hg clone -qr0 r r0
431 $ hg clone -qr0 r r0
432 $ cd r0
432 $ cd r0
433 $ hg phase -fdr0
433 $ hg phase -fdr0
434 $ hg histedit --commands - 0 2>&1 << EOF
434 $ hg histedit --commands - 0 2>&1 << EOF
435 > edit cb9a9f314b8b a > $EDITED
435 > edit cb9a9f314b8b a > $EDITED
436 > EOF
436 > EOF
437 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
437 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
438 Editing (cb9a9f314b8b), you may commit or record as needed now.
438 Editing (cb9a9f314b8b), you may commit or record as needed now.
439 (hg histedit --continue to resume)
439 (hg histedit --continue to resume)
440 [1]
440 [1]
441 $ HGEDITOR=true hg histedit --continue
441 $ HGEDITOR=true hg histedit --continue
442 saved backup bundle to $TESTTMP/r0/.hg/strip-backup/cb9a9f314b8b-cc5ccb0b-histedit.hg
442 saved backup bundle to $TESTTMP/r0/.hg/strip-backup/cb9a9f314b8b-cc5ccb0b-histedit.hg
443
443
444 $ hg log -G
444 $ hg log -G
445 @ changeset: 0:0efcea34f18a
445 @ changeset: 0:0efcea34f18a
446 tag: tip
446 tag: tip
447 user: test
447 user: test
448 date: Thu Jan 01 00:00:00 1970 +0000
448 date: Thu Jan 01 00:00:00 1970 +0000
449 summary: a
449 summary: a
450
450
451 $ echo foo >> b
451 $ echo foo >> b
452 $ hg addr
452 $ hg addr
453 adding b
453 adding b
454 $ hg ci -m 'add b'
454 $ hg ci -m 'add b'
455 $ echo foo >> a
455 $ echo foo >> a
456 $ hg ci -m 'extend a'
456 $ hg ci -m 'extend a'
457 $ hg phase --public 1
457 $ hg phase --public 1
458 Attempting to fold a change into a public change should not work:
458 Attempting to fold a change into a public change should not work:
459 $ cat > ../edit.sh <<EOF
459 $ cat > ../edit.sh <<EOF
460 > cat "\$1" | sed s/pick/fold/ > tmp
460 > cat "\$1" | sed s/pick/fold/ > tmp
461 > mv tmp "\$1"
461 > mv tmp "\$1"
462 > EOF
462 > EOF
463 $ HGEDITOR="sh ../edit.sh" hg histedit 2
463 $ HGEDITOR="sh ../edit.sh" hg histedit 2
464 warning: histedit rules saved to: .hg/histedit-last-edit.txt
464 warning: histedit rules saved to: .hg/histedit-last-edit.txt
465 hg: parse error: first changeset cannot use verb "fold"
465 hg: parse error: first changeset cannot use verb "fold"
466 [255]
466 [255]
467 $ cat .hg/histedit-last-edit.txt
467 $ cat .hg/histedit-last-edit.txt
468 fold 0012be4a27ea 2 extend a
468 fold 0012be4a27ea 2 extend a
469
469
470 # Edit history between 0012be4a27ea and 0012be4a27ea
470 # Edit history between 0012be4a27ea and 0012be4a27ea
471 #
471 #
472 # Commits are listed from least to most recent
472 # Commits are listed from least to most recent
473 #
473 #
474 # You can reorder changesets by reordering the lines
474 # You can reorder changesets by reordering the lines
475 #
475 #
476 # Commands:
476 # Commands:
477 #
477 #
478 # e, edit = use commit, but stop for amending
478 # e, edit = use commit, but stop for amending
479 # m, mess = edit commit message without changing commit content
479 # m, mess = edit commit message without changing commit content
480 # p, fold = use commit
480 # p, fold = use commit
481 # b, base = checkout changeset and apply further changesets from there
481 # b, base = checkout changeset and apply further changesets from there
482 # d, drop = remove commit from history
482 # d, drop = remove commit from history
483 # f, fold = use commit, but combine it with the one above
483 # f, fold = use commit, but combine it with the one above
484 # r, roll = like fold, but discard this commit's description and date
484 # r, roll = like fold, but discard this commit's description and date
485 #
485 #
486
486
487 $ cd ..
487 $ cd ..
488
488
489 ============================================
489 ============================================
490 Test update-timestamp config option in mess|
490 Test update-timestamp config option in mess|
491 ============================================
491 ============================================
492
492
493 $ addwithdate ()
493 $ addwithdate ()
494 > {
494 > {
495 > echo $1 > $1
495 > echo $1 > $1
496 > hg add $1
496 > hg add $1
497 > hg ci -m $1 -d "$2 0"
497 > hg ci -m $1 -d "$2 0"
498 > }
498 > }
499
499
500 $ initrepo ()
500 $ initrepo ()
501 > {
501 > {
502 > hg init r2
502 > hg init r2
503 > cd r2
503 > cd r2
504 > addwithdate a 1
504 > addwithdate a 1
505 > addwithdate b 2
505 > addwithdate b 2
506 > addwithdate c 3
506 > addwithdate c 3
507 > addwithdate d 4
507 > addwithdate d 4
508 > addwithdate e 5
508 > addwithdate e 5
509 > addwithdate f 6
509 > addwithdate f 6
510 > }
510 > }
511
511
512 $ initrepo
512 $ initrepo
513
513
514 log before edit
514 log before edit
515
515
516 $ hg log --limit 1
516 $ hg log --limit 1
517 changeset: 5:178e35e0ce73
517 changeset: 5:178e35e0ce73
518 tag: tip
518 tag: tip
519 user: test
519 user: test
520 date: Thu Jan 01 00:00:06 1970 +0000
520 date: Thu Jan 01 00:00:06 1970 +0000
521 summary: f
521 summary: f
522
522
523 $ hg histedit tip --commands - 2>&1 --config rewrite.update-timestamp=True << EOF | fixbundle
523 $ hg histedit tip --commands - 2>&1 --config rewrite.update-timestamp=True << EOF | fixbundle
524 > mess 178e35e0ce73 f
524 > mess 178e35e0ce73 f
525 > EOF
525 > EOF
526
526
527 log after edit
527 log after edit
528
528
529 $ hg log --limit 1
529 $ hg log --limit 1
530 changeset: 5:98bf456d476b
530 changeset: 5:98bf456d476b
531 tag: tip
531 tag: tip
532 user: test
532 user: test
533 date: Thu Jan 01 00:00:00 1970 +0000
533 date: Thu Jan 01 00:00:00 1970 +0000
534 summary: f
534 summary: f
535
535
536
536
537 $ cd ..
537 $ cd ..
538
538
539 warn the user on editing tagged commits
539 warn the user on editing tagged commits
540
540
541 $ hg init issue4017
541 $ hg init issue4017
542 $ cd issue4017
542 $ cd issue4017
543 $ echo > a
543 $ echo > a
544 $ hg ci -Am 'add a'
544 $ hg ci -Am 'add a'
545 adding a
545 adding a
546 $ hg tag a
546 $ hg tag a
547 $ hg tags
547 $ hg tags
548 tip 1:bd7ee4f3939b
548 tip 1:bd7ee4f3939b
549 a 0:a8a82d372bb3
549 a 0:a8a82d372bb3
550 $ hg histedit
550 $ hg histedit
551 warning: tags associated with the given changeset will be lost after histedit.
551 warning: tags associated with the given changeset will be lost after histedit.
552 do you want to continue (yN)? n
552 do you want to continue (yN)? n
553 abort: histedit cancelled
553 abort: histedit cancelled
554
554
555 [255]
555 [255]
556 $ cd ..
556 $ cd ..
@@ -1,439 +1,439 b''
1 $ cat <<EOF > merge
1 $ cat <<EOF > merge
2 > from __future__ import print_function
2 > from __future__ import print_function
3 > import sys, os
3 > import sys, os
4 >
4 >
5 > try:
5 > try:
6 > import msvcrt
6 > import msvcrt
7 > msvcrt.setmode(sys.stdout.fileno(), os.O_BINARY)
7 > msvcrt.setmode(sys.stdout.fileno(), os.O_BINARY)
8 > msvcrt.setmode(sys.stderr.fileno(), os.O_BINARY)
8 > msvcrt.setmode(sys.stderr.fileno(), os.O_BINARY)
9 > except ImportError:
9 > except ImportError:
10 > pass
10 > pass
11 >
11 >
12 > print("merging for", os.path.basename(sys.argv[1]))
12 > print("merging for", os.path.basename(sys.argv[1]))
13 > EOF
13 > EOF
14 $ HGMERGE="\"$PYTHON\" ../merge"; export HGMERGE
14 $ HGMERGE="\"$PYTHON\" ../merge"; export HGMERGE
15
15
16 $ hg init t
16 $ hg init t
17 $ cd t
17 $ cd t
18 $ echo This is file a1 > a
18 $ echo This is file a1 > a
19 $ hg add a
19 $ hg add a
20 $ hg commit -m "commit #0"
20 $ hg commit -m "commit #0"
21 $ echo This is file b1 > b
21 $ echo This is file b1 > b
22 $ hg add b
22 $ hg add b
23 $ hg commit -m "commit #1"
23 $ hg commit -m "commit #1"
24
24
25 $ hg update 0
25 $ hg update 0
26 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
26 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
27
27
28 Test interrupted updates by having a non-empty dir with the same name as one
28 Test interrupted updates by having a non-empty dir with the same name as one
29 of the files in a commit we're updating to
29 of the files in a commit we're updating to
30
30
31 $ mkdir b && touch b/nonempty
31 $ mkdir b && touch b/nonempty
32 $ hg up
32 $ hg up
33 abort: Unlinking directory not permitted: *$TESTTMP/t/b* (glob) (windows !)
33 abort: Unlinking directory not permitted: *$TESTTMP/t/b* (glob) (windows !)
34 abort: Directory not empty: '?\$TESTTMP/t/b'? (re) (no-windows !)
34 abort: Directory not empty: '?\$TESTTMP/t/b'? (re) (no-windows !)
35 [255]
35 [255]
36 $ hg ci
36 $ hg ci
37 abort: last update was interrupted
37 abort: last update was interrupted
38 (use 'hg update' to get a consistent checkout)
38 (use 'hg update' to get a consistent checkout)
39 [255]
39 [255]
40 $ hg sum
40 $ hg sum
41 parent: 0:538afb845929
41 parent: 0:538afb845929
42 commit #0
42 commit #0
43 branch: default
43 branch: default
44 commit: 1 unknown (interrupted update)
44 commit: 1 unknown (interrupted update)
45 update: 1 new changesets (update)
45 update: 1 new changesets (update)
46 phases: 2 draft
46 phases: 2 draft
47 Detect interrupted update by hg status --verbose
47 Detect interrupted update by hg status --verbose
48 $ hg status -v
48 $ hg status -v
49 ? b/nonempty
49 ? b/nonempty
50 # The repository is in an unfinished *update* state.
50 # The repository is in an unfinished *update* state.
51
51
52 # To continue: hg update .
52 # To continue: hg update .
53
53
54
54
55 $ rm b/nonempty
55 $ rm b/nonempty
56
56
57 $ hg up
57 $ hg up
58 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
58 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
59 $ hg sum
59 $ hg sum
60 parent: 1:b8bb4a988f25 tip
60 parent: 1:b8bb4a988f25 tip
61 commit #1
61 commit #1
62 branch: default
62 branch: default
63 commit: (clean)
63 commit: (clean)
64 update: (current)
64 update: (current)
65 phases: 2 draft
65 phases: 2 draft
66
66
67 Prepare a basic merge
67 Prepare a basic merge
68
68
69 $ hg up 0
69 $ hg up 0
70 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
70 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
71 $ echo This is file c1 > c
71 $ echo This is file c1 > c
72 $ hg add c
72 $ hg add c
73 $ hg commit -m "commit #2"
73 $ hg commit -m "commit #2"
74 created new head
74 created new head
75 $ echo This is file b1 > b
75 $ echo This is file b1 > b
76 no merges expected
76 no merges expected
77 $ hg merge -P 1
77 $ hg merge -P 1
78 changeset: 1:b8bb4a988f25
78 changeset: 1:b8bb4a988f25
79 user: test
79 user: test
80 date: Thu Jan 01 00:00:00 1970 +0000
80 date: Thu Jan 01 00:00:00 1970 +0000
81 summary: commit #1
81 summary: commit #1
82
82
83 $ hg merge 1
83 $ hg merge 1
84 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
84 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
85 (branch merge, don't forget to commit)
85 (branch merge, don't forget to commit)
86 $ hg diff --nodates
86 $ hg diff --nodates
87 diff -r 49035e18a8e6 b
87 diff -r 49035e18a8e6 b
88 --- /dev/null
88 --- /dev/null
89 +++ b/b
89 +++ b/b
90 @@ -0,0 +1,1 @@
90 @@ -0,0 +1,1 @@
91 +This is file b1
91 +This is file b1
92 $ hg status
92 $ hg status
93 M b
93 M b
94 $ cd ..; rm -r t
94 $ cd ..; rm -r t
95
95
96 $ hg init t
96 $ hg init t
97 $ cd t
97 $ cd t
98 $ echo This is file a1 > a
98 $ echo This is file a1 > a
99 $ hg add a
99 $ hg add a
100 $ hg commit -m "commit #0"
100 $ hg commit -m "commit #0"
101 $ echo This is file b1 > b
101 $ echo This is file b1 > b
102 $ hg add b
102 $ hg add b
103 $ hg commit -m "commit #1"
103 $ hg commit -m "commit #1"
104
104
105 $ hg update 0
105 $ hg update 0
106 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
106 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
107 $ echo This is file c1 > c
107 $ echo This is file c1 > c
108 $ hg add c
108 $ hg add c
109 $ hg commit -m "commit #2"
109 $ hg commit -m "commit #2"
110 created new head
110 created new head
111 $ echo This is file b2 > b
111 $ echo This is file b2 > b
112 merge should fail
112 merge should fail
113 $ hg merge 1
113 $ hg merge 1
114 b: untracked file differs
114 b: untracked file differs
115 abort: untracked files in working directory differ from files in requested revision
115 abort: untracked files in working directory differ from files in requested revision
116 [255]
116 [255]
117
117
118 #if symlink
118 #if symlink
119 symlinks to directories should be treated as regular files (issue5027)
119 symlinks to directories should be treated as regular files (issue5027)
120 $ rm b
120 $ rm b
121 $ ln -s 'This is file b2' b
121 $ ln -s 'This is file b2' b
122 $ hg merge 1
122 $ hg merge 1
123 b: untracked file differs
123 b: untracked file differs
124 abort: untracked files in working directory differ from files in requested revision
124 abort: untracked files in working directory differ from files in requested revision
125 [255]
125 [255]
126 symlinks shouldn't be followed
126 symlinks shouldn't be followed
127 $ rm b
127 $ rm b
128 $ echo This is file b1 > .hg/b
128 $ echo This is file b1 > .hg/b
129 $ ln -s .hg/b b
129 $ ln -s .hg/b b
130 $ hg merge 1
130 $ hg merge 1
131 b: untracked file differs
131 b: untracked file differs
132 abort: untracked files in working directory differ from files in requested revision
132 abort: untracked files in working directory differ from files in requested revision
133 [255]
133 [255]
134
134
135 $ rm b
135 $ rm b
136 $ echo This is file b2 > b
136 $ echo This is file b2 > b
137 #endif
137 #endif
138
138
139 bad config
139 bad config
140 $ hg merge 1 --config merge.checkunknown=x
140 $ hg merge 1 --config merge.checkunknown=x
141 abort: merge.checkunknown not valid ('x' is none of 'abort', 'ignore', 'warn')
141 abort: merge.checkunknown not valid ('x' is none of 'abort', 'ignore', 'warn')
142 [255]
142 [255]
143 this merge should fail
143 this merge should fail
144 $ hg merge 1 --config merge.checkunknown=abort
144 $ hg merge 1 --config merge.checkunknown=abort
145 b: untracked file differs
145 b: untracked file differs
146 abort: untracked files in working directory differ from files in requested revision
146 abort: untracked files in working directory differ from files in requested revision
147 [255]
147 [255]
148
148
149 this merge should warn
149 this merge should warn
150 $ hg merge 1 --config merge.checkunknown=warn
150 $ hg merge 1 --config merge.checkunknown=warn
151 b: replacing untracked file
151 b: replacing untracked file
152 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
152 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
153 (branch merge, don't forget to commit)
153 (branch merge, don't forget to commit)
154 $ cat b.orig
154 $ cat b.orig
155 This is file b2
155 This is file b2
156 $ hg up --clean 2
156 $ hg up --clean 2
157 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
157 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
158 $ mv b.orig b
158 $ mv b.orig b
159
159
160 this merge should silently ignore
160 this merge should silently ignore
161 $ cat b
161 $ cat b
162 This is file b2
162 This is file b2
163 $ hg merge 1 --config merge.checkunknown=ignore
163 $ hg merge 1 --config merge.checkunknown=ignore
164 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
164 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
165 (branch merge, don't forget to commit)
165 (branch merge, don't forget to commit)
166
166
167 merge.checkignored
167 merge.checkignored
168 $ hg up --clean 1
168 $ hg up --clean 1
169 1 files updated, 0 files merged, 1 files removed, 0 files unresolved
169 1 files updated, 0 files merged, 1 files removed, 0 files unresolved
170 $ cat >> .hgignore << EOF
170 $ cat >> .hgignore << EOF
171 > remoteignored
171 > remoteignored
172 > EOF
172 > EOF
173 $ echo This is file localignored3 > localignored
173 $ echo This is file localignored3 > localignored
174 $ echo This is file remoteignored3 > remoteignored
174 $ echo This is file remoteignored3 > remoteignored
175 $ hg add .hgignore localignored remoteignored
175 $ hg add .hgignore localignored remoteignored
176 $ hg commit -m "commit #3"
176 $ hg commit -m "commit #3"
177
177
178 $ hg up 2
178 $ hg up 2
179 1 files updated, 0 files merged, 4 files removed, 0 files unresolved
179 1 files updated, 0 files merged, 4 files removed, 0 files unresolved
180 $ cat >> .hgignore << EOF
180 $ cat >> .hgignore << EOF
181 > localignored
181 > localignored
182 > EOF
182 > EOF
183 $ hg add .hgignore
183 $ hg add .hgignore
184 $ hg commit -m "commit #4"
184 $ hg commit -m "commit #4"
185
185
186 remote .hgignore shouldn't be used for determining whether a file is ignored
186 remote .hgignore shouldn't be used for determining whether a file is ignored
187 $ echo This is file remoteignored4 > remoteignored
187 $ echo This is file remoteignored4 > remoteignored
188 $ hg merge 3 --config merge.checkignored=ignore --config merge.checkunknown=abort
188 $ hg merge 3 --config merge.checkignored=ignore --config merge.checkunknown=abort
189 remoteignored: untracked file differs
189 remoteignored: untracked file differs
190 abort: untracked files in working directory differ from files in requested revision
190 abort: untracked files in working directory differ from files in requested revision
191 [255]
191 [255]
192 $ hg merge 3 --config merge.checkignored=abort --config merge.checkunknown=ignore
192 $ hg merge 3 --config merge.checkignored=abort --config merge.checkunknown=ignore
193 merging .hgignore
193 merging .hgignore
194 merging for .hgignore
194 merging for .hgignore
195 3 files updated, 1 files merged, 0 files removed, 0 files unresolved
195 3 files updated, 1 files merged, 0 files removed, 0 files unresolved
196 (branch merge, don't forget to commit)
196 (branch merge, don't forget to commit)
197 $ cat remoteignored
197 $ cat remoteignored
198 This is file remoteignored3
198 This is file remoteignored3
199 $ cat remoteignored.orig
199 $ cat remoteignored.orig
200 This is file remoteignored4
200 This is file remoteignored4
201 $ rm remoteignored.orig
201 $ rm remoteignored.orig
202
202
203 local .hgignore should be used for that
203 local .hgignore should be used for that
204 $ hg up --clean 4
204 $ hg up --clean 4
205 1 files updated, 0 files merged, 3 files removed, 0 files unresolved
205 1 files updated, 0 files merged, 3 files removed, 0 files unresolved
206 $ echo This is file localignored4 > localignored
206 $ echo This is file localignored4 > localignored
207 also test other conflicting files to see we output the full set of warnings
207 also test other conflicting files to see we output the full set of warnings
208 $ echo This is file b2 > b
208 $ echo This is file b2 > b
209 $ hg merge 3 --config merge.checkignored=abort --config merge.checkunknown=abort
209 $ hg merge 3 --config merge.checkignored=abort --config merge.checkunknown=abort
210 b: untracked file differs
210 b: untracked file differs
211 localignored: untracked file differs
211 localignored: untracked file differs
212 abort: untracked files in working directory differ from files in requested revision
212 abort: untracked files in working directory differ from files in requested revision
213 [255]
213 [255]
214 $ hg merge 3 --config merge.checkignored=abort --config merge.checkunknown=ignore
214 $ hg merge 3 --config merge.checkignored=abort --config merge.checkunknown=ignore
215 localignored: untracked file differs
215 localignored: untracked file differs
216 abort: untracked files in working directory differ from files in requested revision
216 abort: untracked files in working directory differ from files in requested revision
217 [255]
217 [255]
218 $ hg merge 3 --config merge.checkignored=warn --config merge.checkunknown=abort
218 $ hg merge 3 --config merge.checkignored=warn --config merge.checkunknown=abort
219 b: untracked file differs
219 b: untracked file differs
220 abort: untracked files in working directory differ from files in requested revision
220 abort: untracked files in working directory differ from files in requested revision
221 [255]
221 [255]
222 $ hg merge 3 --config merge.checkignored=warn --config merge.checkunknown=warn
222 $ hg merge 3 --config merge.checkignored=warn --config merge.checkunknown=warn
223 b: replacing untracked file
223 b: replacing untracked file
224 localignored: replacing untracked file
224 localignored: replacing untracked file
225 merging .hgignore
225 merging .hgignore
226 merging for .hgignore
226 merging for .hgignore
227 3 files updated, 1 files merged, 0 files removed, 0 files unresolved
227 3 files updated, 1 files merged, 0 files removed, 0 files unresolved
228 (branch merge, don't forget to commit)
228 (branch merge, don't forget to commit)
229 $ cat localignored
229 $ cat localignored
230 This is file localignored3
230 This is file localignored3
231 $ cat localignored.orig
231 $ cat localignored.orig
232 This is file localignored4
232 This is file localignored4
233 $ rm localignored.orig
233 $ rm localignored.orig
234
234
235 $ cat b.orig
235 $ cat b.orig
236 This is file b2
236 This is file b2
237 $ hg up --clean 2
237 $ hg up --clean 2
238 0 files updated, 0 files merged, 4 files removed, 0 files unresolved
238 0 files updated, 0 files merged, 4 files removed, 0 files unresolved
239 $ mv b.orig b
239 $ mv b.orig b
240
240
241 this merge of b should work
241 this merge of b should work
242 $ cat b
242 $ cat b
243 This is file b2
243 This is file b2
244 $ hg merge -f 1
244 $ hg merge -f 1
245 merging b
245 merging b
246 merging for b
246 merging for b
247 0 files updated, 1 files merged, 0 files removed, 0 files unresolved
247 0 files updated, 1 files merged, 0 files removed, 0 files unresolved
248 (branch merge, don't forget to commit)
248 (branch merge, don't forget to commit)
249 $ hg diff --nodates
249 $ hg diff --nodates
250 diff -r 49035e18a8e6 b
250 diff -r 49035e18a8e6 b
251 --- /dev/null
251 --- /dev/null
252 +++ b/b
252 +++ b/b
253 @@ -0,0 +1,1 @@
253 @@ -0,0 +1,1 @@
254 +This is file b2
254 +This is file b2
255 $ hg status
255 $ hg status
256 M b
256 M b
257 $ cd ..; rm -r t
257 $ cd ..; rm -r t
258
258
259 $ hg init t
259 $ hg init t
260 $ cd t
260 $ cd t
261 $ echo This is file a1 > a
261 $ echo This is file a1 > a
262 $ hg add a
262 $ hg add a
263 $ hg commit -m "commit #0"
263 $ hg commit -m "commit #0"
264 $ echo This is file b1 > b
264 $ echo This is file b1 > b
265 $ hg add b
265 $ hg add b
266 $ hg commit -m "commit #1"
266 $ hg commit -m "commit #1"
267 $ echo This is file b22 > b
267 $ echo This is file b22 > b
268 $ hg commit -m "commit #2"
268 $ hg commit -m "commit #2"
269 $ hg update 1
269 $ hg update 1
270 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
270 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
271 $ echo This is file c1 > c
271 $ echo This is file c1 > c
272 $ hg add c
272 $ hg add c
273 $ hg commit -m "commit #3"
273 $ hg commit -m "commit #3"
274 created new head
274 created new head
275
275
276 Contents of b should be "this is file b1"
276 Contents of b should be "this is file b1"
277 $ cat b
277 $ cat b
278 This is file b1
278 This is file b1
279
279
280 $ echo This is file b22 > b
280 $ echo This is file b22 > b
281 merge fails
281 merge fails
282 $ hg merge 2
282 $ hg merge 2
283 abort: uncommitted changes
283 abort: uncommitted changes
284 (use 'hg status' to list changes)
284 (use 'hg status' to list changes)
285 [255]
285 [255]
286 merge expected!
286 merge expected!
287 $ hg merge -f 2
287 $ hg merge -f 2
288 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
288 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
289 (branch merge, don't forget to commit)
289 (branch merge, don't forget to commit)
290 $ hg diff --nodates
290 $ hg diff --nodates
291 diff -r 85de557015a8 b
291 diff -r 85de557015a8 b
292 --- a/b
292 --- a/b
293 +++ b/b
293 +++ b/b
294 @@ -1,1 +1,1 @@
294 @@ -1,1 +1,1 @@
295 -This is file b1
295 -This is file b1
296 +This is file b22
296 +This is file b22
297 $ hg status
297 $ hg status
298 M b
298 M b
299 $ cd ..; rm -r t
299 $ cd ..; rm -r t
300
300
301 $ hg init t
301 $ hg init t
302 $ cd t
302 $ cd t
303 $ echo This is file a1 > a
303 $ echo This is file a1 > a
304 $ hg add a
304 $ hg add a
305 $ hg commit -m "commit #0"
305 $ hg commit -m "commit #0"
306 $ echo This is file b1 > b
306 $ echo This is file b1 > b
307 $ hg add b
307 $ hg add b
308 $ hg commit -m "commit #1"
308 $ hg commit -m "commit #1"
309 $ echo This is file b22 > b
309 $ echo This is file b22 > b
310 $ hg commit -m "commit #2"
310 $ hg commit -m "commit #2"
311 $ hg update 1
311 $ hg update 1
312 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
312 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
313 $ echo This is file c1 > c
313 $ echo This is file c1 > c
314 $ hg add c
314 $ hg add c
315 $ hg commit -m "commit #3"
315 $ hg commit -m "commit #3"
316 created new head
316 created new head
317 $ echo This is file b33 > b
317 $ echo This is file b33 > b
318 merge of b should fail
318 merge of b should fail
319 $ hg merge 2
319 $ hg merge 2
320 abort: uncommitted changes
320 abort: uncommitted changes
321 (use 'hg status' to list changes)
321 (use 'hg status' to list changes)
322 [255]
322 [255]
323 merge of b expected
323 merge of b expected
324 $ hg merge -f 2
324 $ hg merge -f 2
325 merging b
325 merging b
326 merging for b
326 merging for b
327 0 files updated, 1 files merged, 0 files removed, 0 files unresolved
327 0 files updated, 1 files merged, 0 files removed, 0 files unresolved
328 (branch merge, don't forget to commit)
328 (branch merge, don't forget to commit)
329 $ hg diff --nodates
329 $ hg diff --nodates
330 diff -r 85de557015a8 b
330 diff -r 85de557015a8 b
331 --- a/b
331 --- a/b
332 +++ b/b
332 +++ b/b
333 @@ -1,1 +1,1 @@
333 @@ -1,1 +1,1 @@
334 -This is file b1
334 -This is file b1
335 +This is file b33
335 +This is file b33
336 $ hg status
336 $ hg status
337 M b
337 M b
338
338
339 Test for issue2364
339 Test for issue2364
340
340
341 $ hg up -qC .
341 $ hg up -qC .
342 $ hg rm b
342 $ hg rm b
343 $ hg ci -md
343 $ hg ci -md
344 $ hg revert -r -2 b
344 $ hg revert -r -2 b
345 $ hg up -q -- -2
345 $ hg up -q -- -2
346
346
347 Test that updated files are treated as "modified", when
347 Test that updated files are treated as "modified", when
348 'merge.update()' is aborted before 'merge.recordupdates()' (= parents
348 'merge.update()' is aborted before 'merge.recordupdates()' (= parents
349 aren't changed), even if none of mode, size and timestamp of them
349 aren't changed), even if none of mode, size and timestamp of them
350 isn't changed on the filesystem (see also issue4583).
350 isn't changed on the filesystem (see also issue4583).
351
351
352 $ cat > $TESTTMP/abort.py <<EOF
352 $ cat > $TESTTMP/abort.py <<EOF
353 > from __future__ import absolute_import
353 > from __future__ import absolute_import
354 > # emulate aborting before "recordupdates()". in this case, files
354 > # emulate aborting before "recordupdates()". in this case, files
355 > # are changed without updating dirstate
355 > # are changed without updating dirstate
356 > from mercurial import (
356 > from mercurial import (
357 > error,
357 > error,
358 > extensions,
358 > extensions,
359 > merge,
359 > merge,
360 > )
360 > )
361 > def applyupdates(orig, *args, **kwargs):
361 > def applyupdates(orig, *args, **kwargs):
362 > orig(*args, **kwargs)
362 > orig(*args, **kwargs)
363 > raise error.Abort('intentional aborting')
363 > raise error.Abort(b'intentional aborting')
364 > def extsetup(ui):
364 > def extsetup(ui):
365 > extensions.wrapfunction(merge, "applyupdates", applyupdates)
365 > extensions.wrapfunction(merge, "applyupdates", applyupdates)
366 > EOF
366 > EOF
367
367
368 $ cat >> .hg/hgrc <<EOF
368 $ cat >> .hg/hgrc <<EOF
369 > [fakedirstatewritetime]
369 > [fakedirstatewritetime]
370 > # emulate invoking dirstate.write() via repo.status()
370 > # emulate invoking dirstate.write() via repo.status()
371 > # at 2000-01-01 00:00
371 > # at 2000-01-01 00:00
372 > fakenow = 200001010000
372 > fakenow = 200001010000
373 > EOF
373 > EOF
374
374
375 (file gotten from other revision)
375 (file gotten from other revision)
376
376
377 $ hg update -q -C 2
377 $ hg update -q -C 2
378 $ echo 'THIS IS FILE B5' > b
378 $ echo 'THIS IS FILE B5' > b
379 $ hg commit -m 'commit #5'
379 $ hg commit -m 'commit #5'
380
380
381 $ hg update -q -C 3
381 $ hg update -q -C 3
382 $ cat b
382 $ cat b
383 This is file b1
383 This is file b1
384 $ touch -t 200001010000 b
384 $ touch -t 200001010000 b
385 $ hg debugrebuildstate
385 $ hg debugrebuildstate
386
386
387 $ cat >> .hg/hgrc <<EOF
387 $ cat >> .hg/hgrc <<EOF
388 > [extensions]
388 > [extensions]
389 > fakedirstatewritetime = $TESTDIR/fakedirstatewritetime.py
389 > fakedirstatewritetime = $TESTDIR/fakedirstatewritetime.py
390 > abort = $TESTTMP/abort.py
390 > abort = $TESTTMP/abort.py
391 > EOF
391 > EOF
392 $ hg merge 5
392 $ hg merge 5
393 abort: intentional aborting
393 abort: intentional aborting
394 [255]
394 [255]
395 $ cat >> .hg/hgrc <<EOF
395 $ cat >> .hg/hgrc <<EOF
396 > [extensions]
396 > [extensions]
397 > fakedirstatewritetime = !
397 > fakedirstatewritetime = !
398 > abort = !
398 > abort = !
399 > EOF
399 > EOF
400
400
401 $ cat b
401 $ cat b
402 THIS IS FILE B5
402 THIS IS FILE B5
403 $ touch -t 200001010000 b
403 $ touch -t 200001010000 b
404 $ hg status -A b
404 $ hg status -A b
405 M b
405 M b
406
406
407 (file merged from other revision)
407 (file merged from other revision)
408
408
409 $ hg update -q -C 3
409 $ hg update -q -C 3
410 $ echo 'this is file b6' > b
410 $ echo 'this is file b6' > b
411 $ hg commit -m 'commit #6'
411 $ hg commit -m 'commit #6'
412 created new head
412 created new head
413
413
414 $ cat b
414 $ cat b
415 this is file b6
415 this is file b6
416 $ touch -t 200001010000 b
416 $ touch -t 200001010000 b
417 $ hg debugrebuildstate
417 $ hg debugrebuildstate
418
418
419 $ cat >> .hg/hgrc <<EOF
419 $ cat >> .hg/hgrc <<EOF
420 > [extensions]
420 > [extensions]
421 > fakedirstatewritetime = $TESTDIR/fakedirstatewritetime.py
421 > fakedirstatewritetime = $TESTDIR/fakedirstatewritetime.py
422 > abort = $TESTTMP/abort.py
422 > abort = $TESTTMP/abort.py
423 > EOF
423 > EOF
424 $ hg merge --tool internal:other 5
424 $ hg merge --tool internal:other 5
425 abort: intentional aborting
425 abort: intentional aborting
426 [255]
426 [255]
427 $ cat >> .hg/hgrc <<EOF
427 $ cat >> .hg/hgrc <<EOF
428 > [extensions]
428 > [extensions]
429 > fakedirstatewritetime = !
429 > fakedirstatewritetime = !
430 > abort = !
430 > abort = !
431 > EOF
431 > EOF
432
432
433 $ cat b
433 $ cat b
434 THIS IS FILE B5
434 THIS IS FILE B5
435 $ touch -t 200001010000 b
435 $ touch -t 200001010000 b
436 $ hg status -A b
436 $ hg status -A b
437 M b
437 M b
438
438
439 $ cd ..
439 $ cd ..
@@ -1,264 +1,264 b''
1 $ cat <<EOF >> $HGRCPATH
1 $ cat <<EOF >> $HGRCPATH
2 > [extensions]
2 > [extensions]
3 > mq =
3 > mq =
4 > [mq]
4 > [mq]
5 > git = keep
5 > git = keep
6 > [diff]
6 > [diff]
7 > nodates = 1
7 > nodates = 1
8 > EOF
8 > EOF
9
9
10 init:
10 init:
11
11
12 $ hg init repo
12 $ hg init repo
13 $ cd repo
13 $ cd repo
14 $ echo a > a
14 $ echo a > a
15 $ hg ci -Am adda
15 $ hg ci -Am adda
16 adding a
16 adding a
17 $ echo a >> a
17 $ echo a >> a
18 $ hg qnew -f p1
18 $ hg qnew -f p1
19 $ echo b >> a
19 $ echo b >> a
20 $ hg qnew -f p2
20 $ hg qnew -f p2
21 $ echo c >> a
21 $ echo c >> a
22 $ hg qnew -f p3
22 $ hg qnew -f p3
23
23
24 Fold in the middle of the queue:
24 Fold in the middle of the queue:
25 (this tests also that editor is not invoked if '--edit' is not
25 (this tests also that editor is not invoked if '--edit' is not
26 specified)
26 specified)
27
27
28 $ hg qpop p1
28 $ hg qpop p1
29 popping p3
29 popping p3
30 popping p2
30 popping p2
31 now at: p1
31 now at: p1
32
32
33 $ hg qdiff
33 $ hg qdiff
34 diff -r 07f494440405 a
34 diff -r 07f494440405 a
35 --- a/a
35 --- a/a
36 +++ b/a
36 +++ b/a
37 @@ -1,1 +1,2 @@
37 @@ -1,1 +1,2 @@
38 a
38 a
39 +a
39 +a
40
40
41 $ HGEDITOR=cat hg qfold p2
41 $ HGEDITOR=cat hg qfold p2
42 $ grep git .hg/patches/p1 && echo 'git patch found!'
42 $ grep git .hg/patches/p1 && echo 'git patch found!'
43 [1]
43 [1]
44
44
45 $ hg qser
45 $ hg qser
46 p1
46 p1
47 p3
47 p3
48
48
49 $ hg qdiff
49 $ hg qdiff
50 diff -r 07f494440405 a
50 diff -r 07f494440405 a
51 --- a/a
51 --- a/a
52 +++ b/a
52 +++ b/a
53 @@ -1,1 +1,3 @@
53 @@ -1,1 +1,3 @@
54 a
54 a
55 +a
55 +a
56 +b
56 +b
57
57
58 Fold with local changes:
58 Fold with local changes:
59
59
60 $ echo d >> a
60 $ echo d >> a
61 $ hg qfold p3
61 $ hg qfold p3
62 abort: local changes found, qrefresh first
62 abort: local changes found, qrefresh first
63 [255]
63 [255]
64
64
65 $ hg diff -c .
65 $ hg diff -c .
66 diff -r 07f494440405 -r ???????????? a (glob)
66 diff -r 07f494440405 -r ???????????? a (glob)
67 --- a/a
67 --- a/a
68 +++ b/a
68 +++ b/a
69 @@ -1,1 +1,3 @@
69 @@ -1,1 +1,3 @@
70 a
70 a
71 +a
71 +a
72 +b
72 +b
73
73
74 $ hg revert -a --no-backup
74 $ hg revert -a --no-backup
75 reverting a
75 reverting a
76
76
77 Fold git patch into a regular patch, expect git patch:
77 Fold git patch into a regular patch, expect git patch:
78
78
79 $ echo a >> a
79 $ echo a >> a
80 $ hg qnew -f regular
80 $ hg qnew -f regular
81 $ hg cp a aa
81 $ hg cp a aa
82 $ hg qnew --git -f git
82 $ hg qnew --git -f git
83
83
84 $ hg qpop
84 $ hg qpop
85 popping git
85 popping git
86 now at: regular
86 now at: regular
87
87
88 $ hg qfold git
88 $ hg qfold git
89
89
90 $ cat .hg/patches/regular
90 $ cat .hg/patches/regular
91 # HG changeset patch
91 # HG changeset patch
92 # Parent ???????????????????????????????????????? (glob)
92 # Parent ???????????????????????????????????????? (glob)
93
93
94 diff --git a/a b/a
94 diff --git a/a b/a
95 --- a/a
95 --- a/a
96 +++ b/a
96 +++ b/a
97 @@ -1,3 +1,4 @@
97 @@ -1,3 +1,4 @@
98 a
98 a
99 a
99 a
100 b
100 b
101 +a
101 +a
102 diff --git a/a b/aa
102 diff --git a/a b/aa
103 copy from a
103 copy from a
104 copy to aa
104 copy to aa
105 --- a/a
105 --- a/a
106 +++ b/aa
106 +++ b/aa
107 @@ -1,3 +1,4 @@
107 @@ -1,3 +1,4 @@
108 a
108 a
109 a
109 a
110 b
110 b
111 +a
111 +a
112
112
113 $ hg qpop
113 $ hg qpop
114 popping regular
114 popping regular
115 now at: p1
115 now at: p1
116
116
117 $ hg qdel regular
117 $ hg qdel regular
118
118
119 Fold regular patch into a git patch, expect git patch:
119 Fold regular patch into a git patch, expect git patch:
120
120
121 $ hg cp a aa
121 $ hg cp a aa
122 $ hg qnew --git -f git
122 $ hg qnew --git -f git
123 $ echo b >> aa
123 $ echo b >> aa
124 $ hg qnew -f regular
124 $ hg qnew -f regular
125
125
126 $ hg qpop
126 $ hg qpop
127 popping regular
127 popping regular
128 now at: git
128 now at: git
129
129
130 $ hg qfold regular
130 $ hg qfold regular
131
131
132 $ cat .hg/patches/git
132 $ cat .hg/patches/git
133 # HG changeset patch
133 # HG changeset patch
134 # Parent ???????????????????????????????????????? (glob)
134 # Parent ???????????????????????????????????????? (glob)
135
135
136 diff --git a/a b/aa
136 diff --git a/a b/aa
137 copy from a
137 copy from a
138 copy to aa
138 copy to aa
139 --- a/a
139 --- a/a
140 +++ b/aa
140 +++ b/aa
141 @@ -1,3 +1,4 @@
141 @@ -1,3 +1,4 @@
142 a
142 a
143 a
143 a
144 b
144 b
145 +b
145 +b
146
146
147 Test saving last-message.txt:
147 Test saving last-message.txt:
148
148
149 $ hg qrefresh -m "original message"
149 $ hg qrefresh -m "original message"
150
150
151 $ cat > $TESTTMP/commitfailure.py <<EOF
151 $ cat > $TESTTMP/commitfailure.py <<EOF
152 > from mercurial import error
152 > from mercurial import error
153 > def reposetup(ui, repo):
153 > def reposetup(ui, repo):
154 > class commitfailure(repo.__class__):
154 > class commitfailure(repo.__class__):
155 > def commit(self, *args, **kwargs):
155 > def commit(self, *args, **kwargs):
156 > raise error.Abort('emulating unexpected abort')
156 > raise error.Abort(b'emulating unexpected abort')
157 > repo.__class__ = commitfailure
157 > repo.__class__ = commitfailure
158 > EOF
158 > EOF
159
159
160 $ cat >> .hg/hgrc <<EOF
160 $ cat >> .hg/hgrc <<EOF
161 > [extensions]
161 > [extensions]
162 > # this failure occurs before editor invocation
162 > # this failure occurs before editor invocation
163 > commitfailure = $TESTTMP/commitfailure.py
163 > commitfailure = $TESTTMP/commitfailure.py
164 > EOF
164 > EOF
165
165
166 $ cat > $TESTTMP/editor.sh << EOF
166 $ cat > $TESTTMP/editor.sh << EOF
167 > echo "==== before editing"
167 > echo "==== before editing"
168 > cat \$1
168 > cat \$1
169 > echo "===="
169 > echo "===="
170 > (echo; echo "test saving last-message.txt") >> \$1
170 > (echo; echo "test saving last-message.txt") >> \$1
171 > EOF
171 > EOF
172
172
173 $ hg qapplied
173 $ hg qapplied
174 p1
174 p1
175 git
175 git
176 $ hg tip --template "{files}\n"
176 $ hg tip --template "{files}\n"
177 aa
177 aa
178
178
179 (test that editor is not invoked before transaction starting,
179 (test that editor is not invoked before transaction starting,
180 and that combination of '--edit' and '--message' doesn't abort execution)
180 and that combination of '--edit' and '--message' doesn't abort execution)
181
181
182 $ rm -f .hg/last-message.txt
182 $ rm -f .hg/last-message.txt
183 $ HGEDITOR="sh $TESTTMP/editor.sh" hg qfold -e -m MESSAGE p3
183 $ HGEDITOR="sh $TESTTMP/editor.sh" hg qfold -e -m MESSAGE p3
184 qrefresh interrupted while patch was popped! (revert --all, qpush to recover)
184 qrefresh interrupted while patch was popped! (revert --all, qpush to recover)
185 abort: emulating unexpected abort
185 abort: emulating unexpected abort
186 [255]
186 [255]
187 $ test -f .hg/last-message.txt
187 $ test -f .hg/last-message.txt
188 [1]
188 [1]
189
189
190 (reset applied patches and directory status)
190 (reset applied patches and directory status)
191
191
192 $ cat >> .hg/hgrc <<EOF
192 $ cat >> .hg/hgrc <<EOF
193 > [extensions]
193 > [extensions]
194 > # this failure occurs after editor invocation
194 > # this failure occurs after editor invocation
195 > commitfailure = !
195 > commitfailure = !
196 > EOF
196 > EOF
197
197
198 $ hg qapplied
198 $ hg qapplied
199 p1
199 p1
200 $ hg status -A aa
200 $ hg status -A aa
201 ? aa
201 ? aa
202 $ rm aa
202 $ rm aa
203 $ hg status -m
203 $ hg status -m
204 M a
204 M a
205 $ hg revert --no-backup -q a
205 $ hg revert --no-backup -q a
206 $ hg qpush -q git
206 $ hg qpush -q git
207 now at: git
207 now at: git
208
208
209 (test that editor is invoked and commit message is saved into
209 (test that editor is invoked and commit message is saved into
210 "last-message.txt")
210 "last-message.txt")
211
211
212 $ cat >> .hg/hgrc <<EOF
212 $ cat >> .hg/hgrc <<EOF
213 > [hooks]
213 > [hooks]
214 > # this failure occurs after editor invocation
214 > # this failure occurs after editor invocation
215 > pretxncommit.unexpectedabort = false
215 > pretxncommit.unexpectedabort = false
216 > EOF
216 > EOF
217
217
218 $ rm -f .hg/last-message.txt
218 $ rm -f .hg/last-message.txt
219 $ HGEDITOR="sh $TESTTMP/editor.sh" hg qfold -e p3
219 $ HGEDITOR="sh $TESTTMP/editor.sh" hg qfold -e p3
220 ==== before editing
220 ==== before editing
221 original message
221 original message
222
222
223
223
224 HG: Enter commit message. Lines beginning with 'HG:' are removed.
224 HG: Enter commit message. Lines beginning with 'HG:' are removed.
225 HG: Leave message empty to use default message.
225 HG: Leave message empty to use default message.
226 HG: --
226 HG: --
227 HG: user: test
227 HG: user: test
228 HG: branch 'default'
228 HG: branch 'default'
229 HG: added aa
229 HG: added aa
230 HG: changed a
230 HG: changed a
231 ====
231 ====
232 note: commit message saved in .hg/last-message.txt
232 note: commit message saved in .hg/last-message.txt
233 note: use 'hg commit --logfile .hg/last-message.txt --edit' to reuse it
233 note: use 'hg commit --logfile .hg/last-message.txt --edit' to reuse it
234 transaction abort!
234 transaction abort!
235 rollback completed
235 rollback completed
236 qrefresh interrupted while patch was popped! (revert --all, qpush to recover)
236 qrefresh interrupted while patch was popped! (revert --all, qpush to recover)
237 abort: pretxncommit.unexpectedabort hook exited with status 1
237 abort: pretxncommit.unexpectedabort hook exited with status 1
238 [255]
238 [255]
239 $ cat .hg/last-message.txt
239 $ cat .hg/last-message.txt
240 original message
240 original message
241
241
242
242
243
243
244 test saving last-message.txt
244 test saving last-message.txt
245
245
246 (confirm whether files listed up in the commit message editing are correct)
246 (confirm whether files listed up in the commit message editing are correct)
247
247
248 $ cat >> .hg/hgrc <<EOF
248 $ cat >> .hg/hgrc <<EOF
249 > [hooks]
249 > [hooks]
250 > pretxncommit.unexpectedabort =
250 > pretxncommit.unexpectedabort =
251 > EOF
251 > EOF
252 $ hg status -u | while read f; do rm ${f}; done
252 $ hg status -u | while read f; do rm ${f}; done
253 $ hg revert --no-backup -q --all
253 $ hg revert --no-backup -q --all
254 $ hg qpush -q git
254 $ hg qpush -q git
255 now at: git
255 now at: git
256 $ hg qpush -q --move p3
256 $ hg qpush -q --move p3
257 now at: p3
257 now at: p3
258
258
259 $ hg status --rev "git^1" --rev . -arm
259 $ hg status --rev "git^1" --rev . -arm
260 M a
260 M a
261 A aa
261 A aa
262
262
263 $ cd ..
263 $ cd ..
264
264
@@ -1,322 +1,322 b''
1
1
2 $ catpatch() {
2 $ catpatch() {
3 > cat $1 | sed -e "s/^\(# Parent \).*/\1/"
3 > cat $1 | sed -e "s/^\(# Parent \).*/\1/"
4 > }
4 > }
5 $ echo "[extensions]" >> $HGRCPATH
5 $ echo "[extensions]" >> $HGRCPATH
6 $ echo "mq=" >> $HGRCPATH
6 $ echo "mq=" >> $HGRCPATH
7 $ runtest() {
7 $ runtest() {
8 > hg init mq
8 > hg init mq
9 > cd mq
9 > cd mq
10 >
10 >
11 > echo a > a
11 > echo a > a
12 > hg ci -Ama
12 > hg ci -Ama
13 >
13 >
14 > echo '% qnew should refuse bad patch names'
14 > echo '% qnew should refuse bad patch names'
15 > hg qnew series
15 > hg qnew series
16 > hg qnew status
16 > hg qnew status
17 > hg qnew guards
17 > hg qnew guards
18 > hg qnew .
18 > hg qnew .
19 > hg qnew ..
19 > hg qnew ..
20 > hg qnew .hgignore
20 > hg qnew .hgignore
21 > hg qnew .mqfoo
21 > hg qnew .mqfoo
22 > hg qnew 'foo#bar'
22 > hg qnew 'foo#bar'
23 > hg qnew 'foo:bar'
23 > hg qnew 'foo:bar'
24 > hg qnew "`echo foo; echo bar`"
24 > hg qnew "`echo foo; echo bar`"
25 > hg qnew ' foo'
25 > hg qnew ' foo'
26 > hg qnew 'foo '
26 > hg qnew 'foo '
27 >
27 >
28 > hg qinit -c
28 > hg qinit -c
29 >
29 >
30 > echo '% qnew with name containing slash'
30 > echo '% qnew with name containing slash'
31 > hg qnew foo/
31 > hg qnew foo/
32 > hg qnew foo/bar.patch
32 > hg qnew foo/bar.patch
33 > hg qnew foo
33 > hg qnew foo
34 > hg qseries
34 > hg qseries
35 > hg qpop
35 > hg qpop
36 > hg qdelete foo/bar.patch
36 > hg qdelete foo/bar.patch
37 >
37 >
38 > echo '% qnew with uncommitted changes'
38 > echo '% qnew with uncommitted changes'
39 > echo a > somefile
39 > echo a > somefile
40 > hg add somefile
40 > hg add somefile
41 > hg qnew uncommitted.patch
41 > hg qnew uncommitted.patch
42 > hg st
42 > hg st
43 > hg qseries
43 > hg qseries
44 >
44 >
45 > echo '% qnew implies add'
45 > echo '% qnew implies add'
46 > hg -R .hg/patches st
46 > hg -R .hg/patches st
47 >
47 >
48 > echo '% qnew missing'
48 > echo '% qnew missing'
49 > hg qnew missing.patch missing
49 > hg qnew missing.patch missing
50 >
50 >
51 > echo '% qnew -m'
51 > echo '% qnew -m'
52 > hg qnew -m 'foo bar' mtest.patch
52 > hg qnew -m 'foo bar' mtest.patch
53 > catpatch .hg/patches/mtest.patch
53 > catpatch .hg/patches/mtest.patch
54 >
54 >
55 > echo '% qnew twice'
55 > echo '% qnew twice'
56 > hg qnew first.patch
56 > hg qnew first.patch
57 > hg qnew first.patch
57 > hg qnew first.patch
58 >
58 >
59 > touch ../first.patch
59 > touch ../first.patch
60 > hg qimport ../first.patch
60 > hg qimport ../first.patch
61 >
61 >
62 > echo '% qnew -f from a subdirectory'
62 > echo '% qnew -f from a subdirectory'
63 > hg qpop -a
63 > hg qpop -a
64 > mkdir d
64 > mkdir d
65 > cd d
65 > cd d
66 > echo b > b
66 > echo b > b
67 > hg ci -Am t
67 > hg ci -Am t
68 > echo b >> b
68 > echo b >> b
69 > hg st
69 > hg st
70 > hg qnew -g -f p
70 > hg qnew -g -f p
71 > catpatch ../.hg/patches/p
71 > catpatch ../.hg/patches/p
72 >
72 >
73 > echo '% qnew -u with no username configured'
73 > echo '% qnew -u with no username configured'
74 > HGUSER= hg qnew -u blue red
74 > HGUSER= hg qnew -u blue red
75 > catpatch ../.hg/patches/red
75 > catpatch ../.hg/patches/red
76 >
76 >
77 > echo '% qnew -e -u with no username configured'
77 > echo '% qnew -e -u with no username configured'
78 > HGUSER= hg qnew -e -u chartreuse fucsia
78 > HGUSER= hg qnew -e -u chartreuse fucsia
79 > catpatch ../.hg/patches/fucsia
79 > catpatch ../.hg/patches/fucsia
80 >
80 >
81 > echo '% fail when trying to import a merge'
81 > echo '% fail when trying to import a merge'
82 > hg init merge
82 > hg init merge
83 > cd merge
83 > cd merge
84 > touch a
84 > touch a
85 > hg ci -Am null
85 > hg ci -Am null
86 > echo a >> a
86 > echo a >> a
87 > hg ci -m a
87 > hg ci -m a
88 > hg up -r 0
88 > hg up -r 0
89 > echo b >> a
89 > echo b >> a
90 > hg ci -m b
90 > hg ci -m b
91 > hg merge -f 1
91 > hg merge -f 1
92 > hg resolve --mark a
92 > hg resolve --mark a
93 > hg qnew -f merge
93 > hg qnew -f merge
94 >
94 >
95 > cd ../../..
95 > cd ../../..
96 > rm -r mq
96 > rm -r mq
97 > }
97 > }
98
98
99 plain headers
99 plain headers
100
100
101 $ echo "[mq]" >> $HGRCPATH
101 $ echo "[mq]" >> $HGRCPATH
102 $ echo "plain=true" >> $HGRCPATH
102 $ echo "plain=true" >> $HGRCPATH
103 $ mkdir sandbox
103 $ mkdir sandbox
104 $ (cd sandbox ; runtest)
104 $ (cd sandbox ; runtest)
105 adding a
105 adding a
106 % qnew should refuse bad patch names
106 % qnew should refuse bad patch names
107 abort: "series" cannot be used as the name of a patch
107 abort: "series" cannot be used as the name of a patch
108 abort: "status" cannot be used as the name of a patch
108 abort: "status" cannot be used as the name of a patch
109 abort: "guards" cannot be used as the name of a patch
109 abort: "guards" cannot be used as the name of a patch
110 abort: "." cannot be used as the name of a patch
110 abort: "." cannot be used as the name of a patch
111 abort: ".." cannot be used as the name of a patch
111 abort: ".." cannot be used as the name of a patch
112 abort: patch name cannot begin with ".hg"
112 abort: patch name cannot begin with ".hg"
113 abort: patch name cannot begin with ".mq"
113 abort: patch name cannot begin with ".mq"
114 abort: '#' cannot be used in the name of a patch
114 abort: '#' cannot be used in the name of a patch
115 abort: ':' cannot be used in the name of a patch
115 abort: ':' cannot be used in the name of a patch
116 abort: '\n' cannot be used in the name of a patch
116 abort: '\n' cannot be used in the name of a patch
117 abort: patch name cannot begin or end with whitespace
117 abort: patch name cannot begin or end with whitespace
118 abort: patch name cannot begin or end with whitespace
118 abort: patch name cannot begin or end with whitespace
119 % qnew with name containing slash
119 % qnew with name containing slash
120 abort: path ends in directory separator: foo/
120 abort: path ends in directory separator: foo/
121 abort: "foo" already exists as a directory
121 abort: "foo" already exists as a directory
122 foo/bar.patch
122 foo/bar.patch
123 popping foo/bar.patch
123 popping foo/bar.patch
124 patch queue now empty
124 patch queue now empty
125 % qnew with uncommitted changes
125 % qnew with uncommitted changes
126 uncommitted.patch
126 uncommitted.patch
127 % qnew implies add
127 % qnew implies add
128 A .hgignore
128 A .hgignore
129 A series
129 A series
130 A uncommitted.patch
130 A uncommitted.patch
131 % qnew missing
131 % qnew missing
132 abort: missing: * (glob)
132 abort: missing: * (glob)
133 % qnew -m
133 % qnew -m
134 foo bar
134 foo bar
135
135
136 % qnew twice
136 % qnew twice
137 abort: patch "first.patch" already exists
137 abort: patch "first.patch" already exists
138 abort: patch "first.patch" already exists
138 abort: patch "first.patch" already exists
139 % qnew -f from a subdirectory
139 % qnew -f from a subdirectory
140 popping first.patch
140 popping first.patch
141 popping mtest.patch
141 popping mtest.patch
142 popping uncommitted.patch
142 popping uncommitted.patch
143 patch queue now empty
143 patch queue now empty
144 adding d/b
144 adding d/b
145 M d/b
145 M d/b
146 diff --git a/d/b b/d/b
146 diff --git a/d/b b/d/b
147 --- a/d/b
147 --- a/d/b
148 +++ b/d/b
148 +++ b/d/b
149 @@ -1,1 +1,2 @@
149 @@ -1,1 +1,2 @@
150 b
150 b
151 +b
151 +b
152 % qnew -u with no username configured
152 % qnew -u with no username configured
153 From: blue
153 From: blue
154
154
155 % qnew -e -u with no username configured
155 % qnew -e -u with no username configured
156 From: chartreuse
156 From: chartreuse
157
157
158 % fail when trying to import a merge
158 % fail when trying to import a merge
159 adding a
159 adding a
160 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
160 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
161 created new head
161 created new head
162 merging a
162 merging a
163 warning: conflicts while merging a! (edit, then use 'hg resolve --mark')
163 warning: conflicts while merging a! (edit, then use 'hg resolve --mark')
164 0 files updated, 0 files merged, 0 files removed, 1 files unresolved
164 0 files updated, 0 files merged, 0 files removed, 1 files unresolved
165 use 'hg resolve' to retry unresolved file merges or 'hg merge --abort' to abandon
165 use 'hg resolve' to retry unresolved file merges or 'hg merge --abort' to abandon
166 (no more unresolved files)
166 (no more unresolved files)
167 abort: cannot manage merge changesets
167 abort: cannot manage merge changesets
168 $ rm -r sandbox
168 $ rm -r sandbox
169
169
170 hg headers
170 hg headers
171
171
172 $ echo "plain=false" >> $HGRCPATH
172 $ echo "plain=false" >> $HGRCPATH
173 $ mkdir sandbox
173 $ mkdir sandbox
174 $ (cd sandbox ; runtest)
174 $ (cd sandbox ; runtest)
175 adding a
175 adding a
176 % qnew should refuse bad patch names
176 % qnew should refuse bad patch names
177 abort: "series" cannot be used as the name of a patch
177 abort: "series" cannot be used as the name of a patch
178 abort: "status" cannot be used as the name of a patch
178 abort: "status" cannot be used as the name of a patch
179 abort: "guards" cannot be used as the name of a patch
179 abort: "guards" cannot be used as the name of a patch
180 abort: "." cannot be used as the name of a patch
180 abort: "." cannot be used as the name of a patch
181 abort: ".." cannot be used as the name of a patch
181 abort: ".." cannot be used as the name of a patch
182 abort: patch name cannot begin with ".hg"
182 abort: patch name cannot begin with ".hg"
183 abort: patch name cannot begin with ".mq"
183 abort: patch name cannot begin with ".mq"
184 abort: '#' cannot be used in the name of a patch
184 abort: '#' cannot be used in the name of a patch
185 abort: ':' cannot be used in the name of a patch
185 abort: ':' cannot be used in the name of a patch
186 abort: '\n' cannot be used in the name of a patch
186 abort: '\n' cannot be used in the name of a patch
187 abort: patch name cannot begin or end with whitespace
187 abort: patch name cannot begin or end with whitespace
188 abort: patch name cannot begin or end with whitespace
188 abort: patch name cannot begin or end with whitespace
189 % qnew with name containing slash
189 % qnew with name containing slash
190 abort: path ends in directory separator: foo/
190 abort: path ends in directory separator: foo/
191 abort: "foo" already exists as a directory
191 abort: "foo" already exists as a directory
192 foo/bar.patch
192 foo/bar.patch
193 popping foo/bar.patch
193 popping foo/bar.patch
194 patch queue now empty
194 patch queue now empty
195 % qnew with uncommitted changes
195 % qnew with uncommitted changes
196 uncommitted.patch
196 uncommitted.patch
197 % qnew implies add
197 % qnew implies add
198 A .hgignore
198 A .hgignore
199 A series
199 A series
200 A uncommitted.patch
200 A uncommitted.patch
201 % qnew missing
201 % qnew missing
202 abort: missing: * (glob)
202 abort: missing: * (glob)
203 % qnew -m
203 % qnew -m
204 # HG changeset patch
204 # HG changeset patch
205 # Parent
205 # Parent
206 foo bar
206 foo bar
207
207
208 % qnew twice
208 % qnew twice
209 abort: patch "first.patch" already exists
209 abort: patch "first.patch" already exists
210 abort: patch "first.patch" already exists
210 abort: patch "first.patch" already exists
211 % qnew -f from a subdirectory
211 % qnew -f from a subdirectory
212 popping first.patch
212 popping first.patch
213 popping mtest.patch
213 popping mtest.patch
214 popping uncommitted.patch
214 popping uncommitted.patch
215 patch queue now empty
215 patch queue now empty
216 adding d/b
216 adding d/b
217 M d/b
217 M d/b
218 # HG changeset patch
218 # HG changeset patch
219 # Parent
219 # Parent
220
220
221 diff --git a/d/b b/d/b
221 diff --git a/d/b b/d/b
222 --- a/d/b
222 --- a/d/b
223 +++ b/d/b
223 +++ b/d/b
224 @@ -1,1 +1,2 @@
224 @@ -1,1 +1,2 @@
225 b
225 b
226 +b
226 +b
227 % qnew -u with no username configured
227 % qnew -u with no username configured
228 # HG changeset patch
228 # HG changeset patch
229 # User blue
229 # User blue
230 # Parent
230 # Parent
231
231
232 % qnew -e -u with no username configured
232 % qnew -e -u with no username configured
233 # HG changeset patch
233 # HG changeset patch
234 # User chartreuse
234 # User chartreuse
235 # Parent
235 # Parent
236
236
237 % fail when trying to import a merge
237 % fail when trying to import a merge
238 adding a
238 adding a
239 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
239 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
240 created new head
240 created new head
241 merging a
241 merging a
242 warning: conflicts while merging a! (edit, then use 'hg resolve --mark')
242 warning: conflicts while merging a! (edit, then use 'hg resolve --mark')
243 0 files updated, 0 files merged, 0 files removed, 1 files unresolved
243 0 files updated, 0 files merged, 0 files removed, 1 files unresolved
244 use 'hg resolve' to retry unresolved file merges or 'hg merge --abort' to abandon
244 use 'hg resolve' to retry unresolved file merges or 'hg merge --abort' to abandon
245 (no more unresolved files)
245 (no more unresolved files)
246 abort: cannot manage merge changesets
246 abort: cannot manage merge changesets
247 $ rm -r sandbox
247 $ rm -r sandbox
248
248
249 Test saving last-message.txt
249 Test saving last-message.txt
250
250
251 $ hg init repo
251 $ hg init repo
252 $ cd repo
252 $ cd repo
253
253
254 $ cat > $TESTTMP/commitfailure.py <<EOF
254 $ cat > $TESTTMP/commitfailure.py <<EOF
255 > from mercurial import error
255 > from mercurial import error
256 > def reposetup(ui, repo):
256 > def reposetup(ui, repo):
257 > class commitfailure(repo.__class__):
257 > class commitfailure(repo.__class__):
258 > def commit(self, *args, **kwargs):
258 > def commit(self, *args, **kwargs):
259 > raise error.Abort('emulating unexpected abort')
259 > raise error.Abort(b'emulating unexpected abort')
260 > repo.__class__ = commitfailure
260 > repo.__class__ = commitfailure
261 > EOF
261 > EOF
262 $ cat >> .hg/hgrc <<EOF
262 $ cat >> .hg/hgrc <<EOF
263 > [extensions]
263 > [extensions]
264 > # this failure occurs before editor invocation
264 > # this failure occurs before editor invocation
265 > commitfailure = $TESTTMP/commitfailure.py
265 > commitfailure = $TESTTMP/commitfailure.py
266 > EOF
266 > EOF
267
267
268 $ cat > $TESTTMP/editor.sh << EOF
268 $ cat > $TESTTMP/editor.sh << EOF
269 > echo "==== before editing"
269 > echo "==== before editing"
270 > cat \$1
270 > cat \$1
271 > echo "===="
271 > echo "===="
272 > echo "test saving last-message.txt" >> \$1
272 > echo "test saving last-message.txt" >> \$1
273 > EOF
273 > EOF
274
274
275 (test that editor is not invoked before transaction starting)
275 (test that editor is not invoked before transaction starting)
276
276
277 $ rm -f .hg/last-message.txt
277 $ rm -f .hg/last-message.txt
278 $ HGEDITOR="sh $TESTTMP/editor.sh" hg qnew -e patch
278 $ HGEDITOR="sh $TESTTMP/editor.sh" hg qnew -e patch
279 abort: emulating unexpected abort
279 abort: emulating unexpected abort
280 [255]
280 [255]
281 $ test -f .hg/last-message.txt
281 $ test -f .hg/last-message.txt
282 [1]
282 [1]
283
283
284 (test that editor is invoked and commit message is saved into
284 (test that editor is invoked and commit message is saved into
285 "last-message.txt")
285 "last-message.txt")
286
286
287 $ cat >> .hg/hgrc <<EOF
287 $ cat >> .hg/hgrc <<EOF
288 > [extensions]
288 > [extensions]
289 > commitfailure = !
289 > commitfailure = !
290 > [hooks]
290 > [hooks]
291 > # this failure occurs after editor invocation
291 > # this failure occurs after editor invocation
292 > pretxncommit.unexpectedabort = false
292 > pretxncommit.unexpectedabort = false
293 > EOF
293 > EOF
294
294
295 $ rm -f .hg/last-message.txt
295 $ rm -f .hg/last-message.txt
296 $ hg status
296 $ hg status
297 $ HGEDITOR="sh $TESTTMP/editor.sh" hg qnew -e patch
297 $ HGEDITOR="sh $TESTTMP/editor.sh" hg qnew -e patch
298 ==== before editing
298 ==== before editing
299
299
300
300
301 HG: Enter commit message. Lines beginning with 'HG:' are removed.
301 HG: Enter commit message. Lines beginning with 'HG:' are removed.
302 HG: Leave message empty to use default message.
302 HG: Leave message empty to use default message.
303 HG: --
303 HG: --
304 HG: user: test
304 HG: user: test
305 HG: branch 'default'
305 HG: branch 'default'
306 HG: no files changed
306 HG: no files changed
307 ====
307 ====
308 transaction abort!
308 transaction abort!
309 rollback completed
309 rollback completed
310 note: commit message saved in .hg/last-message.txt
310 note: commit message saved in .hg/last-message.txt
311 note: use 'hg commit --logfile .hg/last-message.txt --edit' to reuse it
311 note: use 'hg commit --logfile .hg/last-message.txt --edit' to reuse it
312 abort: pretxncommit.unexpectedabort hook exited with status 1
312 abort: pretxncommit.unexpectedabort hook exited with status 1
313 [255]
313 [255]
314 $ cat .hg/last-message.txt
314 $ cat .hg/last-message.txt
315
315
316
316
317 test saving last-message.txt
317 test saving last-message.txt
318
318
319 $ cat >> .hg/hgrc <<EOF
319 $ cat >> .hg/hgrc <<EOF
320 > [hooks]
320 > [hooks]
321 > pretxncommit.unexpectedabort =
321 > pretxncommit.unexpectedabort =
322 > EOF
322 > EOF
@@ -1,293 +1,293 b''
1 #testcases vfs svfs
1 #testcases vfs svfs
2 #testcases safe normal
2 #testcases safe normal
3
3
4 #if safe
4 #if safe
5 $ echo "[format]" >> $HGRCPATH
5 $ echo "[format]" >> $HGRCPATH
6 $ echo "exp-share-safe = True" >> $HGRCPATH
6 $ echo "exp-share-safe = True" >> $HGRCPATH
7 #endif
7 #endif
8
8
9 $ echo "[extensions]" >> $HGRCPATH
9 $ echo "[extensions]" >> $HGRCPATH
10 $ echo "share = " >> $HGRCPATH
10 $ echo "share = " >> $HGRCPATH
11
11
12 #if svfs
12 #if svfs
13 $ echo "[format]" >> $HGRCPATH
13 $ echo "[format]" >> $HGRCPATH
14 $ echo "bookmarks-in-store = yes " >> $HGRCPATH
14 $ echo "bookmarks-in-store = yes " >> $HGRCPATH
15 #endif
15 #endif
16
16
17 prepare repo1
17 prepare repo1
18
18
19 $ hg init repo1
19 $ hg init repo1
20 $ cd repo1
20 $ cd repo1
21 $ echo a > a
21 $ echo a > a
22 $ hg commit -A -m'init'
22 $ hg commit -A -m'init'
23 adding a
23 adding a
24 $ echo a >> a
24 $ echo a >> a
25 $ hg commit -m'change in shared clone'
25 $ hg commit -m'change in shared clone'
26 $ echo b > b
26 $ echo b > b
27 $ hg commit -A -m'another file'
27 $ hg commit -A -m'another file'
28 adding b
28 adding b
29
29
30 share it
30 share it
31
31
32 $ cd ..
32 $ cd ..
33 $ hg share repo1 repo2
33 $ hg share repo1 repo2
34 updating working directory
34 updating working directory
35 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
35 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
36
36
37 test sharing bookmarks
37 test sharing bookmarks
38
38
39 $ hg share -B repo1 repo3
39 $ hg share -B repo1 repo3
40 updating working directory
40 updating working directory
41 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
41 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
42 $ cd repo1
42 $ cd repo1
43 $ hg bookmark bm1
43 $ hg bookmark bm1
44 $ hg bookmarks
44 $ hg bookmarks
45 * bm1 2:c2e0ac586386
45 * bm1 2:c2e0ac586386
46 $ cd ../repo2
46 $ cd ../repo2
47 $ hg book bm2
47 $ hg book bm2
48 $ hg bookmarks
48 $ hg bookmarks
49 bm1 2:c2e0ac586386 (svfs !)
49 bm1 2:c2e0ac586386 (svfs !)
50 * bm2 2:c2e0ac586386
50 * bm2 2:c2e0ac586386
51 $ cd ../repo3
51 $ cd ../repo3
52 $ hg bookmarks
52 $ hg bookmarks
53 bm1 2:c2e0ac586386
53 bm1 2:c2e0ac586386
54 bm2 2:c2e0ac586386 (svfs !)
54 bm2 2:c2e0ac586386 (svfs !)
55 $ hg book bm3
55 $ hg book bm3
56 $ hg bookmarks
56 $ hg bookmarks
57 bm1 2:c2e0ac586386
57 bm1 2:c2e0ac586386
58 bm2 2:c2e0ac586386 (svfs !)
58 bm2 2:c2e0ac586386 (svfs !)
59 * bm3 2:c2e0ac586386
59 * bm3 2:c2e0ac586386
60 $ cd ../repo1
60 $ cd ../repo1
61 $ hg bookmarks
61 $ hg bookmarks
62 * bm1 2:c2e0ac586386
62 * bm1 2:c2e0ac586386
63 bm2 2:c2e0ac586386 (svfs !)
63 bm2 2:c2e0ac586386 (svfs !)
64 bm3 2:c2e0ac586386
64 bm3 2:c2e0ac586386
65
65
66 check whether HG_PENDING makes pending changes only in relatd
66 check whether HG_PENDING makes pending changes only in relatd
67 repositories visible to an external hook.
67 repositories visible to an external hook.
68
68
69 In "hg share" case, another transaction can't run in other
69 In "hg share" case, another transaction can't run in other
70 repositories sharing same source repository, because starting
70 repositories sharing same source repository, because starting
71 transaction requires locking store of source repository.
71 transaction requires locking store of source repository.
72
72
73 Therefore, this test scenario ignores checking visibility of
73 Therefore, this test scenario ignores checking visibility of
74 .hg/bookmarks.pending in repo2, which shares repo1 without bookmarks.
74 .hg/bookmarks.pending in repo2, which shares repo1 without bookmarks.
75
75
76 $ cat > $TESTTMP/checkbookmarks.sh <<EOF
76 $ cat > $TESTTMP/checkbookmarks.sh <<EOF
77 > echo "@repo1"
77 > echo "@repo1"
78 > hg -R "$TESTTMP/repo1" bookmarks
78 > hg -R "$TESTTMP/repo1" bookmarks
79 > echo "@repo2"
79 > echo "@repo2"
80 > hg -R "$TESTTMP/repo2" bookmarks
80 > hg -R "$TESTTMP/repo2" bookmarks
81 > echo "@repo3"
81 > echo "@repo3"
82 > hg -R "$TESTTMP/repo3" bookmarks
82 > hg -R "$TESTTMP/repo3" bookmarks
83 > exit 1 # to avoid adding new bookmark for subsequent tests
83 > exit 1 # to avoid adding new bookmark for subsequent tests
84 > EOF
84 > EOF
85
85
86 $ cd ../repo1
86 $ cd ../repo1
87 $ hg --config hooks.pretxnclose="sh $TESTTMP/checkbookmarks.sh" -q book bmX
87 $ hg --config hooks.pretxnclose="sh $TESTTMP/checkbookmarks.sh" -q book bmX
88 @repo1
88 @repo1
89 bm1 2:c2e0ac586386
89 bm1 2:c2e0ac586386
90 bm2 2:c2e0ac586386 (svfs !)
90 bm2 2:c2e0ac586386 (svfs !)
91 bm3 2:c2e0ac586386
91 bm3 2:c2e0ac586386
92 * bmX 2:c2e0ac586386
92 * bmX 2:c2e0ac586386
93 @repo2
93 @repo2
94 bm1 2:c2e0ac586386 (svfs !)
94 bm1 2:c2e0ac586386 (svfs !)
95 * bm2 2:c2e0ac586386
95 * bm2 2:c2e0ac586386
96 bm3 2:c2e0ac586386 (svfs !)
96 bm3 2:c2e0ac586386 (svfs !)
97 @repo3
97 @repo3
98 bm1 2:c2e0ac586386
98 bm1 2:c2e0ac586386
99 bm2 2:c2e0ac586386 (svfs !)
99 bm2 2:c2e0ac586386 (svfs !)
100 * bm3 2:c2e0ac586386
100 * bm3 2:c2e0ac586386
101 bmX 2:c2e0ac586386 (vfs !)
101 bmX 2:c2e0ac586386 (vfs !)
102 transaction abort!
102 transaction abort!
103 rollback completed
103 rollback completed
104 abort: pretxnclose hook exited with status 1
104 abort: pretxnclose hook exited with status 1
105 [255]
105 [255]
106 $ hg book bm1
106 $ hg book bm1
107
107
108 FYI, in contrast to above test, bmX is invisible in repo1 (= shared
108 FYI, in contrast to above test, bmX is invisible in repo1 (= shared
109 src), because (1) HG_PENDING refers only repo3 and (2)
109 src), because (1) HG_PENDING refers only repo3 and (2)
110 "bookmarks.pending" is written only into repo3.
110 "bookmarks.pending" is written only into repo3.
111
111
112 $ cd ../repo3
112 $ cd ../repo3
113 $ hg --config hooks.pretxnclose="sh $TESTTMP/checkbookmarks.sh" -q book bmX
113 $ hg --config hooks.pretxnclose="sh $TESTTMP/checkbookmarks.sh" -q book bmX
114 @repo1
114 @repo1
115 * bm1 2:c2e0ac586386
115 * bm1 2:c2e0ac586386
116 bm2 2:c2e0ac586386 (svfs !)
116 bm2 2:c2e0ac586386 (svfs !)
117 bm3 2:c2e0ac586386
117 bm3 2:c2e0ac586386
118 @repo2
118 @repo2
119 bm1 2:c2e0ac586386 (svfs !)
119 bm1 2:c2e0ac586386 (svfs !)
120 * bm2 2:c2e0ac586386
120 * bm2 2:c2e0ac586386
121 bm3 2:c2e0ac586386 (svfs !)
121 bm3 2:c2e0ac586386 (svfs !)
122 @repo3
122 @repo3
123 bm1 2:c2e0ac586386
123 bm1 2:c2e0ac586386
124 bm2 2:c2e0ac586386 (svfs !)
124 bm2 2:c2e0ac586386 (svfs !)
125 bm3 2:c2e0ac586386
125 bm3 2:c2e0ac586386
126 * bmX 2:c2e0ac586386
126 * bmX 2:c2e0ac586386
127 transaction abort!
127 transaction abort!
128 rollback completed
128 rollback completed
129 abort: pretxnclose hook exited with status 1
129 abort: pretxnclose hook exited with status 1
130 [255]
130 [255]
131 $ hg book bm3
131 $ hg book bm3
132
132
133 clean up bm2 since it's uninteresting (not shared in the vfs case and
133 clean up bm2 since it's uninteresting (not shared in the vfs case and
134 same as bm3 in the svfs case)
134 same as bm3 in the svfs case)
135 $ cd ../repo2
135 $ cd ../repo2
136 $ hg book -d bm2
136 $ hg book -d bm2
137
137
138 $ cd ../repo1
138 $ cd ../repo1
139
139
140 test that commits work
140 test that commits work
141
141
142 $ echo 'shared bookmarks' > a
142 $ echo 'shared bookmarks' > a
143 $ hg commit -m 'testing shared bookmarks'
143 $ hg commit -m 'testing shared bookmarks'
144 $ hg bookmarks
144 $ hg bookmarks
145 * bm1 3:b87954705719
145 * bm1 3:b87954705719
146 bm3 2:c2e0ac586386
146 bm3 2:c2e0ac586386
147 $ cd ../repo3
147 $ cd ../repo3
148 $ hg bookmarks
148 $ hg bookmarks
149 bm1 3:b87954705719
149 bm1 3:b87954705719
150 * bm3 2:c2e0ac586386
150 * bm3 2:c2e0ac586386
151 $ echo 'more shared bookmarks' > a
151 $ echo 'more shared bookmarks' > a
152 $ hg commit -m 'testing shared bookmarks'
152 $ hg commit -m 'testing shared bookmarks'
153 created new head
153 created new head
154 $ hg bookmarks
154 $ hg bookmarks
155 bm1 3:b87954705719
155 bm1 3:b87954705719
156 * bm3 4:62f4ded848e4
156 * bm3 4:62f4ded848e4
157 $ cd ../repo1
157 $ cd ../repo1
158 $ hg bookmarks
158 $ hg bookmarks
159 * bm1 3:b87954705719
159 * bm1 3:b87954705719
160 bm3 4:62f4ded848e4
160 bm3 4:62f4ded848e4
161 $ cd ..
161 $ cd ..
162
162
163 test pushing bookmarks works
163 test pushing bookmarks works
164
164
165 $ hg clone repo3 repo4
165 $ hg clone repo3 repo4
166 updating to branch default
166 updating to branch default
167 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
167 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
168 $ cd repo4
168 $ cd repo4
169 $ hg boo bm4
169 $ hg boo bm4
170 $ echo foo > b
170 $ echo foo > b
171 $ hg commit -m 'foo in b'
171 $ hg commit -m 'foo in b'
172 $ hg boo
172 $ hg boo
173 bm1 3:b87954705719
173 bm1 3:b87954705719
174 bm3 4:62f4ded848e4
174 bm3 4:62f4ded848e4
175 * bm4 5:92793bfc8cad
175 * bm4 5:92793bfc8cad
176 $ hg push -B bm4
176 $ hg push -B bm4
177 pushing to $TESTTMP/repo3
177 pushing to $TESTTMP/repo3
178 searching for changes
178 searching for changes
179 adding changesets
179 adding changesets
180 adding manifests
180 adding manifests
181 adding file changes
181 adding file changes
182 added 1 changesets with 1 changes to 1 files
182 added 1 changesets with 1 changes to 1 files
183 exporting bookmark bm4
183 exporting bookmark bm4
184 $ cd ../repo1
184 $ cd ../repo1
185 $ hg bookmarks
185 $ hg bookmarks
186 * bm1 3:b87954705719
186 * bm1 3:b87954705719
187 bm3 4:62f4ded848e4
187 bm3 4:62f4ded848e4
188 bm4 5:92793bfc8cad
188 bm4 5:92793bfc8cad
189 $ cd ../repo3
189 $ cd ../repo3
190 $ hg bookmarks
190 $ hg bookmarks
191 bm1 3:b87954705719
191 bm1 3:b87954705719
192 * bm3 4:62f4ded848e4
192 * bm3 4:62f4ded848e4
193 bm4 5:92793bfc8cad
193 bm4 5:92793bfc8cad
194 $ cd ..
194 $ cd ..
195
195
196 test behavior when sharing a shared repo
196 test behavior when sharing a shared repo
197
197
198 $ hg share -B repo3 missingdir/repo5
198 $ hg share -B repo3 missingdir/repo5
199 updating working directory
199 updating working directory
200 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
200 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
201 $ cd missingdir/repo5
201 $ cd missingdir/repo5
202 $ hg book
202 $ hg book
203 bm1 3:b87954705719
203 bm1 3:b87954705719
204 bm3 4:62f4ded848e4
204 bm3 4:62f4ded848e4
205 bm4 5:92793bfc8cad
205 bm4 5:92793bfc8cad
206 $ cd ../..
206 $ cd ../..
207
207
208 test what happens when an active bookmark is deleted
208 test what happens when an active bookmark is deleted
209
209
210 $ cd repo1
210 $ cd repo1
211 $ hg boo -d bm3
211 $ hg boo -d bm3
212 $ hg boo
212 $ hg boo
213 * bm1 3:b87954705719
213 * bm1 3:b87954705719
214 bm4 5:92793bfc8cad
214 bm4 5:92793bfc8cad
215 $ cd ../repo3
215 $ cd ../repo3
216 $ hg boo
216 $ hg boo
217 bm1 3:b87954705719
217 bm1 3:b87954705719
218 bm4 5:92793bfc8cad
218 bm4 5:92793bfc8cad
219 $ cd ..
219 $ cd ..
220
220
221 verify that bookmarks are not written on failed transaction
221 verify that bookmarks are not written on failed transaction
222
222
223 $ cat > failpullbookmarks.py << EOF
223 $ cat > failpullbookmarks.py << EOF
224 > """A small extension that makes bookmark pulls fail, for testing"""
224 > """A small extension that makes bookmark pulls fail, for testing"""
225 > from __future__ import absolute_import
225 > from __future__ import absolute_import
226 > from mercurial import (
226 > from mercurial import (
227 > error,
227 > error,
228 > exchange,
228 > exchange,
229 > extensions,
229 > extensions,
230 > )
230 > )
231 > def _pullbookmarks(orig, pullop):
231 > def _pullbookmarks(orig, pullop):
232 > orig(pullop)
232 > orig(pullop)
233 > raise error.HookAbort('forced failure by extension')
233 > raise error.HookAbort(b'forced failure by extension')
234 > def extsetup(ui):
234 > def extsetup(ui):
235 > extensions.wrapfunction(exchange, '_pullbookmarks', _pullbookmarks)
235 > extensions.wrapfunction(exchange, '_pullbookmarks', _pullbookmarks)
236 > EOF
236 > EOF
237 $ cd repo4
237 $ cd repo4
238 $ hg boo
238 $ hg boo
239 bm1 3:b87954705719
239 bm1 3:b87954705719
240 bm3 4:62f4ded848e4
240 bm3 4:62f4ded848e4
241 * bm4 5:92793bfc8cad
241 * bm4 5:92793bfc8cad
242 $ cd ../repo3
242 $ cd ../repo3
243 $ hg boo
243 $ hg boo
244 bm1 3:b87954705719
244 bm1 3:b87954705719
245 bm4 5:92793bfc8cad
245 bm4 5:92793bfc8cad
246 $ hg --config "extensions.failpullbookmarks=$TESTTMP/failpullbookmarks.py" pull $TESTTMP/repo4
246 $ hg --config "extensions.failpullbookmarks=$TESTTMP/failpullbookmarks.py" pull $TESTTMP/repo4
247 pulling from $TESTTMP/repo4
247 pulling from $TESTTMP/repo4
248 searching for changes
248 searching for changes
249 no changes found
249 no changes found
250 adding remote bookmark bm3
250 adding remote bookmark bm3
251 abort: forced failure by extension
251 abort: forced failure by extension
252 [255]
252 [255]
253 $ hg boo
253 $ hg boo
254 bm1 3:b87954705719
254 bm1 3:b87954705719
255 bm4 5:92793bfc8cad
255 bm4 5:92793bfc8cad
256 $ hg pull $TESTTMP/repo4
256 $ hg pull $TESTTMP/repo4
257 pulling from $TESTTMP/repo4
257 pulling from $TESTTMP/repo4
258 searching for changes
258 searching for changes
259 no changes found
259 no changes found
260 adding remote bookmark bm3
260 adding remote bookmark bm3
261 1 local changesets published
261 1 local changesets published
262 $ hg boo
262 $ hg boo
263 bm1 3:b87954705719
263 bm1 3:b87954705719
264 * bm3 4:62f4ded848e4
264 * bm3 4:62f4ded848e4
265 bm4 5:92793bfc8cad
265 bm4 5:92793bfc8cad
266 $ cd ..
266 $ cd ..
267
267
268 verify bookmark behavior after unshare
268 verify bookmark behavior after unshare
269
269
270 $ cd repo3
270 $ cd repo3
271 $ hg unshare
271 $ hg unshare
272 $ hg boo
272 $ hg boo
273 bm1 3:b87954705719
273 bm1 3:b87954705719
274 * bm3 4:62f4ded848e4
274 * bm3 4:62f4ded848e4
275 bm4 5:92793bfc8cad
275 bm4 5:92793bfc8cad
276 $ hg boo -d bm4
276 $ hg boo -d bm4
277 $ hg boo bm5
277 $ hg boo bm5
278 $ hg boo
278 $ hg boo
279 bm1 3:b87954705719
279 bm1 3:b87954705719
280 bm3 4:62f4ded848e4
280 bm3 4:62f4ded848e4
281 * bm5 4:62f4ded848e4
281 * bm5 4:62f4ded848e4
282 $ cd ../repo1
282 $ cd ../repo1
283 $ hg boo
283 $ hg boo
284 * bm1 3:b87954705719
284 * bm1 3:b87954705719
285 bm3 4:62f4ded848e4
285 bm3 4:62f4ded848e4
286 bm4 5:92793bfc8cad
286 bm4 5:92793bfc8cad
287 $ cd ..
287 $ cd ..
288
288
289 Test that if store is disabled, we drop the bookmarksinstore requirement
289 Test that if store is disabled, we drop the bookmarksinstore requirement
290
290
291 $ hg init brokenrepo --config format.bookmarks-in-store=True --config format.usestore=false
291 $ hg init brokenrepo --config format.bookmarks-in-store=True --config format.usestore=false
292 ignoring enabled 'format.bookmarks-in-store' config beacuse it is incompatible with disabled 'format.usestore' config
292 ignoring enabled 'format.bookmarks-in-store' config beacuse it is incompatible with disabled 'format.usestore' config
293 ignoring enabled 'format.exp-share-safe' config because it is incompatible with disabled 'format.usestore' config (safe !)
293 ignoring enabled 'format.exp-share-safe' config because it is incompatible with disabled 'format.usestore' config (safe !)
@@ -1,184 +1,184 b''
1 test sparse
1 test sparse
2
2
3 $ hg init myrepo
3 $ hg init myrepo
4 $ cd myrepo
4 $ cd myrepo
5 $ cat >> $HGRCPATH <<EOF
5 $ cat >> $HGRCPATH <<EOF
6 > [extensions]
6 > [extensions]
7 > sparse=
7 > sparse=
8 > purge=
8 > purge=
9 > strip=
9 > strip=
10 > rebase=
10 > rebase=
11 > EOF
11 > EOF
12
12
13 $ echo a > index.html
13 $ echo a > index.html
14 $ echo x > data.py
14 $ echo x > data.py
15 $ echo z > readme.txt
15 $ echo z > readme.txt
16 $ cat > base.sparse <<EOF
16 $ cat > base.sparse <<EOF
17 > [include]
17 > [include]
18 > *.sparse
18 > *.sparse
19 > EOF
19 > EOF
20 $ hg ci -Aqm 'initial'
20 $ hg ci -Aqm 'initial'
21 $ cat > webpage.sparse <<EOF
21 $ cat > webpage.sparse <<EOF
22 > %include base.sparse
22 > %include base.sparse
23 > [include]
23 > [include]
24 > *.html
24 > *.html
25 > EOF
25 > EOF
26 $ hg ci -Aqm 'initial'
26 $ hg ci -Aqm 'initial'
27
27
28 Import a rules file against a 'blank' sparse profile
28 Import a rules file against a 'blank' sparse profile
29
29
30 $ cat > $TESTTMP/rules_to_import <<EOF
30 $ cat > $TESTTMP/rules_to_import <<EOF
31 > [include]
31 > [include]
32 > *.py
32 > *.py
33 > EOF
33 > EOF
34 $ hg debugsparse --import-rules $TESTTMP/rules_to_import
34 $ hg debugsparse --import-rules $TESTTMP/rules_to_import
35 $ ls -A
35 $ ls -A
36 .hg
36 .hg
37 data.py
37 data.py
38
38
39 $ hg debugsparse --reset
39 $ hg debugsparse --reset
40 $ rm .hg/sparse
40 $ rm .hg/sparse
41
41
42 $ cat > $TESTTMP/rules_to_import <<EOF
42 $ cat > $TESTTMP/rules_to_import <<EOF
43 > %include base.sparse
43 > %include base.sparse
44 > [include]
44 > [include]
45 > *.py
45 > *.py
46 > EOF
46 > EOF
47 $ hg debugsparse --import-rules $TESTTMP/rules_to_import
47 $ hg debugsparse --import-rules $TESTTMP/rules_to_import
48 $ ls -A
48 $ ls -A
49 .hg
49 .hg
50 base.sparse
50 base.sparse
51 data.py
51 data.py
52 webpage.sparse
52 webpage.sparse
53
53
54 $ hg debugsparse --reset
54 $ hg debugsparse --reset
55 $ rm .hg/sparse
55 $ rm .hg/sparse
56
56
57 Start against an existing profile; rules *already active* should be ignored
57 Start against an existing profile; rules *already active* should be ignored
58
58
59 $ hg debugsparse --enable-profile webpage.sparse
59 $ hg debugsparse --enable-profile webpage.sparse
60 $ hg debugsparse --include *.py
60 $ hg debugsparse --include *.py
61 $ cat > $TESTTMP/rules_to_import <<EOF
61 $ cat > $TESTTMP/rules_to_import <<EOF
62 > %include base.sparse
62 > %include base.sparse
63 > [include]
63 > [include]
64 > *.html
64 > *.html
65 > *.txt
65 > *.txt
66 > [exclude]
66 > [exclude]
67 > *.py
67 > *.py
68 > EOF
68 > EOF
69 $ hg debugsparse --import-rules $TESTTMP/rules_to_import
69 $ hg debugsparse --import-rules $TESTTMP/rules_to_import
70 $ ls -A
70 $ ls -A
71 .hg
71 .hg
72 base.sparse
72 base.sparse
73 index.html
73 index.html
74 readme.txt
74 readme.txt
75 webpage.sparse
75 webpage.sparse
76 $ cat .hg/sparse
76 $ cat .hg/sparse
77 %include webpage.sparse
77 %include webpage.sparse
78 [include]
78 [include]
79 *.py
79 *.py
80 *.txt
80 *.txt
81 [exclude]
81 [exclude]
82 *.py
82 *.py
83
83
84 $ hg debugsparse --reset
84 $ hg debugsparse --reset
85 $ rm .hg/sparse
85 $ rm .hg/sparse
86
86
87 Same tests, with -Tjson enabled to output summaries
87 Same tests, with -Tjson enabled to output summaries
88
88
89 $ cat > $TESTTMP/rules_to_import <<EOF
89 $ cat > $TESTTMP/rules_to_import <<EOF
90 > [include]
90 > [include]
91 > *.py
91 > *.py
92 > EOF
92 > EOF
93 $ hg debugsparse --import-rules $TESTTMP/rules_to_import -Tjson
93 $ hg debugsparse --import-rules $TESTTMP/rules_to_import -Tjson
94 [
94 [
95 {
95 {
96 "exclude_rules_added": 0,
96 "exclude_rules_added": 0,
97 "files_added": 0,
97 "files_added": 0,
98 "files_conflicting": 0,
98 "files_conflicting": 0,
99 "files_dropped": 4,
99 "files_dropped": 4,
100 "include_rules_added": 1,
100 "include_rules_added": 1,
101 "profiles_added": 0
101 "profiles_added": 0
102 }
102 }
103 ]
103 ]
104
104
105 $ hg debugsparse --reset
105 $ hg debugsparse --reset
106 $ rm .hg/sparse
106 $ rm .hg/sparse
107
107
108 $ cat > $TESTTMP/rules_to_import <<EOF
108 $ cat > $TESTTMP/rules_to_import <<EOF
109 > %include base.sparse
109 > %include base.sparse
110 > [include]
110 > [include]
111 > *.py
111 > *.py
112 > EOF
112 > EOF
113 $ hg debugsparse --import-rules $TESTTMP/rules_to_import -Tjson
113 $ hg debugsparse --import-rules $TESTTMP/rules_to_import -Tjson
114 [
114 [
115 {
115 {
116 "exclude_rules_added": 0,
116 "exclude_rules_added": 0,
117 "files_added": 0,
117 "files_added": 0,
118 "files_conflicting": 0,
118 "files_conflicting": 0,
119 "files_dropped": 2,
119 "files_dropped": 2,
120 "include_rules_added": 1,
120 "include_rules_added": 1,
121 "profiles_added": 1
121 "profiles_added": 1
122 }
122 }
123 ]
123 ]
124
124
125 $ hg debugsparse --reset
125 $ hg debugsparse --reset
126 $ rm .hg/sparse
126 $ rm .hg/sparse
127
127
128 $ hg debugsparse --enable-profile webpage.sparse
128 $ hg debugsparse --enable-profile webpage.sparse
129 $ hg debugsparse --include *.py
129 $ hg debugsparse --include *.py
130 $ cat > $TESTTMP/rules_to_import <<EOF
130 $ cat > $TESTTMP/rules_to_import <<EOF
131 > %include base.sparse
131 > %include base.sparse
132 > [include]
132 > [include]
133 > *.html
133 > *.html
134 > *.txt
134 > *.txt
135 > [exclude]
135 > [exclude]
136 > *.py
136 > *.py
137 > EOF
137 > EOF
138 $ hg debugsparse --import-rules $TESTTMP/rules_to_import -Tjson
138 $ hg debugsparse --import-rules $TESTTMP/rules_to_import -Tjson
139 [
139 [
140 {
140 {
141 "exclude_rules_added": 1,
141 "exclude_rules_added": 1,
142 "files_added": 1,
142 "files_added": 1,
143 "files_conflicting": 0,
143 "files_conflicting": 0,
144 "files_dropped": 1,
144 "files_dropped": 1,
145 "include_rules_added": 1,
145 "include_rules_added": 1,
146 "profiles_added": 0
146 "profiles_added": 0
147 }
147 }
148 ]
148 ]
149
149
150 If importing results in no new rules being added, no refresh should take place!
150 If importing results in no new rules being added, no refresh should take place!
151
151
152 $ cat > $TESTTMP/trap_sparse_refresh.py <<EOF
152 $ cat > $TESTTMP/trap_sparse_refresh.py <<EOF
153 > from mercurial import error, sparse
153 > from mercurial import error, sparse
154 > def extsetup(ui):
154 > def extsetup(ui):
155 > def abort_refresh(*args, **kwargs):
155 > def abort_refresh(*args, **kwargs):
156 > raise error.Abort('sparse._refresh called!')
156 > raise error.Abort(b'sparse._refresh called!')
157 > sparse.refreshwdir = abort_refresh
157 > sparse.refreshwdir = abort_refresh
158 > EOF
158 > EOF
159 $ cat >> $HGRCPATH <<EOF
159 $ cat >> $HGRCPATH <<EOF
160 > [extensions]
160 > [extensions]
161 > trap_sparse_refresh=$TESTTMP/trap_sparse_refresh.py
161 > trap_sparse_refresh=$TESTTMP/trap_sparse_refresh.py
162 > EOF
162 > EOF
163 $ cat > $TESTTMP/rules_to_import <<EOF
163 $ cat > $TESTTMP/rules_to_import <<EOF
164 > [include]
164 > [include]
165 > *.py
165 > *.py
166 > EOF
166 > EOF
167 $ hg debugsparse --import-rules $TESTTMP/rules_to_import
167 $ hg debugsparse --import-rules $TESTTMP/rules_to_import
168
168
169 If an exception is raised during refresh, restore the existing rules again.
169 If an exception is raised during refresh, restore the existing rules again.
170
170
171 $ cat > $TESTTMP/rules_to_import <<EOF
171 $ cat > $TESTTMP/rules_to_import <<EOF
172 > [exclude]
172 > [exclude]
173 > *.html
173 > *.html
174 > EOF
174 > EOF
175 $ hg debugsparse --import-rules $TESTTMP/rules_to_import
175 $ hg debugsparse --import-rules $TESTTMP/rules_to_import
176 abort: sparse._refresh called!
176 abort: sparse._refresh called!
177 [255]
177 [255]
178 $ cat .hg/sparse
178 $ cat .hg/sparse
179 %include webpage.sparse
179 %include webpage.sparse
180 [include]
180 [include]
181 *.py
181 *.py
182 *.txt
182 *.txt
183 [exclude]
183 [exclude]
184 *.py
184 *.py
General Comments 0
You need to be logged in to leave comments. Login now