##// END OF EJS Templates
push: add --new-branch option to allow intial push of new branches...
Sune Foldager -
r11211:e43c23d1 default
parent child Browse files
Show More
@@ -1,2851 +1,2851 b''
1 # mq.py - patch queues for mercurial
1 # mq.py - patch queues for mercurial
2 #
2 #
3 # Copyright 2005, 2006 Chris Mason <mason@suse.com>
3 # Copyright 2005, 2006 Chris Mason <mason@suse.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 '''manage a stack of patches
8 '''manage a stack of patches
9
9
10 This extension lets you work with a stack of patches in a Mercurial
10 This extension lets you work with a stack of patches in a Mercurial
11 repository. It manages two stacks of patches - all known patches, and
11 repository. It manages two stacks of patches - all known patches, and
12 applied patches (subset of known patches).
12 applied patches (subset of known patches).
13
13
14 Known patches are represented as patch files in the .hg/patches
14 Known patches are represented as patch files in the .hg/patches
15 directory. Applied patches are both patch files and changesets.
15 directory. Applied patches are both patch files and changesets.
16
16
17 Common tasks (use :hg:`help command` for more details)::
17 Common tasks (use :hg:`help command` for more details)::
18
18
19 create new patch qnew
19 create new patch qnew
20 import existing patch qimport
20 import existing patch qimport
21
21
22 print patch series qseries
22 print patch series qseries
23 print applied patches qapplied
23 print applied patches qapplied
24
24
25 add known patch to applied stack qpush
25 add known patch to applied stack qpush
26 remove patch from applied stack qpop
26 remove patch from applied stack qpop
27 refresh contents of top applied patch qrefresh
27 refresh contents of top applied patch qrefresh
28
28
29 By default, mq will automatically use git patches when required to
29 By default, mq will automatically use git patches when required to
30 avoid losing file mode changes, copy records, binary files or empty
30 avoid losing file mode changes, copy records, binary files or empty
31 files creations or deletions. This behaviour can be configured with::
31 files creations or deletions. This behaviour can be configured with::
32
32
33 [mq]
33 [mq]
34 git = auto/keep/yes/no
34 git = auto/keep/yes/no
35
35
36 If set to 'keep', mq will obey the [diff] section configuration while
36 If set to 'keep', mq will obey the [diff] section configuration while
37 preserving existing git patches upon qrefresh. If set to 'yes' or
37 preserving existing git patches upon qrefresh. If set to 'yes' or
38 'no', mq will override the [diff] section and always generate git or
38 'no', mq will override the [diff] section and always generate git or
39 regular patches, possibly losing data in the second case.
39 regular patches, possibly losing data in the second case.
40 '''
40 '''
41
41
42 from mercurial.i18n import _
42 from mercurial.i18n import _
43 from mercurial.node import bin, hex, short, nullid, nullrev
43 from mercurial.node import bin, hex, short, nullid, nullrev
44 from mercurial.lock import release
44 from mercurial.lock import release
45 from mercurial import commands, cmdutil, hg, patch, util
45 from mercurial import commands, cmdutil, hg, patch, util
46 from mercurial import repair, extensions, url, error
46 from mercurial import repair, extensions, url, error
47 import os, sys, re, errno
47 import os, sys, re, errno
48
48
49 commands.norepo += " qclone"
49 commands.norepo += " qclone"
50
50
51 # Patch names looks like unix-file names.
51 # Patch names looks like unix-file names.
52 # They must be joinable with queue directory and result in the patch path.
52 # They must be joinable with queue directory and result in the patch path.
53 normname = util.normpath
53 normname = util.normpath
54
54
55 class statusentry(object):
55 class statusentry(object):
56 def __init__(self, node, name):
56 def __init__(self, node, name):
57 self.node, self.name = node, name
57 self.node, self.name = node, name
58
58
59 def __str__(self):
59 def __str__(self):
60 return hex(self.node) + ':' + self.name
60 return hex(self.node) + ':' + self.name
61
61
62 class patchheader(object):
62 class patchheader(object):
63 def __init__(self, pf, plainmode=False):
63 def __init__(self, pf, plainmode=False):
64 def eatdiff(lines):
64 def eatdiff(lines):
65 while lines:
65 while lines:
66 l = lines[-1]
66 l = lines[-1]
67 if (l.startswith("diff -") or
67 if (l.startswith("diff -") or
68 l.startswith("Index:") or
68 l.startswith("Index:") or
69 l.startswith("===========")):
69 l.startswith("===========")):
70 del lines[-1]
70 del lines[-1]
71 else:
71 else:
72 break
72 break
73 def eatempty(lines):
73 def eatempty(lines):
74 while lines:
74 while lines:
75 if not lines[-1].strip():
75 if not lines[-1].strip():
76 del lines[-1]
76 del lines[-1]
77 else:
77 else:
78 break
78 break
79
79
80 message = []
80 message = []
81 comments = []
81 comments = []
82 user = None
82 user = None
83 date = None
83 date = None
84 parent = None
84 parent = None
85 format = None
85 format = None
86 subject = None
86 subject = None
87 diffstart = 0
87 diffstart = 0
88
88
89 for line in file(pf):
89 for line in file(pf):
90 line = line.rstrip()
90 line = line.rstrip()
91 if (line.startswith('diff --git')
91 if (line.startswith('diff --git')
92 or (diffstart and line.startswith('+++ '))):
92 or (diffstart and line.startswith('+++ '))):
93 diffstart = 2
93 diffstart = 2
94 break
94 break
95 diffstart = 0 # reset
95 diffstart = 0 # reset
96 if line.startswith("--- "):
96 if line.startswith("--- "):
97 diffstart = 1
97 diffstart = 1
98 continue
98 continue
99 elif format == "hgpatch":
99 elif format == "hgpatch":
100 # parse values when importing the result of an hg export
100 # parse values when importing the result of an hg export
101 if line.startswith("# User "):
101 if line.startswith("# User "):
102 user = line[7:]
102 user = line[7:]
103 elif line.startswith("# Date "):
103 elif line.startswith("# Date "):
104 date = line[7:]
104 date = line[7:]
105 elif line.startswith("# Parent "):
105 elif line.startswith("# Parent "):
106 parent = line[9:]
106 parent = line[9:]
107 elif not line.startswith("# ") and line:
107 elif not line.startswith("# ") and line:
108 message.append(line)
108 message.append(line)
109 format = None
109 format = None
110 elif line == '# HG changeset patch':
110 elif line == '# HG changeset patch':
111 message = []
111 message = []
112 format = "hgpatch"
112 format = "hgpatch"
113 elif (format != "tagdone" and (line.startswith("Subject: ") or
113 elif (format != "tagdone" and (line.startswith("Subject: ") or
114 line.startswith("subject: "))):
114 line.startswith("subject: "))):
115 subject = line[9:]
115 subject = line[9:]
116 format = "tag"
116 format = "tag"
117 elif (format != "tagdone" and (line.startswith("From: ") or
117 elif (format != "tagdone" and (line.startswith("From: ") or
118 line.startswith("from: "))):
118 line.startswith("from: "))):
119 user = line[6:]
119 user = line[6:]
120 format = "tag"
120 format = "tag"
121 elif (format != "tagdone" and (line.startswith("Date: ") or
121 elif (format != "tagdone" and (line.startswith("Date: ") or
122 line.startswith("date: "))):
122 line.startswith("date: "))):
123 date = line[6:]
123 date = line[6:]
124 format = "tag"
124 format = "tag"
125 elif format == "tag" and line == "":
125 elif format == "tag" and line == "":
126 # when looking for tags (subject: from: etc) they
126 # when looking for tags (subject: from: etc) they
127 # end once you find a blank line in the source
127 # end once you find a blank line in the source
128 format = "tagdone"
128 format = "tagdone"
129 elif message or line:
129 elif message or line:
130 message.append(line)
130 message.append(line)
131 comments.append(line)
131 comments.append(line)
132
132
133 eatdiff(message)
133 eatdiff(message)
134 eatdiff(comments)
134 eatdiff(comments)
135 eatempty(message)
135 eatempty(message)
136 eatempty(comments)
136 eatempty(comments)
137
137
138 # make sure message isn't empty
138 # make sure message isn't empty
139 if format and format.startswith("tag") and subject:
139 if format and format.startswith("tag") and subject:
140 message.insert(0, "")
140 message.insert(0, "")
141 message.insert(0, subject)
141 message.insert(0, subject)
142
142
143 self.message = message
143 self.message = message
144 self.comments = comments
144 self.comments = comments
145 self.user = user
145 self.user = user
146 self.date = date
146 self.date = date
147 self.parent = parent
147 self.parent = parent
148 self.haspatch = diffstart > 1
148 self.haspatch = diffstart > 1
149 self.plainmode = plainmode
149 self.plainmode = plainmode
150
150
151 def setuser(self, user):
151 def setuser(self, user):
152 if not self.updateheader(['From: ', '# User '], user):
152 if not self.updateheader(['From: ', '# User '], user):
153 try:
153 try:
154 patchheaderat = self.comments.index('# HG changeset patch')
154 patchheaderat = self.comments.index('# HG changeset patch')
155 self.comments.insert(patchheaderat + 1, '# User ' + user)
155 self.comments.insert(patchheaderat + 1, '# User ' + user)
156 except ValueError:
156 except ValueError:
157 if self.plainmode or self._hasheader(['Date: ']):
157 if self.plainmode or self._hasheader(['Date: ']):
158 self.comments = ['From: ' + user] + self.comments
158 self.comments = ['From: ' + user] + self.comments
159 else:
159 else:
160 tmp = ['# HG changeset patch', '# User ' + user, '']
160 tmp = ['# HG changeset patch', '# User ' + user, '']
161 self.comments = tmp + self.comments
161 self.comments = tmp + self.comments
162 self.user = user
162 self.user = user
163
163
164 def setdate(self, date):
164 def setdate(self, date):
165 if not self.updateheader(['Date: ', '# Date '], date):
165 if not self.updateheader(['Date: ', '# Date '], date):
166 try:
166 try:
167 patchheaderat = self.comments.index('# HG changeset patch')
167 patchheaderat = self.comments.index('# HG changeset patch')
168 self.comments.insert(patchheaderat + 1, '# Date ' + date)
168 self.comments.insert(patchheaderat + 1, '# Date ' + date)
169 except ValueError:
169 except ValueError:
170 if self.plainmode or self._hasheader(['From: ']):
170 if self.plainmode or self._hasheader(['From: ']):
171 self.comments = ['Date: ' + date] + self.comments
171 self.comments = ['Date: ' + date] + self.comments
172 else:
172 else:
173 tmp = ['# HG changeset patch', '# Date ' + date, '']
173 tmp = ['# HG changeset patch', '# Date ' + date, '']
174 self.comments = tmp + self.comments
174 self.comments = tmp + self.comments
175 self.date = date
175 self.date = date
176
176
177 def setparent(self, parent):
177 def setparent(self, parent):
178 if not self.updateheader(['# Parent '], parent):
178 if not self.updateheader(['# Parent '], parent):
179 try:
179 try:
180 patchheaderat = self.comments.index('# HG changeset patch')
180 patchheaderat = self.comments.index('# HG changeset patch')
181 self.comments.insert(patchheaderat + 1, '# Parent ' + parent)
181 self.comments.insert(patchheaderat + 1, '# Parent ' + parent)
182 except ValueError:
182 except ValueError:
183 pass
183 pass
184 self.parent = parent
184 self.parent = parent
185
185
186 def setmessage(self, message):
186 def setmessage(self, message):
187 if self.comments:
187 if self.comments:
188 self._delmsg()
188 self._delmsg()
189 self.message = [message]
189 self.message = [message]
190 self.comments += self.message
190 self.comments += self.message
191
191
192 def updateheader(self, prefixes, new):
192 def updateheader(self, prefixes, new):
193 '''Update all references to a field in the patch header.
193 '''Update all references to a field in the patch header.
194 Return whether the field is present.'''
194 Return whether the field is present.'''
195 res = False
195 res = False
196 for prefix in prefixes:
196 for prefix in prefixes:
197 for i in xrange(len(self.comments)):
197 for i in xrange(len(self.comments)):
198 if self.comments[i].startswith(prefix):
198 if self.comments[i].startswith(prefix):
199 self.comments[i] = prefix + new
199 self.comments[i] = prefix + new
200 res = True
200 res = True
201 break
201 break
202 return res
202 return res
203
203
204 def _hasheader(self, prefixes):
204 def _hasheader(self, prefixes):
205 '''Check if a header starts with any of the given prefixes.'''
205 '''Check if a header starts with any of the given prefixes.'''
206 for prefix in prefixes:
206 for prefix in prefixes:
207 for comment in self.comments:
207 for comment in self.comments:
208 if comment.startswith(prefix):
208 if comment.startswith(prefix):
209 return True
209 return True
210 return False
210 return False
211
211
212 def __str__(self):
212 def __str__(self):
213 if not self.comments:
213 if not self.comments:
214 return ''
214 return ''
215 return '\n'.join(self.comments) + '\n\n'
215 return '\n'.join(self.comments) + '\n\n'
216
216
217 def _delmsg(self):
217 def _delmsg(self):
218 '''Remove existing message, keeping the rest of the comments fields.
218 '''Remove existing message, keeping the rest of the comments fields.
219 If comments contains 'subject: ', message will prepend
219 If comments contains 'subject: ', message will prepend
220 the field and a blank line.'''
220 the field and a blank line.'''
221 if self.message:
221 if self.message:
222 subj = 'subject: ' + self.message[0].lower()
222 subj = 'subject: ' + self.message[0].lower()
223 for i in xrange(len(self.comments)):
223 for i in xrange(len(self.comments)):
224 if subj == self.comments[i].lower():
224 if subj == self.comments[i].lower():
225 del self.comments[i]
225 del self.comments[i]
226 self.message = self.message[2:]
226 self.message = self.message[2:]
227 break
227 break
228 ci = 0
228 ci = 0
229 for mi in self.message:
229 for mi in self.message:
230 while mi != self.comments[ci]:
230 while mi != self.comments[ci]:
231 ci += 1
231 ci += 1
232 del self.comments[ci]
232 del self.comments[ci]
233
233
234 class queue(object):
234 class queue(object):
235 def __init__(self, ui, path, patchdir=None):
235 def __init__(self, ui, path, patchdir=None):
236 self.basepath = path
236 self.basepath = path
237 self.path = patchdir or os.path.join(path, "patches")
237 self.path = patchdir or os.path.join(path, "patches")
238 self.opener = util.opener(self.path)
238 self.opener = util.opener(self.path)
239 self.ui = ui
239 self.ui = ui
240 self.applied_dirty = 0
240 self.applied_dirty = 0
241 self.series_dirty = 0
241 self.series_dirty = 0
242 self.series_path = "series"
242 self.series_path = "series"
243 self.status_path = "status"
243 self.status_path = "status"
244 self.guards_path = "guards"
244 self.guards_path = "guards"
245 self.active_guards = None
245 self.active_guards = None
246 self.guards_dirty = False
246 self.guards_dirty = False
247 # Handle mq.git as a bool with extended values
247 # Handle mq.git as a bool with extended values
248 try:
248 try:
249 gitmode = ui.configbool('mq', 'git', None)
249 gitmode = ui.configbool('mq', 'git', None)
250 if gitmode is None:
250 if gitmode is None:
251 raise error.ConfigError()
251 raise error.ConfigError()
252 self.gitmode = gitmode and 'yes' or 'no'
252 self.gitmode = gitmode and 'yes' or 'no'
253 except error.ConfigError:
253 except error.ConfigError:
254 self.gitmode = ui.config('mq', 'git', 'auto').lower()
254 self.gitmode = ui.config('mq', 'git', 'auto').lower()
255 self.plainmode = ui.configbool('mq', 'plain', False)
255 self.plainmode = ui.configbool('mq', 'plain', False)
256
256
257 @util.propertycache
257 @util.propertycache
258 def applied(self):
258 def applied(self):
259 if os.path.exists(self.join(self.status_path)):
259 if os.path.exists(self.join(self.status_path)):
260 def parse(l):
260 def parse(l):
261 n, name = l.split(':', 1)
261 n, name = l.split(':', 1)
262 return statusentry(bin(n), name)
262 return statusentry(bin(n), name)
263 lines = self.opener(self.status_path).read().splitlines()
263 lines = self.opener(self.status_path).read().splitlines()
264 return [parse(l) for l in lines]
264 return [parse(l) for l in lines]
265 return []
265 return []
266
266
267 @util.propertycache
267 @util.propertycache
268 def full_series(self):
268 def full_series(self):
269 if os.path.exists(self.join(self.series_path)):
269 if os.path.exists(self.join(self.series_path)):
270 return self.opener(self.series_path).read().splitlines()
270 return self.opener(self.series_path).read().splitlines()
271 return []
271 return []
272
272
273 @util.propertycache
273 @util.propertycache
274 def series(self):
274 def series(self):
275 self.parse_series()
275 self.parse_series()
276 return self.series
276 return self.series
277
277
278 @util.propertycache
278 @util.propertycache
279 def series_guards(self):
279 def series_guards(self):
280 self.parse_series()
280 self.parse_series()
281 return self.series_guards
281 return self.series_guards
282
282
283 def invalidate(self):
283 def invalidate(self):
284 for a in 'applied full_series series series_guards'.split():
284 for a in 'applied full_series series series_guards'.split():
285 if a in self.__dict__:
285 if a in self.__dict__:
286 delattr(self, a)
286 delattr(self, a)
287 self.applied_dirty = 0
287 self.applied_dirty = 0
288 self.series_dirty = 0
288 self.series_dirty = 0
289 self.guards_dirty = False
289 self.guards_dirty = False
290 self.active_guards = None
290 self.active_guards = None
291
291
292 def diffopts(self, opts={}, patchfn=None):
292 def diffopts(self, opts={}, patchfn=None):
293 diffopts = patch.diffopts(self.ui, opts)
293 diffopts = patch.diffopts(self.ui, opts)
294 if self.gitmode == 'auto':
294 if self.gitmode == 'auto':
295 diffopts.upgrade = True
295 diffopts.upgrade = True
296 elif self.gitmode == 'keep':
296 elif self.gitmode == 'keep':
297 pass
297 pass
298 elif self.gitmode in ('yes', 'no'):
298 elif self.gitmode in ('yes', 'no'):
299 diffopts.git = self.gitmode == 'yes'
299 diffopts.git = self.gitmode == 'yes'
300 else:
300 else:
301 raise util.Abort(_('mq.git option can be auto/keep/yes/no'
301 raise util.Abort(_('mq.git option can be auto/keep/yes/no'
302 ' got %s') % self.gitmode)
302 ' got %s') % self.gitmode)
303 if patchfn:
303 if patchfn:
304 diffopts = self.patchopts(diffopts, patchfn)
304 diffopts = self.patchopts(diffopts, patchfn)
305 return diffopts
305 return diffopts
306
306
307 def patchopts(self, diffopts, *patches):
307 def patchopts(self, diffopts, *patches):
308 """Return a copy of input diff options with git set to true if
308 """Return a copy of input diff options with git set to true if
309 referenced patch is a git patch and should be preserved as such.
309 referenced patch is a git patch and should be preserved as such.
310 """
310 """
311 diffopts = diffopts.copy()
311 diffopts = diffopts.copy()
312 if not diffopts.git and self.gitmode == 'keep':
312 if not diffopts.git and self.gitmode == 'keep':
313 for patchfn in patches:
313 for patchfn in patches:
314 patchf = self.opener(patchfn, 'r')
314 patchf = self.opener(patchfn, 'r')
315 # if the patch was a git patch, refresh it as a git patch
315 # if the patch was a git patch, refresh it as a git patch
316 for line in patchf:
316 for line in patchf:
317 if line.startswith('diff --git'):
317 if line.startswith('diff --git'):
318 diffopts.git = True
318 diffopts.git = True
319 break
319 break
320 patchf.close()
320 patchf.close()
321 return diffopts
321 return diffopts
322
322
323 def join(self, *p):
323 def join(self, *p):
324 return os.path.join(self.path, *p)
324 return os.path.join(self.path, *p)
325
325
326 def find_series(self, patch):
326 def find_series(self, patch):
327 def matchpatch(l):
327 def matchpatch(l):
328 l = l.split('#', 1)[0]
328 l = l.split('#', 1)[0]
329 return l.strip() == patch
329 return l.strip() == patch
330 for index, l in enumerate(self.full_series):
330 for index, l in enumerate(self.full_series):
331 if matchpatch(l):
331 if matchpatch(l):
332 return index
332 return index
333 return None
333 return None
334
334
335 guard_re = re.compile(r'\s?#([-+][^-+# \t\r\n\f][^# \t\r\n\f]*)')
335 guard_re = re.compile(r'\s?#([-+][^-+# \t\r\n\f][^# \t\r\n\f]*)')
336
336
337 def parse_series(self):
337 def parse_series(self):
338 self.series = []
338 self.series = []
339 self.series_guards = []
339 self.series_guards = []
340 for l in self.full_series:
340 for l in self.full_series:
341 h = l.find('#')
341 h = l.find('#')
342 if h == -1:
342 if h == -1:
343 patch = l
343 patch = l
344 comment = ''
344 comment = ''
345 elif h == 0:
345 elif h == 0:
346 continue
346 continue
347 else:
347 else:
348 patch = l[:h]
348 patch = l[:h]
349 comment = l[h:]
349 comment = l[h:]
350 patch = patch.strip()
350 patch = patch.strip()
351 if patch:
351 if patch:
352 if patch in self.series:
352 if patch in self.series:
353 raise util.Abort(_('%s appears more than once in %s') %
353 raise util.Abort(_('%s appears more than once in %s') %
354 (patch, self.join(self.series_path)))
354 (patch, self.join(self.series_path)))
355 self.series.append(patch)
355 self.series.append(patch)
356 self.series_guards.append(self.guard_re.findall(comment))
356 self.series_guards.append(self.guard_re.findall(comment))
357
357
358 def check_guard(self, guard):
358 def check_guard(self, guard):
359 if not guard:
359 if not guard:
360 return _('guard cannot be an empty string')
360 return _('guard cannot be an empty string')
361 bad_chars = '# \t\r\n\f'
361 bad_chars = '# \t\r\n\f'
362 first = guard[0]
362 first = guard[0]
363 if first in '-+':
363 if first in '-+':
364 return (_('guard %r starts with invalid character: %r') %
364 return (_('guard %r starts with invalid character: %r') %
365 (guard, first))
365 (guard, first))
366 for c in bad_chars:
366 for c in bad_chars:
367 if c in guard:
367 if c in guard:
368 return _('invalid character in guard %r: %r') % (guard, c)
368 return _('invalid character in guard %r: %r') % (guard, c)
369
369
370 def set_active(self, guards):
370 def set_active(self, guards):
371 for guard in guards:
371 for guard in guards:
372 bad = self.check_guard(guard)
372 bad = self.check_guard(guard)
373 if bad:
373 if bad:
374 raise util.Abort(bad)
374 raise util.Abort(bad)
375 guards = sorted(set(guards))
375 guards = sorted(set(guards))
376 self.ui.debug('active guards: %s\n' % ' '.join(guards))
376 self.ui.debug('active guards: %s\n' % ' '.join(guards))
377 self.active_guards = guards
377 self.active_guards = guards
378 self.guards_dirty = True
378 self.guards_dirty = True
379
379
380 def active(self):
380 def active(self):
381 if self.active_guards is None:
381 if self.active_guards is None:
382 self.active_guards = []
382 self.active_guards = []
383 try:
383 try:
384 guards = self.opener(self.guards_path).read().split()
384 guards = self.opener(self.guards_path).read().split()
385 except IOError, err:
385 except IOError, err:
386 if err.errno != errno.ENOENT:
386 if err.errno != errno.ENOENT:
387 raise
387 raise
388 guards = []
388 guards = []
389 for i, guard in enumerate(guards):
389 for i, guard in enumerate(guards):
390 bad = self.check_guard(guard)
390 bad = self.check_guard(guard)
391 if bad:
391 if bad:
392 self.ui.warn('%s:%d: %s\n' %
392 self.ui.warn('%s:%d: %s\n' %
393 (self.join(self.guards_path), i + 1, bad))
393 (self.join(self.guards_path), i + 1, bad))
394 else:
394 else:
395 self.active_guards.append(guard)
395 self.active_guards.append(guard)
396 return self.active_guards
396 return self.active_guards
397
397
398 def set_guards(self, idx, guards):
398 def set_guards(self, idx, guards):
399 for g in guards:
399 for g in guards:
400 if len(g) < 2:
400 if len(g) < 2:
401 raise util.Abort(_('guard %r too short') % g)
401 raise util.Abort(_('guard %r too short') % g)
402 if g[0] not in '-+':
402 if g[0] not in '-+':
403 raise util.Abort(_('guard %r starts with invalid char') % g)
403 raise util.Abort(_('guard %r starts with invalid char') % g)
404 bad = self.check_guard(g[1:])
404 bad = self.check_guard(g[1:])
405 if bad:
405 if bad:
406 raise util.Abort(bad)
406 raise util.Abort(bad)
407 drop = self.guard_re.sub('', self.full_series[idx])
407 drop = self.guard_re.sub('', self.full_series[idx])
408 self.full_series[idx] = drop + ''.join([' #' + g for g in guards])
408 self.full_series[idx] = drop + ''.join([' #' + g for g in guards])
409 self.parse_series()
409 self.parse_series()
410 self.series_dirty = True
410 self.series_dirty = True
411
411
412 def pushable(self, idx):
412 def pushable(self, idx):
413 if isinstance(idx, str):
413 if isinstance(idx, str):
414 idx = self.series.index(idx)
414 idx = self.series.index(idx)
415 patchguards = self.series_guards[idx]
415 patchguards = self.series_guards[idx]
416 if not patchguards:
416 if not patchguards:
417 return True, None
417 return True, None
418 guards = self.active()
418 guards = self.active()
419 exactneg = [g for g in patchguards if g[0] == '-' and g[1:] in guards]
419 exactneg = [g for g in patchguards if g[0] == '-' and g[1:] in guards]
420 if exactneg:
420 if exactneg:
421 return False, exactneg[0]
421 return False, exactneg[0]
422 pos = [g for g in patchguards if g[0] == '+']
422 pos = [g for g in patchguards if g[0] == '+']
423 exactpos = [g for g in pos if g[1:] in guards]
423 exactpos = [g for g in pos if g[1:] in guards]
424 if pos:
424 if pos:
425 if exactpos:
425 if exactpos:
426 return True, exactpos[0]
426 return True, exactpos[0]
427 return False, pos
427 return False, pos
428 return True, ''
428 return True, ''
429
429
430 def explain_pushable(self, idx, all_patches=False):
430 def explain_pushable(self, idx, all_patches=False):
431 write = all_patches and self.ui.write or self.ui.warn
431 write = all_patches and self.ui.write or self.ui.warn
432 if all_patches or self.ui.verbose:
432 if all_patches or self.ui.verbose:
433 if isinstance(idx, str):
433 if isinstance(idx, str):
434 idx = self.series.index(idx)
434 idx = self.series.index(idx)
435 pushable, why = self.pushable(idx)
435 pushable, why = self.pushable(idx)
436 if all_patches and pushable:
436 if all_patches and pushable:
437 if why is None:
437 if why is None:
438 write(_('allowing %s - no guards in effect\n') %
438 write(_('allowing %s - no guards in effect\n') %
439 self.series[idx])
439 self.series[idx])
440 else:
440 else:
441 if not why:
441 if not why:
442 write(_('allowing %s - no matching negative guards\n') %
442 write(_('allowing %s - no matching negative guards\n') %
443 self.series[idx])
443 self.series[idx])
444 else:
444 else:
445 write(_('allowing %s - guarded by %r\n') %
445 write(_('allowing %s - guarded by %r\n') %
446 (self.series[idx], why))
446 (self.series[idx], why))
447 if not pushable:
447 if not pushable:
448 if why:
448 if why:
449 write(_('skipping %s - guarded by %r\n') %
449 write(_('skipping %s - guarded by %r\n') %
450 (self.series[idx], why))
450 (self.series[idx], why))
451 else:
451 else:
452 write(_('skipping %s - no matching guards\n') %
452 write(_('skipping %s - no matching guards\n') %
453 self.series[idx])
453 self.series[idx])
454
454
455 def save_dirty(self):
455 def save_dirty(self):
456 def write_list(items, path):
456 def write_list(items, path):
457 fp = self.opener(path, 'w')
457 fp = self.opener(path, 'w')
458 for i in items:
458 for i in items:
459 fp.write("%s\n" % i)
459 fp.write("%s\n" % i)
460 fp.close()
460 fp.close()
461 if self.applied_dirty:
461 if self.applied_dirty:
462 write_list(map(str, self.applied), self.status_path)
462 write_list(map(str, self.applied), self.status_path)
463 if self.series_dirty:
463 if self.series_dirty:
464 write_list(self.full_series, self.series_path)
464 write_list(self.full_series, self.series_path)
465 if self.guards_dirty:
465 if self.guards_dirty:
466 write_list(self.active_guards, self.guards_path)
466 write_list(self.active_guards, self.guards_path)
467
467
468 def removeundo(self, repo):
468 def removeundo(self, repo):
469 undo = repo.sjoin('undo')
469 undo = repo.sjoin('undo')
470 if not os.path.exists(undo):
470 if not os.path.exists(undo):
471 return
471 return
472 try:
472 try:
473 os.unlink(undo)
473 os.unlink(undo)
474 except OSError, inst:
474 except OSError, inst:
475 self.ui.warn(_('error removing undo: %s\n') % str(inst))
475 self.ui.warn(_('error removing undo: %s\n') % str(inst))
476
476
477 def printdiff(self, repo, diffopts, node1, node2=None, files=None,
477 def printdiff(self, repo, diffopts, node1, node2=None, files=None,
478 fp=None, changes=None, opts={}):
478 fp=None, changes=None, opts={}):
479 stat = opts.get('stat')
479 stat = opts.get('stat')
480 m = cmdutil.match(repo, files, opts)
480 m = cmdutil.match(repo, files, opts)
481 cmdutil.diffordiffstat(self.ui, repo, diffopts, node1, node2, m,
481 cmdutil.diffordiffstat(self.ui, repo, diffopts, node1, node2, m,
482 changes, stat, fp)
482 changes, stat, fp)
483
483
484 def mergeone(self, repo, mergeq, head, patch, rev, diffopts):
484 def mergeone(self, repo, mergeq, head, patch, rev, diffopts):
485 # first try just applying the patch
485 # first try just applying the patch
486 (err, n) = self.apply(repo, [patch], update_status=False,
486 (err, n) = self.apply(repo, [patch], update_status=False,
487 strict=True, merge=rev)
487 strict=True, merge=rev)
488
488
489 if err == 0:
489 if err == 0:
490 return (err, n)
490 return (err, n)
491
491
492 if n is None:
492 if n is None:
493 raise util.Abort(_("apply failed for patch %s") % patch)
493 raise util.Abort(_("apply failed for patch %s") % patch)
494
494
495 self.ui.warn(_("patch didn't work out, merging %s\n") % patch)
495 self.ui.warn(_("patch didn't work out, merging %s\n") % patch)
496
496
497 # apply failed, strip away that rev and merge.
497 # apply failed, strip away that rev and merge.
498 hg.clean(repo, head)
498 hg.clean(repo, head)
499 self.strip(repo, n, update=False, backup='strip')
499 self.strip(repo, n, update=False, backup='strip')
500
500
501 ctx = repo[rev]
501 ctx = repo[rev]
502 ret = hg.merge(repo, rev)
502 ret = hg.merge(repo, rev)
503 if ret:
503 if ret:
504 raise util.Abort(_("update returned %d") % ret)
504 raise util.Abort(_("update returned %d") % ret)
505 n = repo.commit(ctx.description(), ctx.user(), force=True)
505 n = repo.commit(ctx.description(), ctx.user(), force=True)
506 if n is None:
506 if n is None:
507 raise util.Abort(_("repo commit failed"))
507 raise util.Abort(_("repo commit failed"))
508 try:
508 try:
509 ph = patchheader(mergeq.join(patch), self.plainmode)
509 ph = patchheader(mergeq.join(patch), self.plainmode)
510 except:
510 except:
511 raise util.Abort(_("unable to read %s") % patch)
511 raise util.Abort(_("unable to read %s") % patch)
512
512
513 diffopts = self.patchopts(diffopts, patch)
513 diffopts = self.patchopts(diffopts, patch)
514 patchf = self.opener(patch, "w")
514 patchf = self.opener(patch, "w")
515 comments = str(ph)
515 comments = str(ph)
516 if comments:
516 if comments:
517 patchf.write(comments)
517 patchf.write(comments)
518 self.printdiff(repo, diffopts, head, n, fp=patchf)
518 self.printdiff(repo, diffopts, head, n, fp=patchf)
519 patchf.close()
519 patchf.close()
520 self.removeundo(repo)
520 self.removeundo(repo)
521 return (0, n)
521 return (0, n)
522
522
523 def qparents(self, repo, rev=None):
523 def qparents(self, repo, rev=None):
524 if rev is None:
524 if rev is None:
525 (p1, p2) = repo.dirstate.parents()
525 (p1, p2) = repo.dirstate.parents()
526 if p2 == nullid:
526 if p2 == nullid:
527 return p1
527 return p1
528 if not self.applied:
528 if not self.applied:
529 return None
529 return None
530 return self.applied[-1].node
530 return self.applied[-1].node
531 p1, p2 = repo.changelog.parents(rev)
531 p1, p2 = repo.changelog.parents(rev)
532 if p2 != nullid and p2 in [x.node for x in self.applied]:
532 if p2 != nullid and p2 in [x.node for x in self.applied]:
533 return p2
533 return p2
534 return p1
534 return p1
535
535
536 def mergepatch(self, repo, mergeq, series, diffopts):
536 def mergepatch(self, repo, mergeq, series, diffopts):
537 if not self.applied:
537 if not self.applied:
538 # each of the patches merged in will have two parents. This
538 # each of the patches merged in will have two parents. This
539 # can confuse the qrefresh, qdiff, and strip code because it
539 # can confuse the qrefresh, qdiff, and strip code because it
540 # needs to know which parent is actually in the patch queue.
540 # needs to know which parent is actually in the patch queue.
541 # so, we insert a merge marker with only one parent. This way
541 # so, we insert a merge marker with only one parent. This way
542 # the first patch in the queue is never a merge patch
542 # the first patch in the queue is never a merge patch
543 #
543 #
544 pname = ".hg.patches.merge.marker"
544 pname = ".hg.patches.merge.marker"
545 n = repo.commit('[mq]: merge marker', force=True)
545 n = repo.commit('[mq]: merge marker', force=True)
546 self.removeundo(repo)
546 self.removeundo(repo)
547 self.applied.append(statusentry(n, pname))
547 self.applied.append(statusentry(n, pname))
548 self.applied_dirty = 1
548 self.applied_dirty = 1
549
549
550 head = self.qparents(repo)
550 head = self.qparents(repo)
551
551
552 for patch in series:
552 for patch in series:
553 patch = mergeq.lookup(patch, strict=True)
553 patch = mergeq.lookup(patch, strict=True)
554 if not patch:
554 if not patch:
555 self.ui.warn(_("patch %s does not exist\n") % patch)
555 self.ui.warn(_("patch %s does not exist\n") % patch)
556 return (1, None)
556 return (1, None)
557 pushable, reason = self.pushable(patch)
557 pushable, reason = self.pushable(patch)
558 if not pushable:
558 if not pushable:
559 self.explain_pushable(patch, all_patches=True)
559 self.explain_pushable(patch, all_patches=True)
560 continue
560 continue
561 info = mergeq.isapplied(patch)
561 info = mergeq.isapplied(patch)
562 if not info:
562 if not info:
563 self.ui.warn(_("patch %s is not applied\n") % patch)
563 self.ui.warn(_("patch %s is not applied\n") % patch)
564 return (1, None)
564 return (1, None)
565 rev = info[1]
565 rev = info[1]
566 err, head = self.mergeone(repo, mergeq, head, patch, rev, diffopts)
566 err, head = self.mergeone(repo, mergeq, head, patch, rev, diffopts)
567 if head:
567 if head:
568 self.applied.append(statusentry(head, patch))
568 self.applied.append(statusentry(head, patch))
569 self.applied_dirty = 1
569 self.applied_dirty = 1
570 if err:
570 if err:
571 return (err, head)
571 return (err, head)
572 self.save_dirty()
572 self.save_dirty()
573 return (0, head)
573 return (0, head)
574
574
575 def patch(self, repo, patchfile):
575 def patch(self, repo, patchfile):
576 '''Apply patchfile to the working directory.
576 '''Apply patchfile to the working directory.
577 patchfile: name of patch file'''
577 patchfile: name of patch file'''
578 files = {}
578 files = {}
579 try:
579 try:
580 fuzz = patch.patch(patchfile, self.ui, strip=1, cwd=repo.root,
580 fuzz = patch.patch(patchfile, self.ui, strip=1, cwd=repo.root,
581 files=files, eolmode=None)
581 files=files, eolmode=None)
582 except Exception, inst:
582 except Exception, inst:
583 self.ui.note(str(inst) + '\n')
583 self.ui.note(str(inst) + '\n')
584 if not self.ui.verbose:
584 if not self.ui.verbose:
585 self.ui.warn(_("patch failed, unable to continue (try -v)\n"))
585 self.ui.warn(_("patch failed, unable to continue (try -v)\n"))
586 return (False, files, False)
586 return (False, files, False)
587
587
588 return (True, files, fuzz)
588 return (True, files, fuzz)
589
589
590 def apply(self, repo, series, list=False, update_status=True,
590 def apply(self, repo, series, list=False, update_status=True,
591 strict=False, patchdir=None, merge=None, all_files=None):
591 strict=False, patchdir=None, merge=None, all_files=None):
592 wlock = lock = tr = None
592 wlock = lock = tr = None
593 try:
593 try:
594 wlock = repo.wlock()
594 wlock = repo.wlock()
595 lock = repo.lock()
595 lock = repo.lock()
596 tr = repo.transaction("qpush")
596 tr = repo.transaction("qpush")
597 try:
597 try:
598 ret = self._apply(repo, series, list, update_status,
598 ret = self._apply(repo, series, list, update_status,
599 strict, patchdir, merge, all_files=all_files)
599 strict, patchdir, merge, all_files=all_files)
600 tr.close()
600 tr.close()
601 self.save_dirty()
601 self.save_dirty()
602 return ret
602 return ret
603 except:
603 except:
604 try:
604 try:
605 tr.abort()
605 tr.abort()
606 finally:
606 finally:
607 repo.invalidate()
607 repo.invalidate()
608 repo.dirstate.invalidate()
608 repo.dirstate.invalidate()
609 raise
609 raise
610 finally:
610 finally:
611 del tr
611 del tr
612 release(lock, wlock)
612 release(lock, wlock)
613 self.removeundo(repo)
613 self.removeundo(repo)
614
614
615 def _apply(self, repo, series, list=False, update_status=True,
615 def _apply(self, repo, series, list=False, update_status=True,
616 strict=False, patchdir=None, merge=None, all_files=None):
616 strict=False, patchdir=None, merge=None, all_files=None):
617 '''returns (error, hash)
617 '''returns (error, hash)
618 error = 1 for unable to read, 2 for patch failed, 3 for patch fuzz'''
618 error = 1 for unable to read, 2 for patch failed, 3 for patch fuzz'''
619 # TODO unify with commands.py
619 # TODO unify with commands.py
620 if not patchdir:
620 if not patchdir:
621 patchdir = self.path
621 patchdir = self.path
622 err = 0
622 err = 0
623 n = None
623 n = None
624 for patchname in series:
624 for patchname in series:
625 pushable, reason = self.pushable(patchname)
625 pushable, reason = self.pushable(patchname)
626 if not pushable:
626 if not pushable:
627 self.explain_pushable(patchname, all_patches=True)
627 self.explain_pushable(patchname, all_patches=True)
628 continue
628 continue
629 self.ui.status(_("applying %s\n") % patchname)
629 self.ui.status(_("applying %s\n") % patchname)
630 pf = os.path.join(patchdir, patchname)
630 pf = os.path.join(patchdir, patchname)
631
631
632 try:
632 try:
633 ph = patchheader(self.join(patchname), self.plainmode)
633 ph = patchheader(self.join(patchname), self.plainmode)
634 except:
634 except:
635 self.ui.warn(_("unable to read %s\n") % patchname)
635 self.ui.warn(_("unable to read %s\n") % patchname)
636 err = 1
636 err = 1
637 break
637 break
638
638
639 message = ph.message
639 message = ph.message
640 if not message:
640 if not message:
641 message = "imported patch %s\n" % patchname
641 message = "imported patch %s\n" % patchname
642 else:
642 else:
643 if list:
643 if list:
644 message.append("\nimported patch %s" % patchname)
644 message.append("\nimported patch %s" % patchname)
645 message = '\n'.join(message)
645 message = '\n'.join(message)
646
646
647 if ph.haspatch:
647 if ph.haspatch:
648 (patcherr, files, fuzz) = self.patch(repo, pf)
648 (patcherr, files, fuzz) = self.patch(repo, pf)
649 if all_files is not None:
649 if all_files is not None:
650 all_files.update(files)
650 all_files.update(files)
651 patcherr = not patcherr
651 patcherr = not patcherr
652 else:
652 else:
653 self.ui.warn(_("patch %s is empty\n") % patchname)
653 self.ui.warn(_("patch %s is empty\n") % patchname)
654 patcherr, files, fuzz = 0, [], 0
654 patcherr, files, fuzz = 0, [], 0
655
655
656 if merge and files:
656 if merge and files:
657 # Mark as removed/merged and update dirstate parent info
657 # Mark as removed/merged and update dirstate parent info
658 removed = []
658 removed = []
659 merged = []
659 merged = []
660 for f in files:
660 for f in files:
661 if os.path.exists(repo.wjoin(f)):
661 if os.path.exists(repo.wjoin(f)):
662 merged.append(f)
662 merged.append(f)
663 else:
663 else:
664 removed.append(f)
664 removed.append(f)
665 for f in removed:
665 for f in removed:
666 repo.dirstate.remove(f)
666 repo.dirstate.remove(f)
667 for f in merged:
667 for f in merged:
668 repo.dirstate.merge(f)
668 repo.dirstate.merge(f)
669 p1, p2 = repo.dirstate.parents()
669 p1, p2 = repo.dirstate.parents()
670 repo.dirstate.setparents(p1, merge)
670 repo.dirstate.setparents(p1, merge)
671
671
672 files = patch.updatedir(self.ui, repo, files)
672 files = patch.updatedir(self.ui, repo, files)
673 match = cmdutil.matchfiles(repo, files or [])
673 match = cmdutil.matchfiles(repo, files or [])
674 n = repo.commit(message, ph.user, ph.date, match=match, force=True)
674 n = repo.commit(message, ph.user, ph.date, match=match, force=True)
675
675
676 if n is None:
676 if n is None:
677 raise util.Abort(_("repo commit failed"))
677 raise util.Abort(_("repo commit failed"))
678
678
679 if update_status:
679 if update_status:
680 self.applied.append(statusentry(n, patchname))
680 self.applied.append(statusentry(n, patchname))
681
681
682 if patcherr:
682 if patcherr:
683 self.ui.warn(_("patch failed, rejects left in working dir\n"))
683 self.ui.warn(_("patch failed, rejects left in working dir\n"))
684 err = 2
684 err = 2
685 break
685 break
686
686
687 if fuzz and strict:
687 if fuzz and strict:
688 self.ui.warn(_("fuzz found when applying patch, stopping\n"))
688 self.ui.warn(_("fuzz found when applying patch, stopping\n"))
689 err = 3
689 err = 3
690 break
690 break
691 return (err, n)
691 return (err, n)
692
692
693 def _cleanup(self, patches, numrevs, keep=False):
693 def _cleanup(self, patches, numrevs, keep=False):
694 if not keep:
694 if not keep:
695 r = self.qrepo()
695 r = self.qrepo()
696 if r:
696 if r:
697 r.remove(patches, True)
697 r.remove(patches, True)
698 else:
698 else:
699 for p in patches:
699 for p in patches:
700 os.unlink(self.join(p))
700 os.unlink(self.join(p))
701
701
702 if numrevs:
702 if numrevs:
703 del self.applied[:numrevs]
703 del self.applied[:numrevs]
704 self.applied_dirty = 1
704 self.applied_dirty = 1
705
705
706 for i in sorted([self.find_series(p) for p in patches], reverse=True):
706 for i in sorted([self.find_series(p) for p in patches], reverse=True):
707 del self.full_series[i]
707 del self.full_series[i]
708 self.parse_series()
708 self.parse_series()
709 self.series_dirty = 1
709 self.series_dirty = 1
710
710
711 def _revpatches(self, repo, revs):
711 def _revpatches(self, repo, revs):
712 firstrev = repo[self.applied[0].node].rev()
712 firstrev = repo[self.applied[0].node].rev()
713 patches = []
713 patches = []
714 for i, rev in enumerate(revs):
714 for i, rev in enumerate(revs):
715
715
716 if rev < firstrev:
716 if rev < firstrev:
717 raise util.Abort(_('revision %d is not managed') % rev)
717 raise util.Abort(_('revision %d is not managed') % rev)
718
718
719 ctx = repo[rev]
719 ctx = repo[rev]
720 base = self.applied[i].node
720 base = self.applied[i].node
721 if ctx.node() != base:
721 if ctx.node() != base:
722 msg = _('cannot delete revision %d above applied patches')
722 msg = _('cannot delete revision %d above applied patches')
723 raise util.Abort(msg % rev)
723 raise util.Abort(msg % rev)
724
724
725 patch = self.applied[i].name
725 patch = self.applied[i].name
726 for fmt in ('[mq]: %s', 'imported patch %s'):
726 for fmt in ('[mq]: %s', 'imported patch %s'):
727 if ctx.description() == fmt % patch:
727 if ctx.description() == fmt % patch:
728 msg = _('patch %s finalized without changeset message\n')
728 msg = _('patch %s finalized without changeset message\n')
729 repo.ui.status(msg % patch)
729 repo.ui.status(msg % patch)
730 break
730 break
731
731
732 patches.append(patch)
732 patches.append(patch)
733 return patches
733 return patches
734
734
735 def finish(self, repo, revs):
735 def finish(self, repo, revs):
736 patches = self._revpatches(repo, sorted(revs))
736 patches = self._revpatches(repo, sorted(revs))
737 self._cleanup(patches, len(patches))
737 self._cleanup(patches, len(patches))
738
738
739 def delete(self, repo, patches, opts):
739 def delete(self, repo, patches, opts):
740 if not patches and not opts.get('rev'):
740 if not patches and not opts.get('rev'):
741 raise util.Abort(_('qdelete requires at least one revision or '
741 raise util.Abort(_('qdelete requires at least one revision or '
742 'patch name'))
742 'patch name'))
743
743
744 realpatches = []
744 realpatches = []
745 for patch in patches:
745 for patch in patches:
746 patch = self.lookup(patch, strict=True)
746 patch = self.lookup(patch, strict=True)
747 info = self.isapplied(patch)
747 info = self.isapplied(patch)
748 if info:
748 if info:
749 raise util.Abort(_("cannot delete applied patch %s") % patch)
749 raise util.Abort(_("cannot delete applied patch %s") % patch)
750 if patch not in self.series:
750 if patch not in self.series:
751 raise util.Abort(_("patch %s not in series file") % patch)
751 raise util.Abort(_("patch %s not in series file") % patch)
752 realpatches.append(patch)
752 realpatches.append(patch)
753
753
754 numrevs = 0
754 numrevs = 0
755 if opts.get('rev'):
755 if opts.get('rev'):
756 if not self.applied:
756 if not self.applied:
757 raise util.Abort(_('no patches applied'))
757 raise util.Abort(_('no patches applied'))
758 revs = cmdutil.revrange(repo, opts['rev'])
758 revs = cmdutil.revrange(repo, opts['rev'])
759 if len(revs) > 1 and revs[0] > revs[1]:
759 if len(revs) > 1 and revs[0] > revs[1]:
760 revs.reverse()
760 revs.reverse()
761 revpatches = self._revpatches(repo, revs)
761 revpatches = self._revpatches(repo, revs)
762 realpatches += revpatches
762 realpatches += revpatches
763 numrevs = len(revpatches)
763 numrevs = len(revpatches)
764
764
765 self._cleanup(realpatches, numrevs, opts.get('keep'))
765 self._cleanup(realpatches, numrevs, opts.get('keep'))
766
766
767 def check_toppatch(self, repo):
767 def check_toppatch(self, repo):
768 if self.applied:
768 if self.applied:
769 top = self.applied[-1].node
769 top = self.applied[-1].node
770 patch = self.applied[-1].name
770 patch = self.applied[-1].name
771 pp = repo.dirstate.parents()
771 pp = repo.dirstate.parents()
772 if top not in pp:
772 if top not in pp:
773 raise util.Abort(_("working directory revision is not qtip"))
773 raise util.Abort(_("working directory revision is not qtip"))
774 return top, patch
774 return top, patch
775 return None, None
775 return None, None
776
776
777 def check_localchanges(self, repo, force=False, refresh=True):
777 def check_localchanges(self, repo, force=False, refresh=True):
778 m, a, r, d = repo.status()[:4]
778 m, a, r, d = repo.status()[:4]
779 if (m or a or r or d) and not force:
779 if (m or a or r or d) and not force:
780 if refresh:
780 if refresh:
781 raise util.Abort(_("local changes found, refresh first"))
781 raise util.Abort(_("local changes found, refresh first"))
782 else:
782 else:
783 raise util.Abort(_("local changes found"))
783 raise util.Abort(_("local changes found"))
784 return m, a, r, d
784 return m, a, r, d
785
785
786 _reserved = ('series', 'status', 'guards')
786 _reserved = ('series', 'status', 'guards')
787 def check_reserved_name(self, name):
787 def check_reserved_name(self, name):
788 if (name in self._reserved or name.startswith('.hg')
788 if (name in self._reserved or name.startswith('.hg')
789 or name.startswith('.mq') or '#' in name or ':' in name):
789 or name.startswith('.mq') or '#' in name or ':' in name):
790 raise util.Abort(_('"%s" cannot be used as the name of a patch')
790 raise util.Abort(_('"%s" cannot be used as the name of a patch')
791 % name)
791 % name)
792
792
793 def new(self, repo, patchfn, *pats, **opts):
793 def new(self, repo, patchfn, *pats, **opts):
794 """options:
794 """options:
795 msg: a string or a no-argument function returning a string
795 msg: a string or a no-argument function returning a string
796 """
796 """
797 msg = opts.get('msg')
797 msg = opts.get('msg')
798 user = opts.get('user')
798 user = opts.get('user')
799 date = opts.get('date')
799 date = opts.get('date')
800 if date:
800 if date:
801 date = util.parsedate(date)
801 date = util.parsedate(date)
802 diffopts = self.diffopts({'git': opts.get('git')})
802 diffopts = self.diffopts({'git': opts.get('git')})
803 self.check_reserved_name(patchfn)
803 self.check_reserved_name(patchfn)
804 if os.path.exists(self.join(patchfn)):
804 if os.path.exists(self.join(patchfn)):
805 raise util.Abort(_('patch "%s" already exists') % patchfn)
805 raise util.Abort(_('patch "%s" already exists') % patchfn)
806 if opts.get('include') or opts.get('exclude') or pats:
806 if opts.get('include') or opts.get('exclude') or pats:
807 match = cmdutil.match(repo, pats, opts)
807 match = cmdutil.match(repo, pats, opts)
808 # detect missing files in pats
808 # detect missing files in pats
809 def badfn(f, msg):
809 def badfn(f, msg):
810 raise util.Abort('%s: %s' % (f, msg))
810 raise util.Abort('%s: %s' % (f, msg))
811 match.bad = badfn
811 match.bad = badfn
812 m, a, r, d = repo.status(match=match)[:4]
812 m, a, r, d = repo.status(match=match)[:4]
813 else:
813 else:
814 m, a, r, d = self.check_localchanges(repo, force=True)
814 m, a, r, d = self.check_localchanges(repo, force=True)
815 match = cmdutil.matchfiles(repo, m + a + r)
815 match = cmdutil.matchfiles(repo, m + a + r)
816 if len(repo[None].parents()) > 1:
816 if len(repo[None].parents()) > 1:
817 raise util.Abort(_('cannot manage merge changesets'))
817 raise util.Abort(_('cannot manage merge changesets'))
818 commitfiles = m + a + r
818 commitfiles = m + a + r
819 self.check_toppatch(repo)
819 self.check_toppatch(repo)
820 insert = self.full_series_end()
820 insert = self.full_series_end()
821 wlock = repo.wlock()
821 wlock = repo.wlock()
822 try:
822 try:
823 # if patch file write fails, abort early
823 # if patch file write fails, abort early
824 p = self.opener(patchfn, "w")
824 p = self.opener(patchfn, "w")
825 try:
825 try:
826 if self.plainmode:
826 if self.plainmode:
827 if user:
827 if user:
828 p.write("From: " + user + "\n")
828 p.write("From: " + user + "\n")
829 if not date:
829 if not date:
830 p.write("\n")
830 p.write("\n")
831 if date:
831 if date:
832 p.write("Date: %d %d\n\n" % date)
832 p.write("Date: %d %d\n\n" % date)
833 else:
833 else:
834 p.write("# HG changeset patch\n")
834 p.write("# HG changeset patch\n")
835 p.write("# Parent "
835 p.write("# Parent "
836 + hex(repo[None].parents()[0].node()) + "\n")
836 + hex(repo[None].parents()[0].node()) + "\n")
837 if user:
837 if user:
838 p.write("# User " + user + "\n")
838 p.write("# User " + user + "\n")
839 if date:
839 if date:
840 p.write("# Date %s %s\n\n" % date)
840 p.write("# Date %s %s\n\n" % date)
841 if hasattr(msg, '__call__'):
841 if hasattr(msg, '__call__'):
842 msg = msg()
842 msg = msg()
843 commitmsg = msg and msg or ("[mq]: %s" % patchfn)
843 commitmsg = msg and msg or ("[mq]: %s" % patchfn)
844 n = repo.commit(commitmsg, user, date, match=match, force=True)
844 n = repo.commit(commitmsg, user, date, match=match, force=True)
845 if n is None:
845 if n is None:
846 raise util.Abort(_("repo commit failed"))
846 raise util.Abort(_("repo commit failed"))
847 try:
847 try:
848 self.full_series[insert:insert] = [patchfn]
848 self.full_series[insert:insert] = [patchfn]
849 self.applied.append(statusentry(n, patchfn))
849 self.applied.append(statusentry(n, patchfn))
850 self.parse_series()
850 self.parse_series()
851 self.series_dirty = 1
851 self.series_dirty = 1
852 self.applied_dirty = 1
852 self.applied_dirty = 1
853 if msg:
853 if msg:
854 msg = msg + "\n\n"
854 msg = msg + "\n\n"
855 p.write(msg)
855 p.write(msg)
856 if commitfiles:
856 if commitfiles:
857 parent = self.qparents(repo, n)
857 parent = self.qparents(repo, n)
858 chunks = patch.diff(repo, node1=parent, node2=n,
858 chunks = patch.diff(repo, node1=parent, node2=n,
859 match=match, opts=diffopts)
859 match=match, opts=diffopts)
860 for chunk in chunks:
860 for chunk in chunks:
861 p.write(chunk)
861 p.write(chunk)
862 p.close()
862 p.close()
863 wlock.release()
863 wlock.release()
864 wlock = None
864 wlock = None
865 r = self.qrepo()
865 r = self.qrepo()
866 if r:
866 if r:
867 r.add([patchfn])
867 r.add([patchfn])
868 except:
868 except:
869 repo.rollback()
869 repo.rollback()
870 raise
870 raise
871 except Exception:
871 except Exception:
872 patchpath = self.join(patchfn)
872 patchpath = self.join(patchfn)
873 try:
873 try:
874 os.unlink(patchpath)
874 os.unlink(patchpath)
875 except:
875 except:
876 self.ui.warn(_('error unlinking %s\n') % patchpath)
876 self.ui.warn(_('error unlinking %s\n') % patchpath)
877 raise
877 raise
878 self.removeundo(repo)
878 self.removeundo(repo)
879 finally:
879 finally:
880 release(wlock)
880 release(wlock)
881
881
882 def strip(self, repo, rev, update=True, backup="all", force=None):
882 def strip(self, repo, rev, update=True, backup="all", force=None):
883 wlock = lock = None
883 wlock = lock = None
884 try:
884 try:
885 wlock = repo.wlock()
885 wlock = repo.wlock()
886 lock = repo.lock()
886 lock = repo.lock()
887
887
888 if update:
888 if update:
889 self.check_localchanges(repo, force=force, refresh=False)
889 self.check_localchanges(repo, force=force, refresh=False)
890 urev = self.qparents(repo, rev)
890 urev = self.qparents(repo, rev)
891 hg.clean(repo, urev)
891 hg.clean(repo, urev)
892 repo.dirstate.write()
892 repo.dirstate.write()
893
893
894 self.removeundo(repo)
894 self.removeundo(repo)
895 repair.strip(self.ui, repo, rev, backup)
895 repair.strip(self.ui, repo, rev, backup)
896 # strip may have unbundled a set of backed up revisions after
896 # strip may have unbundled a set of backed up revisions after
897 # the actual strip
897 # the actual strip
898 self.removeundo(repo)
898 self.removeundo(repo)
899 finally:
899 finally:
900 release(lock, wlock)
900 release(lock, wlock)
901
901
902 def isapplied(self, patch):
902 def isapplied(self, patch):
903 """returns (index, rev, patch)"""
903 """returns (index, rev, patch)"""
904 for i, a in enumerate(self.applied):
904 for i, a in enumerate(self.applied):
905 if a.name == patch:
905 if a.name == patch:
906 return (i, a.node, a.name)
906 return (i, a.node, a.name)
907 return None
907 return None
908
908
909 # if the exact patch name does not exist, we try a few
909 # if the exact patch name does not exist, we try a few
910 # variations. If strict is passed, we try only #1
910 # variations. If strict is passed, we try only #1
911 #
911 #
912 # 1) a number to indicate an offset in the series file
912 # 1) a number to indicate an offset in the series file
913 # 2) a unique substring of the patch name was given
913 # 2) a unique substring of the patch name was given
914 # 3) patchname[-+]num to indicate an offset in the series file
914 # 3) patchname[-+]num to indicate an offset in the series file
915 def lookup(self, patch, strict=False):
915 def lookup(self, patch, strict=False):
916 patch = patch and str(patch)
916 patch = patch and str(patch)
917
917
918 def partial_name(s):
918 def partial_name(s):
919 if s in self.series:
919 if s in self.series:
920 return s
920 return s
921 matches = [x for x in self.series if s in x]
921 matches = [x for x in self.series if s in x]
922 if len(matches) > 1:
922 if len(matches) > 1:
923 self.ui.warn(_('patch name "%s" is ambiguous:\n') % s)
923 self.ui.warn(_('patch name "%s" is ambiguous:\n') % s)
924 for m in matches:
924 for m in matches:
925 self.ui.warn(' %s\n' % m)
925 self.ui.warn(' %s\n' % m)
926 return None
926 return None
927 if matches:
927 if matches:
928 return matches[0]
928 return matches[0]
929 if self.series and self.applied:
929 if self.series and self.applied:
930 if s == 'qtip':
930 if s == 'qtip':
931 return self.series[self.series_end(True)-1]
931 return self.series[self.series_end(True)-1]
932 if s == 'qbase':
932 if s == 'qbase':
933 return self.series[0]
933 return self.series[0]
934 return None
934 return None
935
935
936 if patch is None:
936 if patch is None:
937 return None
937 return None
938 if patch in self.series:
938 if patch in self.series:
939 return patch
939 return patch
940
940
941 if not os.path.isfile(self.join(patch)):
941 if not os.path.isfile(self.join(patch)):
942 try:
942 try:
943 sno = int(patch)
943 sno = int(patch)
944 except (ValueError, OverflowError):
944 except (ValueError, OverflowError):
945 pass
945 pass
946 else:
946 else:
947 if -len(self.series) <= sno < len(self.series):
947 if -len(self.series) <= sno < len(self.series):
948 return self.series[sno]
948 return self.series[sno]
949
949
950 if not strict:
950 if not strict:
951 res = partial_name(patch)
951 res = partial_name(patch)
952 if res:
952 if res:
953 return res
953 return res
954 minus = patch.rfind('-')
954 minus = patch.rfind('-')
955 if minus >= 0:
955 if minus >= 0:
956 res = partial_name(patch[:minus])
956 res = partial_name(patch[:minus])
957 if res:
957 if res:
958 i = self.series.index(res)
958 i = self.series.index(res)
959 try:
959 try:
960 off = int(patch[minus + 1:] or 1)
960 off = int(patch[minus + 1:] or 1)
961 except (ValueError, OverflowError):
961 except (ValueError, OverflowError):
962 pass
962 pass
963 else:
963 else:
964 if i - off >= 0:
964 if i - off >= 0:
965 return self.series[i - off]
965 return self.series[i - off]
966 plus = patch.rfind('+')
966 plus = patch.rfind('+')
967 if plus >= 0:
967 if plus >= 0:
968 res = partial_name(patch[:plus])
968 res = partial_name(patch[:plus])
969 if res:
969 if res:
970 i = self.series.index(res)
970 i = self.series.index(res)
971 try:
971 try:
972 off = int(patch[plus + 1:] or 1)
972 off = int(patch[plus + 1:] or 1)
973 except (ValueError, OverflowError):
973 except (ValueError, OverflowError):
974 pass
974 pass
975 else:
975 else:
976 if i + off < len(self.series):
976 if i + off < len(self.series):
977 return self.series[i + off]
977 return self.series[i + off]
978 raise util.Abort(_("patch %s not in series") % patch)
978 raise util.Abort(_("patch %s not in series") % patch)
979
979
980 def push(self, repo, patch=None, force=False, list=False,
980 def push(self, repo, patch=None, force=False, list=False,
981 mergeq=None, all=False, move=False):
981 mergeq=None, all=False, move=False):
982 diffopts = self.diffopts()
982 diffopts = self.diffopts()
983 wlock = repo.wlock()
983 wlock = repo.wlock()
984 try:
984 try:
985 heads = []
985 heads = []
986 for b, ls in repo.branchmap().iteritems():
986 for b, ls in repo.branchmap().iteritems():
987 heads += ls
987 heads += ls
988 if not heads:
988 if not heads:
989 heads = [nullid]
989 heads = [nullid]
990 if repo.dirstate.parents()[0] not in heads:
990 if repo.dirstate.parents()[0] not in heads:
991 self.ui.status(_("(working directory not at a head)\n"))
991 self.ui.status(_("(working directory not at a head)\n"))
992
992
993 if not self.series:
993 if not self.series:
994 self.ui.warn(_('no patches in series\n'))
994 self.ui.warn(_('no patches in series\n'))
995 return 0
995 return 0
996
996
997 patch = self.lookup(patch)
997 patch = self.lookup(patch)
998 # Suppose our series file is: A B C and the current 'top'
998 # Suppose our series file is: A B C and the current 'top'
999 # patch is B. qpush C should be performed (moving forward)
999 # patch is B. qpush C should be performed (moving forward)
1000 # qpush B is a NOP (no change) qpush A is an error (can't
1000 # qpush B is a NOP (no change) qpush A is an error (can't
1001 # go backwards with qpush)
1001 # go backwards with qpush)
1002 if patch:
1002 if patch:
1003 info = self.isapplied(patch)
1003 info = self.isapplied(patch)
1004 if info:
1004 if info:
1005 if info[0] < len(self.applied) - 1:
1005 if info[0] < len(self.applied) - 1:
1006 raise util.Abort(
1006 raise util.Abort(
1007 _("cannot push to a previous patch: %s") % patch)
1007 _("cannot push to a previous patch: %s") % patch)
1008 self.ui.warn(
1008 self.ui.warn(
1009 _('qpush: %s is already at the top\n') % patch)
1009 _('qpush: %s is already at the top\n') % patch)
1010 return
1010 return
1011 pushable, reason = self.pushable(patch)
1011 pushable, reason = self.pushable(patch)
1012 if not pushable:
1012 if not pushable:
1013 if reason:
1013 if reason:
1014 reason = _('guarded by %r') % reason
1014 reason = _('guarded by %r') % reason
1015 else:
1015 else:
1016 reason = _('no matching guards')
1016 reason = _('no matching guards')
1017 self.ui.warn(_("cannot push '%s' - %s\n") % (patch, reason))
1017 self.ui.warn(_("cannot push '%s' - %s\n") % (patch, reason))
1018 return 1
1018 return 1
1019 elif all:
1019 elif all:
1020 patch = self.series[-1]
1020 patch = self.series[-1]
1021 if self.isapplied(patch):
1021 if self.isapplied(patch):
1022 self.ui.warn(_('all patches are currently applied\n'))
1022 self.ui.warn(_('all patches are currently applied\n'))
1023 return 0
1023 return 0
1024
1024
1025 # Following the above example, starting at 'top' of B:
1025 # Following the above example, starting at 'top' of B:
1026 # qpush should be performed (pushes C), but a subsequent
1026 # qpush should be performed (pushes C), but a subsequent
1027 # qpush without an argument is an error (nothing to
1027 # qpush without an argument is an error (nothing to
1028 # apply). This allows a loop of "...while hg qpush..." to
1028 # apply). This allows a loop of "...while hg qpush..." to
1029 # work as it detects an error when done
1029 # work as it detects an error when done
1030 start = self.series_end()
1030 start = self.series_end()
1031 if start == len(self.series):
1031 if start == len(self.series):
1032 self.ui.warn(_('patch series already fully applied\n'))
1032 self.ui.warn(_('patch series already fully applied\n'))
1033 return 1
1033 return 1
1034 if not force:
1034 if not force:
1035 self.check_localchanges(repo)
1035 self.check_localchanges(repo)
1036
1036
1037 if move:
1037 if move:
1038 try:
1038 try:
1039 del self.full_series[self.full_series.index(patch, start)]
1039 del self.full_series[self.full_series.index(patch, start)]
1040 except ValueError:
1040 except ValueError:
1041 raise util.Abort(_("patch '%s' not found") % patch)
1041 raise util.Abort(_("patch '%s' not found") % patch)
1042 self.full_series.insert(start, patch)
1042 self.full_series.insert(start, patch)
1043 self.parse_series()
1043 self.parse_series()
1044 self.series_dirty = 1
1044 self.series_dirty = 1
1045
1045
1046 self.applied_dirty = 1
1046 self.applied_dirty = 1
1047 if start > 0:
1047 if start > 0:
1048 self.check_toppatch(repo)
1048 self.check_toppatch(repo)
1049 if not patch:
1049 if not patch:
1050 patch = self.series[start]
1050 patch = self.series[start]
1051 end = start + 1
1051 end = start + 1
1052 else:
1052 else:
1053 end = self.series.index(patch, start) + 1
1053 end = self.series.index(patch, start) + 1
1054
1054
1055 s = self.series[start:end]
1055 s = self.series[start:end]
1056 all_files = set()
1056 all_files = set()
1057 try:
1057 try:
1058 if mergeq:
1058 if mergeq:
1059 ret = self.mergepatch(repo, mergeq, s, diffopts)
1059 ret = self.mergepatch(repo, mergeq, s, diffopts)
1060 else:
1060 else:
1061 ret = self.apply(repo, s, list, all_files=all_files)
1061 ret = self.apply(repo, s, list, all_files=all_files)
1062 except:
1062 except:
1063 self.ui.warn(_('cleaning up working directory...'))
1063 self.ui.warn(_('cleaning up working directory...'))
1064 node = repo.dirstate.parents()[0]
1064 node = repo.dirstate.parents()[0]
1065 hg.revert(repo, node, None)
1065 hg.revert(repo, node, None)
1066 # only remove unknown files that we know we touched or
1066 # only remove unknown files that we know we touched or
1067 # created while patching
1067 # created while patching
1068 for f in all_files:
1068 for f in all_files:
1069 if f not in repo.dirstate:
1069 if f not in repo.dirstate:
1070 try:
1070 try:
1071 util.unlink(repo.wjoin(f))
1071 util.unlink(repo.wjoin(f))
1072 except OSError, inst:
1072 except OSError, inst:
1073 if inst.errno != errno.ENOENT:
1073 if inst.errno != errno.ENOENT:
1074 raise
1074 raise
1075 self.ui.warn(_('done\n'))
1075 self.ui.warn(_('done\n'))
1076 raise
1076 raise
1077
1077
1078 if not self.applied:
1078 if not self.applied:
1079 return ret[0]
1079 return ret[0]
1080 top = self.applied[-1].name
1080 top = self.applied[-1].name
1081 if ret[0] and ret[0] > 1:
1081 if ret[0] and ret[0] > 1:
1082 msg = _("errors during apply, please fix and refresh %s\n")
1082 msg = _("errors during apply, please fix and refresh %s\n")
1083 self.ui.write(msg % top)
1083 self.ui.write(msg % top)
1084 else:
1084 else:
1085 self.ui.write(_("now at: %s\n") % top)
1085 self.ui.write(_("now at: %s\n") % top)
1086 return ret[0]
1086 return ret[0]
1087
1087
1088 finally:
1088 finally:
1089 wlock.release()
1089 wlock.release()
1090
1090
1091 def pop(self, repo, patch=None, force=False, update=True, all=False):
1091 def pop(self, repo, patch=None, force=False, update=True, all=False):
1092 wlock = repo.wlock()
1092 wlock = repo.wlock()
1093 try:
1093 try:
1094 if patch:
1094 if patch:
1095 # index, rev, patch
1095 # index, rev, patch
1096 info = self.isapplied(patch)
1096 info = self.isapplied(patch)
1097 if not info:
1097 if not info:
1098 patch = self.lookup(patch)
1098 patch = self.lookup(patch)
1099 info = self.isapplied(patch)
1099 info = self.isapplied(patch)
1100 if not info:
1100 if not info:
1101 raise util.Abort(_("patch %s is not applied") % patch)
1101 raise util.Abort(_("patch %s is not applied") % patch)
1102
1102
1103 if not self.applied:
1103 if not self.applied:
1104 # Allow qpop -a to work repeatedly,
1104 # Allow qpop -a to work repeatedly,
1105 # but not qpop without an argument
1105 # but not qpop without an argument
1106 self.ui.warn(_("no patches applied\n"))
1106 self.ui.warn(_("no patches applied\n"))
1107 return not all
1107 return not all
1108
1108
1109 if all:
1109 if all:
1110 start = 0
1110 start = 0
1111 elif patch:
1111 elif patch:
1112 start = info[0] + 1
1112 start = info[0] + 1
1113 else:
1113 else:
1114 start = len(self.applied) - 1
1114 start = len(self.applied) - 1
1115
1115
1116 if start >= len(self.applied):
1116 if start >= len(self.applied):
1117 self.ui.warn(_("qpop: %s is already at the top\n") % patch)
1117 self.ui.warn(_("qpop: %s is already at the top\n") % patch)
1118 return
1118 return
1119
1119
1120 if not update:
1120 if not update:
1121 parents = repo.dirstate.parents()
1121 parents = repo.dirstate.parents()
1122 rr = [x.node for x in self.applied]
1122 rr = [x.node for x in self.applied]
1123 for p in parents:
1123 for p in parents:
1124 if p in rr:
1124 if p in rr:
1125 self.ui.warn(_("qpop: forcing dirstate update\n"))
1125 self.ui.warn(_("qpop: forcing dirstate update\n"))
1126 update = True
1126 update = True
1127 else:
1127 else:
1128 parents = [p.node() for p in repo[None].parents()]
1128 parents = [p.node() for p in repo[None].parents()]
1129 needupdate = False
1129 needupdate = False
1130 for entry in self.applied[start:]:
1130 for entry in self.applied[start:]:
1131 if entry.node in parents:
1131 if entry.node in parents:
1132 needupdate = True
1132 needupdate = True
1133 break
1133 break
1134 update = needupdate
1134 update = needupdate
1135
1135
1136 if not force and update:
1136 if not force and update:
1137 self.check_localchanges(repo)
1137 self.check_localchanges(repo)
1138
1138
1139 self.applied_dirty = 1
1139 self.applied_dirty = 1
1140 end = len(self.applied)
1140 end = len(self.applied)
1141 rev = self.applied[start].node
1141 rev = self.applied[start].node
1142 if update:
1142 if update:
1143 top = self.check_toppatch(repo)[0]
1143 top = self.check_toppatch(repo)[0]
1144
1144
1145 try:
1145 try:
1146 heads = repo.changelog.heads(rev)
1146 heads = repo.changelog.heads(rev)
1147 except error.LookupError:
1147 except error.LookupError:
1148 node = short(rev)
1148 node = short(rev)
1149 raise util.Abort(_('trying to pop unknown node %s') % node)
1149 raise util.Abort(_('trying to pop unknown node %s') % node)
1150
1150
1151 if heads != [self.applied[-1].node]:
1151 if heads != [self.applied[-1].node]:
1152 raise util.Abort(_("popping would remove a revision not "
1152 raise util.Abort(_("popping would remove a revision not "
1153 "managed by this patch queue"))
1153 "managed by this patch queue"))
1154
1154
1155 # we know there are no local changes, so we can make a simplified
1155 # we know there are no local changes, so we can make a simplified
1156 # form of hg.update.
1156 # form of hg.update.
1157 if update:
1157 if update:
1158 qp = self.qparents(repo, rev)
1158 qp = self.qparents(repo, rev)
1159 ctx = repo[qp]
1159 ctx = repo[qp]
1160 m, a, r, d = repo.status(qp, top)[:4]
1160 m, a, r, d = repo.status(qp, top)[:4]
1161 if d:
1161 if d:
1162 raise util.Abort(_("deletions found between repo revs"))
1162 raise util.Abort(_("deletions found between repo revs"))
1163 for f in a:
1163 for f in a:
1164 try:
1164 try:
1165 util.unlink(repo.wjoin(f))
1165 util.unlink(repo.wjoin(f))
1166 except OSError, e:
1166 except OSError, e:
1167 if e.errno != errno.ENOENT:
1167 if e.errno != errno.ENOENT:
1168 raise
1168 raise
1169 repo.dirstate.forget(f)
1169 repo.dirstate.forget(f)
1170 for f in m + r:
1170 for f in m + r:
1171 fctx = ctx[f]
1171 fctx = ctx[f]
1172 repo.wwrite(f, fctx.data(), fctx.flags())
1172 repo.wwrite(f, fctx.data(), fctx.flags())
1173 repo.dirstate.normal(f)
1173 repo.dirstate.normal(f)
1174 repo.dirstate.setparents(qp, nullid)
1174 repo.dirstate.setparents(qp, nullid)
1175 for patch in reversed(self.applied[start:end]):
1175 for patch in reversed(self.applied[start:end]):
1176 self.ui.status(_("popping %s\n") % patch.name)
1176 self.ui.status(_("popping %s\n") % patch.name)
1177 del self.applied[start:end]
1177 del self.applied[start:end]
1178 self.strip(repo, rev, update=False, backup='strip')
1178 self.strip(repo, rev, update=False, backup='strip')
1179 if self.applied:
1179 if self.applied:
1180 self.ui.write(_("now at: %s\n") % self.applied[-1].name)
1180 self.ui.write(_("now at: %s\n") % self.applied[-1].name)
1181 else:
1181 else:
1182 self.ui.write(_("patch queue now empty\n"))
1182 self.ui.write(_("patch queue now empty\n"))
1183 finally:
1183 finally:
1184 wlock.release()
1184 wlock.release()
1185
1185
1186 def diff(self, repo, pats, opts):
1186 def diff(self, repo, pats, opts):
1187 top, patch = self.check_toppatch(repo)
1187 top, patch = self.check_toppatch(repo)
1188 if not top:
1188 if not top:
1189 self.ui.write(_("no patches applied\n"))
1189 self.ui.write(_("no patches applied\n"))
1190 return
1190 return
1191 qp = self.qparents(repo, top)
1191 qp = self.qparents(repo, top)
1192 if opts.get('reverse'):
1192 if opts.get('reverse'):
1193 node1, node2 = None, qp
1193 node1, node2 = None, qp
1194 else:
1194 else:
1195 node1, node2 = qp, None
1195 node1, node2 = qp, None
1196 diffopts = self.diffopts(opts, patch)
1196 diffopts = self.diffopts(opts, patch)
1197 self.printdiff(repo, diffopts, node1, node2, files=pats, opts=opts)
1197 self.printdiff(repo, diffopts, node1, node2, files=pats, opts=opts)
1198
1198
1199 def refresh(self, repo, pats=None, **opts):
1199 def refresh(self, repo, pats=None, **opts):
1200 if not self.applied:
1200 if not self.applied:
1201 self.ui.write(_("no patches applied\n"))
1201 self.ui.write(_("no patches applied\n"))
1202 return 1
1202 return 1
1203 msg = opts.get('msg', '').rstrip()
1203 msg = opts.get('msg', '').rstrip()
1204 newuser = opts.get('user')
1204 newuser = opts.get('user')
1205 newdate = opts.get('date')
1205 newdate = opts.get('date')
1206 if newdate:
1206 if newdate:
1207 newdate = '%d %d' % util.parsedate(newdate)
1207 newdate = '%d %d' % util.parsedate(newdate)
1208 wlock = repo.wlock()
1208 wlock = repo.wlock()
1209
1209
1210 try:
1210 try:
1211 self.check_toppatch(repo)
1211 self.check_toppatch(repo)
1212 (top, patchfn) = (self.applied[-1].node, self.applied[-1].name)
1212 (top, patchfn) = (self.applied[-1].node, self.applied[-1].name)
1213 if repo.changelog.heads(top) != [top]:
1213 if repo.changelog.heads(top) != [top]:
1214 raise util.Abort(_("cannot refresh a revision with children"))
1214 raise util.Abort(_("cannot refresh a revision with children"))
1215
1215
1216 cparents = repo.changelog.parents(top)
1216 cparents = repo.changelog.parents(top)
1217 patchparent = self.qparents(repo, top)
1217 patchparent = self.qparents(repo, top)
1218 ph = patchheader(self.join(patchfn), self.plainmode)
1218 ph = patchheader(self.join(patchfn), self.plainmode)
1219 diffopts = self.diffopts({'git': opts.get('git')}, patchfn)
1219 diffopts = self.diffopts({'git': opts.get('git')}, patchfn)
1220 if msg:
1220 if msg:
1221 ph.setmessage(msg)
1221 ph.setmessage(msg)
1222 if newuser:
1222 if newuser:
1223 ph.setuser(newuser)
1223 ph.setuser(newuser)
1224 if newdate:
1224 if newdate:
1225 ph.setdate(newdate)
1225 ph.setdate(newdate)
1226 ph.setparent(hex(patchparent))
1226 ph.setparent(hex(patchparent))
1227
1227
1228 # only commit new patch when write is complete
1228 # only commit new patch when write is complete
1229 patchf = self.opener(patchfn, 'w', atomictemp=True)
1229 patchf = self.opener(patchfn, 'w', atomictemp=True)
1230
1230
1231 comments = str(ph)
1231 comments = str(ph)
1232 if comments:
1232 if comments:
1233 patchf.write(comments)
1233 patchf.write(comments)
1234
1234
1235 # update the dirstate in place, strip off the qtip commit
1235 # update the dirstate in place, strip off the qtip commit
1236 # and then commit.
1236 # and then commit.
1237 #
1237 #
1238 # this should really read:
1238 # this should really read:
1239 # mm, dd, aa, aa2 = repo.status(tip, patchparent)[:4]
1239 # mm, dd, aa, aa2 = repo.status(tip, patchparent)[:4]
1240 # but we do it backwards to take advantage of manifest/chlog
1240 # but we do it backwards to take advantage of manifest/chlog
1241 # caching against the next repo.status call
1241 # caching against the next repo.status call
1242 mm, aa, dd, aa2 = repo.status(patchparent, top)[:4]
1242 mm, aa, dd, aa2 = repo.status(patchparent, top)[:4]
1243 changes = repo.changelog.read(top)
1243 changes = repo.changelog.read(top)
1244 man = repo.manifest.read(changes[0])
1244 man = repo.manifest.read(changes[0])
1245 aaa = aa[:]
1245 aaa = aa[:]
1246 matchfn = cmdutil.match(repo, pats, opts)
1246 matchfn = cmdutil.match(repo, pats, opts)
1247 # in short mode, we only diff the files included in the
1247 # in short mode, we only diff the files included in the
1248 # patch already plus specified files
1248 # patch already plus specified files
1249 if opts.get('short'):
1249 if opts.get('short'):
1250 # if amending a patch, we start with existing
1250 # if amending a patch, we start with existing
1251 # files plus specified files - unfiltered
1251 # files plus specified files - unfiltered
1252 match = cmdutil.matchfiles(repo, mm + aa + dd + matchfn.files())
1252 match = cmdutil.matchfiles(repo, mm + aa + dd + matchfn.files())
1253 # filter with inc/exl options
1253 # filter with inc/exl options
1254 matchfn = cmdutil.match(repo, opts=opts)
1254 matchfn = cmdutil.match(repo, opts=opts)
1255 else:
1255 else:
1256 match = cmdutil.matchall(repo)
1256 match = cmdutil.matchall(repo)
1257 m, a, r, d = repo.status(match=match)[:4]
1257 m, a, r, d = repo.status(match=match)[:4]
1258
1258
1259 # we might end up with files that were added between
1259 # we might end up with files that were added between
1260 # qtip and the dirstate parent, but then changed in the
1260 # qtip and the dirstate parent, but then changed in the
1261 # local dirstate. in this case, we want them to only
1261 # local dirstate. in this case, we want them to only
1262 # show up in the added section
1262 # show up in the added section
1263 for x in m:
1263 for x in m:
1264 if x not in aa:
1264 if x not in aa:
1265 mm.append(x)
1265 mm.append(x)
1266 # we might end up with files added by the local dirstate that
1266 # we might end up with files added by the local dirstate that
1267 # were deleted by the patch. In this case, they should only
1267 # were deleted by the patch. In this case, they should only
1268 # show up in the changed section.
1268 # show up in the changed section.
1269 for x in a:
1269 for x in a:
1270 if x in dd:
1270 if x in dd:
1271 del dd[dd.index(x)]
1271 del dd[dd.index(x)]
1272 mm.append(x)
1272 mm.append(x)
1273 else:
1273 else:
1274 aa.append(x)
1274 aa.append(x)
1275 # make sure any files deleted in the local dirstate
1275 # make sure any files deleted in the local dirstate
1276 # are not in the add or change column of the patch
1276 # are not in the add or change column of the patch
1277 forget = []
1277 forget = []
1278 for x in d + r:
1278 for x in d + r:
1279 if x in aa:
1279 if x in aa:
1280 del aa[aa.index(x)]
1280 del aa[aa.index(x)]
1281 forget.append(x)
1281 forget.append(x)
1282 continue
1282 continue
1283 elif x in mm:
1283 elif x in mm:
1284 del mm[mm.index(x)]
1284 del mm[mm.index(x)]
1285 dd.append(x)
1285 dd.append(x)
1286
1286
1287 m = list(set(mm))
1287 m = list(set(mm))
1288 r = list(set(dd))
1288 r = list(set(dd))
1289 a = list(set(aa))
1289 a = list(set(aa))
1290 c = [filter(matchfn, l) for l in (m, a, r)]
1290 c = [filter(matchfn, l) for l in (m, a, r)]
1291 match = cmdutil.matchfiles(repo, set(c[0] + c[1] + c[2]))
1291 match = cmdutil.matchfiles(repo, set(c[0] + c[1] + c[2]))
1292 chunks = patch.diff(repo, patchparent, match=match,
1292 chunks = patch.diff(repo, patchparent, match=match,
1293 changes=c, opts=diffopts)
1293 changes=c, opts=diffopts)
1294 for chunk in chunks:
1294 for chunk in chunks:
1295 patchf.write(chunk)
1295 patchf.write(chunk)
1296
1296
1297 try:
1297 try:
1298 if diffopts.git or diffopts.upgrade:
1298 if diffopts.git or diffopts.upgrade:
1299 copies = {}
1299 copies = {}
1300 for dst in a:
1300 for dst in a:
1301 src = repo.dirstate.copied(dst)
1301 src = repo.dirstate.copied(dst)
1302 # during qfold, the source file for copies may
1302 # during qfold, the source file for copies may
1303 # be removed. Treat this as a simple add.
1303 # be removed. Treat this as a simple add.
1304 if src is not None and src in repo.dirstate:
1304 if src is not None and src in repo.dirstate:
1305 copies.setdefault(src, []).append(dst)
1305 copies.setdefault(src, []).append(dst)
1306 repo.dirstate.add(dst)
1306 repo.dirstate.add(dst)
1307 # remember the copies between patchparent and qtip
1307 # remember the copies between patchparent and qtip
1308 for dst in aaa:
1308 for dst in aaa:
1309 f = repo.file(dst)
1309 f = repo.file(dst)
1310 src = f.renamed(man[dst])
1310 src = f.renamed(man[dst])
1311 if src:
1311 if src:
1312 copies.setdefault(src[0], []).extend(
1312 copies.setdefault(src[0], []).extend(
1313 copies.get(dst, []))
1313 copies.get(dst, []))
1314 if dst in a:
1314 if dst in a:
1315 copies[src[0]].append(dst)
1315 copies[src[0]].append(dst)
1316 # we can't copy a file created by the patch itself
1316 # we can't copy a file created by the patch itself
1317 if dst in copies:
1317 if dst in copies:
1318 del copies[dst]
1318 del copies[dst]
1319 for src, dsts in copies.iteritems():
1319 for src, dsts in copies.iteritems():
1320 for dst in dsts:
1320 for dst in dsts:
1321 repo.dirstate.copy(src, dst)
1321 repo.dirstate.copy(src, dst)
1322 else:
1322 else:
1323 for dst in a:
1323 for dst in a:
1324 repo.dirstate.add(dst)
1324 repo.dirstate.add(dst)
1325 # Drop useless copy information
1325 # Drop useless copy information
1326 for f in list(repo.dirstate.copies()):
1326 for f in list(repo.dirstate.copies()):
1327 repo.dirstate.copy(None, f)
1327 repo.dirstate.copy(None, f)
1328 for f in r:
1328 for f in r:
1329 repo.dirstate.remove(f)
1329 repo.dirstate.remove(f)
1330 # if the patch excludes a modified file, mark that
1330 # if the patch excludes a modified file, mark that
1331 # file with mtime=0 so status can see it.
1331 # file with mtime=0 so status can see it.
1332 mm = []
1332 mm = []
1333 for i in xrange(len(m)-1, -1, -1):
1333 for i in xrange(len(m)-1, -1, -1):
1334 if not matchfn(m[i]):
1334 if not matchfn(m[i]):
1335 mm.append(m[i])
1335 mm.append(m[i])
1336 del m[i]
1336 del m[i]
1337 for f in m:
1337 for f in m:
1338 repo.dirstate.normal(f)
1338 repo.dirstate.normal(f)
1339 for f in mm:
1339 for f in mm:
1340 repo.dirstate.normallookup(f)
1340 repo.dirstate.normallookup(f)
1341 for f in forget:
1341 for f in forget:
1342 repo.dirstate.forget(f)
1342 repo.dirstate.forget(f)
1343
1343
1344 if not msg:
1344 if not msg:
1345 if not ph.message:
1345 if not ph.message:
1346 message = "[mq]: %s\n" % patchfn
1346 message = "[mq]: %s\n" % patchfn
1347 else:
1347 else:
1348 message = "\n".join(ph.message)
1348 message = "\n".join(ph.message)
1349 else:
1349 else:
1350 message = msg
1350 message = msg
1351
1351
1352 user = ph.user or changes[1]
1352 user = ph.user or changes[1]
1353
1353
1354 # assumes strip can roll itself back if interrupted
1354 # assumes strip can roll itself back if interrupted
1355 repo.dirstate.setparents(*cparents)
1355 repo.dirstate.setparents(*cparents)
1356 self.applied.pop()
1356 self.applied.pop()
1357 self.applied_dirty = 1
1357 self.applied_dirty = 1
1358 self.strip(repo, top, update=False,
1358 self.strip(repo, top, update=False,
1359 backup='strip')
1359 backup='strip')
1360 except:
1360 except:
1361 repo.dirstate.invalidate()
1361 repo.dirstate.invalidate()
1362 raise
1362 raise
1363
1363
1364 try:
1364 try:
1365 # might be nice to attempt to roll back strip after this
1365 # might be nice to attempt to roll back strip after this
1366 patchf.rename()
1366 patchf.rename()
1367 n = repo.commit(message, user, ph.date, match=match,
1367 n = repo.commit(message, user, ph.date, match=match,
1368 force=True)
1368 force=True)
1369 self.applied.append(statusentry(n, patchfn))
1369 self.applied.append(statusentry(n, patchfn))
1370 except:
1370 except:
1371 ctx = repo[cparents[0]]
1371 ctx = repo[cparents[0]]
1372 repo.dirstate.rebuild(ctx.node(), ctx.manifest())
1372 repo.dirstate.rebuild(ctx.node(), ctx.manifest())
1373 self.save_dirty()
1373 self.save_dirty()
1374 self.ui.warn(_('refresh interrupted while patch was popped! '
1374 self.ui.warn(_('refresh interrupted while patch was popped! '
1375 '(revert --all, qpush to recover)\n'))
1375 '(revert --all, qpush to recover)\n'))
1376 raise
1376 raise
1377 finally:
1377 finally:
1378 wlock.release()
1378 wlock.release()
1379 self.removeundo(repo)
1379 self.removeundo(repo)
1380
1380
1381 def init(self, repo, create=False):
1381 def init(self, repo, create=False):
1382 if not create and os.path.isdir(self.path):
1382 if not create and os.path.isdir(self.path):
1383 raise util.Abort(_("patch queue directory already exists"))
1383 raise util.Abort(_("patch queue directory already exists"))
1384 try:
1384 try:
1385 os.mkdir(self.path)
1385 os.mkdir(self.path)
1386 except OSError, inst:
1386 except OSError, inst:
1387 if inst.errno != errno.EEXIST or not create:
1387 if inst.errno != errno.EEXIST or not create:
1388 raise
1388 raise
1389 if create:
1389 if create:
1390 return self.qrepo(create=True)
1390 return self.qrepo(create=True)
1391
1391
1392 def unapplied(self, repo, patch=None):
1392 def unapplied(self, repo, patch=None):
1393 if patch and patch not in self.series:
1393 if patch and patch not in self.series:
1394 raise util.Abort(_("patch %s is not in series file") % patch)
1394 raise util.Abort(_("patch %s is not in series file") % patch)
1395 if not patch:
1395 if not patch:
1396 start = self.series_end()
1396 start = self.series_end()
1397 else:
1397 else:
1398 start = self.series.index(patch) + 1
1398 start = self.series.index(patch) + 1
1399 unapplied = []
1399 unapplied = []
1400 for i in xrange(start, len(self.series)):
1400 for i in xrange(start, len(self.series)):
1401 pushable, reason = self.pushable(i)
1401 pushable, reason = self.pushable(i)
1402 if pushable:
1402 if pushable:
1403 unapplied.append((i, self.series[i]))
1403 unapplied.append((i, self.series[i]))
1404 self.explain_pushable(i)
1404 self.explain_pushable(i)
1405 return unapplied
1405 return unapplied
1406
1406
1407 def qseries(self, repo, missing=None, start=0, length=None, status=None,
1407 def qseries(self, repo, missing=None, start=0, length=None, status=None,
1408 summary=False):
1408 summary=False):
1409 def displayname(pfx, patchname, state):
1409 def displayname(pfx, patchname, state):
1410 if pfx:
1410 if pfx:
1411 self.ui.write(pfx)
1411 self.ui.write(pfx)
1412 if summary:
1412 if summary:
1413 ph = patchheader(self.join(patchname), self.plainmode)
1413 ph = patchheader(self.join(patchname), self.plainmode)
1414 msg = ph.message and ph.message[0] or ''
1414 msg = ph.message and ph.message[0] or ''
1415 if not self.ui.plain():
1415 if not self.ui.plain():
1416 width = util.termwidth() - len(pfx) - len(patchname) - 2
1416 width = util.termwidth() - len(pfx) - len(patchname) - 2
1417 if width > 0:
1417 if width > 0:
1418 msg = util.ellipsis(msg, width)
1418 msg = util.ellipsis(msg, width)
1419 else:
1419 else:
1420 msg = ''
1420 msg = ''
1421 self.ui.write(patchname, label='qseries.' + state)
1421 self.ui.write(patchname, label='qseries.' + state)
1422 self.ui.write(': ')
1422 self.ui.write(': ')
1423 self.ui.write(msg, label='qseries.message.' + state)
1423 self.ui.write(msg, label='qseries.message.' + state)
1424 else:
1424 else:
1425 self.ui.write(patchname, label='qseries.' + state)
1425 self.ui.write(patchname, label='qseries.' + state)
1426 self.ui.write('\n')
1426 self.ui.write('\n')
1427
1427
1428 applied = set([p.name for p in self.applied])
1428 applied = set([p.name for p in self.applied])
1429 if length is None:
1429 if length is None:
1430 length = len(self.series) - start
1430 length = len(self.series) - start
1431 if not missing:
1431 if not missing:
1432 if self.ui.verbose:
1432 if self.ui.verbose:
1433 idxwidth = len(str(start + length - 1))
1433 idxwidth = len(str(start + length - 1))
1434 for i in xrange(start, start + length):
1434 for i in xrange(start, start + length):
1435 patch = self.series[i]
1435 patch = self.series[i]
1436 if patch in applied:
1436 if patch in applied:
1437 char, state = 'A', 'applied'
1437 char, state = 'A', 'applied'
1438 elif self.pushable(i)[0]:
1438 elif self.pushable(i)[0]:
1439 char, state = 'U', 'unapplied'
1439 char, state = 'U', 'unapplied'
1440 else:
1440 else:
1441 char, state = 'G', 'guarded'
1441 char, state = 'G', 'guarded'
1442 pfx = ''
1442 pfx = ''
1443 if self.ui.verbose:
1443 if self.ui.verbose:
1444 pfx = '%*d %s ' % (idxwidth, i, char)
1444 pfx = '%*d %s ' % (idxwidth, i, char)
1445 elif status and status != char:
1445 elif status and status != char:
1446 continue
1446 continue
1447 displayname(pfx, patch, state)
1447 displayname(pfx, patch, state)
1448 else:
1448 else:
1449 msng_list = []
1449 msng_list = []
1450 for root, dirs, files in os.walk(self.path):
1450 for root, dirs, files in os.walk(self.path):
1451 d = root[len(self.path) + 1:]
1451 d = root[len(self.path) + 1:]
1452 for f in files:
1452 for f in files:
1453 fl = os.path.join(d, f)
1453 fl = os.path.join(d, f)
1454 if (fl not in self.series and
1454 if (fl not in self.series and
1455 fl not in (self.status_path, self.series_path,
1455 fl not in (self.status_path, self.series_path,
1456 self.guards_path)
1456 self.guards_path)
1457 and not fl.startswith('.')):
1457 and not fl.startswith('.')):
1458 msng_list.append(fl)
1458 msng_list.append(fl)
1459 for x in sorted(msng_list):
1459 for x in sorted(msng_list):
1460 pfx = self.ui.verbose and ('D ') or ''
1460 pfx = self.ui.verbose and ('D ') or ''
1461 displayname(pfx, x, 'missing')
1461 displayname(pfx, x, 'missing')
1462
1462
1463 def issaveline(self, l):
1463 def issaveline(self, l):
1464 if l.name == '.hg.patches.save.line':
1464 if l.name == '.hg.patches.save.line':
1465 return True
1465 return True
1466
1466
1467 def qrepo(self, create=False):
1467 def qrepo(self, create=False):
1468 if create or os.path.isdir(self.join(".hg")):
1468 if create or os.path.isdir(self.join(".hg")):
1469 return hg.repository(self.ui, path=self.path, create=create)
1469 return hg.repository(self.ui, path=self.path, create=create)
1470
1470
1471 def restore(self, repo, rev, delete=None, qupdate=None):
1471 def restore(self, repo, rev, delete=None, qupdate=None):
1472 desc = repo[rev].description().strip()
1472 desc = repo[rev].description().strip()
1473 lines = desc.splitlines()
1473 lines = desc.splitlines()
1474 i = 0
1474 i = 0
1475 datastart = None
1475 datastart = None
1476 series = []
1476 series = []
1477 applied = []
1477 applied = []
1478 qpp = None
1478 qpp = None
1479 for i, line in enumerate(lines):
1479 for i, line in enumerate(lines):
1480 if line == 'Patch Data:':
1480 if line == 'Patch Data:':
1481 datastart = i + 1
1481 datastart = i + 1
1482 elif line.startswith('Dirstate:'):
1482 elif line.startswith('Dirstate:'):
1483 l = line.rstrip()
1483 l = line.rstrip()
1484 l = l[10:].split(' ')
1484 l = l[10:].split(' ')
1485 qpp = [bin(x) for x in l]
1485 qpp = [bin(x) for x in l]
1486 elif datastart != None:
1486 elif datastart != None:
1487 l = line.rstrip()
1487 l = line.rstrip()
1488 n, name = l.split(':', 1)
1488 n, name = l.split(':', 1)
1489 if n:
1489 if n:
1490 applied.append(statusentry(bin(n), name))
1490 applied.append(statusentry(bin(n), name))
1491 else:
1491 else:
1492 series.append(l)
1492 series.append(l)
1493 if datastart is None:
1493 if datastart is None:
1494 self.ui.warn(_("No saved patch data found\n"))
1494 self.ui.warn(_("No saved patch data found\n"))
1495 return 1
1495 return 1
1496 self.ui.warn(_("restoring status: %s\n") % lines[0])
1496 self.ui.warn(_("restoring status: %s\n") % lines[0])
1497 self.full_series = series
1497 self.full_series = series
1498 self.applied = applied
1498 self.applied = applied
1499 self.parse_series()
1499 self.parse_series()
1500 self.series_dirty = 1
1500 self.series_dirty = 1
1501 self.applied_dirty = 1
1501 self.applied_dirty = 1
1502 heads = repo.changelog.heads()
1502 heads = repo.changelog.heads()
1503 if delete:
1503 if delete:
1504 if rev not in heads:
1504 if rev not in heads:
1505 self.ui.warn(_("save entry has children, leaving it alone\n"))
1505 self.ui.warn(_("save entry has children, leaving it alone\n"))
1506 else:
1506 else:
1507 self.ui.warn(_("removing save entry %s\n") % short(rev))
1507 self.ui.warn(_("removing save entry %s\n") % short(rev))
1508 pp = repo.dirstate.parents()
1508 pp = repo.dirstate.parents()
1509 if rev in pp:
1509 if rev in pp:
1510 update = True
1510 update = True
1511 else:
1511 else:
1512 update = False
1512 update = False
1513 self.strip(repo, rev, update=update, backup='strip')
1513 self.strip(repo, rev, update=update, backup='strip')
1514 if qpp:
1514 if qpp:
1515 self.ui.warn(_("saved queue repository parents: %s %s\n") %
1515 self.ui.warn(_("saved queue repository parents: %s %s\n") %
1516 (short(qpp[0]), short(qpp[1])))
1516 (short(qpp[0]), short(qpp[1])))
1517 if qupdate:
1517 if qupdate:
1518 self.ui.status(_("queue directory updating\n"))
1518 self.ui.status(_("queue directory updating\n"))
1519 r = self.qrepo()
1519 r = self.qrepo()
1520 if not r:
1520 if not r:
1521 self.ui.warn(_("Unable to load queue repository\n"))
1521 self.ui.warn(_("Unable to load queue repository\n"))
1522 return 1
1522 return 1
1523 hg.clean(r, qpp[0])
1523 hg.clean(r, qpp[0])
1524
1524
1525 def save(self, repo, msg=None):
1525 def save(self, repo, msg=None):
1526 if not self.applied:
1526 if not self.applied:
1527 self.ui.warn(_("save: no patches applied, exiting\n"))
1527 self.ui.warn(_("save: no patches applied, exiting\n"))
1528 return 1
1528 return 1
1529 if self.issaveline(self.applied[-1]):
1529 if self.issaveline(self.applied[-1]):
1530 self.ui.warn(_("status is already saved\n"))
1530 self.ui.warn(_("status is already saved\n"))
1531 return 1
1531 return 1
1532
1532
1533 if not msg:
1533 if not msg:
1534 msg = _("hg patches saved state")
1534 msg = _("hg patches saved state")
1535 else:
1535 else:
1536 msg = "hg patches: " + msg.rstrip('\r\n')
1536 msg = "hg patches: " + msg.rstrip('\r\n')
1537 r = self.qrepo()
1537 r = self.qrepo()
1538 if r:
1538 if r:
1539 pp = r.dirstate.parents()
1539 pp = r.dirstate.parents()
1540 msg += "\nDirstate: %s %s" % (hex(pp[0]), hex(pp[1]))
1540 msg += "\nDirstate: %s %s" % (hex(pp[0]), hex(pp[1]))
1541 msg += "\n\nPatch Data:\n"
1541 msg += "\n\nPatch Data:\n"
1542 msg += ''.join('%s\n' % x for x in self.applied)
1542 msg += ''.join('%s\n' % x for x in self.applied)
1543 msg += ''.join(':%s\n' % x for x in self.full_series)
1543 msg += ''.join(':%s\n' % x for x in self.full_series)
1544 n = repo.commit(msg, force=True)
1544 n = repo.commit(msg, force=True)
1545 if not n:
1545 if not n:
1546 self.ui.warn(_("repo commit failed\n"))
1546 self.ui.warn(_("repo commit failed\n"))
1547 return 1
1547 return 1
1548 self.applied.append(statusentry(n, '.hg.patches.save.line'))
1548 self.applied.append(statusentry(n, '.hg.patches.save.line'))
1549 self.applied_dirty = 1
1549 self.applied_dirty = 1
1550 self.removeundo(repo)
1550 self.removeundo(repo)
1551
1551
1552 def full_series_end(self):
1552 def full_series_end(self):
1553 if self.applied:
1553 if self.applied:
1554 p = self.applied[-1].name
1554 p = self.applied[-1].name
1555 end = self.find_series(p)
1555 end = self.find_series(p)
1556 if end is None:
1556 if end is None:
1557 return len(self.full_series)
1557 return len(self.full_series)
1558 return end + 1
1558 return end + 1
1559 return 0
1559 return 0
1560
1560
1561 def series_end(self, all_patches=False):
1561 def series_end(self, all_patches=False):
1562 """If all_patches is False, return the index of the next pushable patch
1562 """If all_patches is False, return the index of the next pushable patch
1563 in the series, or the series length. If all_patches is True, return the
1563 in the series, or the series length. If all_patches is True, return the
1564 index of the first patch past the last applied one.
1564 index of the first patch past the last applied one.
1565 """
1565 """
1566 end = 0
1566 end = 0
1567 def next(start):
1567 def next(start):
1568 if all_patches or start >= len(self.series):
1568 if all_patches or start >= len(self.series):
1569 return start
1569 return start
1570 for i in xrange(start, len(self.series)):
1570 for i in xrange(start, len(self.series)):
1571 p, reason = self.pushable(i)
1571 p, reason = self.pushable(i)
1572 if p:
1572 if p:
1573 break
1573 break
1574 self.explain_pushable(i)
1574 self.explain_pushable(i)
1575 return i
1575 return i
1576 if self.applied:
1576 if self.applied:
1577 p = self.applied[-1].name
1577 p = self.applied[-1].name
1578 try:
1578 try:
1579 end = self.series.index(p)
1579 end = self.series.index(p)
1580 except ValueError:
1580 except ValueError:
1581 return 0
1581 return 0
1582 return next(end + 1)
1582 return next(end + 1)
1583 return next(end)
1583 return next(end)
1584
1584
1585 def appliedname(self, index):
1585 def appliedname(self, index):
1586 pname = self.applied[index].name
1586 pname = self.applied[index].name
1587 if not self.ui.verbose:
1587 if not self.ui.verbose:
1588 p = pname
1588 p = pname
1589 else:
1589 else:
1590 p = str(self.series.index(pname)) + " " + pname
1590 p = str(self.series.index(pname)) + " " + pname
1591 return p
1591 return p
1592
1592
1593 def qimport(self, repo, files, patchname=None, rev=None, existing=None,
1593 def qimport(self, repo, files, patchname=None, rev=None, existing=None,
1594 force=None, git=False):
1594 force=None, git=False):
1595 def checkseries(patchname):
1595 def checkseries(patchname):
1596 if patchname in self.series:
1596 if patchname in self.series:
1597 raise util.Abort(_('patch %s is already in the series file')
1597 raise util.Abort(_('patch %s is already in the series file')
1598 % patchname)
1598 % patchname)
1599 def checkfile(patchname):
1599 def checkfile(patchname):
1600 if not force and os.path.exists(self.join(patchname)):
1600 if not force and os.path.exists(self.join(patchname)):
1601 raise util.Abort(_('patch "%s" already exists')
1601 raise util.Abort(_('patch "%s" already exists')
1602 % patchname)
1602 % patchname)
1603
1603
1604 if rev:
1604 if rev:
1605 if files:
1605 if files:
1606 raise util.Abort(_('option "-r" not valid when importing '
1606 raise util.Abort(_('option "-r" not valid when importing '
1607 'files'))
1607 'files'))
1608 rev = cmdutil.revrange(repo, rev)
1608 rev = cmdutil.revrange(repo, rev)
1609 rev.sort(reverse=True)
1609 rev.sort(reverse=True)
1610 if (len(files) > 1 or len(rev) > 1) and patchname:
1610 if (len(files) > 1 or len(rev) > 1) and patchname:
1611 raise util.Abort(_('option "-n" not valid when importing multiple '
1611 raise util.Abort(_('option "-n" not valid when importing multiple '
1612 'patches'))
1612 'patches'))
1613 added = []
1613 added = []
1614 if rev:
1614 if rev:
1615 # If mq patches are applied, we can only import revisions
1615 # If mq patches are applied, we can only import revisions
1616 # that form a linear path to qbase.
1616 # that form a linear path to qbase.
1617 # Otherwise, they should form a linear path to a head.
1617 # Otherwise, they should form a linear path to a head.
1618 heads = repo.changelog.heads(repo.changelog.node(rev[-1]))
1618 heads = repo.changelog.heads(repo.changelog.node(rev[-1]))
1619 if len(heads) > 1:
1619 if len(heads) > 1:
1620 raise util.Abort(_('revision %d is the root of more than one '
1620 raise util.Abort(_('revision %d is the root of more than one '
1621 'branch') % rev[-1])
1621 'branch') % rev[-1])
1622 if self.applied:
1622 if self.applied:
1623 base = repo.changelog.node(rev[0])
1623 base = repo.changelog.node(rev[0])
1624 if base in [n.node for n in self.applied]:
1624 if base in [n.node for n in self.applied]:
1625 raise util.Abort(_('revision %d is already managed')
1625 raise util.Abort(_('revision %d is already managed')
1626 % rev[0])
1626 % rev[0])
1627 if heads != [self.applied[-1].node]:
1627 if heads != [self.applied[-1].node]:
1628 raise util.Abort(_('revision %d is not the parent of '
1628 raise util.Abort(_('revision %d is not the parent of '
1629 'the queue') % rev[0])
1629 'the queue') % rev[0])
1630 base = repo.changelog.rev(self.applied[0].node)
1630 base = repo.changelog.rev(self.applied[0].node)
1631 lastparent = repo.changelog.parentrevs(base)[0]
1631 lastparent = repo.changelog.parentrevs(base)[0]
1632 else:
1632 else:
1633 if heads != [repo.changelog.node(rev[0])]:
1633 if heads != [repo.changelog.node(rev[0])]:
1634 raise util.Abort(_('revision %d has unmanaged children')
1634 raise util.Abort(_('revision %d has unmanaged children')
1635 % rev[0])
1635 % rev[0])
1636 lastparent = None
1636 lastparent = None
1637
1637
1638 diffopts = self.diffopts({'git': git})
1638 diffopts = self.diffopts({'git': git})
1639 for r in rev:
1639 for r in rev:
1640 p1, p2 = repo.changelog.parentrevs(r)
1640 p1, p2 = repo.changelog.parentrevs(r)
1641 n = repo.changelog.node(r)
1641 n = repo.changelog.node(r)
1642 if p2 != nullrev:
1642 if p2 != nullrev:
1643 raise util.Abort(_('cannot import merge revision %d') % r)
1643 raise util.Abort(_('cannot import merge revision %d') % r)
1644 if lastparent and lastparent != r:
1644 if lastparent and lastparent != r:
1645 raise util.Abort(_('revision %d is not the parent of %d')
1645 raise util.Abort(_('revision %d is not the parent of %d')
1646 % (r, lastparent))
1646 % (r, lastparent))
1647 lastparent = p1
1647 lastparent = p1
1648
1648
1649 if not patchname:
1649 if not patchname:
1650 patchname = normname('%d.diff' % r)
1650 patchname = normname('%d.diff' % r)
1651 self.check_reserved_name(patchname)
1651 self.check_reserved_name(patchname)
1652 checkseries(patchname)
1652 checkseries(patchname)
1653 checkfile(patchname)
1653 checkfile(patchname)
1654 self.full_series.insert(0, patchname)
1654 self.full_series.insert(0, patchname)
1655
1655
1656 patchf = self.opener(patchname, "w")
1656 patchf = self.opener(patchname, "w")
1657 cmdutil.export(repo, [n], fp=patchf, opts=diffopts)
1657 cmdutil.export(repo, [n], fp=patchf, opts=diffopts)
1658 patchf.close()
1658 patchf.close()
1659
1659
1660 se = statusentry(n, patchname)
1660 se = statusentry(n, patchname)
1661 self.applied.insert(0, se)
1661 self.applied.insert(0, se)
1662
1662
1663 added.append(patchname)
1663 added.append(patchname)
1664 patchname = None
1664 patchname = None
1665 self.parse_series()
1665 self.parse_series()
1666 self.applied_dirty = 1
1666 self.applied_dirty = 1
1667
1667
1668 for i, filename in enumerate(files):
1668 for i, filename in enumerate(files):
1669 if existing:
1669 if existing:
1670 if filename == '-':
1670 if filename == '-':
1671 raise util.Abort(_('-e is incompatible with import from -'))
1671 raise util.Abort(_('-e is incompatible with import from -'))
1672 if not patchname:
1672 if not patchname:
1673 patchname = normname(filename)
1673 patchname = normname(filename)
1674 self.check_reserved_name(patchname)
1674 self.check_reserved_name(patchname)
1675 if not os.path.isfile(self.join(patchname)):
1675 if not os.path.isfile(self.join(patchname)):
1676 raise util.Abort(_("patch %s does not exist") % patchname)
1676 raise util.Abort(_("patch %s does not exist") % patchname)
1677 else:
1677 else:
1678 try:
1678 try:
1679 if filename == '-':
1679 if filename == '-':
1680 if not patchname:
1680 if not patchname:
1681 raise util.Abort(
1681 raise util.Abort(
1682 _('need --name to import a patch from -'))
1682 _('need --name to import a patch from -'))
1683 text = sys.stdin.read()
1683 text = sys.stdin.read()
1684 else:
1684 else:
1685 text = url.open(self.ui, filename).read()
1685 text = url.open(self.ui, filename).read()
1686 except (OSError, IOError):
1686 except (OSError, IOError):
1687 raise util.Abort(_("unable to read %s") % filename)
1687 raise util.Abort(_("unable to read %s") % filename)
1688 if not patchname:
1688 if not patchname:
1689 patchname = normname(os.path.basename(filename))
1689 patchname = normname(os.path.basename(filename))
1690 self.check_reserved_name(patchname)
1690 self.check_reserved_name(patchname)
1691 checkfile(patchname)
1691 checkfile(patchname)
1692 patchf = self.opener(patchname, "w")
1692 patchf = self.opener(patchname, "w")
1693 patchf.write(text)
1693 patchf.write(text)
1694 if not force:
1694 if not force:
1695 checkseries(patchname)
1695 checkseries(patchname)
1696 if patchname not in self.series:
1696 if patchname not in self.series:
1697 index = self.full_series_end() + i
1697 index = self.full_series_end() + i
1698 self.full_series[index:index] = [patchname]
1698 self.full_series[index:index] = [patchname]
1699 self.parse_series()
1699 self.parse_series()
1700 self.ui.warn(_("adding %s to series file\n") % patchname)
1700 self.ui.warn(_("adding %s to series file\n") % patchname)
1701 added.append(patchname)
1701 added.append(patchname)
1702 patchname = None
1702 patchname = None
1703 self.series_dirty = 1
1703 self.series_dirty = 1
1704 qrepo = self.qrepo()
1704 qrepo = self.qrepo()
1705 if qrepo:
1705 if qrepo:
1706 qrepo.add(added)
1706 qrepo.add(added)
1707
1707
1708 def delete(ui, repo, *patches, **opts):
1708 def delete(ui, repo, *patches, **opts):
1709 """remove patches from queue
1709 """remove patches from queue
1710
1710
1711 The patches must not be applied, and at least one patch is required. With
1711 The patches must not be applied, and at least one patch is required. With
1712 -k/--keep, the patch files are preserved in the patch directory.
1712 -k/--keep, the patch files are preserved in the patch directory.
1713
1713
1714 To stop managing a patch and move it into permanent history,
1714 To stop managing a patch and move it into permanent history,
1715 use the qfinish command."""
1715 use the qfinish command."""
1716 q = repo.mq
1716 q = repo.mq
1717 q.delete(repo, patches, opts)
1717 q.delete(repo, patches, opts)
1718 q.save_dirty()
1718 q.save_dirty()
1719 return 0
1719 return 0
1720
1720
1721 def applied(ui, repo, patch=None, **opts):
1721 def applied(ui, repo, patch=None, **opts):
1722 """print the patches already applied"""
1722 """print the patches already applied"""
1723
1723
1724 q = repo.mq
1724 q = repo.mq
1725 l = len(q.applied)
1725 l = len(q.applied)
1726
1726
1727 if patch:
1727 if patch:
1728 if patch not in q.series:
1728 if patch not in q.series:
1729 raise util.Abort(_("patch %s is not in series file") % patch)
1729 raise util.Abort(_("patch %s is not in series file") % patch)
1730 end = q.series.index(patch) + 1
1730 end = q.series.index(patch) + 1
1731 else:
1731 else:
1732 end = q.series_end(True)
1732 end = q.series_end(True)
1733
1733
1734 if opts.get('last') and not end:
1734 if opts.get('last') and not end:
1735 ui.write(_("no patches applied\n"))
1735 ui.write(_("no patches applied\n"))
1736 return 1
1736 return 1
1737 elif opts.get('last') and end == 1:
1737 elif opts.get('last') and end == 1:
1738 ui.write(_("only one patch applied\n"))
1738 ui.write(_("only one patch applied\n"))
1739 return 1
1739 return 1
1740 elif opts.get('last'):
1740 elif opts.get('last'):
1741 start = end - 2
1741 start = end - 2
1742 end = 1
1742 end = 1
1743 else:
1743 else:
1744 start = 0
1744 start = 0
1745
1745
1746 return q.qseries(repo, length=end, start=start, status='A',
1746 return q.qseries(repo, length=end, start=start, status='A',
1747 summary=opts.get('summary'))
1747 summary=opts.get('summary'))
1748
1748
1749 def unapplied(ui, repo, patch=None, **opts):
1749 def unapplied(ui, repo, patch=None, **opts):
1750 """print the patches not yet applied"""
1750 """print the patches not yet applied"""
1751
1751
1752 q = repo.mq
1752 q = repo.mq
1753 if patch:
1753 if patch:
1754 if patch not in q.series:
1754 if patch not in q.series:
1755 raise util.Abort(_("patch %s is not in series file") % patch)
1755 raise util.Abort(_("patch %s is not in series file") % patch)
1756 start = q.series.index(patch) + 1
1756 start = q.series.index(patch) + 1
1757 else:
1757 else:
1758 start = q.series_end(True)
1758 start = q.series_end(True)
1759
1759
1760 if start == len(q.series) and opts.get('first'):
1760 if start == len(q.series) and opts.get('first'):
1761 ui.write(_("all patches applied\n"))
1761 ui.write(_("all patches applied\n"))
1762 return 1
1762 return 1
1763
1763
1764 length = opts.get('first') and 1 or None
1764 length = opts.get('first') and 1 or None
1765 return q.qseries(repo, start=start, length=length, status='U',
1765 return q.qseries(repo, start=start, length=length, status='U',
1766 summary=opts.get('summary'))
1766 summary=opts.get('summary'))
1767
1767
1768 def qimport(ui, repo, *filename, **opts):
1768 def qimport(ui, repo, *filename, **opts):
1769 """import a patch
1769 """import a patch
1770
1770
1771 The patch is inserted into the series after the last applied
1771 The patch is inserted into the series after the last applied
1772 patch. If no patches have been applied, qimport prepends the patch
1772 patch. If no patches have been applied, qimport prepends the patch
1773 to the series.
1773 to the series.
1774
1774
1775 The patch will have the same name as its source file unless you
1775 The patch will have the same name as its source file unless you
1776 give it a new one with -n/--name.
1776 give it a new one with -n/--name.
1777
1777
1778 You can register an existing patch inside the patch directory with
1778 You can register an existing patch inside the patch directory with
1779 the -e/--existing flag.
1779 the -e/--existing flag.
1780
1780
1781 With -f/--force, an existing patch of the same name will be
1781 With -f/--force, an existing patch of the same name will be
1782 overwritten.
1782 overwritten.
1783
1783
1784 An existing changeset may be placed under mq control with -r/--rev
1784 An existing changeset may be placed under mq control with -r/--rev
1785 (e.g. qimport --rev tip -n patch will place tip under mq control).
1785 (e.g. qimport --rev tip -n patch will place tip under mq control).
1786 With -g/--git, patches imported with --rev will use the git diff
1786 With -g/--git, patches imported with --rev will use the git diff
1787 format. See the diffs help topic for information on why this is
1787 format. See the diffs help topic for information on why this is
1788 important for preserving rename/copy information and permission
1788 important for preserving rename/copy information and permission
1789 changes.
1789 changes.
1790
1790
1791 To import a patch from standard input, pass - as the patch file.
1791 To import a patch from standard input, pass - as the patch file.
1792 When importing from standard input, a patch name must be specified
1792 When importing from standard input, a patch name must be specified
1793 using the --name flag.
1793 using the --name flag.
1794 """
1794 """
1795 q = repo.mq
1795 q = repo.mq
1796 q.qimport(repo, filename, patchname=opts['name'],
1796 q.qimport(repo, filename, patchname=opts['name'],
1797 existing=opts['existing'], force=opts['force'], rev=opts['rev'],
1797 existing=opts['existing'], force=opts['force'], rev=opts['rev'],
1798 git=opts['git'])
1798 git=opts['git'])
1799 q.save_dirty()
1799 q.save_dirty()
1800
1800
1801 if opts.get('push') and not opts.get('rev'):
1801 if opts.get('push') and not opts.get('rev'):
1802 return q.push(repo, None)
1802 return q.push(repo, None)
1803 return 0
1803 return 0
1804
1804
1805 def qinit(ui, repo, create):
1805 def qinit(ui, repo, create):
1806 """initialize a new queue repository
1806 """initialize a new queue repository
1807
1807
1808 This command also creates a series file for ordering patches, and
1808 This command also creates a series file for ordering patches, and
1809 an mq-specific .hgignore file in the queue repository, to exclude
1809 an mq-specific .hgignore file in the queue repository, to exclude
1810 the status and guards files (these contain mostly transient state)."""
1810 the status and guards files (these contain mostly transient state)."""
1811 q = repo.mq
1811 q = repo.mq
1812 r = q.init(repo, create)
1812 r = q.init(repo, create)
1813 q.save_dirty()
1813 q.save_dirty()
1814 if r:
1814 if r:
1815 if not os.path.exists(r.wjoin('.hgignore')):
1815 if not os.path.exists(r.wjoin('.hgignore')):
1816 fp = r.wopener('.hgignore', 'w')
1816 fp = r.wopener('.hgignore', 'w')
1817 fp.write('^\\.hg\n')
1817 fp.write('^\\.hg\n')
1818 fp.write('^\\.mq\n')
1818 fp.write('^\\.mq\n')
1819 fp.write('syntax: glob\n')
1819 fp.write('syntax: glob\n')
1820 fp.write('status\n')
1820 fp.write('status\n')
1821 fp.write('guards\n')
1821 fp.write('guards\n')
1822 fp.close()
1822 fp.close()
1823 if not os.path.exists(r.wjoin('series')):
1823 if not os.path.exists(r.wjoin('series')):
1824 r.wopener('series', 'w').close()
1824 r.wopener('series', 'w').close()
1825 r.add(['.hgignore', 'series'])
1825 r.add(['.hgignore', 'series'])
1826 commands.add(ui, r)
1826 commands.add(ui, r)
1827 return 0
1827 return 0
1828
1828
1829 def init(ui, repo, **opts):
1829 def init(ui, repo, **opts):
1830 """init a new queue repository (DEPRECATED)
1830 """init a new queue repository (DEPRECATED)
1831
1831
1832 The queue repository is unversioned by default. If
1832 The queue repository is unversioned by default. If
1833 -c/--create-repo is specified, qinit will create a separate nested
1833 -c/--create-repo is specified, qinit will create a separate nested
1834 repository for patches (qinit -c may also be run later to convert
1834 repository for patches (qinit -c may also be run later to convert
1835 an unversioned patch repository into a versioned one). You can use
1835 an unversioned patch repository into a versioned one). You can use
1836 qcommit to commit changes to this queue repository.
1836 qcommit to commit changes to this queue repository.
1837
1837
1838 This command is deprecated. Without -c, it's implied by other relevant
1838 This command is deprecated. Without -c, it's implied by other relevant
1839 commands. With -c, use :hg:`init --mq` instead."""
1839 commands. With -c, use :hg:`init --mq` instead."""
1840 return qinit(ui, repo, create=opts['create_repo'])
1840 return qinit(ui, repo, create=opts['create_repo'])
1841
1841
1842 def clone(ui, source, dest=None, **opts):
1842 def clone(ui, source, dest=None, **opts):
1843 '''clone main and patch repository at same time
1843 '''clone main and patch repository at same time
1844
1844
1845 If source is local, destination will have no patches applied. If
1845 If source is local, destination will have no patches applied. If
1846 source is remote, this command can not check if patches are
1846 source is remote, this command can not check if patches are
1847 applied in source, so cannot guarantee that patches are not
1847 applied in source, so cannot guarantee that patches are not
1848 applied in destination. If you clone remote repository, be sure
1848 applied in destination. If you clone remote repository, be sure
1849 before that it has no patches applied.
1849 before that it has no patches applied.
1850
1850
1851 Source patch repository is looked for in <src>/.hg/patches by
1851 Source patch repository is looked for in <src>/.hg/patches by
1852 default. Use -p <url> to change.
1852 default. Use -p <url> to change.
1853
1853
1854 The patch directory must be a nested Mercurial repository, as
1854 The patch directory must be a nested Mercurial repository, as
1855 would be created by :hg:`init --mq`.
1855 would be created by :hg:`init --mq`.
1856 '''
1856 '''
1857 def patchdir(repo):
1857 def patchdir(repo):
1858 url = repo.url()
1858 url = repo.url()
1859 if url.endswith('/'):
1859 if url.endswith('/'):
1860 url = url[:-1]
1860 url = url[:-1]
1861 return url + '/.hg/patches'
1861 return url + '/.hg/patches'
1862 if dest is None:
1862 if dest is None:
1863 dest = hg.defaultdest(source)
1863 dest = hg.defaultdest(source)
1864 sr = hg.repository(cmdutil.remoteui(ui, opts), ui.expandpath(source))
1864 sr = hg.repository(cmdutil.remoteui(ui, opts), ui.expandpath(source))
1865 if opts['patches']:
1865 if opts['patches']:
1866 patchespath = ui.expandpath(opts['patches'])
1866 patchespath = ui.expandpath(opts['patches'])
1867 else:
1867 else:
1868 patchespath = patchdir(sr)
1868 patchespath = patchdir(sr)
1869 try:
1869 try:
1870 hg.repository(ui, patchespath)
1870 hg.repository(ui, patchespath)
1871 except error.RepoError:
1871 except error.RepoError:
1872 raise util.Abort(_('versioned patch repository not found'
1872 raise util.Abort(_('versioned patch repository not found'
1873 ' (see init --mq)'))
1873 ' (see init --mq)'))
1874 qbase, destrev = None, None
1874 qbase, destrev = None, None
1875 if sr.local():
1875 if sr.local():
1876 if sr.mq.applied:
1876 if sr.mq.applied:
1877 qbase = sr.mq.applied[0].node
1877 qbase = sr.mq.applied[0].node
1878 if not hg.islocal(dest):
1878 if not hg.islocal(dest):
1879 heads = set(sr.heads())
1879 heads = set(sr.heads())
1880 destrev = list(heads.difference(sr.heads(qbase)))
1880 destrev = list(heads.difference(sr.heads(qbase)))
1881 destrev.append(sr.changelog.parents(qbase)[0])
1881 destrev.append(sr.changelog.parents(qbase)[0])
1882 elif sr.capable('lookup'):
1882 elif sr.capable('lookup'):
1883 try:
1883 try:
1884 qbase = sr.lookup('qbase')
1884 qbase = sr.lookup('qbase')
1885 except error.RepoError:
1885 except error.RepoError:
1886 pass
1886 pass
1887 ui.note(_('cloning main repository\n'))
1887 ui.note(_('cloning main repository\n'))
1888 sr, dr = hg.clone(ui, sr.url(), dest,
1888 sr, dr = hg.clone(ui, sr.url(), dest,
1889 pull=opts['pull'],
1889 pull=opts['pull'],
1890 rev=destrev,
1890 rev=destrev,
1891 update=False,
1891 update=False,
1892 stream=opts['uncompressed'])
1892 stream=opts['uncompressed'])
1893 ui.note(_('cloning patch repository\n'))
1893 ui.note(_('cloning patch repository\n'))
1894 hg.clone(ui, opts['patches'] or patchdir(sr), patchdir(dr),
1894 hg.clone(ui, opts['patches'] or patchdir(sr), patchdir(dr),
1895 pull=opts['pull'], update=not opts['noupdate'],
1895 pull=opts['pull'], update=not opts['noupdate'],
1896 stream=opts['uncompressed'])
1896 stream=opts['uncompressed'])
1897 if dr.local():
1897 if dr.local():
1898 if qbase:
1898 if qbase:
1899 ui.note(_('stripping applied patches from destination '
1899 ui.note(_('stripping applied patches from destination '
1900 'repository\n'))
1900 'repository\n'))
1901 dr.mq.strip(dr, qbase, update=False, backup=None)
1901 dr.mq.strip(dr, qbase, update=False, backup=None)
1902 if not opts['noupdate']:
1902 if not opts['noupdate']:
1903 ui.note(_('updating destination repository\n'))
1903 ui.note(_('updating destination repository\n'))
1904 hg.update(dr, dr.changelog.tip())
1904 hg.update(dr, dr.changelog.tip())
1905
1905
1906 def commit(ui, repo, *pats, **opts):
1906 def commit(ui, repo, *pats, **opts):
1907 """commit changes in the queue repository (DEPRECATED)
1907 """commit changes in the queue repository (DEPRECATED)
1908
1908
1909 This command is deprecated; use :hg:`commit --mq` instead."""
1909 This command is deprecated; use :hg:`commit --mq` instead."""
1910 q = repo.mq
1910 q = repo.mq
1911 r = q.qrepo()
1911 r = q.qrepo()
1912 if not r:
1912 if not r:
1913 raise util.Abort('no queue repository')
1913 raise util.Abort('no queue repository')
1914 commands.commit(r.ui, r, *pats, **opts)
1914 commands.commit(r.ui, r, *pats, **opts)
1915
1915
1916 def series(ui, repo, **opts):
1916 def series(ui, repo, **opts):
1917 """print the entire series file"""
1917 """print the entire series file"""
1918 repo.mq.qseries(repo, missing=opts['missing'], summary=opts['summary'])
1918 repo.mq.qseries(repo, missing=opts['missing'], summary=opts['summary'])
1919 return 0
1919 return 0
1920
1920
1921 def top(ui, repo, **opts):
1921 def top(ui, repo, **opts):
1922 """print the name of the current patch"""
1922 """print the name of the current patch"""
1923 q = repo.mq
1923 q = repo.mq
1924 t = q.applied and q.series_end(True) or 0
1924 t = q.applied and q.series_end(True) or 0
1925 if t:
1925 if t:
1926 return q.qseries(repo, start=t - 1, length=1, status='A',
1926 return q.qseries(repo, start=t - 1, length=1, status='A',
1927 summary=opts.get('summary'))
1927 summary=opts.get('summary'))
1928 else:
1928 else:
1929 ui.write(_("no patches applied\n"))
1929 ui.write(_("no patches applied\n"))
1930 return 1
1930 return 1
1931
1931
1932 def next(ui, repo, **opts):
1932 def next(ui, repo, **opts):
1933 """print the name of the next patch"""
1933 """print the name of the next patch"""
1934 q = repo.mq
1934 q = repo.mq
1935 end = q.series_end()
1935 end = q.series_end()
1936 if end == len(q.series):
1936 if end == len(q.series):
1937 ui.write(_("all patches applied\n"))
1937 ui.write(_("all patches applied\n"))
1938 return 1
1938 return 1
1939 return q.qseries(repo, start=end, length=1, summary=opts.get('summary'))
1939 return q.qseries(repo, start=end, length=1, summary=opts.get('summary'))
1940
1940
1941 def prev(ui, repo, **opts):
1941 def prev(ui, repo, **opts):
1942 """print the name of the previous patch"""
1942 """print the name of the previous patch"""
1943 q = repo.mq
1943 q = repo.mq
1944 l = len(q.applied)
1944 l = len(q.applied)
1945 if l == 1:
1945 if l == 1:
1946 ui.write(_("only one patch applied\n"))
1946 ui.write(_("only one patch applied\n"))
1947 return 1
1947 return 1
1948 if not l:
1948 if not l:
1949 ui.write(_("no patches applied\n"))
1949 ui.write(_("no patches applied\n"))
1950 return 1
1950 return 1
1951 return q.qseries(repo, start=l - 2, length=1, status='A',
1951 return q.qseries(repo, start=l - 2, length=1, status='A',
1952 summary=opts.get('summary'))
1952 summary=opts.get('summary'))
1953
1953
1954 def setupheaderopts(ui, opts):
1954 def setupheaderopts(ui, opts):
1955 if not opts.get('user') and opts.get('currentuser'):
1955 if not opts.get('user') and opts.get('currentuser'):
1956 opts['user'] = ui.username()
1956 opts['user'] = ui.username()
1957 if not opts.get('date') and opts.get('currentdate'):
1957 if not opts.get('date') and opts.get('currentdate'):
1958 opts['date'] = "%d %d" % util.makedate()
1958 opts['date'] = "%d %d" % util.makedate()
1959
1959
1960 def new(ui, repo, patch, *args, **opts):
1960 def new(ui, repo, patch, *args, **opts):
1961 """create a new patch
1961 """create a new patch
1962
1962
1963 qnew creates a new patch on top of the currently-applied patch (if
1963 qnew creates a new patch on top of the currently-applied patch (if
1964 any). The patch will be initialized with any outstanding changes
1964 any). The patch will be initialized with any outstanding changes
1965 in the working directory. You may also use -I/--include,
1965 in the working directory. You may also use -I/--include,
1966 -X/--exclude, and/or a list of files after the patch name to add
1966 -X/--exclude, and/or a list of files after the patch name to add
1967 only changes to matching files to the new patch, leaving the rest
1967 only changes to matching files to the new patch, leaving the rest
1968 as uncommitted modifications.
1968 as uncommitted modifications.
1969
1969
1970 -u/--user and -d/--date can be used to set the (given) user and
1970 -u/--user and -d/--date can be used to set the (given) user and
1971 date, respectively. -U/--currentuser and -D/--currentdate set user
1971 date, respectively. -U/--currentuser and -D/--currentdate set user
1972 to current user and date to current date.
1972 to current user and date to current date.
1973
1973
1974 -e/--edit, -m/--message or -l/--logfile set the patch header as
1974 -e/--edit, -m/--message or -l/--logfile set the patch header as
1975 well as the commit message. If none is specified, the header is
1975 well as the commit message. If none is specified, the header is
1976 empty and the commit message is '[mq]: PATCH'.
1976 empty and the commit message is '[mq]: PATCH'.
1977
1977
1978 Use the -g/--git option to keep the patch in the git extended diff
1978 Use the -g/--git option to keep the patch in the git extended diff
1979 format. Read the diffs help topic for more information on why this
1979 format. Read the diffs help topic for more information on why this
1980 is important for preserving permission changes and copy/rename
1980 is important for preserving permission changes and copy/rename
1981 information.
1981 information.
1982 """
1982 """
1983 msg = cmdutil.logmessage(opts)
1983 msg = cmdutil.logmessage(opts)
1984 def getmsg():
1984 def getmsg():
1985 return ui.edit(msg, ui.username())
1985 return ui.edit(msg, ui.username())
1986 q = repo.mq
1986 q = repo.mq
1987 opts['msg'] = msg
1987 opts['msg'] = msg
1988 if opts.get('edit'):
1988 if opts.get('edit'):
1989 opts['msg'] = getmsg
1989 opts['msg'] = getmsg
1990 else:
1990 else:
1991 opts['msg'] = msg
1991 opts['msg'] = msg
1992 setupheaderopts(ui, opts)
1992 setupheaderopts(ui, opts)
1993 q.new(repo, patch, *args, **opts)
1993 q.new(repo, patch, *args, **opts)
1994 q.save_dirty()
1994 q.save_dirty()
1995 return 0
1995 return 0
1996
1996
1997 def refresh(ui, repo, *pats, **opts):
1997 def refresh(ui, repo, *pats, **opts):
1998 """update the current patch
1998 """update the current patch
1999
1999
2000 If any file patterns are provided, the refreshed patch will
2000 If any file patterns are provided, the refreshed patch will
2001 contain only the modifications that match those patterns; the
2001 contain only the modifications that match those patterns; the
2002 remaining modifications will remain in the working directory.
2002 remaining modifications will remain in the working directory.
2003
2003
2004 If -s/--short is specified, files currently included in the patch
2004 If -s/--short is specified, files currently included in the patch
2005 will be refreshed just like matched files and remain in the patch.
2005 will be refreshed just like matched files and remain in the patch.
2006
2006
2007 hg add/remove/copy/rename work as usual, though you might want to
2007 hg add/remove/copy/rename work as usual, though you might want to
2008 use git-style patches (-g/--git or [diff] git=1) to track copies
2008 use git-style patches (-g/--git or [diff] git=1) to track copies
2009 and renames. See the diffs help topic for more information on the
2009 and renames. See the diffs help topic for more information on the
2010 git diff format.
2010 git diff format.
2011 """
2011 """
2012 q = repo.mq
2012 q = repo.mq
2013 message = cmdutil.logmessage(opts)
2013 message = cmdutil.logmessage(opts)
2014 if opts['edit']:
2014 if opts['edit']:
2015 if not q.applied:
2015 if not q.applied:
2016 ui.write(_("no patches applied\n"))
2016 ui.write(_("no patches applied\n"))
2017 return 1
2017 return 1
2018 if message:
2018 if message:
2019 raise util.Abort(_('option "-e" incompatible with "-m" or "-l"'))
2019 raise util.Abort(_('option "-e" incompatible with "-m" or "-l"'))
2020 patch = q.applied[-1].name
2020 patch = q.applied[-1].name
2021 ph = patchheader(q.join(patch), q.plainmode)
2021 ph = patchheader(q.join(patch), q.plainmode)
2022 message = ui.edit('\n'.join(ph.message), ph.user or ui.username())
2022 message = ui.edit('\n'.join(ph.message), ph.user or ui.username())
2023 setupheaderopts(ui, opts)
2023 setupheaderopts(ui, opts)
2024 ret = q.refresh(repo, pats, msg=message, **opts)
2024 ret = q.refresh(repo, pats, msg=message, **opts)
2025 q.save_dirty()
2025 q.save_dirty()
2026 return ret
2026 return ret
2027
2027
2028 def diff(ui, repo, *pats, **opts):
2028 def diff(ui, repo, *pats, **opts):
2029 """diff of the current patch and subsequent modifications
2029 """diff of the current patch and subsequent modifications
2030
2030
2031 Shows a diff which includes the current patch as well as any
2031 Shows a diff which includes the current patch as well as any
2032 changes which have been made in the working directory since the
2032 changes which have been made in the working directory since the
2033 last refresh (thus showing what the current patch would become
2033 last refresh (thus showing what the current patch would become
2034 after a qrefresh).
2034 after a qrefresh).
2035
2035
2036 Use :hg:`diff` if you only want to see the changes made since the
2036 Use :hg:`diff` if you only want to see the changes made since the
2037 last qrefresh, or :hg:`export qtip` if you want to see changes
2037 last qrefresh, or :hg:`export qtip` if you want to see changes
2038 made by the current patch without including changes made since the
2038 made by the current patch without including changes made since the
2039 qrefresh.
2039 qrefresh.
2040 """
2040 """
2041 repo.mq.diff(repo, pats, opts)
2041 repo.mq.diff(repo, pats, opts)
2042 return 0
2042 return 0
2043
2043
2044 def fold(ui, repo, *files, **opts):
2044 def fold(ui, repo, *files, **opts):
2045 """fold the named patches into the current patch
2045 """fold the named patches into the current patch
2046
2046
2047 Patches must not yet be applied. Each patch will be successively
2047 Patches must not yet be applied. Each patch will be successively
2048 applied to the current patch in the order given. If all the
2048 applied to the current patch in the order given. If all the
2049 patches apply successfully, the current patch will be refreshed
2049 patches apply successfully, the current patch will be refreshed
2050 with the new cumulative patch, and the folded patches will be
2050 with the new cumulative patch, and the folded patches will be
2051 deleted. With -k/--keep, the folded patch files will not be
2051 deleted. With -k/--keep, the folded patch files will not be
2052 removed afterwards.
2052 removed afterwards.
2053
2053
2054 The header for each folded patch will be concatenated with the
2054 The header for each folded patch will be concatenated with the
2055 current patch header, separated by a line of '* * *'."""
2055 current patch header, separated by a line of '* * *'."""
2056
2056
2057 q = repo.mq
2057 q = repo.mq
2058
2058
2059 if not files:
2059 if not files:
2060 raise util.Abort(_('qfold requires at least one patch name'))
2060 raise util.Abort(_('qfold requires at least one patch name'))
2061 if not q.check_toppatch(repo)[0]:
2061 if not q.check_toppatch(repo)[0]:
2062 raise util.Abort(_('No patches applied'))
2062 raise util.Abort(_('No patches applied'))
2063 q.check_localchanges(repo)
2063 q.check_localchanges(repo)
2064
2064
2065 message = cmdutil.logmessage(opts)
2065 message = cmdutil.logmessage(opts)
2066 if opts['edit']:
2066 if opts['edit']:
2067 if message:
2067 if message:
2068 raise util.Abort(_('option "-e" incompatible with "-m" or "-l"'))
2068 raise util.Abort(_('option "-e" incompatible with "-m" or "-l"'))
2069
2069
2070 parent = q.lookup('qtip')
2070 parent = q.lookup('qtip')
2071 patches = []
2071 patches = []
2072 messages = []
2072 messages = []
2073 for f in files:
2073 for f in files:
2074 p = q.lookup(f)
2074 p = q.lookup(f)
2075 if p in patches or p == parent:
2075 if p in patches or p == parent:
2076 ui.warn(_('Skipping already folded patch %s') % p)
2076 ui.warn(_('Skipping already folded patch %s') % p)
2077 if q.isapplied(p):
2077 if q.isapplied(p):
2078 raise util.Abort(_('qfold cannot fold already applied patch %s') % p)
2078 raise util.Abort(_('qfold cannot fold already applied patch %s') % p)
2079 patches.append(p)
2079 patches.append(p)
2080
2080
2081 for p in patches:
2081 for p in patches:
2082 if not message:
2082 if not message:
2083 ph = patchheader(q.join(p), q.plainmode)
2083 ph = patchheader(q.join(p), q.plainmode)
2084 if ph.message:
2084 if ph.message:
2085 messages.append(ph.message)
2085 messages.append(ph.message)
2086 pf = q.join(p)
2086 pf = q.join(p)
2087 (patchsuccess, files, fuzz) = q.patch(repo, pf)
2087 (patchsuccess, files, fuzz) = q.patch(repo, pf)
2088 if not patchsuccess:
2088 if not patchsuccess:
2089 raise util.Abort(_('Error folding patch %s') % p)
2089 raise util.Abort(_('Error folding patch %s') % p)
2090 patch.updatedir(ui, repo, files)
2090 patch.updatedir(ui, repo, files)
2091
2091
2092 if not message:
2092 if not message:
2093 ph = patchheader(q.join(parent), q.plainmode)
2093 ph = patchheader(q.join(parent), q.plainmode)
2094 message, user = ph.message, ph.user
2094 message, user = ph.message, ph.user
2095 for msg in messages:
2095 for msg in messages:
2096 message.append('* * *')
2096 message.append('* * *')
2097 message.extend(msg)
2097 message.extend(msg)
2098 message = '\n'.join(message)
2098 message = '\n'.join(message)
2099
2099
2100 if opts['edit']:
2100 if opts['edit']:
2101 message = ui.edit(message, user or ui.username())
2101 message = ui.edit(message, user or ui.username())
2102
2102
2103 diffopts = q.patchopts(q.diffopts(), *patches)
2103 diffopts = q.patchopts(q.diffopts(), *patches)
2104 q.refresh(repo, msg=message, git=diffopts.git)
2104 q.refresh(repo, msg=message, git=diffopts.git)
2105 q.delete(repo, patches, opts)
2105 q.delete(repo, patches, opts)
2106 q.save_dirty()
2106 q.save_dirty()
2107
2107
2108 def goto(ui, repo, patch, **opts):
2108 def goto(ui, repo, patch, **opts):
2109 '''push or pop patches until named patch is at top of stack'''
2109 '''push or pop patches until named patch is at top of stack'''
2110 q = repo.mq
2110 q = repo.mq
2111 patch = q.lookup(patch)
2111 patch = q.lookup(patch)
2112 if q.isapplied(patch):
2112 if q.isapplied(patch):
2113 ret = q.pop(repo, patch, force=opts['force'])
2113 ret = q.pop(repo, patch, force=opts['force'])
2114 else:
2114 else:
2115 ret = q.push(repo, patch, force=opts['force'])
2115 ret = q.push(repo, patch, force=opts['force'])
2116 q.save_dirty()
2116 q.save_dirty()
2117 return ret
2117 return ret
2118
2118
2119 def guard(ui, repo, *args, **opts):
2119 def guard(ui, repo, *args, **opts):
2120 '''set or print guards for a patch
2120 '''set or print guards for a patch
2121
2121
2122 Guards control whether a patch can be pushed. A patch with no
2122 Guards control whether a patch can be pushed. A patch with no
2123 guards is always pushed. A patch with a positive guard ("+foo") is
2123 guards is always pushed. A patch with a positive guard ("+foo") is
2124 pushed only if the qselect command has activated it. A patch with
2124 pushed only if the qselect command has activated it. A patch with
2125 a negative guard ("-foo") is never pushed if the qselect command
2125 a negative guard ("-foo") is never pushed if the qselect command
2126 has activated it.
2126 has activated it.
2127
2127
2128 With no arguments, print the currently active guards.
2128 With no arguments, print the currently active guards.
2129 With arguments, set guards for the named patch.
2129 With arguments, set guards for the named patch.
2130 NOTE: Specifying negative guards now requires '--'.
2130 NOTE: Specifying negative guards now requires '--'.
2131
2131
2132 To set guards on another patch::
2132 To set guards on another patch::
2133
2133
2134 hg qguard other.patch -- +2.6.17 -stable
2134 hg qguard other.patch -- +2.6.17 -stable
2135 '''
2135 '''
2136 def status(idx):
2136 def status(idx):
2137 guards = q.series_guards[idx] or ['unguarded']
2137 guards = q.series_guards[idx] or ['unguarded']
2138 ui.write('%s: ' % ui.label(q.series[idx], 'qguard.patch'))
2138 ui.write('%s: ' % ui.label(q.series[idx], 'qguard.patch'))
2139 for i, guard in enumerate(guards):
2139 for i, guard in enumerate(guards):
2140 if guard.startswith('+'):
2140 if guard.startswith('+'):
2141 ui.write(guard, label='qguard.positive')
2141 ui.write(guard, label='qguard.positive')
2142 elif guard.startswith('-'):
2142 elif guard.startswith('-'):
2143 ui.write(guard, label='qguard.negative')
2143 ui.write(guard, label='qguard.negative')
2144 else:
2144 else:
2145 ui.write(guard, label='qguard.unguarded')
2145 ui.write(guard, label='qguard.unguarded')
2146 if i != len(guards) - 1:
2146 if i != len(guards) - 1:
2147 ui.write(' ')
2147 ui.write(' ')
2148 ui.write('\n')
2148 ui.write('\n')
2149 q = repo.mq
2149 q = repo.mq
2150 patch = None
2150 patch = None
2151 args = list(args)
2151 args = list(args)
2152 if opts['list']:
2152 if opts['list']:
2153 if args or opts['none']:
2153 if args or opts['none']:
2154 raise util.Abort(_('cannot mix -l/--list with options or arguments'))
2154 raise util.Abort(_('cannot mix -l/--list with options or arguments'))
2155 for i in xrange(len(q.series)):
2155 for i in xrange(len(q.series)):
2156 status(i)
2156 status(i)
2157 return
2157 return
2158 if not args or args[0][0:1] in '-+':
2158 if not args or args[0][0:1] in '-+':
2159 if not q.applied:
2159 if not q.applied:
2160 raise util.Abort(_('no patches applied'))
2160 raise util.Abort(_('no patches applied'))
2161 patch = q.applied[-1].name
2161 patch = q.applied[-1].name
2162 if patch is None and args[0][0:1] not in '-+':
2162 if patch is None and args[0][0:1] not in '-+':
2163 patch = args.pop(0)
2163 patch = args.pop(0)
2164 if patch is None:
2164 if patch is None:
2165 raise util.Abort(_('no patch to work with'))
2165 raise util.Abort(_('no patch to work with'))
2166 if args or opts['none']:
2166 if args or opts['none']:
2167 idx = q.find_series(patch)
2167 idx = q.find_series(patch)
2168 if idx is None:
2168 if idx is None:
2169 raise util.Abort(_('no patch named %s') % patch)
2169 raise util.Abort(_('no patch named %s') % patch)
2170 q.set_guards(idx, args)
2170 q.set_guards(idx, args)
2171 q.save_dirty()
2171 q.save_dirty()
2172 else:
2172 else:
2173 status(q.series.index(q.lookup(patch)))
2173 status(q.series.index(q.lookup(patch)))
2174
2174
2175 def header(ui, repo, patch=None):
2175 def header(ui, repo, patch=None):
2176 """print the header of the topmost or specified patch"""
2176 """print the header of the topmost or specified patch"""
2177 q = repo.mq
2177 q = repo.mq
2178
2178
2179 if patch:
2179 if patch:
2180 patch = q.lookup(patch)
2180 patch = q.lookup(patch)
2181 else:
2181 else:
2182 if not q.applied:
2182 if not q.applied:
2183 ui.write(_('no patches applied\n'))
2183 ui.write(_('no patches applied\n'))
2184 return 1
2184 return 1
2185 patch = q.lookup('qtip')
2185 patch = q.lookup('qtip')
2186 ph = patchheader(q.join(patch), q.plainmode)
2186 ph = patchheader(q.join(patch), q.plainmode)
2187
2187
2188 ui.write('\n'.join(ph.message) + '\n')
2188 ui.write('\n'.join(ph.message) + '\n')
2189
2189
2190 def lastsavename(path):
2190 def lastsavename(path):
2191 (directory, base) = os.path.split(path)
2191 (directory, base) = os.path.split(path)
2192 names = os.listdir(directory)
2192 names = os.listdir(directory)
2193 namere = re.compile("%s.([0-9]+)" % base)
2193 namere = re.compile("%s.([0-9]+)" % base)
2194 maxindex = None
2194 maxindex = None
2195 maxname = None
2195 maxname = None
2196 for f in names:
2196 for f in names:
2197 m = namere.match(f)
2197 m = namere.match(f)
2198 if m:
2198 if m:
2199 index = int(m.group(1))
2199 index = int(m.group(1))
2200 if maxindex is None or index > maxindex:
2200 if maxindex is None or index > maxindex:
2201 maxindex = index
2201 maxindex = index
2202 maxname = f
2202 maxname = f
2203 if maxname:
2203 if maxname:
2204 return (os.path.join(directory, maxname), maxindex)
2204 return (os.path.join(directory, maxname), maxindex)
2205 return (None, None)
2205 return (None, None)
2206
2206
2207 def savename(path):
2207 def savename(path):
2208 (last, index) = lastsavename(path)
2208 (last, index) = lastsavename(path)
2209 if last is None:
2209 if last is None:
2210 index = 0
2210 index = 0
2211 newpath = path + ".%d" % (index + 1)
2211 newpath = path + ".%d" % (index + 1)
2212 return newpath
2212 return newpath
2213
2213
2214 def push(ui, repo, patch=None, **opts):
2214 def push(ui, repo, patch=None, **opts):
2215 """push the next patch onto the stack
2215 """push the next patch onto the stack
2216
2216
2217 When -f/--force is applied, all local changes in patched files
2217 When -f/--force is applied, all local changes in patched files
2218 will be lost.
2218 will be lost.
2219 """
2219 """
2220 q = repo.mq
2220 q = repo.mq
2221 mergeq = None
2221 mergeq = None
2222
2222
2223 if opts['merge']:
2223 if opts['merge']:
2224 if opts['name']:
2224 if opts['name']:
2225 newpath = repo.join(opts['name'])
2225 newpath = repo.join(opts['name'])
2226 else:
2226 else:
2227 newpath, i = lastsavename(q.path)
2227 newpath, i = lastsavename(q.path)
2228 if not newpath:
2228 if not newpath:
2229 ui.warn(_("no saved queues found, please use -n\n"))
2229 ui.warn(_("no saved queues found, please use -n\n"))
2230 return 1
2230 return 1
2231 mergeq = queue(ui, repo.join(""), newpath)
2231 mergeq = queue(ui, repo.join(""), newpath)
2232 ui.warn(_("merging with queue at: %s\n") % mergeq.path)
2232 ui.warn(_("merging with queue at: %s\n") % mergeq.path)
2233 ret = q.push(repo, patch, force=opts['force'], list=opts['list'],
2233 ret = q.push(repo, patch, force=opts['force'], list=opts['list'],
2234 mergeq=mergeq, all=opts.get('all'), move=opts.get('move'))
2234 mergeq=mergeq, all=opts.get('all'), move=opts.get('move'))
2235 return ret
2235 return ret
2236
2236
2237 def pop(ui, repo, patch=None, **opts):
2237 def pop(ui, repo, patch=None, **opts):
2238 """pop the current patch off the stack
2238 """pop the current patch off the stack
2239
2239
2240 By default, pops off the top of the patch stack. If given a patch
2240 By default, pops off the top of the patch stack. If given a patch
2241 name, keeps popping off patches until the named patch is at the
2241 name, keeps popping off patches until the named patch is at the
2242 top of the stack.
2242 top of the stack.
2243 """
2243 """
2244 localupdate = True
2244 localupdate = True
2245 if opts['name']:
2245 if opts['name']:
2246 q = queue(ui, repo.join(""), repo.join(opts['name']))
2246 q = queue(ui, repo.join(""), repo.join(opts['name']))
2247 ui.warn(_('using patch queue: %s\n') % q.path)
2247 ui.warn(_('using patch queue: %s\n') % q.path)
2248 localupdate = False
2248 localupdate = False
2249 else:
2249 else:
2250 q = repo.mq
2250 q = repo.mq
2251 ret = q.pop(repo, patch, force=opts['force'], update=localupdate,
2251 ret = q.pop(repo, patch, force=opts['force'], update=localupdate,
2252 all=opts['all'])
2252 all=opts['all'])
2253 q.save_dirty()
2253 q.save_dirty()
2254 return ret
2254 return ret
2255
2255
2256 def rename(ui, repo, patch, name=None, **opts):
2256 def rename(ui, repo, patch, name=None, **opts):
2257 """rename a patch
2257 """rename a patch
2258
2258
2259 With one argument, renames the current patch to PATCH1.
2259 With one argument, renames the current patch to PATCH1.
2260 With two arguments, renames PATCH1 to PATCH2."""
2260 With two arguments, renames PATCH1 to PATCH2."""
2261
2261
2262 q = repo.mq
2262 q = repo.mq
2263
2263
2264 if not name:
2264 if not name:
2265 name = patch
2265 name = patch
2266 patch = None
2266 patch = None
2267
2267
2268 if patch:
2268 if patch:
2269 patch = q.lookup(patch)
2269 patch = q.lookup(patch)
2270 else:
2270 else:
2271 if not q.applied:
2271 if not q.applied:
2272 ui.write(_('no patches applied\n'))
2272 ui.write(_('no patches applied\n'))
2273 return
2273 return
2274 patch = q.lookup('qtip')
2274 patch = q.lookup('qtip')
2275 absdest = q.join(name)
2275 absdest = q.join(name)
2276 if os.path.isdir(absdest):
2276 if os.path.isdir(absdest):
2277 name = normname(os.path.join(name, os.path.basename(patch)))
2277 name = normname(os.path.join(name, os.path.basename(patch)))
2278 absdest = q.join(name)
2278 absdest = q.join(name)
2279 if os.path.exists(absdest):
2279 if os.path.exists(absdest):
2280 raise util.Abort(_('%s already exists') % absdest)
2280 raise util.Abort(_('%s already exists') % absdest)
2281
2281
2282 if name in q.series:
2282 if name in q.series:
2283 raise util.Abort(
2283 raise util.Abort(
2284 _('A patch named %s already exists in the series file') % name)
2284 _('A patch named %s already exists in the series file') % name)
2285
2285
2286 ui.note(_('renaming %s to %s\n') % (patch, name))
2286 ui.note(_('renaming %s to %s\n') % (patch, name))
2287 i = q.find_series(patch)
2287 i = q.find_series(patch)
2288 guards = q.guard_re.findall(q.full_series[i])
2288 guards = q.guard_re.findall(q.full_series[i])
2289 q.full_series[i] = name + ''.join([' #' + g for g in guards])
2289 q.full_series[i] = name + ''.join([' #' + g for g in guards])
2290 q.parse_series()
2290 q.parse_series()
2291 q.series_dirty = 1
2291 q.series_dirty = 1
2292
2292
2293 info = q.isapplied(patch)
2293 info = q.isapplied(patch)
2294 if info:
2294 if info:
2295 q.applied[info[0]] = statusentry(info[1], name)
2295 q.applied[info[0]] = statusentry(info[1], name)
2296 q.applied_dirty = 1
2296 q.applied_dirty = 1
2297
2297
2298 util.rename(q.join(patch), absdest)
2298 util.rename(q.join(patch), absdest)
2299 r = q.qrepo()
2299 r = q.qrepo()
2300 if r:
2300 if r:
2301 wlock = r.wlock()
2301 wlock = r.wlock()
2302 try:
2302 try:
2303 if r.dirstate[patch] == 'a':
2303 if r.dirstate[patch] == 'a':
2304 r.dirstate.forget(patch)
2304 r.dirstate.forget(patch)
2305 r.dirstate.add(name)
2305 r.dirstate.add(name)
2306 else:
2306 else:
2307 if r.dirstate[name] == 'r':
2307 if r.dirstate[name] == 'r':
2308 r.undelete([name])
2308 r.undelete([name])
2309 r.copy(patch, name)
2309 r.copy(patch, name)
2310 r.remove([patch], False)
2310 r.remove([patch], False)
2311 finally:
2311 finally:
2312 wlock.release()
2312 wlock.release()
2313
2313
2314 q.save_dirty()
2314 q.save_dirty()
2315
2315
2316 def restore(ui, repo, rev, **opts):
2316 def restore(ui, repo, rev, **opts):
2317 """restore the queue state saved by a revision (DEPRECATED)
2317 """restore the queue state saved by a revision (DEPRECATED)
2318
2318
2319 This command is deprecated, use rebase --mq instead."""
2319 This command is deprecated, use rebase --mq instead."""
2320 rev = repo.lookup(rev)
2320 rev = repo.lookup(rev)
2321 q = repo.mq
2321 q = repo.mq
2322 q.restore(repo, rev, delete=opts['delete'],
2322 q.restore(repo, rev, delete=opts['delete'],
2323 qupdate=opts['update'])
2323 qupdate=opts['update'])
2324 q.save_dirty()
2324 q.save_dirty()
2325 return 0
2325 return 0
2326
2326
2327 def save(ui, repo, **opts):
2327 def save(ui, repo, **opts):
2328 """save current queue state (DEPRECATED)
2328 """save current queue state (DEPRECATED)
2329
2329
2330 This command is deprecated, use rebase --mq instead."""
2330 This command is deprecated, use rebase --mq instead."""
2331 q = repo.mq
2331 q = repo.mq
2332 message = cmdutil.logmessage(opts)
2332 message = cmdutil.logmessage(opts)
2333 ret = q.save(repo, msg=message)
2333 ret = q.save(repo, msg=message)
2334 if ret:
2334 if ret:
2335 return ret
2335 return ret
2336 q.save_dirty()
2336 q.save_dirty()
2337 if opts['copy']:
2337 if opts['copy']:
2338 path = q.path
2338 path = q.path
2339 if opts['name']:
2339 if opts['name']:
2340 newpath = os.path.join(q.basepath, opts['name'])
2340 newpath = os.path.join(q.basepath, opts['name'])
2341 if os.path.exists(newpath):
2341 if os.path.exists(newpath):
2342 if not os.path.isdir(newpath):
2342 if not os.path.isdir(newpath):
2343 raise util.Abort(_('destination %s exists and is not '
2343 raise util.Abort(_('destination %s exists and is not '
2344 'a directory') % newpath)
2344 'a directory') % newpath)
2345 if not opts['force']:
2345 if not opts['force']:
2346 raise util.Abort(_('destination %s exists, '
2346 raise util.Abort(_('destination %s exists, '
2347 'use -f to force') % newpath)
2347 'use -f to force') % newpath)
2348 else:
2348 else:
2349 newpath = savename(path)
2349 newpath = savename(path)
2350 ui.warn(_("copy %s to %s\n") % (path, newpath))
2350 ui.warn(_("copy %s to %s\n") % (path, newpath))
2351 util.copyfiles(path, newpath)
2351 util.copyfiles(path, newpath)
2352 if opts['empty']:
2352 if opts['empty']:
2353 try:
2353 try:
2354 os.unlink(q.join(q.status_path))
2354 os.unlink(q.join(q.status_path))
2355 except:
2355 except:
2356 pass
2356 pass
2357 return 0
2357 return 0
2358
2358
2359 def strip(ui, repo, rev, **opts):
2359 def strip(ui, repo, rev, **opts):
2360 """strip a changeset and all its descendants from the repository
2360 """strip a changeset and all its descendants from the repository
2361
2361
2362 The strip command removes all changesets whose local revision
2362 The strip command removes all changesets whose local revision
2363 number is greater than or equal to REV, and then restores any
2363 number is greater than or equal to REV, and then restores any
2364 changesets that are not descendants of REV. If the working
2364 changesets that are not descendants of REV. If the working
2365 directory has uncommitted changes, the operation is aborted unless
2365 directory has uncommitted changes, the operation is aborted unless
2366 the --force flag is supplied.
2366 the --force flag is supplied.
2367
2367
2368 If a parent of the working directory is stripped, then the working
2368 If a parent of the working directory is stripped, then the working
2369 directory will automatically be updated to the most recent
2369 directory will automatically be updated to the most recent
2370 available ancestor of the stripped parent after the operation
2370 available ancestor of the stripped parent after the operation
2371 completes.
2371 completes.
2372
2372
2373 Any stripped changesets are stored in ``.hg/strip-backup`` as a
2373 Any stripped changesets are stored in ``.hg/strip-backup`` as a
2374 bundle (see :hg:`help bundle` and :hg:`help unbundle`). They can
2374 bundle (see :hg:`help bundle` and :hg:`help unbundle`). They can
2375 be restored by running :hg:`unbundle .hg/strip-backup/BUNDLE`,
2375 be restored by running :hg:`unbundle .hg/strip-backup/BUNDLE`,
2376 where BUNDLE is the bundle file created by the strip. Note that
2376 where BUNDLE is the bundle file created by the strip. Note that
2377 the local revision numbers will in general be different after the
2377 the local revision numbers will in general be different after the
2378 restore.
2378 restore.
2379
2379
2380 Use the --nobackup option to discard the backup bundle once the
2380 Use the --nobackup option to discard the backup bundle once the
2381 operation completes.
2381 operation completes.
2382 """
2382 """
2383 backup = 'all'
2383 backup = 'all'
2384 if opts['backup']:
2384 if opts['backup']:
2385 backup = 'strip'
2385 backup = 'strip'
2386 elif opts['nobackup']:
2386 elif opts['nobackup']:
2387 backup = 'none'
2387 backup = 'none'
2388
2388
2389 rev = repo.lookup(rev)
2389 rev = repo.lookup(rev)
2390 p = repo.dirstate.parents()
2390 p = repo.dirstate.parents()
2391 cl = repo.changelog
2391 cl = repo.changelog
2392 update = True
2392 update = True
2393 if p[0] == nullid:
2393 if p[0] == nullid:
2394 update = False
2394 update = False
2395 elif p[1] == nullid and rev != cl.ancestor(p[0], rev):
2395 elif p[1] == nullid and rev != cl.ancestor(p[0], rev):
2396 update = False
2396 update = False
2397 elif rev not in (cl.ancestor(p[0], rev), cl.ancestor(p[1], rev)):
2397 elif rev not in (cl.ancestor(p[0], rev), cl.ancestor(p[1], rev)):
2398 update = False
2398 update = False
2399
2399
2400 repo.mq.strip(repo, rev, backup=backup, update=update, force=opts['force'])
2400 repo.mq.strip(repo, rev, backup=backup, update=update, force=opts['force'])
2401 return 0
2401 return 0
2402
2402
2403 def select(ui, repo, *args, **opts):
2403 def select(ui, repo, *args, **opts):
2404 '''set or print guarded patches to push
2404 '''set or print guarded patches to push
2405
2405
2406 Use the qguard command to set or print guards on patch, then use
2406 Use the qguard command to set or print guards on patch, then use
2407 qselect to tell mq which guards to use. A patch will be pushed if
2407 qselect to tell mq which guards to use. A patch will be pushed if
2408 it has no guards or any positive guards match the currently
2408 it has no guards or any positive guards match the currently
2409 selected guard, but will not be pushed if any negative guards
2409 selected guard, but will not be pushed if any negative guards
2410 match the current guard. For example::
2410 match the current guard. For example::
2411
2411
2412 qguard foo.patch -stable (negative guard)
2412 qguard foo.patch -stable (negative guard)
2413 qguard bar.patch +stable (positive guard)
2413 qguard bar.patch +stable (positive guard)
2414 qselect stable
2414 qselect stable
2415
2415
2416 This activates the "stable" guard. mq will skip foo.patch (because
2416 This activates the "stable" guard. mq will skip foo.patch (because
2417 it has a negative match) but push bar.patch (because it has a
2417 it has a negative match) but push bar.patch (because it has a
2418 positive match).
2418 positive match).
2419
2419
2420 With no arguments, prints the currently active guards.
2420 With no arguments, prints the currently active guards.
2421 With one argument, sets the active guard.
2421 With one argument, sets the active guard.
2422
2422
2423 Use -n/--none to deactivate guards (no other arguments needed).
2423 Use -n/--none to deactivate guards (no other arguments needed).
2424 When no guards are active, patches with positive guards are
2424 When no guards are active, patches with positive guards are
2425 skipped and patches with negative guards are pushed.
2425 skipped and patches with negative guards are pushed.
2426
2426
2427 qselect can change the guards on applied patches. It does not pop
2427 qselect can change the guards on applied patches. It does not pop
2428 guarded patches by default. Use --pop to pop back to the last
2428 guarded patches by default. Use --pop to pop back to the last
2429 applied patch that is not guarded. Use --reapply (which implies
2429 applied patch that is not guarded. Use --reapply (which implies
2430 --pop) to push back to the current patch afterwards, but skip
2430 --pop) to push back to the current patch afterwards, but skip
2431 guarded patches.
2431 guarded patches.
2432
2432
2433 Use -s/--series to print a list of all guards in the series file
2433 Use -s/--series to print a list of all guards in the series file
2434 (no other arguments needed). Use -v for more information.'''
2434 (no other arguments needed). Use -v for more information.'''
2435
2435
2436 q = repo.mq
2436 q = repo.mq
2437 guards = q.active()
2437 guards = q.active()
2438 if args or opts['none']:
2438 if args or opts['none']:
2439 old_unapplied = q.unapplied(repo)
2439 old_unapplied = q.unapplied(repo)
2440 old_guarded = [i for i in xrange(len(q.applied)) if
2440 old_guarded = [i for i in xrange(len(q.applied)) if
2441 not q.pushable(i)[0]]
2441 not q.pushable(i)[0]]
2442 q.set_active(args)
2442 q.set_active(args)
2443 q.save_dirty()
2443 q.save_dirty()
2444 if not args:
2444 if not args:
2445 ui.status(_('guards deactivated\n'))
2445 ui.status(_('guards deactivated\n'))
2446 if not opts['pop'] and not opts['reapply']:
2446 if not opts['pop'] and not opts['reapply']:
2447 unapplied = q.unapplied(repo)
2447 unapplied = q.unapplied(repo)
2448 guarded = [i for i in xrange(len(q.applied))
2448 guarded = [i for i in xrange(len(q.applied))
2449 if not q.pushable(i)[0]]
2449 if not q.pushable(i)[0]]
2450 if len(unapplied) != len(old_unapplied):
2450 if len(unapplied) != len(old_unapplied):
2451 ui.status(_('number of unguarded, unapplied patches has '
2451 ui.status(_('number of unguarded, unapplied patches has '
2452 'changed from %d to %d\n') %
2452 'changed from %d to %d\n') %
2453 (len(old_unapplied), len(unapplied)))
2453 (len(old_unapplied), len(unapplied)))
2454 if len(guarded) != len(old_guarded):
2454 if len(guarded) != len(old_guarded):
2455 ui.status(_('number of guarded, applied patches has changed '
2455 ui.status(_('number of guarded, applied patches has changed '
2456 'from %d to %d\n') %
2456 'from %d to %d\n') %
2457 (len(old_guarded), len(guarded)))
2457 (len(old_guarded), len(guarded)))
2458 elif opts['series']:
2458 elif opts['series']:
2459 guards = {}
2459 guards = {}
2460 noguards = 0
2460 noguards = 0
2461 for gs in q.series_guards:
2461 for gs in q.series_guards:
2462 if not gs:
2462 if not gs:
2463 noguards += 1
2463 noguards += 1
2464 for g in gs:
2464 for g in gs:
2465 guards.setdefault(g, 0)
2465 guards.setdefault(g, 0)
2466 guards[g] += 1
2466 guards[g] += 1
2467 if ui.verbose:
2467 if ui.verbose:
2468 guards['NONE'] = noguards
2468 guards['NONE'] = noguards
2469 guards = guards.items()
2469 guards = guards.items()
2470 guards.sort(key=lambda x: x[0][1:])
2470 guards.sort(key=lambda x: x[0][1:])
2471 if guards:
2471 if guards:
2472 ui.note(_('guards in series file:\n'))
2472 ui.note(_('guards in series file:\n'))
2473 for guard, count in guards:
2473 for guard, count in guards:
2474 ui.note('%2d ' % count)
2474 ui.note('%2d ' % count)
2475 ui.write(guard, '\n')
2475 ui.write(guard, '\n')
2476 else:
2476 else:
2477 ui.note(_('no guards in series file\n'))
2477 ui.note(_('no guards in series file\n'))
2478 else:
2478 else:
2479 if guards:
2479 if guards:
2480 ui.note(_('active guards:\n'))
2480 ui.note(_('active guards:\n'))
2481 for g in guards:
2481 for g in guards:
2482 ui.write(g, '\n')
2482 ui.write(g, '\n')
2483 else:
2483 else:
2484 ui.write(_('no active guards\n'))
2484 ui.write(_('no active guards\n'))
2485 reapply = opts['reapply'] and q.applied and q.appliedname(-1)
2485 reapply = opts['reapply'] and q.applied and q.appliedname(-1)
2486 popped = False
2486 popped = False
2487 if opts['pop'] or opts['reapply']:
2487 if opts['pop'] or opts['reapply']:
2488 for i in xrange(len(q.applied)):
2488 for i in xrange(len(q.applied)):
2489 pushable, reason = q.pushable(i)
2489 pushable, reason = q.pushable(i)
2490 if not pushable:
2490 if not pushable:
2491 ui.status(_('popping guarded patches\n'))
2491 ui.status(_('popping guarded patches\n'))
2492 popped = True
2492 popped = True
2493 if i == 0:
2493 if i == 0:
2494 q.pop(repo, all=True)
2494 q.pop(repo, all=True)
2495 else:
2495 else:
2496 q.pop(repo, i - 1)
2496 q.pop(repo, i - 1)
2497 break
2497 break
2498 if popped:
2498 if popped:
2499 try:
2499 try:
2500 if reapply:
2500 if reapply:
2501 ui.status(_('reapplying unguarded patches\n'))
2501 ui.status(_('reapplying unguarded patches\n'))
2502 q.push(repo, reapply)
2502 q.push(repo, reapply)
2503 finally:
2503 finally:
2504 q.save_dirty()
2504 q.save_dirty()
2505
2505
2506 def finish(ui, repo, *revrange, **opts):
2506 def finish(ui, repo, *revrange, **opts):
2507 """move applied patches into repository history
2507 """move applied patches into repository history
2508
2508
2509 Finishes the specified revisions (corresponding to applied
2509 Finishes the specified revisions (corresponding to applied
2510 patches) by moving them out of mq control into regular repository
2510 patches) by moving them out of mq control into regular repository
2511 history.
2511 history.
2512
2512
2513 Accepts a revision range or the -a/--applied option. If --applied
2513 Accepts a revision range or the -a/--applied option. If --applied
2514 is specified, all applied mq revisions are removed from mq
2514 is specified, all applied mq revisions are removed from mq
2515 control. Otherwise, the given revisions must be at the base of the
2515 control. Otherwise, the given revisions must be at the base of the
2516 stack of applied patches.
2516 stack of applied patches.
2517
2517
2518 This can be especially useful if your changes have been applied to
2518 This can be especially useful if your changes have been applied to
2519 an upstream repository, or if you are about to push your changes
2519 an upstream repository, or if you are about to push your changes
2520 to upstream.
2520 to upstream.
2521 """
2521 """
2522 if not opts['applied'] and not revrange:
2522 if not opts['applied'] and not revrange:
2523 raise util.Abort(_('no revisions specified'))
2523 raise util.Abort(_('no revisions specified'))
2524 elif opts['applied']:
2524 elif opts['applied']:
2525 revrange = ('qbase:qtip',) + revrange
2525 revrange = ('qbase:qtip',) + revrange
2526
2526
2527 q = repo.mq
2527 q = repo.mq
2528 if not q.applied:
2528 if not q.applied:
2529 ui.status(_('no patches applied\n'))
2529 ui.status(_('no patches applied\n'))
2530 return 0
2530 return 0
2531
2531
2532 revs = cmdutil.revrange(repo, revrange)
2532 revs = cmdutil.revrange(repo, revrange)
2533 q.finish(repo, revs)
2533 q.finish(repo, revs)
2534 q.save_dirty()
2534 q.save_dirty()
2535 return 0
2535 return 0
2536
2536
2537 def reposetup(ui, repo):
2537 def reposetup(ui, repo):
2538 class mqrepo(repo.__class__):
2538 class mqrepo(repo.__class__):
2539 @util.propertycache
2539 @util.propertycache
2540 def mq(self):
2540 def mq(self):
2541 return queue(self.ui, self.join(""))
2541 return queue(self.ui, self.join(""))
2542
2542
2543 def abort_if_wdir_patched(self, errmsg, force=False):
2543 def abort_if_wdir_patched(self, errmsg, force=False):
2544 if self.mq.applied and not force:
2544 if self.mq.applied and not force:
2545 parent = self.dirstate.parents()[0]
2545 parent = self.dirstate.parents()[0]
2546 if parent in [s.node for s in self.mq.applied]:
2546 if parent in [s.node for s in self.mq.applied]:
2547 raise util.Abort(errmsg)
2547 raise util.Abort(errmsg)
2548
2548
2549 def commit(self, text="", user=None, date=None, match=None,
2549 def commit(self, text="", user=None, date=None, match=None,
2550 force=False, editor=False, extra={}):
2550 force=False, editor=False, extra={}):
2551 self.abort_if_wdir_patched(
2551 self.abort_if_wdir_patched(
2552 _('cannot commit over an applied mq patch'),
2552 _('cannot commit over an applied mq patch'),
2553 force)
2553 force)
2554
2554
2555 return super(mqrepo, self).commit(text, user, date, match, force,
2555 return super(mqrepo, self).commit(text, user, date, match, force,
2556 editor, extra)
2556 editor, extra)
2557
2557
2558 def push(self, remote, force=False, revs=None):
2558 def push(self, remote, force=False, revs=None, newbranch=False):
2559 if self.mq.applied and not force and not revs:
2559 if self.mq.applied and not force and not revs:
2560 raise util.Abort(_('source has mq patches applied'))
2560 raise util.Abort(_('source has mq patches applied'))
2561 return super(mqrepo, self).push(remote, force, revs)
2561 return super(mqrepo, self).push(remote, force, revs, newbranch)
2562
2562
2563 def _findtags(self):
2563 def _findtags(self):
2564 '''augment tags from base class with patch tags'''
2564 '''augment tags from base class with patch tags'''
2565 result = super(mqrepo, self)._findtags()
2565 result = super(mqrepo, self)._findtags()
2566
2566
2567 q = self.mq
2567 q = self.mq
2568 if not q.applied:
2568 if not q.applied:
2569 return result
2569 return result
2570
2570
2571 mqtags = [(patch.node, patch.name) for patch in q.applied]
2571 mqtags = [(patch.node, patch.name) for patch in q.applied]
2572
2572
2573 if mqtags[-1][0] not in self.changelog.nodemap:
2573 if mqtags[-1][0] not in self.changelog.nodemap:
2574 self.ui.warn(_('mq status file refers to unknown node %s\n')
2574 self.ui.warn(_('mq status file refers to unknown node %s\n')
2575 % short(mqtags[-1][0]))
2575 % short(mqtags[-1][0]))
2576 return result
2576 return result
2577
2577
2578 mqtags.append((mqtags[-1][0], 'qtip'))
2578 mqtags.append((mqtags[-1][0], 'qtip'))
2579 mqtags.append((mqtags[0][0], 'qbase'))
2579 mqtags.append((mqtags[0][0], 'qbase'))
2580 mqtags.append((self.changelog.parents(mqtags[0][0])[0], 'qparent'))
2580 mqtags.append((self.changelog.parents(mqtags[0][0])[0], 'qparent'))
2581 tags = result[0]
2581 tags = result[0]
2582 for patch in mqtags:
2582 for patch in mqtags:
2583 if patch[1] in tags:
2583 if patch[1] in tags:
2584 self.ui.warn(_('Tag %s overrides mq patch of the same name\n')
2584 self.ui.warn(_('Tag %s overrides mq patch of the same name\n')
2585 % patch[1])
2585 % patch[1])
2586 else:
2586 else:
2587 tags[patch[1]] = patch[0]
2587 tags[patch[1]] = patch[0]
2588
2588
2589 return result
2589 return result
2590
2590
2591 def _branchtags(self, partial, lrev):
2591 def _branchtags(self, partial, lrev):
2592 q = self.mq
2592 q = self.mq
2593 if not q.applied:
2593 if not q.applied:
2594 return super(mqrepo, self)._branchtags(partial, lrev)
2594 return super(mqrepo, self)._branchtags(partial, lrev)
2595
2595
2596 cl = self.changelog
2596 cl = self.changelog
2597 qbasenode = q.applied[0].node
2597 qbasenode = q.applied[0].node
2598 if qbasenode not in cl.nodemap:
2598 if qbasenode not in cl.nodemap:
2599 self.ui.warn(_('mq status file refers to unknown node %s\n')
2599 self.ui.warn(_('mq status file refers to unknown node %s\n')
2600 % short(qbasenode))
2600 % short(qbasenode))
2601 return super(mqrepo, self)._branchtags(partial, lrev)
2601 return super(mqrepo, self)._branchtags(partial, lrev)
2602
2602
2603 qbase = cl.rev(qbasenode)
2603 qbase = cl.rev(qbasenode)
2604 start = lrev + 1
2604 start = lrev + 1
2605 if start < qbase:
2605 if start < qbase:
2606 # update the cache (excluding the patches) and save it
2606 # update the cache (excluding the patches) and save it
2607 ctxgen = (self[r] for r in xrange(lrev + 1, qbase))
2607 ctxgen = (self[r] for r in xrange(lrev + 1, qbase))
2608 self._updatebranchcache(partial, ctxgen)
2608 self._updatebranchcache(partial, ctxgen)
2609 self._writebranchcache(partial, cl.node(qbase - 1), qbase - 1)
2609 self._writebranchcache(partial, cl.node(qbase - 1), qbase - 1)
2610 start = qbase
2610 start = qbase
2611 # if start = qbase, the cache is as updated as it should be.
2611 # if start = qbase, the cache is as updated as it should be.
2612 # if start > qbase, the cache includes (part of) the patches.
2612 # if start > qbase, the cache includes (part of) the patches.
2613 # we might as well use it, but we won't save it.
2613 # we might as well use it, but we won't save it.
2614
2614
2615 # update the cache up to the tip
2615 # update the cache up to the tip
2616 ctxgen = (self[r] for r in xrange(start, len(cl)))
2616 ctxgen = (self[r] for r in xrange(start, len(cl)))
2617 self._updatebranchcache(partial, ctxgen)
2617 self._updatebranchcache(partial, ctxgen)
2618
2618
2619 return partial
2619 return partial
2620
2620
2621 if repo.local():
2621 if repo.local():
2622 repo.__class__ = mqrepo
2622 repo.__class__ = mqrepo
2623
2623
2624 def mqimport(orig, ui, repo, *args, **kwargs):
2624 def mqimport(orig, ui, repo, *args, **kwargs):
2625 if (hasattr(repo, 'abort_if_wdir_patched')
2625 if (hasattr(repo, 'abort_if_wdir_patched')
2626 and not kwargs.get('no_commit', False)):
2626 and not kwargs.get('no_commit', False)):
2627 repo.abort_if_wdir_patched(_('cannot import over an applied patch'),
2627 repo.abort_if_wdir_patched(_('cannot import over an applied patch'),
2628 kwargs.get('force'))
2628 kwargs.get('force'))
2629 return orig(ui, repo, *args, **kwargs)
2629 return orig(ui, repo, *args, **kwargs)
2630
2630
2631 def mqinit(orig, ui, *args, **kwargs):
2631 def mqinit(orig, ui, *args, **kwargs):
2632 mq = kwargs.pop('mq', None)
2632 mq = kwargs.pop('mq', None)
2633
2633
2634 if not mq:
2634 if not mq:
2635 return orig(ui, *args, **kwargs)
2635 return orig(ui, *args, **kwargs)
2636
2636
2637 if args:
2637 if args:
2638 repopath = args[0]
2638 repopath = args[0]
2639 if not hg.islocal(repopath):
2639 if not hg.islocal(repopath):
2640 raise util.Abort(_('only a local queue repository '
2640 raise util.Abort(_('only a local queue repository '
2641 'may be initialized'))
2641 'may be initialized'))
2642 else:
2642 else:
2643 repopath = cmdutil.findrepo(os.getcwd())
2643 repopath = cmdutil.findrepo(os.getcwd())
2644 if not repopath:
2644 if not repopath:
2645 raise util.Abort(_('There is no Mercurial repository here '
2645 raise util.Abort(_('There is no Mercurial repository here '
2646 '(.hg not found)'))
2646 '(.hg not found)'))
2647 repo = hg.repository(ui, repopath)
2647 repo = hg.repository(ui, repopath)
2648 return qinit(ui, repo, True)
2648 return qinit(ui, repo, True)
2649
2649
2650 def mqcommand(orig, ui, repo, *args, **kwargs):
2650 def mqcommand(orig, ui, repo, *args, **kwargs):
2651 """Add --mq option to operate on patch repository instead of main"""
2651 """Add --mq option to operate on patch repository instead of main"""
2652
2652
2653 # some commands do not like getting unknown options
2653 # some commands do not like getting unknown options
2654 mq = kwargs.pop('mq', None)
2654 mq = kwargs.pop('mq', None)
2655
2655
2656 if not mq:
2656 if not mq:
2657 return orig(ui, repo, *args, **kwargs)
2657 return orig(ui, repo, *args, **kwargs)
2658
2658
2659 q = repo.mq
2659 q = repo.mq
2660 r = q.qrepo()
2660 r = q.qrepo()
2661 if not r:
2661 if not r:
2662 raise util.Abort(_('no queue repository'))
2662 raise util.Abort(_('no queue repository'))
2663 return orig(r.ui, r, *args, **kwargs)
2663 return orig(r.ui, r, *args, **kwargs)
2664
2664
2665 def summary(orig, ui, repo, *args, **kwargs):
2665 def summary(orig, ui, repo, *args, **kwargs):
2666 r = orig(ui, repo, *args, **kwargs)
2666 r = orig(ui, repo, *args, **kwargs)
2667 q = repo.mq
2667 q = repo.mq
2668 m = []
2668 m = []
2669 a, u = len(q.applied), len(q.unapplied(repo))
2669 a, u = len(q.applied), len(q.unapplied(repo))
2670 if a:
2670 if a:
2671 m.append(ui.label(_("%d applied"), 'qseries.applied') % a)
2671 m.append(ui.label(_("%d applied"), 'qseries.applied') % a)
2672 if u:
2672 if u:
2673 m.append(ui.label(_("%d unapplied"), 'qseries.unapplied') % u)
2673 m.append(ui.label(_("%d unapplied"), 'qseries.unapplied') % u)
2674 if m:
2674 if m:
2675 ui.write("mq: %s\n" % ', '.join(m))
2675 ui.write("mq: %s\n" % ', '.join(m))
2676 else:
2676 else:
2677 ui.note(_("mq: (empty queue)\n"))
2677 ui.note(_("mq: (empty queue)\n"))
2678 return r
2678 return r
2679
2679
2680 def uisetup(ui):
2680 def uisetup(ui):
2681 mqopt = [('', 'mq', None, _("operate on patch repository"))]
2681 mqopt = [('', 'mq', None, _("operate on patch repository"))]
2682
2682
2683 extensions.wrapcommand(commands.table, 'import', mqimport)
2683 extensions.wrapcommand(commands.table, 'import', mqimport)
2684 extensions.wrapcommand(commands.table, 'summary', summary)
2684 extensions.wrapcommand(commands.table, 'summary', summary)
2685
2685
2686 entry = extensions.wrapcommand(commands.table, 'init', mqinit)
2686 entry = extensions.wrapcommand(commands.table, 'init', mqinit)
2687 entry[1].extend(mqopt)
2687 entry[1].extend(mqopt)
2688
2688
2689 norepo = commands.norepo.split(" ")
2689 norepo = commands.norepo.split(" ")
2690 for cmd in commands.table.keys():
2690 for cmd in commands.table.keys():
2691 cmd = cmdutil.parsealiases(cmd)[0]
2691 cmd = cmdutil.parsealiases(cmd)[0]
2692 if cmd in norepo:
2692 if cmd in norepo:
2693 continue
2693 continue
2694 entry = extensions.wrapcommand(commands.table, cmd, mqcommand)
2694 entry = extensions.wrapcommand(commands.table, cmd, mqcommand)
2695 entry[1].extend(mqopt)
2695 entry[1].extend(mqopt)
2696
2696
2697 seriesopts = [('s', 'summary', None, _('print first line of patch header'))]
2697 seriesopts = [('s', 'summary', None, _('print first line of patch header'))]
2698
2698
2699 cmdtable = {
2699 cmdtable = {
2700 "qapplied":
2700 "qapplied":
2701 (applied,
2701 (applied,
2702 [('1', 'last', None, _('show only the last patch'))] + seriesopts,
2702 [('1', 'last', None, _('show only the last patch'))] + seriesopts,
2703 _('hg qapplied [-1] [-s] [PATCH]')),
2703 _('hg qapplied [-1] [-s] [PATCH]')),
2704 "qclone":
2704 "qclone":
2705 (clone,
2705 (clone,
2706 [('', 'pull', None, _('use pull protocol to copy metadata')),
2706 [('', 'pull', None, _('use pull protocol to copy metadata')),
2707 ('U', 'noupdate', None, _('do not update the new working directories')),
2707 ('U', 'noupdate', None, _('do not update the new working directories')),
2708 ('', 'uncompressed', None,
2708 ('', 'uncompressed', None,
2709 _('use uncompressed transfer (fast over LAN)')),
2709 _('use uncompressed transfer (fast over LAN)')),
2710 ('p', 'patches', '', _('location of source patch repository')),
2710 ('p', 'patches', '', _('location of source patch repository')),
2711 ] + commands.remoteopts,
2711 ] + commands.remoteopts,
2712 _('hg qclone [OPTION]... SOURCE [DEST]')),
2712 _('hg qclone [OPTION]... SOURCE [DEST]')),
2713 "qcommit|qci":
2713 "qcommit|qci":
2714 (commit,
2714 (commit,
2715 commands.table["^commit|ci"][1],
2715 commands.table["^commit|ci"][1],
2716 _('hg qcommit [OPTION]... [FILE]...')),
2716 _('hg qcommit [OPTION]... [FILE]...')),
2717 "^qdiff":
2717 "^qdiff":
2718 (diff,
2718 (diff,
2719 commands.diffopts + commands.diffopts2 + commands.walkopts,
2719 commands.diffopts + commands.diffopts2 + commands.walkopts,
2720 _('hg qdiff [OPTION]... [FILE]...')),
2720 _('hg qdiff [OPTION]... [FILE]...')),
2721 "qdelete|qremove|qrm":
2721 "qdelete|qremove|qrm":
2722 (delete,
2722 (delete,
2723 [('k', 'keep', None, _('keep patch file')),
2723 [('k', 'keep', None, _('keep patch file')),
2724 ('r', 'rev', [], _('stop managing a revision (DEPRECATED)'))],
2724 ('r', 'rev', [], _('stop managing a revision (DEPRECATED)'))],
2725 _('hg qdelete [-k] [-r REV]... [PATCH]...')),
2725 _('hg qdelete [-k] [-r REV]... [PATCH]...')),
2726 'qfold':
2726 'qfold':
2727 (fold,
2727 (fold,
2728 [('e', 'edit', None, _('edit patch header')),
2728 [('e', 'edit', None, _('edit patch header')),
2729 ('k', 'keep', None, _('keep folded patch files')),
2729 ('k', 'keep', None, _('keep folded patch files')),
2730 ] + commands.commitopts,
2730 ] + commands.commitopts,
2731 _('hg qfold [-e] [-k] [-m TEXT] [-l FILE] PATCH...')),
2731 _('hg qfold [-e] [-k] [-m TEXT] [-l FILE] PATCH...')),
2732 'qgoto':
2732 'qgoto':
2733 (goto,
2733 (goto,
2734 [('f', 'force', None, _('overwrite any local changes'))],
2734 [('f', 'force', None, _('overwrite any local changes'))],
2735 _('hg qgoto [OPTION]... PATCH')),
2735 _('hg qgoto [OPTION]... PATCH')),
2736 'qguard':
2736 'qguard':
2737 (guard,
2737 (guard,
2738 [('l', 'list', None, _('list all patches and guards')),
2738 [('l', 'list', None, _('list all patches and guards')),
2739 ('n', 'none', None, _('drop all guards'))],
2739 ('n', 'none', None, _('drop all guards'))],
2740 _('hg qguard [-l] [-n] [PATCH] [-- [+GUARD]... [-GUARD]...]')),
2740 _('hg qguard [-l] [-n] [PATCH] [-- [+GUARD]... [-GUARD]...]')),
2741 'qheader': (header, [], _('hg qheader [PATCH]')),
2741 'qheader': (header, [], _('hg qheader [PATCH]')),
2742 "qimport":
2742 "qimport":
2743 (qimport,
2743 (qimport,
2744 [('e', 'existing', None, _('import file in patch directory')),
2744 [('e', 'existing', None, _('import file in patch directory')),
2745 ('n', 'name', '', _('name of patch file')),
2745 ('n', 'name', '', _('name of patch file')),
2746 ('f', 'force', None, _('overwrite existing files')),
2746 ('f', 'force', None, _('overwrite existing files')),
2747 ('r', 'rev', [], _('place existing revisions under mq control')),
2747 ('r', 'rev', [], _('place existing revisions under mq control')),
2748 ('g', 'git', None, _('use git extended diff format')),
2748 ('g', 'git', None, _('use git extended diff format')),
2749 ('P', 'push', None, _('qpush after importing'))],
2749 ('P', 'push', None, _('qpush after importing'))],
2750 _('hg qimport [-e] [-n NAME] [-f] [-g] [-P] [-r REV]... FILE...')),
2750 _('hg qimport [-e] [-n NAME] [-f] [-g] [-P] [-r REV]... FILE...')),
2751 "^qinit":
2751 "^qinit":
2752 (init,
2752 (init,
2753 [('c', 'create-repo', None, _('create queue repository'))],
2753 [('c', 'create-repo', None, _('create queue repository'))],
2754 _('hg qinit [-c]')),
2754 _('hg qinit [-c]')),
2755 "^qnew":
2755 "^qnew":
2756 (new,
2756 (new,
2757 [('e', 'edit', None, _('edit commit message')),
2757 [('e', 'edit', None, _('edit commit message')),
2758 ('f', 'force', None, _('import uncommitted changes (DEPRECATED)')),
2758 ('f', 'force', None, _('import uncommitted changes (DEPRECATED)')),
2759 ('g', 'git', None, _('use git extended diff format')),
2759 ('g', 'git', None, _('use git extended diff format')),
2760 ('U', 'currentuser', None, _('add "From: <current user>" to patch')),
2760 ('U', 'currentuser', None, _('add "From: <current user>" to patch')),
2761 ('u', 'user', '', _('add "From: <given user>" to patch')),
2761 ('u', 'user', '', _('add "From: <given user>" to patch')),
2762 ('D', 'currentdate', None, _('add "Date: <current date>" to patch')),
2762 ('D', 'currentdate', None, _('add "Date: <current date>" to patch')),
2763 ('d', 'date', '', _('add "Date: <given date>" to patch'))
2763 ('d', 'date', '', _('add "Date: <given date>" to patch'))
2764 ] + commands.walkopts + commands.commitopts,
2764 ] + commands.walkopts + commands.commitopts,
2765 _('hg qnew [-e] [-m TEXT] [-l FILE] PATCH [FILE]...')),
2765 _('hg qnew [-e] [-m TEXT] [-l FILE] PATCH [FILE]...')),
2766 "qnext": (next, [] + seriesopts, _('hg qnext [-s]')),
2766 "qnext": (next, [] + seriesopts, _('hg qnext [-s]')),
2767 "qprev": (prev, [] + seriesopts, _('hg qprev [-s]')),
2767 "qprev": (prev, [] + seriesopts, _('hg qprev [-s]')),
2768 "^qpop":
2768 "^qpop":
2769 (pop,
2769 (pop,
2770 [('a', 'all', None, _('pop all patches')),
2770 [('a', 'all', None, _('pop all patches')),
2771 ('n', 'name', '', _('queue name to pop (DEPRECATED)')),
2771 ('n', 'name', '', _('queue name to pop (DEPRECATED)')),
2772 ('f', 'force', None, _('forget any local changes to patched files'))],
2772 ('f', 'force', None, _('forget any local changes to patched files'))],
2773 _('hg qpop [-a] [-n NAME] [-f] [PATCH | INDEX]')),
2773 _('hg qpop [-a] [-n NAME] [-f] [PATCH | INDEX]')),
2774 "^qpush":
2774 "^qpush":
2775 (push,
2775 (push,
2776 [('f', 'force', None, _('apply if the patch has rejects')),
2776 [('f', 'force', None, _('apply if the patch has rejects')),
2777 ('l', 'list', None, _('list patch name in commit text')),
2777 ('l', 'list', None, _('list patch name in commit text')),
2778 ('a', 'all', None, _('apply all patches')),
2778 ('a', 'all', None, _('apply all patches')),
2779 ('m', 'merge', None, _('merge from another queue (DEPRECATED)')),
2779 ('m', 'merge', None, _('merge from another queue (DEPRECATED)')),
2780 ('n', 'name', '', _('merge queue name (DEPRECATED)')),
2780 ('n', 'name', '', _('merge queue name (DEPRECATED)')),
2781 ('', 'move', None, _('reorder patch series and apply only the patch'))],
2781 ('', 'move', None, _('reorder patch series and apply only the patch'))],
2782 _('hg qpush [-f] [-l] [-a] [-m] [-n NAME] [--move] [PATCH | INDEX]')),
2782 _('hg qpush [-f] [-l] [-a] [-m] [-n NAME] [--move] [PATCH | INDEX]')),
2783 "^qrefresh":
2783 "^qrefresh":
2784 (refresh,
2784 (refresh,
2785 [('e', 'edit', None, _('edit commit message')),
2785 [('e', 'edit', None, _('edit commit message')),
2786 ('g', 'git', None, _('use git extended diff format')),
2786 ('g', 'git', None, _('use git extended diff format')),
2787 ('s', 'short', None,
2787 ('s', 'short', None,
2788 _('refresh only files already in the patch and specified files')),
2788 _('refresh only files already in the patch and specified files')),
2789 ('U', 'currentuser', None,
2789 ('U', 'currentuser', None,
2790 _('add/update author field in patch with current user')),
2790 _('add/update author field in patch with current user')),
2791 ('u', 'user', '',
2791 ('u', 'user', '',
2792 _('add/update author field in patch with given user')),
2792 _('add/update author field in patch with given user')),
2793 ('D', 'currentdate', None,
2793 ('D', 'currentdate', None,
2794 _('add/update date field in patch with current date')),
2794 _('add/update date field in patch with current date')),
2795 ('d', 'date', '',
2795 ('d', 'date', '',
2796 _('add/update date field in patch with given date'))
2796 _('add/update date field in patch with given date'))
2797 ] + commands.walkopts + commands.commitopts,
2797 ] + commands.walkopts + commands.commitopts,
2798 _('hg qrefresh [-I] [-X] [-e] [-m TEXT] [-l FILE] [-s] [FILE]...')),
2798 _('hg qrefresh [-I] [-X] [-e] [-m TEXT] [-l FILE] [-s] [FILE]...')),
2799 'qrename|qmv':
2799 'qrename|qmv':
2800 (rename, [], _('hg qrename PATCH1 [PATCH2]')),
2800 (rename, [], _('hg qrename PATCH1 [PATCH2]')),
2801 "qrestore":
2801 "qrestore":
2802 (restore,
2802 (restore,
2803 [('d', 'delete', None, _('delete save entry')),
2803 [('d', 'delete', None, _('delete save entry')),
2804 ('u', 'update', None, _('update queue working directory'))],
2804 ('u', 'update', None, _('update queue working directory'))],
2805 _('hg qrestore [-d] [-u] REV')),
2805 _('hg qrestore [-d] [-u] REV')),
2806 "qsave":
2806 "qsave":
2807 (save,
2807 (save,
2808 [('c', 'copy', None, _('copy patch directory')),
2808 [('c', 'copy', None, _('copy patch directory')),
2809 ('n', 'name', '', _('copy directory name')),
2809 ('n', 'name', '', _('copy directory name')),
2810 ('e', 'empty', None, _('clear queue status file')),
2810 ('e', 'empty', None, _('clear queue status file')),
2811 ('f', 'force', None, _('force copy'))] + commands.commitopts,
2811 ('f', 'force', None, _('force copy'))] + commands.commitopts,
2812 _('hg qsave [-m TEXT] [-l FILE] [-c] [-n NAME] [-e] [-f]')),
2812 _('hg qsave [-m TEXT] [-l FILE] [-c] [-n NAME] [-e] [-f]')),
2813 "qselect":
2813 "qselect":
2814 (select,
2814 (select,
2815 [('n', 'none', None, _('disable all guards')),
2815 [('n', 'none', None, _('disable all guards')),
2816 ('s', 'series', None, _('list all guards in series file')),
2816 ('s', 'series', None, _('list all guards in series file')),
2817 ('', 'pop', None, _('pop to before first guarded applied patch')),
2817 ('', 'pop', None, _('pop to before first guarded applied patch')),
2818 ('', 'reapply', None, _('pop, then reapply patches'))],
2818 ('', 'reapply', None, _('pop, then reapply patches'))],
2819 _('hg qselect [OPTION]... [GUARD]...')),
2819 _('hg qselect [OPTION]... [GUARD]...')),
2820 "qseries":
2820 "qseries":
2821 (series,
2821 (series,
2822 [('m', 'missing', None, _('print patches not in series')),
2822 [('m', 'missing', None, _('print patches not in series')),
2823 ] + seriesopts,
2823 ] + seriesopts,
2824 _('hg qseries [-ms]')),
2824 _('hg qseries [-ms]')),
2825 "strip":
2825 "strip":
2826 (strip,
2826 (strip,
2827 [('f', 'force', None, _('force removal of changesets even if the '
2827 [('f', 'force', None, _('force removal of changesets even if the '
2828 'working directory has uncommitted changes')),
2828 'working directory has uncommitted changes')),
2829 ('b', 'backup', None, _('bundle only changesets with local revision'
2829 ('b', 'backup', None, _('bundle only changesets with local revision'
2830 ' number greater than REV which are not'
2830 ' number greater than REV which are not'
2831 ' descendants of REV (DEPRECATED)')),
2831 ' descendants of REV (DEPRECATED)')),
2832 ('n', 'nobackup', None, _('no backups'))],
2832 ('n', 'nobackup', None, _('no backups'))],
2833 _('hg strip [-f] [-n] REV')),
2833 _('hg strip [-f] [-n] REV')),
2834 "qtop": (top, [] + seriesopts, _('hg qtop [-s]')),
2834 "qtop": (top, [] + seriesopts, _('hg qtop [-s]')),
2835 "qunapplied":
2835 "qunapplied":
2836 (unapplied,
2836 (unapplied,
2837 [('1', 'first', None, _('show only the first patch'))] + seriesopts,
2837 [('1', 'first', None, _('show only the first patch'))] + seriesopts,
2838 _('hg qunapplied [-1] [-s] [PATCH]')),
2838 _('hg qunapplied [-1] [-s] [PATCH]')),
2839 "qfinish":
2839 "qfinish":
2840 (finish,
2840 (finish,
2841 [('a', 'applied', None, _('finish all applied changesets'))],
2841 [('a', 'applied', None, _('finish all applied changesets'))],
2842 _('hg qfinish [-a] [REV]...')),
2842 _('hg qfinish [-a] [REV]...')),
2843 }
2843 }
2844
2844
2845 colortable = {'qguard.negative': 'red',
2845 colortable = {'qguard.negative': 'red',
2846 'qguard.positive': 'yellow',
2846 'qguard.positive': 'yellow',
2847 'qguard.unguarded': 'green',
2847 'qguard.unguarded': 'green',
2848 'qseries.applied': 'blue bold underline',
2848 'qseries.applied': 'blue bold underline',
2849 'qseries.guarded': 'black bold',
2849 'qseries.guarded': 'black bold',
2850 'qseries.missing': 'red bold',
2850 'qseries.missing': 'red bold',
2851 'qseries.unapplied': 'black bold'}
2851 'qseries.unapplied': 'black bold'}
@@ -1,4073 +1,4075 b''
1 # commands.py - command processing for mercurial
1 # commands.py - command processing for mercurial
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from node import hex, nullid, nullrev, short
8 from node import hex, nullid, nullrev, short
9 from lock import release
9 from lock import release
10 from i18n import _, gettext
10 from i18n import _, gettext
11 import os, re, sys, difflib, time, tempfile
11 import os, re, sys, difflib, time, tempfile
12 import hg, util, revlog, bundlerepo, extensions, copies, error
12 import hg, util, revlog, bundlerepo, extensions, copies, error
13 import patch, help, mdiff, url, encoding, templatekw
13 import patch, help, mdiff, url, encoding, templatekw
14 import archival, changegroup, cmdutil, sshserver, hbisect, hgweb, hgweb.server
14 import archival, changegroup, cmdutil, sshserver, hbisect, hgweb, hgweb.server
15 import merge as mergemod
15 import merge as mergemod
16 import minirst
16 import minirst
17
17
18 # Commands start here, listed alphabetically
18 # Commands start here, listed alphabetically
19
19
20 def add(ui, repo, *pats, **opts):
20 def add(ui, repo, *pats, **opts):
21 """add the specified files on the next commit
21 """add the specified files on the next commit
22
22
23 Schedule files to be version controlled and added to the
23 Schedule files to be version controlled and added to the
24 repository.
24 repository.
25
25
26 The files will be added to the repository at the next commit. To
26 The files will be added to the repository at the next commit. To
27 undo an add before that, see :hg:`forget`.
27 undo an add before that, see :hg:`forget`.
28
28
29 If no names are given, add all files to the repository.
29 If no names are given, add all files to the repository.
30
30
31 .. container:: verbose
31 .. container:: verbose
32
32
33 An example showing how new (unknown) files are added
33 An example showing how new (unknown) files are added
34 automatically by :hg:`add`::
34 automatically by :hg:`add`::
35
35
36 $ ls
36 $ ls
37 foo.c
37 foo.c
38 $ hg status
38 $ hg status
39 ? foo.c
39 ? foo.c
40 $ hg add
40 $ hg add
41 adding foo.c
41 adding foo.c
42 $ hg status
42 $ hg status
43 A foo.c
43 A foo.c
44 """
44 """
45
45
46 bad = []
46 bad = []
47 names = []
47 names = []
48 m = cmdutil.match(repo, pats, opts)
48 m = cmdutil.match(repo, pats, opts)
49 oldbad = m.bad
49 oldbad = m.bad
50 m.bad = lambda x, y: bad.append(x) or oldbad(x, y)
50 m.bad = lambda x, y: bad.append(x) or oldbad(x, y)
51
51
52 for f in repo.walk(m):
52 for f in repo.walk(m):
53 exact = m.exact(f)
53 exact = m.exact(f)
54 if exact or f not in repo.dirstate:
54 if exact or f not in repo.dirstate:
55 names.append(f)
55 names.append(f)
56 if ui.verbose or not exact:
56 if ui.verbose or not exact:
57 ui.status(_('adding %s\n') % m.rel(f))
57 ui.status(_('adding %s\n') % m.rel(f))
58 if not opts.get('dry_run'):
58 if not opts.get('dry_run'):
59 bad += [f for f in repo.add(names) if f in m.files()]
59 bad += [f for f in repo.add(names) if f in m.files()]
60 return bad and 1 or 0
60 return bad and 1 or 0
61
61
62 def addremove(ui, repo, *pats, **opts):
62 def addremove(ui, repo, *pats, **opts):
63 """add all new files, delete all missing files
63 """add all new files, delete all missing files
64
64
65 Add all new files and remove all missing files from the
65 Add all new files and remove all missing files from the
66 repository.
66 repository.
67
67
68 New files are ignored if they match any of the patterns in
68 New files are ignored if they match any of the patterns in
69 .hgignore. As with add, these changes take effect at the next
69 .hgignore. As with add, these changes take effect at the next
70 commit.
70 commit.
71
71
72 Use the -s/--similarity option to detect renamed files. With a
72 Use the -s/--similarity option to detect renamed files. With a
73 parameter greater than 0, this compares every removed file with
73 parameter greater than 0, this compares every removed file with
74 every added file and records those similar enough as renames. This
74 every added file and records those similar enough as renames. This
75 option takes a percentage between 0 (disabled) and 100 (files must
75 option takes a percentage between 0 (disabled) and 100 (files must
76 be identical) as its parameter. Detecting renamed files this way
76 be identical) as its parameter. Detecting renamed files this way
77 can be expensive.
77 can be expensive.
78
78
79 Returns 0 if all files are successfully added.
79 Returns 0 if all files are successfully added.
80 """
80 """
81 try:
81 try:
82 sim = float(opts.get('similarity') or 0)
82 sim = float(opts.get('similarity') or 0)
83 except ValueError:
83 except ValueError:
84 raise util.Abort(_('similarity must be a number'))
84 raise util.Abort(_('similarity must be a number'))
85 if sim < 0 or sim > 100:
85 if sim < 0 or sim > 100:
86 raise util.Abort(_('similarity must be between 0 and 100'))
86 raise util.Abort(_('similarity must be between 0 and 100'))
87 return cmdutil.addremove(repo, pats, opts, similarity=sim / 100.0)
87 return cmdutil.addremove(repo, pats, opts, similarity=sim / 100.0)
88
88
89 def annotate(ui, repo, *pats, **opts):
89 def annotate(ui, repo, *pats, **opts):
90 """show changeset information by line for each file
90 """show changeset information by line for each file
91
91
92 List changes in files, showing the revision id responsible for
92 List changes in files, showing the revision id responsible for
93 each line
93 each line
94
94
95 This command is useful for discovering when a change was made and
95 This command is useful for discovering when a change was made and
96 by whom.
96 by whom.
97
97
98 Without the -a/--text option, annotate will avoid processing files
98 Without the -a/--text option, annotate will avoid processing files
99 it detects as binary. With -a, annotate will annotate the file
99 it detects as binary. With -a, annotate will annotate the file
100 anyway, although the results will probably be neither useful
100 anyway, although the results will probably be neither useful
101 nor desirable.
101 nor desirable.
102
102
103 Returns 0 on success.
103 Returns 0 on success.
104 """
104 """
105 if opts.get('follow'):
105 if opts.get('follow'):
106 # --follow is deprecated and now just an alias for -f/--file
106 # --follow is deprecated and now just an alias for -f/--file
107 # to mimic the behavior of Mercurial before version 1.5
107 # to mimic the behavior of Mercurial before version 1.5
108 opts['file'] = 1
108 opts['file'] = 1
109
109
110 datefunc = ui.quiet and util.shortdate or util.datestr
110 datefunc = ui.quiet and util.shortdate or util.datestr
111 getdate = util.cachefunc(lambda x: datefunc(x[0].date()))
111 getdate = util.cachefunc(lambda x: datefunc(x[0].date()))
112
112
113 if not pats:
113 if not pats:
114 raise util.Abort(_('at least one filename or pattern is required'))
114 raise util.Abort(_('at least one filename or pattern is required'))
115
115
116 opmap = [('user', lambda x: ui.shortuser(x[0].user())),
116 opmap = [('user', lambda x: ui.shortuser(x[0].user())),
117 ('number', lambda x: str(x[0].rev())),
117 ('number', lambda x: str(x[0].rev())),
118 ('changeset', lambda x: short(x[0].node())),
118 ('changeset', lambda x: short(x[0].node())),
119 ('date', getdate),
119 ('date', getdate),
120 ('file', lambda x: x[0].path()),
120 ('file', lambda x: x[0].path()),
121 ]
121 ]
122
122
123 if (not opts.get('user') and not opts.get('changeset')
123 if (not opts.get('user') and not opts.get('changeset')
124 and not opts.get('date') and not opts.get('file')):
124 and not opts.get('date') and not opts.get('file')):
125 opts['number'] = 1
125 opts['number'] = 1
126
126
127 linenumber = opts.get('line_number') is not None
127 linenumber = opts.get('line_number') is not None
128 if linenumber and (not opts.get('changeset')) and (not opts.get('number')):
128 if linenumber and (not opts.get('changeset')) and (not opts.get('number')):
129 raise util.Abort(_('at least one of -n/-c is required for -l'))
129 raise util.Abort(_('at least one of -n/-c is required for -l'))
130
130
131 funcmap = [func for op, func in opmap if opts.get(op)]
131 funcmap = [func for op, func in opmap if opts.get(op)]
132 if linenumber:
132 if linenumber:
133 lastfunc = funcmap[-1]
133 lastfunc = funcmap[-1]
134 funcmap[-1] = lambda x: "%s:%s" % (lastfunc(x), x[1])
134 funcmap[-1] = lambda x: "%s:%s" % (lastfunc(x), x[1])
135
135
136 ctx = repo[opts.get('rev')]
136 ctx = repo[opts.get('rev')]
137 m = cmdutil.match(repo, pats, opts)
137 m = cmdutil.match(repo, pats, opts)
138 follow = not opts.get('no_follow')
138 follow = not opts.get('no_follow')
139 for abs in ctx.walk(m):
139 for abs in ctx.walk(m):
140 fctx = ctx[abs]
140 fctx = ctx[abs]
141 if not opts.get('text') and util.binary(fctx.data()):
141 if not opts.get('text') and util.binary(fctx.data()):
142 ui.write(_("%s: binary file\n") % ((pats and m.rel(abs)) or abs))
142 ui.write(_("%s: binary file\n") % ((pats and m.rel(abs)) or abs))
143 continue
143 continue
144
144
145 lines = fctx.annotate(follow=follow, linenumber=linenumber)
145 lines = fctx.annotate(follow=follow, linenumber=linenumber)
146 pieces = []
146 pieces = []
147
147
148 for f in funcmap:
148 for f in funcmap:
149 l = [f(n) for n, dummy in lines]
149 l = [f(n) for n, dummy in lines]
150 if l:
150 if l:
151 ml = max(map(len, l))
151 ml = max(map(len, l))
152 pieces.append(["%*s" % (ml, x) for x in l])
152 pieces.append(["%*s" % (ml, x) for x in l])
153
153
154 if pieces:
154 if pieces:
155 for p, l in zip(zip(*pieces), lines):
155 for p, l in zip(zip(*pieces), lines):
156 ui.write("%s: %s" % (" ".join(p), l[1]))
156 ui.write("%s: %s" % (" ".join(p), l[1]))
157
157
158 def archive(ui, repo, dest, **opts):
158 def archive(ui, repo, dest, **opts):
159 '''create an unversioned archive of a repository revision
159 '''create an unversioned archive of a repository revision
160
160
161 By default, the revision used is the parent of the working
161 By default, the revision used is the parent of the working
162 directory; use -r/--rev to specify a different revision.
162 directory; use -r/--rev to specify a different revision.
163
163
164 The archive type is automatically detected based on file
164 The archive type is automatically detected based on file
165 extension (or override using -t/--type).
165 extension (or override using -t/--type).
166
166
167 Valid types are:
167 Valid types are:
168
168
169 :``files``: a directory full of files (default)
169 :``files``: a directory full of files (default)
170 :``tar``: tar archive, uncompressed
170 :``tar``: tar archive, uncompressed
171 :``tbz2``: tar archive, compressed using bzip2
171 :``tbz2``: tar archive, compressed using bzip2
172 :``tgz``: tar archive, compressed using gzip
172 :``tgz``: tar archive, compressed using gzip
173 :``uzip``: zip archive, uncompressed
173 :``uzip``: zip archive, uncompressed
174 :``zip``: zip archive, compressed using deflate
174 :``zip``: zip archive, compressed using deflate
175
175
176 The exact name of the destination archive or directory is given
176 The exact name of the destination archive or directory is given
177 using a format string; see :hg:`help export` for details.
177 using a format string; see :hg:`help export` for details.
178
178
179 Each member added to an archive file has a directory prefix
179 Each member added to an archive file has a directory prefix
180 prepended. Use -p/--prefix to specify a format string for the
180 prepended. Use -p/--prefix to specify a format string for the
181 prefix. The default is the basename of the archive, with suffixes
181 prefix. The default is the basename of the archive, with suffixes
182 removed.
182 removed.
183
183
184 Returns 0 on success.
184 Returns 0 on success.
185 '''
185 '''
186
186
187 ctx = repo[opts.get('rev')]
187 ctx = repo[opts.get('rev')]
188 if not ctx:
188 if not ctx:
189 raise util.Abort(_('no working directory: please specify a revision'))
189 raise util.Abort(_('no working directory: please specify a revision'))
190 node = ctx.node()
190 node = ctx.node()
191 dest = cmdutil.make_filename(repo, dest, node)
191 dest = cmdutil.make_filename(repo, dest, node)
192 if os.path.realpath(dest) == repo.root:
192 if os.path.realpath(dest) == repo.root:
193 raise util.Abort(_('repository root cannot be destination'))
193 raise util.Abort(_('repository root cannot be destination'))
194
194
195 def guess_type():
195 def guess_type():
196 exttypes = {
196 exttypes = {
197 'tar': ['.tar'],
197 'tar': ['.tar'],
198 'tbz2': ['.tbz2', '.tar.bz2'],
198 'tbz2': ['.tbz2', '.tar.bz2'],
199 'tgz': ['.tgz', '.tar.gz'],
199 'tgz': ['.tgz', '.tar.gz'],
200 'zip': ['.zip'],
200 'zip': ['.zip'],
201 }
201 }
202
202
203 for type, extensions in exttypes.items():
203 for type, extensions in exttypes.items():
204 if util.any(dest.endswith(ext) for ext in extensions):
204 if util.any(dest.endswith(ext) for ext in extensions):
205 return type
205 return type
206 return None
206 return None
207
207
208 kind = opts.get('type') or guess_type() or 'files'
208 kind = opts.get('type') or guess_type() or 'files'
209 prefix = opts.get('prefix')
209 prefix = opts.get('prefix')
210
210
211 if dest == '-':
211 if dest == '-':
212 if kind == 'files':
212 if kind == 'files':
213 raise util.Abort(_('cannot archive plain files to stdout'))
213 raise util.Abort(_('cannot archive plain files to stdout'))
214 dest = sys.stdout
214 dest = sys.stdout
215 if not prefix:
215 if not prefix:
216 prefix = os.path.basename(repo.root) + '-%h'
216 prefix = os.path.basename(repo.root) + '-%h'
217
217
218 prefix = cmdutil.make_filename(repo, prefix, node)
218 prefix = cmdutil.make_filename(repo, prefix, node)
219 matchfn = cmdutil.match(repo, [], opts)
219 matchfn = cmdutil.match(repo, [], opts)
220 archival.archive(repo, dest, node, kind, not opts.get('no_decode'),
220 archival.archive(repo, dest, node, kind, not opts.get('no_decode'),
221 matchfn, prefix)
221 matchfn, prefix)
222
222
223 def backout(ui, repo, node=None, rev=None, **opts):
223 def backout(ui, repo, node=None, rev=None, **opts):
224 '''reverse effect of earlier changeset
224 '''reverse effect of earlier changeset
225
225
226 Commit the backed out changes as a new changeset. The new
226 Commit the backed out changes as a new changeset. The new
227 changeset is a child of the backed out changeset.
227 changeset is a child of the backed out changeset.
228
228
229 If you backout a changeset other than the tip, a new head is
229 If you backout a changeset other than the tip, a new head is
230 created. This head will be the new tip and you should merge this
230 created. This head will be the new tip and you should merge this
231 backout changeset with another head.
231 backout changeset with another head.
232
232
233 The --merge option remembers the parent of the working directory
233 The --merge option remembers the parent of the working directory
234 before starting the backout, then merges the new head with that
234 before starting the backout, then merges the new head with that
235 changeset afterwards. This saves you from doing the merge by hand.
235 changeset afterwards. This saves you from doing the merge by hand.
236 The result of this merge is not committed, as with a normal merge.
236 The result of this merge is not committed, as with a normal merge.
237
237
238 See :hg:`help dates` for a list of formats valid for -d/--date.
238 See :hg:`help dates` for a list of formats valid for -d/--date.
239
239
240 Returns 0 on success.
240 Returns 0 on success.
241 '''
241 '''
242 if rev and node:
242 if rev and node:
243 raise util.Abort(_("please specify just one revision"))
243 raise util.Abort(_("please specify just one revision"))
244
244
245 if not rev:
245 if not rev:
246 rev = node
246 rev = node
247
247
248 if not rev:
248 if not rev:
249 raise util.Abort(_("please specify a revision to backout"))
249 raise util.Abort(_("please specify a revision to backout"))
250
250
251 date = opts.get('date')
251 date = opts.get('date')
252 if date:
252 if date:
253 opts['date'] = util.parsedate(date)
253 opts['date'] = util.parsedate(date)
254
254
255 cmdutil.bail_if_changed(repo)
255 cmdutil.bail_if_changed(repo)
256 node = repo.lookup(rev)
256 node = repo.lookup(rev)
257
257
258 op1, op2 = repo.dirstate.parents()
258 op1, op2 = repo.dirstate.parents()
259 a = repo.changelog.ancestor(op1, node)
259 a = repo.changelog.ancestor(op1, node)
260 if a != node:
260 if a != node:
261 raise util.Abort(_('cannot backout change on a different branch'))
261 raise util.Abort(_('cannot backout change on a different branch'))
262
262
263 p1, p2 = repo.changelog.parents(node)
263 p1, p2 = repo.changelog.parents(node)
264 if p1 == nullid:
264 if p1 == nullid:
265 raise util.Abort(_('cannot backout a change with no parents'))
265 raise util.Abort(_('cannot backout a change with no parents'))
266 if p2 != nullid:
266 if p2 != nullid:
267 if not opts.get('parent'):
267 if not opts.get('parent'):
268 raise util.Abort(_('cannot backout a merge changeset without '
268 raise util.Abort(_('cannot backout a merge changeset without '
269 '--parent'))
269 '--parent'))
270 p = repo.lookup(opts['parent'])
270 p = repo.lookup(opts['parent'])
271 if p not in (p1, p2):
271 if p not in (p1, p2):
272 raise util.Abort(_('%s is not a parent of %s') %
272 raise util.Abort(_('%s is not a parent of %s') %
273 (short(p), short(node)))
273 (short(p), short(node)))
274 parent = p
274 parent = p
275 else:
275 else:
276 if opts.get('parent'):
276 if opts.get('parent'):
277 raise util.Abort(_('cannot use --parent on non-merge changeset'))
277 raise util.Abort(_('cannot use --parent on non-merge changeset'))
278 parent = p1
278 parent = p1
279
279
280 # the backout should appear on the same branch
280 # the backout should appear on the same branch
281 branch = repo.dirstate.branch()
281 branch = repo.dirstate.branch()
282 hg.clean(repo, node, show_stats=False)
282 hg.clean(repo, node, show_stats=False)
283 repo.dirstate.setbranch(branch)
283 repo.dirstate.setbranch(branch)
284 revert_opts = opts.copy()
284 revert_opts = opts.copy()
285 revert_opts['date'] = None
285 revert_opts['date'] = None
286 revert_opts['all'] = True
286 revert_opts['all'] = True
287 revert_opts['rev'] = hex(parent)
287 revert_opts['rev'] = hex(parent)
288 revert_opts['no_backup'] = None
288 revert_opts['no_backup'] = None
289 revert(ui, repo, **revert_opts)
289 revert(ui, repo, **revert_opts)
290 commit_opts = opts.copy()
290 commit_opts = opts.copy()
291 commit_opts['addremove'] = False
291 commit_opts['addremove'] = False
292 if not commit_opts['message'] and not commit_opts['logfile']:
292 if not commit_opts['message'] and not commit_opts['logfile']:
293 # we don't translate commit messages
293 # we don't translate commit messages
294 commit_opts['message'] = "Backed out changeset %s" % short(node)
294 commit_opts['message'] = "Backed out changeset %s" % short(node)
295 commit_opts['force_editor'] = True
295 commit_opts['force_editor'] = True
296 commit(ui, repo, **commit_opts)
296 commit(ui, repo, **commit_opts)
297 def nice(node):
297 def nice(node):
298 return '%d:%s' % (repo.changelog.rev(node), short(node))
298 return '%d:%s' % (repo.changelog.rev(node), short(node))
299 ui.status(_('changeset %s backs out changeset %s\n') %
299 ui.status(_('changeset %s backs out changeset %s\n') %
300 (nice(repo.changelog.tip()), nice(node)))
300 (nice(repo.changelog.tip()), nice(node)))
301 if op1 != node:
301 if op1 != node:
302 hg.clean(repo, op1, show_stats=False)
302 hg.clean(repo, op1, show_stats=False)
303 if opts.get('merge'):
303 if opts.get('merge'):
304 ui.status(_('merging with changeset %s\n')
304 ui.status(_('merging with changeset %s\n')
305 % nice(repo.changelog.tip()))
305 % nice(repo.changelog.tip()))
306 hg.merge(repo, hex(repo.changelog.tip()))
306 hg.merge(repo, hex(repo.changelog.tip()))
307 else:
307 else:
308 ui.status(_('the backout changeset is a new head - '
308 ui.status(_('the backout changeset is a new head - '
309 'do not forget to merge\n'))
309 'do not forget to merge\n'))
310 ui.status(_('(use "backout --merge" '
310 ui.status(_('(use "backout --merge" '
311 'if you want to auto-merge)\n'))
311 'if you want to auto-merge)\n'))
312
312
313 def bisect(ui, repo, rev=None, extra=None, command=None,
313 def bisect(ui, repo, rev=None, extra=None, command=None,
314 reset=None, good=None, bad=None, skip=None, noupdate=None):
314 reset=None, good=None, bad=None, skip=None, noupdate=None):
315 """subdivision search of changesets
315 """subdivision search of changesets
316
316
317 This command helps to find changesets which introduce problems. To
317 This command helps to find changesets which introduce problems. To
318 use, mark the earliest changeset you know exhibits the problem as
318 use, mark the earliest changeset you know exhibits the problem as
319 bad, then mark the latest changeset which is free from the problem
319 bad, then mark the latest changeset which is free from the problem
320 as good. Bisect will update your working directory to a revision
320 as good. Bisect will update your working directory to a revision
321 for testing (unless the -U/--noupdate option is specified). Once
321 for testing (unless the -U/--noupdate option is specified). Once
322 you have performed tests, mark the working directory as good or
322 you have performed tests, mark the working directory as good or
323 bad, and bisect will either update to another candidate changeset
323 bad, and bisect will either update to another candidate changeset
324 or announce that it has found the bad revision.
324 or announce that it has found the bad revision.
325
325
326 As a shortcut, you can also use the revision argument to mark a
326 As a shortcut, you can also use the revision argument to mark a
327 revision as good or bad without checking it out first.
327 revision as good or bad without checking it out first.
328
328
329 If you supply a command, it will be used for automatic bisection.
329 If you supply a command, it will be used for automatic bisection.
330 Its exit status will be used to mark revisions as good or bad:
330 Its exit status will be used to mark revisions as good or bad:
331 status 0 means good, 125 means to skip the revision, 127
331 status 0 means good, 125 means to skip the revision, 127
332 (command not found) will abort the bisection, and any other
332 (command not found) will abort the bisection, and any other
333 non-zero exit status means the revision is bad.
333 non-zero exit status means the revision is bad.
334
334
335 Returns 0 on success.
335 Returns 0 on success.
336 """
336 """
337 def print_result(nodes, good):
337 def print_result(nodes, good):
338 displayer = cmdutil.show_changeset(ui, repo, {})
338 displayer = cmdutil.show_changeset(ui, repo, {})
339 if len(nodes) == 1:
339 if len(nodes) == 1:
340 # narrowed it down to a single revision
340 # narrowed it down to a single revision
341 if good:
341 if good:
342 ui.write(_("The first good revision is:\n"))
342 ui.write(_("The first good revision is:\n"))
343 else:
343 else:
344 ui.write(_("The first bad revision is:\n"))
344 ui.write(_("The first bad revision is:\n"))
345 displayer.show(repo[nodes[0]])
345 displayer.show(repo[nodes[0]])
346 else:
346 else:
347 # multiple possible revisions
347 # multiple possible revisions
348 if good:
348 if good:
349 ui.write(_("Due to skipped revisions, the first "
349 ui.write(_("Due to skipped revisions, the first "
350 "good revision could be any of:\n"))
350 "good revision could be any of:\n"))
351 else:
351 else:
352 ui.write(_("Due to skipped revisions, the first "
352 ui.write(_("Due to skipped revisions, the first "
353 "bad revision could be any of:\n"))
353 "bad revision could be any of:\n"))
354 for n in nodes:
354 for n in nodes:
355 displayer.show(repo[n])
355 displayer.show(repo[n])
356 displayer.close()
356 displayer.close()
357
357
358 def check_state(state, interactive=True):
358 def check_state(state, interactive=True):
359 if not state['good'] or not state['bad']:
359 if not state['good'] or not state['bad']:
360 if (good or bad or skip or reset) and interactive:
360 if (good or bad or skip or reset) and interactive:
361 return
361 return
362 if not state['good']:
362 if not state['good']:
363 raise util.Abort(_('cannot bisect (no known good revisions)'))
363 raise util.Abort(_('cannot bisect (no known good revisions)'))
364 else:
364 else:
365 raise util.Abort(_('cannot bisect (no known bad revisions)'))
365 raise util.Abort(_('cannot bisect (no known bad revisions)'))
366 return True
366 return True
367
367
368 # backward compatibility
368 # backward compatibility
369 if rev in "good bad reset init".split():
369 if rev in "good bad reset init".split():
370 ui.warn(_("(use of 'hg bisect <cmd>' is deprecated)\n"))
370 ui.warn(_("(use of 'hg bisect <cmd>' is deprecated)\n"))
371 cmd, rev, extra = rev, extra, None
371 cmd, rev, extra = rev, extra, None
372 if cmd == "good":
372 if cmd == "good":
373 good = True
373 good = True
374 elif cmd == "bad":
374 elif cmd == "bad":
375 bad = True
375 bad = True
376 else:
376 else:
377 reset = True
377 reset = True
378 elif extra or good + bad + skip + reset + bool(command) > 1:
378 elif extra or good + bad + skip + reset + bool(command) > 1:
379 raise util.Abort(_('incompatible arguments'))
379 raise util.Abort(_('incompatible arguments'))
380
380
381 if reset:
381 if reset:
382 p = repo.join("bisect.state")
382 p = repo.join("bisect.state")
383 if os.path.exists(p):
383 if os.path.exists(p):
384 os.unlink(p)
384 os.unlink(p)
385 return
385 return
386
386
387 state = hbisect.load_state(repo)
387 state = hbisect.load_state(repo)
388
388
389 if command:
389 if command:
390 changesets = 1
390 changesets = 1
391 try:
391 try:
392 while changesets:
392 while changesets:
393 # update state
393 # update state
394 status = util.system(command)
394 status = util.system(command)
395 if status == 125:
395 if status == 125:
396 transition = "skip"
396 transition = "skip"
397 elif status == 0:
397 elif status == 0:
398 transition = "good"
398 transition = "good"
399 # status < 0 means process was killed
399 # status < 0 means process was killed
400 elif status == 127:
400 elif status == 127:
401 raise util.Abort(_("failed to execute %s") % command)
401 raise util.Abort(_("failed to execute %s") % command)
402 elif status < 0:
402 elif status < 0:
403 raise util.Abort(_("%s killed") % command)
403 raise util.Abort(_("%s killed") % command)
404 else:
404 else:
405 transition = "bad"
405 transition = "bad"
406 ctx = repo[rev or '.']
406 ctx = repo[rev or '.']
407 state[transition].append(ctx.node())
407 state[transition].append(ctx.node())
408 ui.status(_('Changeset %d:%s: %s\n') % (ctx, ctx, transition))
408 ui.status(_('Changeset %d:%s: %s\n') % (ctx, ctx, transition))
409 check_state(state, interactive=False)
409 check_state(state, interactive=False)
410 # bisect
410 # bisect
411 nodes, changesets, good = hbisect.bisect(repo.changelog, state)
411 nodes, changesets, good = hbisect.bisect(repo.changelog, state)
412 # update to next check
412 # update to next check
413 cmdutil.bail_if_changed(repo)
413 cmdutil.bail_if_changed(repo)
414 hg.clean(repo, nodes[0], show_stats=False)
414 hg.clean(repo, nodes[0], show_stats=False)
415 finally:
415 finally:
416 hbisect.save_state(repo, state)
416 hbisect.save_state(repo, state)
417 print_result(nodes, good)
417 print_result(nodes, good)
418 return
418 return
419
419
420 # update state
420 # update state
421 node = repo.lookup(rev or '.')
421 node = repo.lookup(rev or '.')
422 if good or bad or skip:
422 if good or bad or skip:
423 if good:
423 if good:
424 state['good'].append(node)
424 state['good'].append(node)
425 elif bad:
425 elif bad:
426 state['bad'].append(node)
426 state['bad'].append(node)
427 elif skip:
427 elif skip:
428 state['skip'].append(node)
428 state['skip'].append(node)
429 hbisect.save_state(repo, state)
429 hbisect.save_state(repo, state)
430
430
431 if not check_state(state):
431 if not check_state(state):
432 return
432 return
433
433
434 # actually bisect
434 # actually bisect
435 nodes, changesets, good = hbisect.bisect(repo.changelog, state)
435 nodes, changesets, good = hbisect.bisect(repo.changelog, state)
436 if changesets == 0:
436 if changesets == 0:
437 print_result(nodes, good)
437 print_result(nodes, good)
438 else:
438 else:
439 assert len(nodes) == 1 # only a single node can be tested next
439 assert len(nodes) == 1 # only a single node can be tested next
440 node = nodes[0]
440 node = nodes[0]
441 # compute the approximate number of remaining tests
441 # compute the approximate number of remaining tests
442 tests, size = 0, 2
442 tests, size = 0, 2
443 while size <= changesets:
443 while size <= changesets:
444 tests, size = tests + 1, size * 2
444 tests, size = tests + 1, size * 2
445 rev = repo.changelog.rev(node)
445 rev = repo.changelog.rev(node)
446 ui.write(_("Testing changeset %d:%s "
446 ui.write(_("Testing changeset %d:%s "
447 "(%d changesets remaining, ~%d tests)\n")
447 "(%d changesets remaining, ~%d tests)\n")
448 % (rev, short(node), changesets, tests))
448 % (rev, short(node), changesets, tests))
449 if not noupdate:
449 if not noupdate:
450 cmdutil.bail_if_changed(repo)
450 cmdutil.bail_if_changed(repo)
451 return hg.clean(repo, node)
451 return hg.clean(repo, node)
452
452
453 def branch(ui, repo, label=None, **opts):
453 def branch(ui, repo, label=None, **opts):
454 """set or show the current branch name
454 """set or show the current branch name
455
455
456 With no argument, show the current branch name. With one argument,
456 With no argument, show the current branch name. With one argument,
457 set the working directory branch name (the branch will not exist
457 set the working directory branch name (the branch will not exist
458 in the repository until the next commit). Standard practice
458 in the repository until the next commit). Standard practice
459 recommends that primary development take place on the 'default'
459 recommends that primary development take place on the 'default'
460 branch.
460 branch.
461
461
462 Unless -f/--force is specified, branch will not let you set a
462 Unless -f/--force is specified, branch will not let you set a
463 branch name that already exists, even if it's inactive.
463 branch name that already exists, even if it's inactive.
464
464
465 Use -C/--clean to reset the working directory branch to that of
465 Use -C/--clean to reset the working directory branch to that of
466 the parent of the working directory, negating a previous branch
466 the parent of the working directory, negating a previous branch
467 change.
467 change.
468
468
469 Use the command :hg:`update` to switch to an existing branch. Use
469 Use the command :hg:`update` to switch to an existing branch. Use
470 :hg:`commit --close-branch` to mark this branch as closed.
470 :hg:`commit --close-branch` to mark this branch as closed.
471
471
472 Returns 0 on success.
472 Returns 0 on success.
473 """
473 """
474
474
475 if opts.get('clean'):
475 if opts.get('clean'):
476 label = repo[None].parents()[0].branch()
476 label = repo[None].parents()[0].branch()
477 repo.dirstate.setbranch(label)
477 repo.dirstate.setbranch(label)
478 ui.status(_('reset working directory to branch %s\n') % label)
478 ui.status(_('reset working directory to branch %s\n') % label)
479 elif label:
479 elif label:
480 utflabel = encoding.fromlocal(label)
480 utflabel = encoding.fromlocal(label)
481 if not opts.get('force') and utflabel in repo.branchtags():
481 if not opts.get('force') and utflabel in repo.branchtags():
482 if label not in [p.branch() for p in repo.parents()]:
482 if label not in [p.branch() for p in repo.parents()]:
483 raise util.Abort(_('a branch of the same name already exists'
483 raise util.Abort(_('a branch of the same name already exists'
484 " (use 'hg update' to switch to it)"))
484 " (use 'hg update' to switch to it)"))
485 repo.dirstate.setbranch(utflabel)
485 repo.dirstate.setbranch(utflabel)
486 ui.status(_('marked working directory as branch %s\n') % label)
486 ui.status(_('marked working directory as branch %s\n') % label)
487 else:
487 else:
488 ui.write("%s\n" % encoding.tolocal(repo.dirstate.branch()))
488 ui.write("%s\n" % encoding.tolocal(repo.dirstate.branch()))
489
489
490 def branches(ui, repo, active=False, closed=False):
490 def branches(ui, repo, active=False, closed=False):
491 """list repository named branches
491 """list repository named branches
492
492
493 List the repository's named branches, indicating which ones are
493 List the repository's named branches, indicating which ones are
494 inactive. If -c/--closed is specified, also list branches which have
494 inactive. If -c/--closed is specified, also list branches which have
495 been marked closed (see :hg:`commit --close-branch`).
495 been marked closed (see :hg:`commit --close-branch`).
496
496
497 If -a/--active is specified, only show active branches. A branch
497 If -a/--active is specified, only show active branches. A branch
498 is considered active if it contains repository heads.
498 is considered active if it contains repository heads.
499
499
500 Use the command :hg:`update` to switch to an existing branch.
500 Use the command :hg:`update` to switch to an existing branch.
501
501
502 Returns 0.
502 Returns 0.
503 """
503 """
504
504
505 hexfunc = ui.debugflag and hex or short
505 hexfunc = ui.debugflag and hex or short
506 activebranches = [repo[n].branch() for n in repo.heads()]
506 activebranches = [repo[n].branch() for n in repo.heads()]
507 def testactive(tag, node):
507 def testactive(tag, node):
508 realhead = tag in activebranches
508 realhead = tag in activebranches
509 open = node in repo.branchheads(tag, closed=False)
509 open = node in repo.branchheads(tag, closed=False)
510 return realhead and open
510 return realhead and open
511 branches = sorted([(testactive(tag, node), repo.changelog.rev(node), tag)
511 branches = sorted([(testactive(tag, node), repo.changelog.rev(node), tag)
512 for tag, node in repo.branchtags().items()],
512 for tag, node in repo.branchtags().items()],
513 reverse=True)
513 reverse=True)
514
514
515 for isactive, node, tag in branches:
515 for isactive, node, tag in branches:
516 if (not active) or isactive:
516 if (not active) or isactive:
517 encodedtag = encoding.tolocal(tag)
517 encodedtag = encoding.tolocal(tag)
518 if ui.quiet:
518 if ui.quiet:
519 ui.write("%s\n" % encodedtag)
519 ui.write("%s\n" % encodedtag)
520 else:
520 else:
521 hn = repo.lookup(node)
521 hn = repo.lookup(node)
522 if isactive:
522 if isactive:
523 notice = ''
523 notice = ''
524 elif hn not in repo.branchheads(tag, closed=False):
524 elif hn not in repo.branchheads(tag, closed=False):
525 if not closed:
525 if not closed:
526 continue
526 continue
527 notice = _(' (closed)')
527 notice = _(' (closed)')
528 else:
528 else:
529 notice = _(' (inactive)')
529 notice = _(' (inactive)')
530 rev = str(node).rjust(31 - encoding.colwidth(encodedtag))
530 rev = str(node).rjust(31 - encoding.colwidth(encodedtag))
531 data = encodedtag, rev, hexfunc(hn), notice
531 data = encodedtag, rev, hexfunc(hn), notice
532 ui.write("%s %s:%s%s\n" % data)
532 ui.write("%s %s:%s%s\n" % data)
533
533
534 def bundle(ui, repo, fname, dest=None, **opts):
534 def bundle(ui, repo, fname, dest=None, **opts):
535 """create a changegroup file
535 """create a changegroup file
536
536
537 Generate a compressed changegroup file collecting changesets not
537 Generate a compressed changegroup file collecting changesets not
538 known to be in another repository.
538 known to be in another repository.
539
539
540 If you omit the destination repository, then hg assumes the
540 If you omit the destination repository, then hg assumes the
541 destination will have all the nodes you specify with --base
541 destination will have all the nodes you specify with --base
542 parameters. To create a bundle containing all changesets, use
542 parameters. To create a bundle containing all changesets, use
543 -a/--all (or --base null).
543 -a/--all (or --base null).
544
544
545 You can change compression method with the -t/--type option.
545 You can change compression method with the -t/--type option.
546 The available compression methods are: none, bzip2, and
546 The available compression methods are: none, bzip2, and
547 gzip (by default, bundles are compressed using bzip2).
547 gzip (by default, bundles are compressed using bzip2).
548
548
549 The bundle file can then be transferred using conventional means
549 The bundle file can then be transferred using conventional means
550 and applied to another repository with the unbundle or pull
550 and applied to another repository with the unbundle or pull
551 command. This is useful when direct push and pull are not
551 command. This is useful when direct push and pull are not
552 available or when exporting an entire repository is undesirable.
552 available or when exporting an entire repository is undesirable.
553
553
554 Applying bundles preserves all changeset contents including
554 Applying bundles preserves all changeset contents including
555 permissions, copy/rename information, and revision history.
555 permissions, copy/rename information, and revision history.
556
556
557 Returns 0 on success, 1 if no changes found.
557 Returns 0 on success, 1 if no changes found.
558 """
558 """
559 revs = opts.get('rev') or None
559 revs = opts.get('rev') or None
560 if revs:
560 if revs:
561 revs = [repo.lookup(rev) for rev in revs]
561 revs = [repo.lookup(rev) for rev in revs]
562 if opts.get('all'):
562 if opts.get('all'):
563 base = ['null']
563 base = ['null']
564 else:
564 else:
565 base = opts.get('base')
565 base = opts.get('base')
566 if base:
566 if base:
567 if dest:
567 if dest:
568 raise util.Abort(_("--base is incompatible with specifying "
568 raise util.Abort(_("--base is incompatible with specifying "
569 "a destination"))
569 "a destination"))
570 base = [repo.lookup(rev) for rev in base]
570 base = [repo.lookup(rev) for rev in base]
571 # create the right base
571 # create the right base
572 # XXX: nodesbetween / changegroup* should be "fixed" instead
572 # XXX: nodesbetween / changegroup* should be "fixed" instead
573 o = []
573 o = []
574 has = set((nullid,))
574 has = set((nullid,))
575 for n in base:
575 for n in base:
576 has.update(repo.changelog.reachable(n))
576 has.update(repo.changelog.reachable(n))
577 if revs:
577 if revs:
578 visit = list(revs)
578 visit = list(revs)
579 has.difference_update(revs)
579 has.difference_update(revs)
580 else:
580 else:
581 visit = repo.changelog.heads()
581 visit = repo.changelog.heads()
582 seen = {}
582 seen = {}
583 while visit:
583 while visit:
584 n = visit.pop(0)
584 n = visit.pop(0)
585 parents = [p for p in repo.changelog.parents(n) if p not in has]
585 parents = [p for p in repo.changelog.parents(n) if p not in has]
586 if len(parents) == 0:
586 if len(parents) == 0:
587 if n not in has:
587 if n not in has:
588 o.append(n)
588 o.append(n)
589 else:
589 else:
590 for p in parents:
590 for p in parents:
591 if p not in seen:
591 if p not in seen:
592 seen[p] = 1
592 seen[p] = 1
593 visit.append(p)
593 visit.append(p)
594 else:
594 else:
595 dest = ui.expandpath(dest or 'default-push', dest or 'default')
595 dest = ui.expandpath(dest or 'default-push', dest or 'default')
596 dest, branches = hg.parseurl(dest, opts.get('branch'))
596 dest, branches = hg.parseurl(dest, opts.get('branch'))
597 other = hg.repository(cmdutil.remoteui(repo, opts), dest)
597 other = hg.repository(cmdutil.remoteui(repo, opts), dest)
598 revs, checkout = hg.addbranchrevs(repo, other, branches, revs)
598 revs, checkout = hg.addbranchrevs(repo, other, branches, revs)
599 o = repo.findoutgoing(other, force=opts.get('force'))
599 o = repo.findoutgoing(other, force=opts.get('force'))
600
600
601 if not o:
601 if not o:
602 ui.status(_("no changes found\n"))
602 ui.status(_("no changes found\n"))
603 return 1
603 return 1
604
604
605 if revs:
605 if revs:
606 cg = repo.changegroupsubset(o, revs, 'bundle')
606 cg = repo.changegroupsubset(o, revs, 'bundle')
607 else:
607 else:
608 cg = repo.changegroup(o, 'bundle')
608 cg = repo.changegroup(o, 'bundle')
609
609
610 bundletype = opts.get('type', 'bzip2').lower()
610 bundletype = opts.get('type', 'bzip2').lower()
611 btypes = {'none': 'HG10UN', 'bzip2': 'HG10BZ', 'gzip': 'HG10GZ'}
611 btypes = {'none': 'HG10UN', 'bzip2': 'HG10BZ', 'gzip': 'HG10GZ'}
612 bundletype = btypes.get(bundletype)
612 bundletype = btypes.get(bundletype)
613 if bundletype not in changegroup.bundletypes:
613 if bundletype not in changegroup.bundletypes:
614 raise util.Abort(_('unknown bundle type specified with --type'))
614 raise util.Abort(_('unknown bundle type specified with --type'))
615
615
616 changegroup.writebundle(cg, fname, bundletype)
616 changegroup.writebundle(cg, fname, bundletype)
617
617
618 def cat(ui, repo, file1, *pats, **opts):
618 def cat(ui, repo, file1, *pats, **opts):
619 """output the current or given revision of files
619 """output the current or given revision of files
620
620
621 Print the specified files as they were at the given revision. If
621 Print the specified files as they were at the given revision. If
622 no revision is given, the parent of the working directory is used,
622 no revision is given, the parent of the working directory is used,
623 or tip if no revision is checked out.
623 or tip if no revision is checked out.
624
624
625 Output may be to a file, in which case the name of the file is
625 Output may be to a file, in which case the name of the file is
626 given using a format string. The formatting rules are the same as
626 given using a format string. The formatting rules are the same as
627 for the export command, with the following additions:
627 for the export command, with the following additions:
628
628
629 :``%s``: basename of file being printed
629 :``%s``: basename of file being printed
630 :``%d``: dirname of file being printed, or '.' if in repository root
630 :``%d``: dirname of file being printed, or '.' if in repository root
631 :``%p``: root-relative path name of file being printed
631 :``%p``: root-relative path name of file being printed
632
632
633 Returns 0 on success.
633 Returns 0 on success.
634 """
634 """
635 ctx = repo[opts.get('rev')]
635 ctx = repo[opts.get('rev')]
636 err = 1
636 err = 1
637 m = cmdutil.match(repo, (file1,) + pats, opts)
637 m = cmdutil.match(repo, (file1,) + pats, opts)
638 for abs in ctx.walk(m):
638 for abs in ctx.walk(m):
639 fp = cmdutil.make_file(repo, opts.get('output'), ctx.node(), pathname=abs)
639 fp = cmdutil.make_file(repo, opts.get('output'), ctx.node(), pathname=abs)
640 data = ctx[abs].data()
640 data = ctx[abs].data()
641 if opts.get('decode'):
641 if opts.get('decode'):
642 data = repo.wwritedata(abs, data)
642 data = repo.wwritedata(abs, data)
643 fp.write(data)
643 fp.write(data)
644 err = 0
644 err = 0
645 return err
645 return err
646
646
647 def clone(ui, source, dest=None, **opts):
647 def clone(ui, source, dest=None, **opts):
648 """make a copy of an existing repository
648 """make a copy of an existing repository
649
649
650 Create a copy of an existing repository in a new directory.
650 Create a copy of an existing repository in a new directory.
651
651
652 If no destination directory name is specified, it defaults to the
652 If no destination directory name is specified, it defaults to the
653 basename of the source.
653 basename of the source.
654
654
655 The location of the source is added to the new repository's
655 The location of the source is added to the new repository's
656 .hg/hgrc file, as the default to be used for future pulls.
656 .hg/hgrc file, as the default to be used for future pulls.
657
657
658 See :hg:`help urls` for valid source format details.
658 See :hg:`help urls` for valid source format details.
659
659
660 It is possible to specify an ``ssh://`` URL as the destination, but no
660 It is possible to specify an ``ssh://`` URL as the destination, but no
661 .hg/hgrc and working directory will be created on the remote side.
661 .hg/hgrc and working directory will be created on the remote side.
662 Please see :hg:`help urls` for important details about ``ssh://`` URLs.
662 Please see :hg:`help urls` for important details about ``ssh://`` URLs.
663
663
664 A set of changesets (tags, or branch names) to pull may be specified
664 A set of changesets (tags, or branch names) to pull may be specified
665 by listing each changeset (tag, or branch name) with -r/--rev.
665 by listing each changeset (tag, or branch name) with -r/--rev.
666 If -r/--rev is used, the cloned repository will contain only a subset
666 If -r/--rev is used, the cloned repository will contain only a subset
667 of the changesets of the source repository. Only the set of changesets
667 of the changesets of the source repository. Only the set of changesets
668 defined by all -r/--rev options (including all their ancestors)
668 defined by all -r/--rev options (including all their ancestors)
669 will be pulled into the destination repository.
669 will be pulled into the destination repository.
670 No subsequent changesets (including subsequent tags) will be present
670 No subsequent changesets (including subsequent tags) will be present
671 in the destination.
671 in the destination.
672
672
673 Using -r/--rev (or 'clone src#rev dest') implies --pull, even for
673 Using -r/--rev (or 'clone src#rev dest') implies --pull, even for
674 local source repositories.
674 local source repositories.
675
675
676 For efficiency, hardlinks are used for cloning whenever the source
676 For efficiency, hardlinks are used for cloning whenever the source
677 and destination are on the same filesystem (note this applies only
677 and destination are on the same filesystem (note this applies only
678 to the repository data, not to the working directory). Some
678 to the repository data, not to the working directory). Some
679 filesystems, such as AFS, implement hardlinking incorrectly, but
679 filesystems, such as AFS, implement hardlinking incorrectly, but
680 do not report errors. In these cases, use the --pull option to
680 do not report errors. In these cases, use the --pull option to
681 avoid hardlinking.
681 avoid hardlinking.
682
682
683 In some cases, you can clone repositories and the working directory
683 In some cases, you can clone repositories and the working directory
684 using full hardlinks with ::
684 using full hardlinks with ::
685
685
686 $ cp -al REPO REPOCLONE
686 $ cp -al REPO REPOCLONE
687
687
688 This is the fastest way to clone, but it is not always safe. The
688 This is the fastest way to clone, but it is not always safe. The
689 operation is not atomic (making sure REPO is not modified during
689 operation is not atomic (making sure REPO is not modified during
690 the operation is up to you) and you have to make sure your editor
690 the operation is up to you) and you have to make sure your editor
691 breaks hardlinks (Emacs and most Linux Kernel tools do so). Also,
691 breaks hardlinks (Emacs and most Linux Kernel tools do so). Also,
692 this is not compatible with certain extensions that place their
692 this is not compatible with certain extensions that place their
693 metadata under the .hg directory, such as mq.
693 metadata under the .hg directory, such as mq.
694
694
695 Mercurial will update the working directory to the first applicable
695 Mercurial will update the working directory to the first applicable
696 revision from this list:
696 revision from this list:
697
697
698 a) null if -U or the source repository has no changesets
698 a) null if -U or the source repository has no changesets
699 b) if -u . and the source repository is local, the first parent of
699 b) if -u . and the source repository is local, the first parent of
700 the source repository's working directory
700 the source repository's working directory
701 c) the changeset specified with -u (if a branch name, this means the
701 c) the changeset specified with -u (if a branch name, this means the
702 latest head of that branch)
702 latest head of that branch)
703 d) the changeset specified with -r
703 d) the changeset specified with -r
704 e) the tipmost head specified with -b
704 e) the tipmost head specified with -b
705 f) the tipmost head specified with the url#branch source syntax
705 f) the tipmost head specified with the url#branch source syntax
706 g) the tipmost head of the default branch
706 g) the tipmost head of the default branch
707 h) tip
707 h) tip
708
708
709 Returns 0 on success.
709 Returns 0 on success.
710 """
710 """
711 if opts.get('noupdate') and opts.get('updaterev'):
711 if opts.get('noupdate') and opts.get('updaterev'):
712 raise util.Abort(_("cannot specify both --noupdate and --updaterev"))
712 raise util.Abort(_("cannot specify both --noupdate and --updaterev"))
713
713
714 r = hg.clone(cmdutil.remoteui(ui, opts), source, dest,
714 r = hg.clone(cmdutil.remoteui(ui, opts), source, dest,
715 pull=opts.get('pull'),
715 pull=opts.get('pull'),
716 stream=opts.get('uncompressed'),
716 stream=opts.get('uncompressed'),
717 rev=opts.get('rev'),
717 rev=opts.get('rev'),
718 update=opts.get('updaterev') or not opts.get('noupdate'),
718 update=opts.get('updaterev') or not opts.get('noupdate'),
719 branch=opts.get('branch'))
719 branch=opts.get('branch'))
720
720
721 return r is None
721 return r is None
722
722
723 def commit(ui, repo, *pats, **opts):
723 def commit(ui, repo, *pats, **opts):
724 """commit the specified files or all outstanding changes
724 """commit the specified files or all outstanding changes
725
725
726 Commit changes to the given files into the repository. Unlike a
726 Commit changes to the given files into the repository. Unlike a
727 centralized RCS, this operation is a local operation. See
727 centralized RCS, this operation is a local operation. See
728 :hg:`push` for a way to actively distribute your changes.
728 :hg:`push` for a way to actively distribute your changes.
729
729
730 If a list of files is omitted, all changes reported by :hg:`status`
730 If a list of files is omitted, all changes reported by :hg:`status`
731 will be committed.
731 will be committed.
732
732
733 If you are committing the result of a merge, do not provide any
733 If you are committing the result of a merge, do not provide any
734 filenames or -I/-X filters.
734 filenames or -I/-X filters.
735
735
736 If no commit message is specified, the configured editor is
736 If no commit message is specified, the configured editor is
737 started to prompt you for a message.
737 started to prompt you for a message.
738
738
739 See :hg:`help dates` for a list of formats valid for -d/--date.
739 See :hg:`help dates` for a list of formats valid for -d/--date.
740
740
741 Returns 0 on success, 1 if nothing changed.
741 Returns 0 on success, 1 if nothing changed.
742 """
742 """
743 extra = {}
743 extra = {}
744 if opts.get('close_branch'):
744 if opts.get('close_branch'):
745 if repo['.'].node() not in repo.branchheads():
745 if repo['.'].node() not in repo.branchheads():
746 # The topo heads set is included in the branch heads set of the
746 # The topo heads set is included in the branch heads set of the
747 # current branch, so it's sufficient to test branchheads
747 # current branch, so it's sufficient to test branchheads
748 raise util.Abort(_('can only close branch heads'))
748 raise util.Abort(_('can only close branch heads'))
749 extra['close'] = 1
749 extra['close'] = 1
750 e = cmdutil.commiteditor
750 e = cmdutil.commiteditor
751 if opts.get('force_editor'):
751 if opts.get('force_editor'):
752 e = cmdutil.commitforceeditor
752 e = cmdutil.commitforceeditor
753
753
754 def commitfunc(ui, repo, message, match, opts):
754 def commitfunc(ui, repo, message, match, opts):
755 return repo.commit(message, opts.get('user'), opts.get('date'), match,
755 return repo.commit(message, opts.get('user'), opts.get('date'), match,
756 editor=e, extra=extra)
756 editor=e, extra=extra)
757
757
758 branch = repo[None].branch()
758 branch = repo[None].branch()
759 bheads = repo.branchheads(branch)
759 bheads = repo.branchheads(branch)
760
760
761 node = cmdutil.commit(ui, repo, commitfunc, pats, opts)
761 node = cmdutil.commit(ui, repo, commitfunc, pats, opts)
762 if not node:
762 if not node:
763 ui.status(_("nothing changed\n"))
763 ui.status(_("nothing changed\n"))
764 return 1
764 return 1
765
765
766 ctx = repo[node]
766 ctx = repo[node]
767 parents = ctx.parents()
767 parents = ctx.parents()
768
768
769 if bheads and [x for x in parents
769 if bheads and [x for x in parents
770 if x.node() not in bheads and x.branch() == branch]:
770 if x.node() not in bheads and x.branch() == branch]:
771 ui.status(_('created new head\n'))
771 ui.status(_('created new head\n'))
772
772
773 if not opts.get('close_branch'):
773 if not opts.get('close_branch'):
774 for r in parents:
774 for r in parents:
775 if r.extra().get('close'):
775 if r.extra().get('close'):
776 ui.status(_('reopening closed branch head %d\n') % r)
776 ui.status(_('reopening closed branch head %d\n') % r)
777
777
778 if ui.debugflag:
778 if ui.debugflag:
779 ui.write(_('committed changeset %d:%s\n') % (int(ctx), ctx.hex()))
779 ui.write(_('committed changeset %d:%s\n') % (int(ctx), ctx.hex()))
780 elif ui.verbose:
780 elif ui.verbose:
781 ui.write(_('committed changeset %d:%s\n') % (int(ctx), ctx))
781 ui.write(_('committed changeset %d:%s\n') % (int(ctx), ctx))
782
782
783 def copy(ui, repo, *pats, **opts):
783 def copy(ui, repo, *pats, **opts):
784 """mark files as copied for the next commit
784 """mark files as copied for the next commit
785
785
786 Mark dest as having copies of source files. If dest is a
786 Mark dest as having copies of source files. If dest is a
787 directory, copies are put in that directory. If dest is a file,
787 directory, copies are put in that directory. If dest is a file,
788 the source must be a single file.
788 the source must be a single file.
789
789
790 By default, this command copies the contents of files as they
790 By default, this command copies the contents of files as they
791 exist in the working directory. If invoked with -A/--after, the
791 exist in the working directory. If invoked with -A/--after, the
792 operation is recorded, but no copying is performed.
792 operation is recorded, but no copying is performed.
793
793
794 This command takes effect with the next commit. To undo a copy
794 This command takes effect with the next commit. To undo a copy
795 before that, see :hg:`revert`.
795 before that, see :hg:`revert`.
796
796
797 Returns 0 on success, 1 if errors are encountered.
797 Returns 0 on success, 1 if errors are encountered.
798 """
798 """
799 wlock = repo.wlock(False)
799 wlock = repo.wlock(False)
800 try:
800 try:
801 return cmdutil.copy(ui, repo, pats, opts)
801 return cmdutil.copy(ui, repo, pats, opts)
802 finally:
802 finally:
803 wlock.release()
803 wlock.release()
804
804
805 def debugancestor(ui, repo, *args):
805 def debugancestor(ui, repo, *args):
806 """find the ancestor revision of two revisions in a given index"""
806 """find the ancestor revision of two revisions in a given index"""
807 if len(args) == 3:
807 if len(args) == 3:
808 index, rev1, rev2 = args
808 index, rev1, rev2 = args
809 r = revlog.revlog(util.opener(os.getcwd(), audit=False), index)
809 r = revlog.revlog(util.opener(os.getcwd(), audit=False), index)
810 lookup = r.lookup
810 lookup = r.lookup
811 elif len(args) == 2:
811 elif len(args) == 2:
812 if not repo:
812 if not repo:
813 raise util.Abort(_("There is no Mercurial repository here "
813 raise util.Abort(_("There is no Mercurial repository here "
814 "(.hg not found)"))
814 "(.hg not found)"))
815 rev1, rev2 = args
815 rev1, rev2 = args
816 r = repo.changelog
816 r = repo.changelog
817 lookup = repo.lookup
817 lookup = repo.lookup
818 else:
818 else:
819 raise util.Abort(_('either two or three arguments required'))
819 raise util.Abort(_('either two or three arguments required'))
820 a = r.ancestor(lookup(rev1), lookup(rev2))
820 a = r.ancestor(lookup(rev1), lookup(rev2))
821 ui.write("%d:%s\n" % (r.rev(a), hex(a)))
821 ui.write("%d:%s\n" % (r.rev(a), hex(a)))
822
822
823 def debugcommands(ui, cmd='', *args):
823 def debugcommands(ui, cmd='', *args):
824 """list all available commands and options"""
824 """list all available commands and options"""
825 for cmd, vals in sorted(table.iteritems()):
825 for cmd, vals in sorted(table.iteritems()):
826 cmd = cmd.split('|')[0].strip('^')
826 cmd = cmd.split('|')[0].strip('^')
827 opts = ', '.join([i[1] for i in vals[1]])
827 opts = ', '.join([i[1] for i in vals[1]])
828 ui.write('%s: %s\n' % (cmd, opts))
828 ui.write('%s: %s\n' % (cmd, opts))
829
829
830 def debugcomplete(ui, cmd='', **opts):
830 def debugcomplete(ui, cmd='', **opts):
831 """returns the completion list associated with the given command"""
831 """returns the completion list associated with the given command"""
832
832
833 if opts.get('options'):
833 if opts.get('options'):
834 options = []
834 options = []
835 otables = [globalopts]
835 otables = [globalopts]
836 if cmd:
836 if cmd:
837 aliases, entry = cmdutil.findcmd(cmd, table, False)
837 aliases, entry = cmdutil.findcmd(cmd, table, False)
838 otables.append(entry[1])
838 otables.append(entry[1])
839 for t in otables:
839 for t in otables:
840 for o in t:
840 for o in t:
841 if "(DEPRECATED)" in o[3]:
841 if "(DEPRECATED)" in o[3]:
842 continue
842 continue
843 if o[0]:
843 if o[0]:
844 options.append('-%s' % o[0])
844 options.append('-%s' % o[0])
845 options.append('--%s' % o[1])
845 options.append('--%s' % o[1])
846 ui.write("%s\n" % "\n".join(options))
846 ui.write("%s\n" % "\n".join(options))
847 return
847 return
848
848
849 cmdlist = cmdutil.findpossible(cmd, table)
849 cmdlist = cmdutil.findpossible(cmd, table)
850 if ui.verbose:
850 if ui.verbose:
851 cmdlist = [' '.join(c[0]) for c in cmdlist.values()]
851 cmdlist = [' '.join(c[0]) for c in cmdlist.values()]
852 ui.write("%s\n" % "\n".join(sorted(cmdlist)))
852 ui.write("%s\n" % "\n".join(sorted(cmdlist)))
853
853
854 def debugfsinfo(ui, path = "."):
854 def debugfsinfo(ui, path = "."):
855 """show information detected about current filesystem"""
855 """show information detected about current filesystem"""
856 open('.debugfsinfo', 'w').write('')
856 open('.debugfsinfo', 'w').write('')
857 ui.write('exec: %s\n' % (util.checkexec(path) and 'yes' or 'no'))
857 ui.write('exec: %s\n' % (util.checkexec(path) and 'yes' or 'no'))
858 ui.write('symlink: %s\n' % (util.checklink(path) and 'yes' or 'no'))
858 ui.write('symlink: %s\n' % (util.checklink(path) and 'yes' or 'no'))
859 ui.write('case-sensitive: %s\n' % (util.checkcase('.debugfsinfo')
859 ui.write('case-sensitive: %s\n' % (util.checkcase('.debugfsinfo')
860 and 'yes' or 'no'))
860 and 'yes' or 'no'))
861 os.unlink('.debugfsinfo')
861 os.unlink('.debugfsinfo')
862
862
863 def debugrebuildstate(ui, repo, rev="tip"):
863 def debugrebuildstate(ui, repo, rev="tip"):
864 """rebuild the dirstate as it would look like for the given revision"""
864 """rebuild the dirstate as it would look like for the given revision"""
865 ctx = repo[rev]
865 ctx = repo[rev]
866 wlock = repo.wlock()
866 wlock = repo.wlock()
867 try:
867 try:
868 repo.dirstate.rebuild(ctx.node(), ctx.manifest())
868 repo.dirstate.rebuild(ctx.node(), ctx.manifest())
869 finally:
869 finally:
870 wlock.release()
870 wlock.release()
871
871
872 def debugcheckstate(ui, repo):
872 def debugcheckstate(ui, repo):
873 """validate the correctness of the current dirstate"""
873 """validate the correctness of the current dirstate"""
874 parent1, parent2 = repo.dirstate.parents()
874 parent1, parent2 = repo.dirstate.parents()
875 m1 = repo[parent1].manifest()
875 m1 = repo[parent1].manifest()
876 m2 = repo[parent2].manifest()
876 m2 = repo[parent2].manifest()
877 errors = 0
877 errors = 0
878 for f in repo.dirstate:
878 for f in repo.dirstate:
879 state = repo.dirstate[f]
879 state = repo.dirstate[f]
880 if state in "nr" and f not in m1:
880 if state in "nr" and f not in m1:
881 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
881 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
882 errors += 1
882 errors += 1
883 if state in "a" and f in m1:
883 if state in "a" and f in m1:
884 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
884 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
885 errors += 1
885 errors += 1
886 if state in "m" and f not in m1 and f not in m2:
886 if state in "m" and f not in m1 and f not in m2:
887 ui.warn(_("%s in state %s, but not in either manifest\n") %
887 ui.warn(_("%s in state %s, but not in either manifest\n") %
888 (f, state))
888 (f, state))
889 errors += 1
889 errors += 1
890 for f in m1:
890 for f in m1:
891 state = repo.dirstate[f]
891 state = repo.dirstate[f]
892 if state not in "nrm":
892 if state not in "nrm":
893 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
893 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
894 errors += 1
894 errors += 1
895 if errors:
895 if errors:
896 error = _(".hg/dirstate inconsistent with current parent's manifest")
896 error = _(".hg/dirstate inconsistent with current parent's manifest")
897 raise util.Abort(error)
897 raise util.Abort(error)
898
898
899 def showconfig(ui, repo, *values, **opts):
899 def showconfig(ui, repo, *values, **opts):
900 """show combined config settings from all hgrc files
900 """show combined config settings from all hgrc files
901
901
902 With no arguments, print names and values of all config items.
902 With no arguments, print names and values of all config items.
903
903
904 With one argument of the form section.name, print just the value
904 With one argument of the form section.name, print just the value
905 of that config item.
905 of that config item.
906
906
907 With multiple arguments, print names and values of all config
907 With multiple arguments, print names and values of all config
908 items with matching section names.
908 items with matching section names.
909
909
910 With --debug, the source (filename and line number) is printed
910 With --debug, the source (filename and line number) is printed
911 for each config item.
911 for each config item.
912
912
913 Returns 0 on success.
913 Returns 0 on success.
914 """
914 """
915
915
916 for f in util.rcpath():
916 for f in util.rcpath():
917 ui.debug(_('read config from: %s\n') % f)
917 ui.debug(_('read config from: %s\n') % f)
918 untrusted = bool(opts.get('untrusted'))
918 untrusted = bool(opts.get('untrusted'))
919 if values:
919 if values:
920 if len([v for v in values if '.' in v]) > 1:
920 if len([v for v in values if '.' in v]) > 1:
921 raise util.Abort(_('only one config item permitted'))
921 raise util.Abort(_('only one config item permitted'))
922 for section, name, value in ui.walkconfig(untrusted=untrusted):
922 for section, name, value in ui.walkconfig(untrusted=untrusted):
923 sectname = section + '.' + name
923 sectname = section + '.' + name
924 if values:
924 if values:
925 for v in values:
925 for v in values:
926 if v == section:
926 if v == section:
927 ui.debug('%s: ' %
927 ui.debug('%s: ' %
928 ui.configsource(section, name, untrusted))
928 ui.configsource(section, name, untrusted))
929 ui.write('%s=%s\n' % (sectname, value))
929 ui.write('%s=%s\n' % (sectname, value))
930 elif v == sectname:
930 elif v == sectname:
931 ui.debug('%s: ' %
931 ui.debug('%s: ' %
932 ui.configsource(section, name, untrusted))
932 ui.configsource(section, name, untrusted))
933 ui.write(value, '\n')
933 ui.write(value, '\n')
934 else:
934 else:
935 ui.debug('%s: ' %
935 ui.debug('%s: ' %
936 ui.configsource(section, name, untrusted))
936 ui.configsource(section, name, untrusted))
937 ui.write('%s=%s\n' % (sectname, value))
937 ui.write('%s=%s\n' % (sectname, value))
938
938
939 def debugsetparents(ui, repo, rev1, rev2=None):
939 def debugsetparents(ui, repo, rev1, rev2=None):
940 """manually set the parents of the current working directory
940 """manually set the parents of the current working directory
941
941
942 This is useful for writing repository conversion tools, but should
942 This is useful for writing repository conversion tools, but should
943 be used with care.
943 be used with care.
944
944
945 Returns 0 on success.
945 Returns 0 on success.
946 """
946 """
947
947
948 if not rev2:
948 if not rev2:
949 rev2 = hex(nullid)
949 rev2 = hex(nullid)
950
950
951 wlock = repo.wlock()
951 wlock = repo.wlock()
952 try:
952 try:
953 repo.dirstate.setparents(repo.lookup(rev1), repo.lookup(rev2))
953 repo.dirstate.setparents(repo.lookup(rev1), repo.lookup(rev2))
954 finally:
954 finally:
955 wlock.release()
955 wlock.release()
956
956
957 def debugstate(ui, repo, nodates=None):
957 def debugstate(ui, repo, nodates=None):
958 """show the contents of the current dirstate"""
958 """show the contents of the current dirstate"""
959 timestr = ""
959 timestr = ""
960 showdate = not nodates
960 showdate = not nodates
961 for file_, ent in sorted(repo.dirstate._map.iteritems()):
961 for file_, ent in sorted(repo.dirstate._map.iteritems()):
962 if showdate:
962 if showdate:
963 if ent[3] == -1:
963 if ent[3] == -1:
964 # Pad or slice to locale representation
964 # Pad or slice to locale representation
965 locale_len = len(time.strftime("%Y-%m-%d %H:%M:%S ",
965 locale_len = len(time.strftime("%Y-%m-%d %H:%M:%S ",
966 time.localtime(0)))
966 time.localtime(0)))
967 timestr = 'unset'
967 timestr = 'unset'
968 timestr = (timestr[:locale_len] +
968 timestr = (timestr[:locale_len] +
969 ' ' * (locale_len - len(timestr)))
969 ' ' * (locale_len - len(timestr)))
970 else:
970 else:
971 timestr = time.strftime("%Y-%m-%d %H:%M:%S ",
971 timestr = time.strftime("%Y-%m-%d %H:%M:%S ",
972 time.localtime(ent[3]))
972 time.localtime(ent[3]))
973 if ent[1] & 020000:
973 if ent[1] & 020000:
974 mode = 'lnk'
974 mode = 'lnk'
975 else:
975 else:
976 mode = '%3o' % (ent[1] & 0777)
976 mode = '%3o' % (ent[1] & 0777)
977 ui.write("%c %s %10d %s%s\n" % (ent[0], mode, ent[2], timestr, file_))
977 ui.write("%c %s %10d %s%s\n" % (ent[0], mode, ent[2], timestr, file_))
978 for f in repo.dirstate.copies():
978 for f in repo.dirstate.copies():
979 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
979 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
980
980
981 def debugsub(ui, repo, rev=None):
981 def debugsub(ui, repo, rev=None):
982 if rev == '':
982 if rev == '':
983 rev = None
983 rev = None
984 for k, v in sorted(repo[rev].substate.items()):
984 for k, v in sorted(repo[rev].substate.items()):
985 ui.write('path %s\n' % k)
985 ui.write('path %s\n' % k)
986 ui.write(' source %s\n' % v[0])
986 ui.write(' source %s\n' % v[0])
987 ui.write(' revision %s\n' % v[1])
987 ui.write(' revision %s\n' % v[1])
988
988
989 def debugdata(ui, file_, rev):
989 def debugdata(ui, file_, rev):
990 """dump the contents of a data file revision"""
990 """dump the contents of a data file revision"""
991 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_[:-2] + ".i")
991 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_[:-2] + ".i")
992 try:
992 try:
993 ui.write(r.revision(r.lookup(rev)))
993 ui.write(r.revision(r.lookup(rev)))
994 except KeyError:
994 except KeyError:
995 raise util.Abort(_('invalid revision identifier %s') % rev)
995 raise util.Abort(_('invalid revision identifier %s') % rev)
996
996
997 def debugdate(ui, date, range=None, **opts):
997 def debugdate(ui, date, range=None, **opts):
998 """parse and display a date"""
998 """parse and display a date"""
999 if opts["extended"]:
999 if opts["extended"]:
1000 d = util.parsedate(date, util.extendeddateformats)
1000 d = util.parsedate(date, util.extendeddateformats)
1001 else:
1001 else:
1002 d = util.parsedate(date)
1002 d = util.parsedate(date)
1003 ui.write("internal: %s %s\n" % d)
1003 ui.write("internal: %s %s\n" % d)
1004 ui.write("standard: %s\n" % util.datestr(d))
1004 ui.write("standard: %s\n" % util.datestr(d))
1005 if range:
1005 if range:
1006 m = util.matchdate(range)
1006 m = util.matchdate(range)
1007 ui.write("match: %s\n" % m(d[0]))
1007 ui.write("match: %s\n" % m(d[0]))
1008
1008
1009 def debugindex(ui, file_):
1009 def debugindex(ui, file_):
1010 """dump the contents of an index file"""
1010 """dump the contents of an index file"""
1011 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_)
1011 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_)
1012 ui.write(" rev offset length base linkrev"
1012 ui.write(" rev offset length base linkrev"
1013 " nodeid p1 p2\n")
1013 " nodeid p1 p2\n")
1014 for i in r:
1014 for i in r:
1015 node = r.node(i)
1015 node = r.node(i)
1016 try:
1016 try:
1017 pp = r.parents(node)
1017 pp = r.parents(node)
1018 except:
1018 except:
1019 pp = [nullid, nullid]
1019 pp = [nullid, nullid]
1020 ui.write("% 6d % 9d % 7d % 6d % 7d %s %s %s\n" % (
1020 ui.write("% 6d % 9d % 7d % 6d % 7d %s %s %s\n" % (
1021 i, r.start(i), r.length(i), r.base(i), r.linkrev(i),
1021 i, r.start(i), r.length(i), r.base(i), r.linkrev(i),
1022 short(node), short(pp[0]), short(pp[1])))
1022 short(node), short(pp[0]), short(pp[1])))
1023
1023
1024 def debugindexdot(ui, file_):
1024 def debugindexdot(ui, file_):
1025 """dump an index DAG as a graphviz dot file"""
1025 """dump an index DAG as a graphviz dot file"""
1026 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_)
1026 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_)
1027 ui.write("digraph G {\n")
1027 ui.write("digraph G {\n")
1028 for i in r:
1028 for i in r:
1029 node = r.node(i)
1029 node = r.node(i)
1030 pp = r.parents(node)
1030 pp = r.parents(node)
1031 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
1031 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
1032 if pp[1] != nullid:
1032 if pp[1] != nullid:
1033 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
1033 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
1034 ui.write("}\n")
1034 ui.write("}\n")
1035
1035
1036 def debuginstall(ui):
1036 def debuginstall(ui):
1037 '''test Mercurial installation
1037 '''test Mercurial installation
1038
1038
1039 Returns 0 on success.
1039 Returns 0 on success.
1040 '''
1040 '''
1041
1041
1042 def writetemp(contents):
1042 def writetemp(contents):
1043 (fd, name) = tempfile.mkstemp(prefix="hg-debuginstall-")
1043 (fd, name) = tempfile.mkstemp(prefix="hg-debuginstall-")
1044 f = os.fdopen(fd, "wb")
1044 f = os.fdopen(fd, "wb")
1045 f.write(contents)
1045 f.write(contents)
1046 f.close()
1046 f.close()
1047 return name
1047 return name
1048
1048
1049 problems = 0
1049 problems = 0
1050
1050
1051 # encoding
1051 # encoding
1052 ui.status(_("Checking encoding (%s)...\n") % encoding.encoding)
1052 ui.status(_("Checking encoding (%s)...\n") % encoding.encoding)
1053 try:
1053 try:
1054 encoding.fromlocal("test")
1054 encoding.fromlocal("test")
1055 except util.Abort, inst:
1055 except util.Abort, inst:
1056 ui.write(" %s\n" % inst)
1056 ui.write(" %s\n" % inst)
1057 ui.write(_(" (check that your locale is properly set)\n"))
1057 ui.write(_(" (check that your locale is properly set)\n"))
1058 problems += 1
1058 problems += 1
1059
1059
1060 # compiled modules
1060 # compiled modules
1061 ui.status(_("Checking extensions...\n"))
1061 ui.status(_("Checking extensions...\n"))
1062 try:
1062 try:
1063 import bdiff, mpatch, base85
1063 import bdiff, mpatch, base85
1064 except Exception, inst:
1064 except Exception, inst:
1065 ui.write(" %s\n" % inst)
1065 ui.write(" %s\n" % inst)
1066 ui.write(_(" One or more extensions could not be found"))
1066 ui.write(_(" One or more extensions could not be found"))
1067 ui.write(_(" (check that you compiled the extensions)\n"))
1067 ui.write(_(" (check that you compiled the extensions)\n"))
1068 problems += 1
1068 problems += 1
1069
1069
1070 # templates
1070 # templates
1071 ui.status(_("Checking templates...\n"))
1071 ui.status(_("Checking templates...\n"))
1072 try:
1072 try:
1073 import templater
1073 import templater
1074 templater.templater(templater.templatepath("map-cmdline.default"))
1074 templater.templater(templater.templatepath("map-cmdline.default"))
1075 except Exception, inst:
1075 except Exception, inst:
1076 ui.write(" %s\n" % inst)
1076 ui.write(" %s\n" % inst)
1077 ui.write(_(" (templates seem to have been installed incorrectly)\n"))
1077 ui.write(_(" (templates seem to have been installed incorrectly)\n"))
1078 problems += 1
1078 problems += 1
1079
1079
1080 # patch
1080 # patch
1081 ui.status(_("Checking patch...\n"))
1081 ui.status(_("Checking patch...\n"))
1082 patchproblems = 0
1082 patchproblems = 0
1083 a = "1\n2\n3\n4\n"
1083 a = "1\n2\n3\n4\n"
1084 b = "1\n2\n3\ninsert\n4\n"
1084 b = "1\n2\n3\ninsert\n4\n"
1085 fa = writetemp(a)
1085 fa = writetemp(a)
1086 d = mdiff.unidiff(a, None, b, None, os.path.basename(fa),
1086 d = mdiff.unidiff(a, None, b, None, os.path.basename(fa),
1087 os.path.basename(fa))
1087 os.path.basename(fa))
1088 fd = writetemp(d)
1088 fd = writetemp(d)
1089
1089
1090 files = {}
1090 files = {}
1091 try:
1091 try:
1092 patch.patch(fd, ui, cwd=os.path.dirname(fa), files=files)
1092 patch.patch(fd, ui, cwd=os.path.dirname(fa), files=files)
1093 except util.Abort, e:
1093 except util.Abort, e:
1094 ui.write(_(" patch call failed:\n"))
1094 ui.write(_(" patch call failed:\n"))
1095 ui.write(" " + str(e) + "\n")
1095 ui.write(" " + str(e) + "\n")
1096 patchproblems += 1
1096 patchproblems += 1
1097 else:
1097 else:
1098 if list(files) != [os.path.basename(fa)]:
1098 if list(files) != [os.path.basename(fa)]:
1099 ui.write(_(" unexpected patch output!\n"))
1099 ui.write(_(" unexpected patch output!\n"))
1100 patchproblems += 1
1100 patchproblems += 1
1101 a = open(fa).read()
1101 a = open(fa).read()
1102 if a != b:
1102 if a != b:
1103 ui.write(_(" patch test failed!\n"))
1103 ui.write(_(" patch test failed!\n"))
1104 patchproblems += 1
1104 patchproblems += 1
1105
1105
1106 if patchproblems:
1106 if patchproblems:
1107 if ui.config('ui', 'patch'):
1107 if ui.config('ui', 'patch'):
1108 ui.write(_(" (Current patch tool may be incompatible with patch,"
1108 ui.write(_(" (Current patch tool may be incompatible with patch,"
1109 " or misconfigured. Please check your .hgrc file)\n"))
1109 " or misconfigured. Please check your .hgrc file)\n"))
1110 else:
1110 else:
1111 ui.write(_(" Internal patcher failure, please report this error"
1111 ui.write(_(" Internal patcher failure, please report this error"
1112 " to http://mercurial.selenic.com/bts/\n"))
1112 " to http://mercurial.selenic.com/bts/\n"))
1113 problems += patchproblems
1113 problems += patchproblems
1114
1114
1115 os.unlink(fa)
1115 os.unlink(fa)
1116 os.unlink(fd)
1116 os.unlink(fd)
1117
1117
1118 # editor
1118 # editor
1119 ui.status(_("Checking commit editor...\n"))
1119 ui.status(_("Checking commit editor...\n"))
1120 editor = ui.geteditor()
1120 editor = ui.geteditor()
1121 cmdpath = util.find_exe(editor) or util.find_exe(editor.split()[0])
1121 cmdpath = util.find_exe(editor) or util.find_exe(editor.split()[0])
1122 if not cmdpath:
1122 if not cmdpath:
1123 if editor == 'vi':
1123 if editor == 'vi':
1124 ui.write(_(" No commit editor set and can't find vi in PATH\n"))
1124 ui.write(_(" No commit editor set and can't find vi in PATH\n"))
1125 ui.write(_(" (specify a commit editor in your .hgrc file)\n"))
1125 ui.write(_(" (specify a commit editor in your .hgrc file)\n"))
1126 else:
1126 else:
1127 ui.write(_(" Can't find editor '%s' in PATH\n") % editor)
1127 ui.write(_(" Can't find editor '%s' in PATH\n") % editor)
1128 ui.write(_(" (specify a commit editor in your .hgrc file)\n"))
1128 ui.write(_(" (specify a commit editor in your .hgrc file)\n"))
1129 problems += 1
1129 problems += 1
1130
1130
1131 # check username
1131 # check username
1132 ui.status(_("Checking username...\n"))
1132 ui.status(_("Checking username...\n"))
1133 try:
1133 try:
1134 user = ui.username()
1134 user = ui.username()
1135 except util.Abort, e:
1135 except util.Abort, e:
1136 ui.write(" %s\n" % e)
1136 ui.write(" %s\n" % e)
1137 ui.write(_(" (specify a username in your .hgrc file)\n"))
1137 ui.write(_(" (specify a username in your .hgrc file)\n"))
1138 problems += 1
1138 problems += 1
1139
1139
1140 if not problems:
1140 if not problems:
1141 ui.status(_("No problems detected\n"))
1141 ui.status(_("No problems detected\n"))
1142 else:
1142 else:
1143 ui.write(_("%s problems detected,"
1143 ui.write(_("%s problems detected,"
1144 " please check your install!\n") % problems)
1144 " please check your install!\n") % problems)
1145
1145
1146 return problems
1146 return problems
1147
1147
1148 def debugrename(ui, repo, file1, *pats, **opts):
1148 def debugrename(ui, repo, file1, *pats, **opts):
1149 """dump rename information"""
1149 """dump rename information"""
1150
1150
1151 ctx = repo[opts.get('rev')]
1151 ctx = repo[opts.get('rev')]
1152 m = cmdutil.match(repo, (file1,) + pats, opts)
1152 m = cmdutil.match(repo, (file1,) + pats, opts)
1153 for abs in ctx.walk(m):
1153 for abs in ctx.walk(m):
1154 fctx = ctx[abs]
1154 fctx = ctx[abs]
1155 o = fctx.filelog().renamed(fctx.filenode())
1155 o = fctx.filelog().renamed(fctx.filenode())
1156 rel = m.rel(abs)
1156 rel = m.rel(abs)
1157 if o:
1157 if o:
1158 ui.write(_("%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
1158 ui.write(_("%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
1159 else:
1159 else:
1160 ui.write(_("%s not renamed\n") % rel)
1160 ui.write(_("%s not renamed\n") % rel)
1161
1161
1162 def debugwalk(ui, repo, *pats, **opts):
1162 def debugwalk(ui, repo, *pats, **opts):
1163 """show how files match on given patterns"""
1163 """show how files match on given patterns"""
1164 m = cmdutil.match(repo, pats, opts)
1164 m = cmdutil.match(repo, pats, opts)
1165 items = list(repo.walk(m))
1165 items = list(repo.walk(m))
1166 if not items:
1166 if not items:
1167 return
1167 return
1168 fmt = 'f %%-%ds %%-%ds %%s' % (
1168 fmt = 'f %%-%ds %%-%ds %%s' % (
1169 max([len(abs) for abs in items]),
1169 max([len(abs) for abs in items]),
1170 max([len(m.rel(abs)) for abs in items]))
1170 max([len(m.rel(abs)) for abs in items]))
1171 for abs in items:
1171 for abs in items:
1172 line = fmt % (abs, m.rel(abs), m.exact(abs) and 'exact' or '')
1172 line = fmt % (abs, m.rel(abs), m.exact(abs) and 'exact' or '')
1173 ui.write("%s\n" % line.rstrip())
1173 ui.write("%s\n" % line.rstrip())
1174
1174
1175 def diff(ui, repo, *pats, **opts):
1175 def diff(ui, repo, *pats, **opts):
1176 """diff repository (or selected files)
1176 """diff repository (or selected files)
1177
1177
1178 Show differences between revisions for the specified files.
1178 Show differences between revisions for the specified files.
1179
1179
1180 Differences between files are shown using the unified diff format.
1180 Differences between files are shown using the unified diff format.
1181
1181
1182 NOTE: diff may generate unexpected results for merges, as it will
1182 NOTE: diff may generate unexpected results for merges, as it will
1183 default to comparing against the working directory's first parent
1183 default to comparing against the working directory's first parent
1184 changeset if no revisions are specified.
1184 changeset if no revisions are specified.
1185
1185
1186 When two revision arguments are given, then changes are shown
1186 When two revision arguments are given, then changes are shown
1187 between those revisions. If only one revision is specified then
1187 between those revisions. If only one revision is specified then
1188 that revision is compared to the working directory, and, when no
1188 that revision is compared to the working directory, and, when no
1189 revisions are specified, the working directory files are compared
1189 revisions are specified, the working directory files are compared
1190 to its parent.
1190 to its parent.
1191
1191
1192 Alternatively you can specify -c/--change with a revision to see
1192 Alternatively you can specify -c/--change with a revision to see
1193 the changes in that changeset relative to its first parent.
1193 the changes in that changeset relative to its first parent.
1194
1194
1195 Without the -a/--text option, diff will avoid generating diffs of
1195 Without the -a/--text option, diff will avoid generating diffs of
1196 files it detects as binary. With -a, diff will generate a diff
1196 files it detects as binary. With -a, diff will generate a diff
1197 anyway, probably with undesirable results.
1197 anyway, probably with undesirable results.
1198
1198
1199 Use the -g/--git option to generate diffs in the git extended diff
1199 Use the -g/--git option to generate diffs in the git extended diff
1200 format. For more information, read :hg:`help diffs`.
1200 format. For more information, read :hg:`help diffs`.
1201
1201
1202 Returns 0 on success.
1202 Returns 0 on success.
1203 """
1203 """
1204
1204
1205 revs = opts.get('rev')
1205 revs = opts.get('rev')
1206 change = opts.get('change')
1206 change = opts.get('change')
1207 stat = opts.get('stat')
1207 stat = opts.get('stat')
1208 reverse = opts.get('reverse')
1208 reverse = opts.get('reverse')
1209
1209
1210 if revs and change:
1210 if revs and change:
1211 msg = _('cannot specify --rev and --change at the same time')
1211 msg = _('cannot specify --rev and --change at the same time')
1212 raise util.Abort(msg)
1212 raise util.Abort(msg)
1213 elif change:
1213 elif change:
1214 node2 = repo.lookup(change)
1214 node2 = repo.lookup(change)
1215 node1 = repo[node2].parents()[0].node()
1215 node1 = repo[node2].parents()[0].node()
1216 else:
1216 else:
1217 node1, node2 = cmdutil.revpair(repo, revs)
1217 node1, node2 = cmdutil.revpair(repo, revs)
1218
1218
1219 if reverse:
1219 if reverse:
1220 node1, node2 = node2, node1
1220 node1, node2 = node2, node1
1221
1221
1222 diffopts = patch.diffopts(ui, opts)
1222 diffopts = patch.diffopts(ui, opts)
1223 m = cmdutil.match(repo, pats, opts)
1223 m = cmdutil.match(repo, pats, opts)
1224 cmdutil.diffordiffstat(ui, repo, diffopts, node1, node2, m, stat=stat)
1224 cmdutil.diffordiffstat(ui, repo, diffopts, node1, node2, m, stat=stat)
1225
1225
1226 def export(ui, repo, *changesets, **opts):
1226 def export(ui, repo, *changesets, **opts):
1227 """dump the header and diffs for one or more changesets
1227 """dump the header and diffs for one or more changesets
1228
1228
1229 Print the changeset header and diffs for one or more revisions.
1229 Print the changeset header and diffs for one or more revisions.
1230
1230
1231 The information shown in the changeset header is: author, date,
1231 The information shown in the changeset header is: author, date,
1232 branch name (if non-default), changeset hash, parent(s) and commit
1232 branch name (if non-default), changeset hash, parent(s) and commit
1233 comment.
1233 comment.
1234
1234
1235 NOTE: export may generate unexpected diff output for merge
1235 NOTE: export may generate unexpected diff output for merge
1236 changesets, as it will compare the merge changeset against its
1236 changesets, as it will compare the merge changeset against its
1237 first parent only.
1237 first parent only.
1238
1238
1239 Output may be to a file, in which case the name of the file is
1239 Output may be to a file, in which case the name of the file is
1240 given using a format string. The formatting rules are as follows:
1240 given using a format string. The formatting rules are as follows:
1241
1241
1242 :``%%``: literal "%" character
1242 :``%%``: literal "%" character
1243 :``%H``: changeset hash (40 bytes of hexadecimal)
1243 :``%H``: changeset hash (40 bytes of hexadecimal)
1244 :``%N``: number of patches being generated
1244 :``%N``: number of patches being generated
1245 :``%R``: changeset revision number
1245 :``%R``: changeset revision number
1246 :``%b``: basename of the exporting repository
1246 :``%b``: basename of the exporting repository
1247 :``%h``: short-form changeset hash (12 bytes of hexadecimal)
1247 :``%h``: short-form changeset hash (12 bytes of hexadecimal)
1248 :``%n``: zero-padded sequence number, starting at 1
1248 :``%n``: zero-padded sequence number, starting at 1
1249 :``%r``: zero-padded changeset revision number
1249 :``%r``: zero-padded changeset revision number
1250
1250
1251 Without the -a/--text option, export will avoid generating diffs
1251 Without the -a/--text option, export will avoid generating diffs
1252 of files it detects as binary. With -a, export will generate a
1252 of files it detects as binary. With -a, export will generate a
1253 diff anyway, probably with undesirable results.
1253 diff anyway, probably with undesirable results.
1254
1254
1255 Use the -g/--git option to generate diffs in the git extended diff
1255 Use the -g/--git option to generate diffs in the git extended diff
1256 format. See :hg:`help diffs` for more information.
1256 format. See :hg:`help diffs` for more information.
1257
1257
1258 With the --switch-parent option, the diff will be against the
1258 With the --switch-parent option, the diff will be against the
1259 second parent. It can be useful to review a merge.
1259 second parent. It can be useful to review a merge.
1260
1260
1261 Returns 0 on success.
1261 Returns 0 on success.
1262 """
1262 """
1263 changesets += tuple(opts.get('rev', []))
1263 changesets += tuple(opts.get('rev', []))
1264 if not changesets:
1264 if not changesets:
1265 raise util.Abort(_("export requires at least one changeset"))
1265 raise util.Abort(_("export requires at least one changeset"))
1266 revs = cmdutil.revrange(repo, changesets)
1266 revs = cmdutil.revrange(repo, changesets)
1267 if len(revs) > 1:
1267 if len(revs) > 1:
1268 ui.note(_('exporting patches:\n'))
1268 ui.note(_('exporting patches:\n'))
1269 else:
1269 else:
1270 ui.note(_('exporting patch:\n'))
1270 ui.note(_('exporting patch:\n'))
1271 cmdutil.export(repo, revs, template=opts.get('output'),
1271 cmdutil.export(repo, revs, template=opts.get('output'),
1272 switch_parent=opts.get('switch_parent'),
1272 switch_parent=opts.get('switch_parent'),
1273 opts=patch.diffopts(ui, opts))
1273 opts=patch.diffopts(ui, opts))
1274
1274
1275 def forget(ui, repo, *pats, **opts):
1275 def forget(ui, repo, *pats, **opts):
1276 """forget the specified files on the next commit
1276 """forget the specified files on the next commit
1277
1277
1278 Mark the specified files so they will no longer be tracked
1278 Mark the specified files so they will no longer be tracked
1279 after the next commit.
1279 after the next commit.
1280
1280
1281 This only removes files from the current branch, not from the
1281 This only removes files from the current branch, not from the
1282 entire project history, and it does not delete them from the
1282 entire project history, and it does not delete them from the
1283 working directory.
1283 working directory.
1284
1284
1285 To undo a forget before the next commit, see :hg:`add`.
1285 To undo a forget before the next commit, see :hg:`add`.
1286
1286
1287 Returns 0 on success.
1287 Returns 0 on success.
1288 """
1288 """
1289
1289
1290 if not pats:
1290 if not pats:
1291 raise util.Abort(_('no files specified'))
1291 raise util.Abort(_('no files specified'))
1292
1292
1293 m = cmdutil.match(repo, pats, opts)
1293 m = cmdutil.match(repo, pats, opts)
1294 s = repo.status(match=m, clean=True)
1294 s = repo.status(match=m, clean=True)
1295 forget = sorted(s[0] + s[1] + s[3] + s[6])
1295 forget = sorted(s[0] + s[1] + s[3] + s[6])
1296 errs = 0
1296 errs = 0
1297
1297
1298 for f in m.files():
1298 for f in m.files():
1299 if f not in repo.dirstate and not os.path.isdir(m.rel(f)):
1299 if f not in repo.dirstate and not os.path.isdir(m.rel(f)):
1300 ui.warn(_('not removing %s: file is already untracked\n')
1300 ui.warn(_('not removing %s: file is already untracked\n')
1301 % m.rel(f))
1301 % m.rel(f))
1302 errs = 1
1302 errs = 1
1303
1303
1304 for f in forget:
1304 for f in forget:
1305 if ui.verbose or not m.exact(f):
1305 if ui.verbose or not m.exact(f):
1306 ui.status(_('removing %s\n') % m.rel(f))
1306 ui.status(_('removing %s\n') % m.rel(f))
1307
1307
1308 repo.remove(forget, unlink=False)
1308 repo.remove(forget, unlink=False)
1309 return errs
1309 return errs
1310
1310
1311 def grep(ui, repo, pattern, *pats, **opts):
1311 def grep(ui, repo, pattern, *pats, **opts):
1312 """search for a pattern in specified files and revisions
1312 """search for a pattern in specified files and revisions
1313
1313
1314 Search revisions of files for a regular expression.
1314 Search revisions of files for a regular expression.
1315
1315
1316 This command behaves differently than Unix grep. It only accepts
1316 This command behaves differently than Unix grep. It only accepts
1317 Python/Perl regexps. It searches repository history, not the
1317 Python/Perl regexps. It searches repository history, not the
1318 working directory. It always prints the revision number in which a
1318 working directory. It always prints the revision number in which a
1319 match appears.
1319 match appears.
1320
1320
1321 By default, grep only prints output for the first revision of a
1321 By default, grep only prints output for the first revision of a
1322 file in which it finds a match. To get it to print every revision
1322 file in which it finds a match. To get it to print every revision
1323 that contains a change in match status ("-" for a match that
1323 that contains a change in match status ("-" for a match that
1324 becomes a non-match, or "+" for a non-match that becomes a match),
1324 becomes a non-match, or "+" for a non-match that becomes a match),
1325 use the --all flag.
1325 use the --all flag.
1326
1326
1327 Returns 0 if a match is found, 1 otherwise.
1327 Returns 0 if a match is found, 1 otherwise.
1328 """
1328 """
1329 reflags = 0
1329 reflags = 0
1330 if opts.get('ignore_case'):
1330 if opts.get('ignore_case'):
1331 reflags |= re.I
1331 reflags |= re.I
1332 try:
1332 try:
1333 regexp = re.compile(pattern, reflags)
1333 regexp = re.compile(pattern, reflags)
1334 except Exception, inst:
1334 except Exception, inst:
1335 ui.warn(_("grep: invalid match pattern: %s\n") % inst)
1335 ui.warn(_("grep: invalid match pattern: %s\n") % inst)
1336 return 1
1336 return 1
1337 sep, eol = ':', '\n'
1337 sep, eol = ':', '\n'
1338 if opts.get('print0'):
1338 if opts.get('print0'):
1339 sep = eol = '\0'
1339 sep = eol = '\0'
1340
1340
1341 getfile = util.lrucachefunc(repo.file)
1341 getfile = util.lrucachefunc(repo.file)
1342
1342
1343 def matchlines(body):
1343 def matchlines(body):
1344 begin = 0
1344 begin = 0
1345 linenum = 0
1345 linenum = 0
1346 while True:
1346 while True:
1347 match = regexp.search(body, begin)
1347 match = regexp.search(body, begin)
1348 if not match:
1348 if not match:
1349 break
1349 break
1350 mstart, mend = match.span()
1350 mstart, mend = match.span()
1351 linenum += body.count('\n', begin, mstart) + 1
1351 linenum += body.count('\n', begin, mstart) + 1
1352 lstart = body.rfind('\n', begin, mstart) + 1 or begin
1352 lstart = body.rfind('\n', begin, mstart) + 1 or begin
1353 begin = body.find('\n', mend) + 1 or len(body)
1353 begin = body.find('\n', mend) + 1 or len(body)
1354 lend = begin - 1
1354 lend = begin - 1
1355 yield linenum, mstart - lstart, mend - lstart, body[lstart:lend]
1355 yield linenum, mstart - lstart, mend - lstart, body[lstart:lend]
1356
1356
1357 class linestate(object):
1357 class linestate(object):
1358 def __init__(self, line, linenum, colstart, colend):
1358 def __init__(self, line, linenum, colstart, colend):
1359 self.line = line
1359 self.line = line
1360 self.linenum = linenum
1360 self.linenum = linenum
1361 self.colstart = colstart
1361 self.colstart = colstart
1362 self.colend = colend
1362 self.colend = colend
1363
1363
1364 def __hash__(self):
1364 def __hash__(self):
1365 return hash((self.linenum, self.line))
1365 return hash((self.linenum, self.line))
1366
1366
1367 def __eq__(self, other):
1367 def __eq__(self, other):
1368 return self.line == other.line
1368 return self.line == other.line
1369
1369
1370 matches = {}
1370 matches = {}
1371 copies = {}
1371 copies = {}
1372 def grepbody(fn, rev, body):
1372 def grepbody(fn, rev, body):
1373 matches[rev].setdefault(fn, [])
1373 matches[rev].setdefault(fn, [])
1374 m = matches[rev][fn]
1374 m = matches[rev][fn]
1375 for lnum, cstart, cend, line in matchlines(body):
1375 for lnum, cstart, cend, line in matchlines(body):
1376 s = linestate(line, lnum, cstart, cend)
1376 s = linestate(line, lnum, cstart, cend)
1377 m.append(s)
1377 m.append(s)
1378
1378
1379 def difflinestates(a, b):
1379 def difflinestates(a, b):
1380 sm = difflib.SequenceMatcher(None, a, b)
1380 sm = difflib.SequenceMatcher(None, a, b)
1381 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
1381 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
1382 if tag == 'insert':
1382 if tag == 'insert':
1383 for i in xrange(blo, bhi):
1383 for i in xrange(blo, bhi):
1384 yield ('+', b[i])
1384 yield ('+', b[i])
1385 elif tag == 'delete':
1385 elif tag == 'delete':
1386 for i in xrange(alo, ahi):
1386 for i in xrange(alo, ahi):
1387 yield ('-', a[i])
1387 yield ('-', a[i])
1388 elif tag == 'replace':
1388 elif tag == 'replace':
1389 for i in xrange(alo, ahi):
1389 for i in xrange(alo, ahi):
1390 yield ('-', a[i])
1390 yield ('-', a[i])
1391 for i in xrange(blo, bhi):
1391 for i in xrange(blo, bhi):
1392 yield ('+', b[i])
1392 yield ('+', b[i])
1393
1393
1394 def display(fn, ctx, pstates, states):
1394 def display(fn, ctx, pstates, states):
1395 rev = ctx.rev()
1395 rev = ctx.rev()
1396 datefunc = ui.quiet and util.shortdate or util.datestr
1396 datefunc = ui.quiet and util.shortdate or util.datestr
1397 found = False
1397 found = False
1398 filerevmatches = {}
1398 filerevmatches = {}
1399 if opts.get('all'):
1399 if opts.get('all'):
1400 iter = difflinestates(pstates, states)
1400 iter = difflinestates(pstates, states)
1401 else:
1401 else:
1402 iter = [('', l) for l in states]
1402 iter = [('', l) for l in states]
1403 for change, l in iter:
1403 for change, l in iter:
1404 cols = [fn, str(rev)]
1404 cols = [fn, str(rev)]
1405 before, match, after = None, None, None
1405 before, match, after = None, None, None
1406 if opts.get('line_number'):
1406 if opts.get('line_number'):
1407 cols.append(str(l.linenum))
1407 cols.append(str(l.linenum))
1408 if opts.get('all'):
1408 if opts.get('all'):
1409 cols.append(change)
1409 cols.append(change)
1410 if opts.get('user'):
1410 if opts.get('user'):
1411 cols.append(ui.shortuser(ctx.user()))
1411 cols.append(ui.shortuser(ctx.user()))
1412 if opts.get('date'):
1412 if opts.get('date'):
1413 cols.append(datefunc(ctx.date()))
1413 cols.append(datefunc(ctx.date()))
1414 if opts.get('files_with_matches'):
1414 if opts.get('files_with_matches'):
1415 c = (fn, rev)
1415 c = (fn, rev)
1416 if c in filerevmatches:
1416 if c in filerevmatches:
1417 continue
1417 continue
1418 filerevmatches[c] = 1
1418 filerevmatches[c] = 1
1419 else:
1419 else:
1420 before = l.line[:l.colstart]
1420 before = l.line[:l.colstart]
1421 match = l.line[l.colstart:l.colend]
1421 match = l.line[l.colstart:l.colend]
1422 after = l.line[l.colend:]
1422 after = l.line[l.colend:]
1423 ui.write(sep.join(cols))
1423 ui.write(sep.join(cols))
1424 if before is not None:
1424 if before is not None:
1425 ui.write(sep + before)
1425 ui.write(sep + before)
1426 ui.write(match, label='grep.match')
1426 ui.write(match, label='grep.match')
1427 ui.write(after)
1427 ui.write(after)
1428 ui.write(eol)
1428 ui.write(eol)
1429 found = True
1429 found = True
1430 return found
1430 return found
1431
1431
1432 skip = {}
1432 skip = {}
1433 revfiles = {}
1433 revfiles = {}
1434 matchfn = cmdutil.match(repo, pats, opts)
1434 matchfn = cmdutil.match(repo, pats, opts)
1435 found = False
1435 found = False
1436 follow = opts.get('follow')
1436 follow = opts.get('follow')
1437
1437
1438 def prep(ctx, fns):
1438 def prep(ctx, fns):
1439 rev = ctx.rev()
1439 rev = ctx.rev()
1440 pctx = ctx.parents()[0]
1440 pctx = ctx.parents()[0]
1441 parent = pctx.rev()
1441 parent = pctx.rev()
1442 matches.setdefault(rev, {})
1442 matches.setdefault(rev, {})
1443 matches.setdefault(parent, {})
1443 matches.setdefault(parent, {})
1444 files = revfiles.setdefault(rev, [])
1444 files = revfiles.setdefault(rev, [])
1445 for fn in fns:
1445 for fn in fns:
1446 flog = getfile(fn)
1446 flog = getfile(fn)
1447 try:
1447 try:
1448 fnode = ctx.filenode(fn)
1448 fnode = ctx.filenode(fn)
1449 except error.LookupError:
1449 except error.LookupError:
1450 continue
1450 continue
1451
1451
1452 copied = flog.renamed(fnode)
1452 copied = flog.renamed(fnode)
1453 copy = follow and copied and copied[0]
1453 copy = follow and copied and copied[0]
1454 if copy:
1454 if copy:
1455 copies.setdefault(rev, {})[fn] = copy
1455 copies.setdefault(rev, {})[fn] = copy
1456 if fn in skip:
1456 if fn in skip:
1457 if copy:
1457 if copy:
1458 skip[copy] = True
1458 skip[copy] = True
1459 continue
1459 continue
1460 files.append(fn)
1460 files.append(fn)
1461
1461
1462 if fn not in matches[rev]:
1462 if fn not in matches[rev]:
1463 grepbody(fn, rev, flog.read(fnode))
1463 grepbody(fn, rev, flog.read(fnode))
1464
1464
1465 pfn = copy or fn
1465 pfn = copy or fn
1466 if pfn not in matches[parent]:
1466 if pfn not in matches[parent]:
1467 try:
1467 try:
1468 fnode = pctx.filenode(pfn)
1468 fnode = pctx.filenode(pfn)
1469 grepbody(pfn, parent, flog.read(fnode))
1469 grepbody(pfn, parent, flog.read(fnode))
1470 except error.LookupError:
1470 except error.LookupError:
1471 pass
1471 pass
1472
1472
1473 for ctx in cmdutil.walkchangerevs(repo, matchfn, opts, prep):
1473 for ctx in cmdutil.walkchangerevs(repo, matchfn, opts, prep):
1474 rev = ctx.rev()
1474 rev = ctx.rev()
1475 parent = ctx.parents()[0].rev()
1475 parent = ctx.parents()[0].rev()
1476 for fn in sorted(revfiles.get(rev, [])):
1476 for fn in sorted(revfiles.get(rev, [])):
1477 states = matches[rev][fn]
1477 states = matches[rev][fn]
1478 copy = copies.get(rev, {}).get(fn)
1478 copy = copies.get(rev, {}).get(fn)
1479 if fn in skip:
1479 if fn in skip:
1480 if copy:
1480 if copy:
1481 skip[copy] = True
1481 skip[copy] = True
1482 continue
1482 continue
1483 pstates = matches.get(parent, {}).get(copy or fn, [])
1483 pstates = matches.get(parent, {}).get(copy or fn, [])
1484 if pstates or states:
1484 if pstates or states:
1485 r = display(fn, ctx, pstates, states)
1485 r = display(fn, ctx, pstates, states)
1486 found = found or r
1486 found = found or r
1487 if r and not opts.get('all'):
1487 if r and not opts.get('all'):
1488 skip[fn] = True
1488 skip[fn] = True
1489 if copy:
1489 if copy:
1490 skip[copy] = True
1490 skip[copy] = True
1491 del matches[rev]
1491 del matches[rev]
1492 del revfiles[rev]
1492 del revfiles[rev]
1493
1493
1494 return not found
1494 return not found
1495
1495
1496 def heads(ui, repo, *branchrevs, **opts):
1496 def heads(ui, repo, *branchrevs, **opts):
1497 """show current repository heads or show branch heads
1497 """show current repository heads or show branch heads
1498
1498
1499 With no arguments, show all repository branch heads.
1499 With no arguments, show all repository branch heads.
1500
1500
1501 Repository "heads" are changesets with no child changesets. They are
1501 Repository "heads" are changesets with no child changesets. They are
1502 where development generally takes place and are the usual targets
1502 where development generally takes place and are the usual targets
1503 for update and merge operations. Branch heads are changesets that have
1503 for update and merge operations. Branch heads are changesets that have
1504 no child changeset on the same branch.
1504 no child changeset on the same branch.
1505
1505
1506 If one or more REVs are given, only branch heads on the branches
1506 If one or more REVs are given, only branch heads on the branches
1507 associated with the specified changesets are shown.
1507 associated with the specified changesets are shown.
1508
1508
1509 If -c/--closed is specified, also show branch heads marked closed
1509 If -c/--closed is specified, also show branch heads marked closed
1510 (see :hg:`commit --close-branch`).
1510 (see :hg:`commit --close-branch`).
1511
1511
1512 If STARTREV is specified, only those heads that are descendants of
1512 If STARTREV is specified, only those heads that are descendants of
1513 STARTREV will be displayed.
1513 STARTREV will be displayed.
1514
1514
1515 If -t/--topo is specified, named branch mechanics will be ignored and only
1515 If -t/--topo is specified, named branch mechanics will be ignored and only
1516 changesets without children will be shown.
1516 changesets without children will be shown.
1517
1517
1518 Returns 0 if matching heads are found, 1 if not.
1518 Returns 0 if matching heads are found, 1 if not.
1519 """
1519 """
1520
1520
1521 if opts.get('rev'):
1521 if opts.get('rev'):
1522 start = repo.lookup(opts['rev'])
1522 start = repo.lookup(opts['rev'])
1523 else:
1523 else:
1524 start = None
1524 start = None
1525
1525
1526 if opts.get('topo'):
1526 if opts.get('topo'):
1527 heads = [repo[h] for h in repo.heads(start)]
1527 heads = [repo[h] for h in repo.heads(start)]
1528 else:
1528 else:
1529 heads = []
1529 heads = []
1530 for b, ls in repo.branchmap().iteritems():
1530 for b, ls in repo.branchmap().iteritems():
1531 if start is None:
1531 if start is None:
1532 heads += [repo[h] for h in ls]
1532 heads += [repo[h] for h in ls]
1533 continue
1533 continue
1534 startrev = repo.changelog.rev(start)
1534 startrev = repo.changelog.rev(start)
1535 descendants = set(repo.changelog.descendants(startrev))
1535 descendants = set(repo.changelog.descendants(startrev))
1536 descendants.add(startrev)
1536 descendants.add(startrev)
1537 rev = repo.changelog.rev
1537 rev = repo.changelog.rev
1538 heads += [repo[h] for h in ls if rev(h) in descendants]
1538 heads += [repo[h] for h in ls if rev(h) in descendants]
1539
1539
1540 if branchrevs:
1540 if branchrevs:
1541 decode, encode = encoding.fromlocal, encoding.tolocal
1541 decode, encode = encoding.fromlocal, encoding.tolocal
1542 branches = set(repo[decode(br)].branch() for br in branchrevs)
1542 branches = set(repo[decode(br)].branch() for br in branchrevs)
1543 heads = [h for h in heads if h.branch() in branches]
1543 heads = [h for h in heads if h.branch() in branches]
1544
1544
1545 if not opts.get('closed'):
1545 if not opts.get('closed'):
1546 heads = [h for h in heads if not h.extra().get('close')]
1546 heads = [h for h in heads if not h.extra().get('close')]
1547
1547
1548 if opts.get('active') and branchrevs:
1548 if opts.get('active') and branchrevs:
1549 dagheads = repo.heads(start)
1549 dagheads = repo.heads(start)
1550 heads = [h for h in heads if h.node() in dagheads]
1550 heads = [h for h in heads if h.node() in dagheads]
1551
1551
1552 if branchrevs:
1552 if branchrevs:
1553 haveheads = set(h.branch() for h in heads)
1553 haveheads = set(h.branch() for h in heads)
1554 if branches - haveheads:
1554 if branches - haveheads:
1555 headless = ', '.join(encode(b) for b in branches - haveheads)
1555 headless = ', '.join(encode(b) for b in branches - haveheads)
1556 msg = _('no open branch heads found on branches %s')
1556 msg = _('no open branch heads found on branches %s')
1557 if opts.get('rev'):
1557 if opts.get('rev'):
1558 msg += _(' (started at %s)' % opts['rev'])
1558 msg += _(' (started at %s)' % opts['rev'])
1559 ui.warn((msg + '\n') % headless)
1559 ui.warn((msg + '\n') % headless)
1560
1560
1561 if not heads:
1561 if not heads:
1562 return 1
1562 return 1
1563
1563
1564 heads = sorted(heads, key=lambda x: -x.rev())
1564 heads = sorted(heads, key=lambda x: -x.rev())
1565 displayer = cmdutil.show_changeset(ui, repo, opts)
1565 displayer = cmdutil.show_changeset(ui, repo, opts)
1566 for ctx in heads:
1566 for ctx in heads:
1567 displayer.show(ctx)
1567 displayer.show(ctx)
1568 displayer.close()
1568 displayer.close()
1569
1569
1570 def help_(ui, name=None, with_version=False, unknowncmd=False):
1570 def help_(ui, name=None, with_version=False, unknowncmd=False):
1571 """show help for a given topic or a help overview
1571 """show help for a given topic or a help overview
1572
1572
1573 With no arguments, print a list of commands with short help messages.
1573 With no arguments, print a list of commands with short help messages.
1574
1574
1575 Given a topic, extension, or command name, print help for that
1575 Given a topic, extension, or command name, print help for that
1576 topic.
1576 topic.
1577
1577
1578 Returns 0 if successful.
1578 Returns 0 if successful.
1579 """
1579 """
1580 option_lists = []
1580 option_lists = []
1581 textwidth = util.termwidth() - 2
1581 textwidth = util.termwidth() - 2
1582
1582
1583 def addglobalopts(aliases):
1583 def addglobalopts(aliases):
1584 if ui.verbose:
1584 if ui.verbose:
1585 option_lists.append((_("global options:"), globalopts))
1585 option_lists.append((_("global options:"), globalopts))
1586 if name == 'shortlist':
1586 if name == 'shortlist':
1587 option_lists.append((_('use "hg help" for the full list '
1587 option_lists.append((_('use "hg help" for the full list '
1588 'of commands'), ()))
1588 'of commands'), ()))
1589 else:
1589 else:
1590 if name == 'shortlist':
1590 if name == 'shortlist':
1591 msg = _('use "hg help" for the full list of commands '
1591 msg = _('use "hg help" for the full list of commands '
1592 'or "hg -v" for details')
1592 'or "hg -v" for details')
1593 elif aliases:
1593 elif aliases:
1594 msg = _('use "hg -v help%s" to show aliases and '
1594 msg = _('use "hg -v help%s" to show aliases and '
1595 'global options') % (name and " " + name or "")
1595 'global options') % (name and " " + name or "")
1596 else:
1596 else:
1597 msg = _('use "hg -v help %s" to show global options') % name
1597 msg = _('use "hg -v help %s" to show global options') % name
1598 option_lists.append((msg, ()))
1598 option_lists.append((msg, ()))
1599
1599
1600 def helpcmd(name):
1600 def helpcmd(name):
1601 if with_version:
1601 if with_version:
1602 version_(ui)
1602 version_(ui)
1603 ui.write('\n')
1603 ui.write('\n')
1604
1604
1605 try:
1605 try:
1606 aliases, entry = cmdutil.findcmd(name, table, strict=unknowncmd)
1606 aliases, entry = cmdutil.findcmd(name, table, strict=unknowncmd)
1607 except error.AmbiguousCommand, inst:
1607 except error.AmbiguousCommand, inst:
1608 # py3k fix: except vars can't be used outside the scope of the
1608 # py3k fix: except vars can't be used outside the scope of the
1609 # except block, nor can be used inside a lambda. python issue4617
1609 # except block, nor can be used inside a lambda. python issue4617
1610 prefix = inst.args[0]
1610 prefix = inst.args[0]
1611 select = lambda c: c.lstrip('^').startswith(prefix)
1611 select = lambda c: c.lstrip('^').startswith(prefix)
1612 helplist(_('list of commands:\n\n'), select)
1612 helplist(_('list of commands:\n\n'), select)
1613 return
1613 return
1614
1614
1615 # check if it's an invalid alias and display its error if it is
1615 # check if it's an invalid alias and display its error if it is
1616 if getattr(entry[0], 'badalias', False):
1616 if getattr(entry[0], 'badalias', False):
1617 if not unknowncmd:
1617 if not unknowncmd:
1618 entry[0](ui)
1618 entry[0](ui)
1619 return
1619 return
1620
1620
1621 # synopsis
1621 # synopsis
1622 if len(entry) > 2:
1622 if len(entry) > 2:
1623 if entry[2].startswith('hg'):
1623 if entry[2].startswith('hg'):
1624 ui.write("%s\n" % entry[2])
1624 ui.write("%s\n" % entry[2])
1625 else:
1625 else:
1626 ui.write('hg %s %s\n' % (aliases[0], entry[2]))
1626 ui.write('hg %s %s\n' % (aliases[0], entry[2]))
1627 else:
1627 else:
1628 ui.write('hg %s\n' % aliases[0])
1628 ui.write('hg %s\n' % aliases[0])
1629
1629
1630 # aliases
1630 # aliases
1631 if not ui.quiet and len(aliases) > 1:
1631 if not ui.quiet and len(aliases) > 1:
1632 ui.write(_("\naliases: %s\n") % ', '.join(aliases[1:]))
1632 ui.write(_("\naliases: %s\n") % ', '.join(aliases[1:]))
1633
1633
1634 # description
1634 # description
1635 doc = gettext(entry[0].__doc__)
1635 doc = gettext(entry[0].__doc__)
1636 if not doc:
1636 if not doc:
1637 doc = _("(no help text available)")
1637 doc = _("(no help text available)")
1638 if hasattr(entry[0], 'definition'): # aliased command
1638 if hasattr(entry[0], 'definition'): # aliased command
1639 doc = _('alias for: hg %s\n\n%s') % (entry[0].definition, doc)
1639 doc = _('alias for: hg %s\n\n%s') % (entry[0].definition, doc)
1640 if ui.quiet:
1640 if ui.quiet:
1641 doc = doc.splitlines()[0]
1641 doc = doc.splitlines()[0]
1642 keep = ui.verbose and ['verbose'] or []
1642 keep = ui.verbose and ['verbose'] or []
1643 formatted, pruned = minirst.format(doc, textwidth, keep=keep)
1643 formatted, pruned = minirst.format(doc, textwidth, keep=keep)
1644 ui.write("\n%s\n" % formatted)
1644 ui.write("\n%s\n" % formatted)
1645 if pruned:
1645 if pruned:
1646 ui.write(_('\nuse "hg -v help %s" to show verbose help\n') % name)
1646 ui.write(_('\nuse "hg -v help %s" to show verbose help\n') % name)
1647
1647
1648 if not ui.quiet:
1648 if not ui.quiet:
1649 # options
1649 # options
1650 if entry[1]:
1650 if entry[1]:
1651 option_lists.append((_("options:\n"), entry[1]))
1651 option_lists.append((_("options:\n"), entry[1]))
1652
1652
1653 addglobalopts(False)
1653 addglobalopts(False)
1654
1654
1655 def helplist(header, select=None):
1655 def helplist(header, select=None):
1656 h = {}
1656 h = {}
1657 cmds = {}
1657 cmds = {}
1658 for c, e in table.iteritems():
1658 for c, e in table.iteritems():
1659 f = c.split("|", 1)[0]
1659 f = c.split("|", 1)[0]
1660 if select and not select(f):
1660 if select and not select(f):
1661 continue
1661 continue
1662 if (not select and name != 'shortlist' and
1662 if (not select and name != 'shortlist' and
1663 e[0].__module__ != __name__):
1663 e[0].__module__ != __name__):
1664 continue
1664 continue
1665 if name == "shortlist" and not f.startswith("^"):
1665 if name == "shortlist" and not f.startswith("^"):
1666 continue
1666 continue
1667 f = f.lstrip("^")
1667 f = f.lstrip("^")
1668 if not ui.debugflag and f.startswith("debug"):
1668 if not ui.debugflag and f.startswith("debug"):
1669 continue
1669 continue
1670 doc = e[0].__doc__
1670 doc = e[0].__doc__
1671 if doc and 'DEPRECATED' in doc and not ui.verbose:
1671 if doc and 'DEPRECATED' in doc and not ui.verbose:
1672 continue
1672 continue
1673 doc = gettext(doc)
1673 doc = gettext(doc)
1674 if not doc:
1674 if not doc:
1675 doc = _("(no help text available)")
1675 doc = _("(no help text available)")
1676 h[f] = doc.splitlines()[0].rstrip()
1676 h[f] = doc.splitlines()[0].rstrip()
1677 cmds[f] = c.lstrip("^")
1677 cmds[f] = c.lstrip("^")
1678
1678
1679 if not h:
1679 if not h:
1680 ui.status(_('no commands defined\n'))
1680 ui.status(_('no commands defined\n'))
1681 return
1681 return
1682
1682
1683 ui.status(header)
1683 ui.status(header)
1684 fns = sorted(h)
1684 fns = sorted(h)
1685 m = max(map(len, fns))
1685 m = max(map(len, fns))
1686 for f in fns:
1686 for f in fns:
1687 if ui.verbose:
1687 if ui.verbose:
1688 commands = cmds[f].replace("|",", ")
1688 commands = cmds[f].replace("|",", ")
1689 ui.write(" %s:\n %s\n"%(commands, h[f]))
1689 ui.write(" %s:\n %s\n"%(commands, h[f]))
1690 else:
1690 else:
1691 ui.write(' %-*s %s\n' % (m, f, util.wrap(h[f], m + 4)))
1691 ui.write(' %-*s %s\n' % (m, f, util.wrap(h[f], m + 4)))
1692
1692
1693 if not ui.quiet:
1693 if not ui.quiet:
1694 addglobalopts(True)
1694 addglobalopts(True)
1695
1695
1696 def helptopic(name):
1696 def helptopic(name):
1697 for names, header, doc in help.helptable:
1697 for names, header, doc in help.helptable:
1698 if name in names:
1698 if name in names:
1699 break
1699 break
1700 else:
1700 else:
1701 raise error.UnknownCommand(name)
1701 raise error.UnknownCommand(name)
1702
1702
1703 # description
1703 # description
1704 if not doc:
1704 if not doc:
1705 doc = _("(no help text available)")
1705 doc = _("(no help text available)")
1706 if hasattr(doc, '__call__'):
1706 if hasattr(doc, '__call__'):
1707 doc = doc()
1707 doc = doc()
1708
1708
1709 ui.write("%s\n\n" % header)
1709 ui.write("%s\n\n" % header)
1710 ui.write("%s\n" % minirst.format(doc, textwidth, indent=4))
1710 ui.write("%s\n" % minirst.format(doc, textwidth, indent=4))
1711
1711
1712 def helpext(name):
1712 def helpext(name):
1713 try:
1713 try:
1714 mod = extensions.find(name)
1714 mod = extensions.find(name)
1715 doc = gettext(mod.__doc__) or _('no help text available')
1715 doc = gettext(mod.__doc__) or _('no help text available')
1716 except KeyError:
1716 except KeyError:
1717 mod = None
1717 mod = None
1718 doc = extensions.disabledext(name)
1718 doc = extensions.disabledext(name)
1719 if not doc:
1719 if not doc:
1720 raise error.UnknownCommand(name)
1720 raise error.UnknownCommand(name)
1721
1721
1722 if '\n' not in doc:
1722 if '\n' not in doc:
1723 head, tail = doc, ""
1723 head, tail = doc, ""
1724 else:
1724 else:
1725 head, tail = doc.split('\n', 1)
1725 head, tail = doc.split('\n', 1)
1726 ui.write(_('%s extension - %s\n\n') % (name.split('.')[-1], head))
1726 ui.write(_('%s extension - %s\n\n') % (name.split('.')[-1], head))
1727 if tail:
1727 if tail:
1728 ui.write(minirst.format(tail, textwidth))
1728 ui.write(minirst.format(tail, textwidth))
1729 ui.status('\n\n')
1729 ui.status('\n\n')
1730
1730
1731 if mod:
1731 if mod:
1732 try:
1732 try:
1733 ct = mod.cmdtable
1733 ct = mod.cmdtable
1734 except AttributeError:
1734 except AttributeError:
1735 ct = {}
1735 ct = {}
1736 modcmds = set([c.split('|', 1)[0] for c in ct])
1736 modcmds = set([c.split('|', 1)[0] for c in ct])
1737 helplist(_('list of commands:\n\n'), modcmds.__contains__)
1737 helplist(_('list of commands:\n\n'), modcmds.__contains__)
1738 else:
1738 else:
1739 ui.write(_('use "hg help extensions" for information on enabling '
1739 ui.write(_('use "hg help extensions" for information on enabling '
1740 'extensions\n'))
1740 'extensions\n'))
1741
1741
1742 def helpextcmd(name):
1742 def helpextcmd(name):
1743 cmd, ext, mod = extensions.disabledcmd(name, ui.config('ui', 'strict'))
1743 cmd, ext, mod = extensions.disabledcmd(name, ui.config('ui', 'strict'))
1744 doc = gettext(mod.__doc__).splitlines()[0]
1744 doc = gettext(mod.__doc__).splitlines()[0]
1745
1745
1746 msg = help.listexts(_("'%s' is provided by the following "
1746 msg = help.listexts(_("'%s' is provided by the following "
1747 "extension:") % cmd, {ext: doc}, len(ext),
1747 "extension:") % cmd, {ext: doc}, len(ext),
1748 indent=4)
1748 indent=4)
1749 ui.write(minirst.format(msg, textwidth))
1749 ui.write(minirst.format(msg, textwidth))
1750 ui.write('\n\n')
1750 ui.write('\n\n')
1751 ui.write(_('use "hg help extensions" for information on enabling '
1751 ui.write(_('use "hg help extensions" for information on enabling '
1752 'extensions\n'))
1752 'extensions\n'))
1753
1753
1754 if name and name != 'shortlist':
1754 if name and name != 'shortlist':
1755 i = None
1755 i = None
1756 if unknowncmd:
1756 if unknowncmd:
1757 queries = (helpextcmd,)
1757 queries = (helpextcmd,)
1758 else:
1758 else:
1759 queries = (helptopic, helpcmd, helpext, helpextcmd)
1759 queries = (helptopic, helpcmd, helpext, helpextcmd)
1760 for f in queries:
1760 for f in queries:
1761 try:
1761 try:
1762 f(name)
1762 f(name)
1763 i = None
1763 i = None
1764 break
1764 break
1765 except error.UnknownCommand, inst:
1765 except error.UnknownCommand, inst:
1766 i = inst
1766 i = inst
1767 if i:
1767 if i:
1768 raise i
1768 raise i
1769
1769
1770 else:
1770 else:
1771 # program name
1771 # program name
1772 if ui.verbose or with_version:
1772 if ui.verbose or with_version:
1773 version_(ui)
1773 version_(ui)
1774 else:
1774 else:
1775 ui.status(_("Mercurial Distributed SCM\n"))
1775 ui.status(_("Mercurial Distributed SCM\n"))
1776 ui.status('\n')
1776 ui.status('\n')
1777
1777
1778 # list of commands
1778 # list of commands
1779 if name == "shortlist":
1779 if name == "shortlist":
1780 header = _('basic commands:\n\n')
1780 header = _('basic commands:\n\n')
1781 else:
1781 else:
1782 header = _('list of commands:\n\n')
1782 header = _('list of commands:\n\n')
1783
1783
1784 helplist(header)
1784 helplist(header)
1785 if name != 'shortlist':
1785 if name != 'shortlist':
1786 exts, maxlength = extensions.enabled()
1786 exts, maxlength = extensions.enabled()
1787 text = help.listexts(_('enabled extensions:'), exts, maxlength)
1787 text = help.listexts(_('enabled extensions:'), exts, maxlength)
1788 if text:
1788 if text:
1789 ui.write("\n%s\n" % minirst.format(text, textwidth))
1789 ui.write("\n%s\n" % minirst.format(text, textwidth))
1790
1790
1791 # list all option lists
1791 # list all option lists
1792 opt_output = []
1792 opt_output = []
1793 for title, options in option_lists:
1793 for title, options in option_lists:
1794 opt_output.append(("\n%s" % title, None))
1794 opt_output.append(("\n%s" % title, None))
1795 for shortopt, longopt, default, desc in options:
1795 for shortopt, longopt, default, desc in options:
1796 if _("DEPRECATED") in desc and not ui.verbose:
1796 if _("DEPRECATED") in desc and not ui.verbose:
1797 continue
1797 continue
1798 opt_output.append(("%2s%s" % (shortopt and "-%s" % shortopt,
1798 opt_output.append(("%2s%s" % (shortopt and "-%s" % shortopt,
1799 longopt and " --%s" % longopt),
1799 longopt and " --%s" % longopt),
1800 "%s%s" % (desc,
1800 "%s%s" % (desc,
1801 default
1801 default
1802 and _(" (default: %s)") % default
1802 and _(" (default: %s)") % default
1803 or "")))
1803 or "")))
1804
1804
1805 if not name:
1805 if not name:
1806 ui.write(_("\nadditional help topics:\n\n"))
1806 ui.write(_("\nadditional help topics:\n\n"))
1807 topics = []
1807 topics = []
1808 for names, header, doc in help.helptable:
1808 for names, header, doc in help.helptable:
1809 topics.append((sorted(names, key=len, reverse=True)[0], header))
1809 topics.append((sorted(names, key=len, reverse=True)[0], header))
1810 topics_len = max([len(s[0]) for s in topics])
1810 topics_len = max([len(s[0]) for s in topics])
1811 for t, desc in topics:
1811 for t, desc in topics:
1812 ui.write(" %-*s %s\n" % (topics_len, t, desc))
1812 ui.write(" %-*s %s\n" % (topics_len, t, desc))
1813
1813
1814 if opt_output:
1814 if opt_output:
1815 opts_len = max([len(line[0]) for line in opt_output if line[1]] or [0])
1815 opts_len = max([len(line[0]) for line in opt_output if line[1]] or [0])
1816 for first, second in opt_output:
1816 for first, second in opt_output:
1817 if second:
1817 if second:
1818 second = util.wrap(second, opts_len + 3)
1818 second = util.wrap(second, opts_len + 3)
1819 ui.write(" %-*s %s\n" % (opts_len, first, second))
1819 ui.write(" %-*s %s\n" % (opts_len, first, second))
1820 else:
1820 else:
1821 ui.write("%s\n" % first)
1821 ui.write("%s\n" % first)
1822
1822
1823 def identify(ui, repo, source=None,
1823 def identify(ui, repo, source=None,
1824 rev=None, num=None, id=None, branch=None, tags=None):
1824 rev=None, num=None, id=None, branch=None, tags=None):
1825 """identify the working copy or specified revision
1825 """identify the working copy or specified revision
1826
1826
1827 With no revision, print a summary of the current state of the
1827 With no revision, print a summary of the current state of the
1828 repository.
1828 repository.
1829
1829
1830 Specifying a path to a repository root or Mercurial bundle will
1830 Specifying a path to a repository root or Mercurial bundle will
1831 cause lookup to operate on that repository/bundle.
1831 cause lookup to operate on that repository/bundle.
1832
1832
1833 This summary identifies the repository state using one or two
1833 This summary identifies the repository state using one or two
1834 parent hash identifiers, followed by a "+" if there are
1834 parent hash identifiers, followed by a "+" if there are
1835 uncommitted changes in the working directory, a list of tags for
1835 uncommitted changes in the working directory, a list of tags for
1836 this revision and a branch name for non-default branches.
1836 this revision and a branch name for non-default branches.
1837
1837
1838 Returns 0 if successful.
1838 Returns 0 if successful.
1839 """
1839 """
1840
1840
1841 if not repo and not source:
1841 if not repo and not source:
1842 raise util.Abort(_("There is no Mercurial repository here "
1842 raise util.Abort(_("There is no Mercurial repository here "
1843 "(.hg not found)"))
1843 "(.hg not found)"))
1844
1844
1845 hexfunc = ui.debugflag and hex or short
1845 hexfunc = ui.debugflag and hex or short
1846 default = not (num or id or branch or tags)
1846 default = not (num or id or branch or tags)
1847 output = []
1847 output = []
1848
1848
1849 revs = []
1849 revs = []
1850 if source:
1850 if source:
1851 source, branches = hg.parseurl(ui.expandpath(source))
1851 source, branches = hg.parseurl(ui.expandpath(source))
1852 repo = hg.repository(ui, source)
1852 repo = hg.repository(ui, source)
1853 revs, checkout = hg.addbranchrevs(repo, repo, branches, None)
1853 revs, checkout = hg.addbranchrevs(repo, repo, branches, None)
1854
1854
1855 if not repo.local():
1855 if not repo.local():
1856 if not rev and revs:
1856 if not rev and revs:
1857 rev = revs[0]
1857 rev = revs[0]
1858 if not rev:
1858 if not rev:
1859 rev = "tip"
1859 rev = "tip"
1860 if num or branch or tags:
1860 if num or branch or tags:
1861 raise util.Abort(
1861 raise util.Abort(
1862 "can't query remote revision number, branch, or tags")
1862 "can't query remote revision number, branch, or tags")
1863 output = [hexfunc(repo.lookup(rev))]
1863 output = [hexfunc(repo.lookup(rev))]
1864 elif not rev:
1864 elif not rev:
1865 ctx = repo[None]
1865 ctx = repo[None]
1866 parents = ctx.parents()
1866 parents = ctx.parents()
1867 changed = False
1867 changed = False
1868 if default or id or num:
1868 if default or id or num:
1869 changed = util.any(repo.status())
1869 changed = util.any(repo.status())
1870 if default or id:
1870 if default or id:
1871 output = ["%s%s" % ('+'.join([hexfunc(p.node()) for p in parents]),
1871 output = ["%s%s" % ('+'.join([hexfunc(p.node()) for p in parents]),
1872 (changed) and "+" or "")]
1872 (changed) and "+" or "")]
1873 if num:
1873 if num:
1874 output.append("%s%s" % ('+'.join([str(p.rev()) for p in parents]),
1874 output.append("%s%s" % ('+'.join([str(p.rev()) for p in parents]),
1875 (changed) and "+" or ""))
1875 (changed) and "+" or ""))
1876 else:
1876 else:
1877 ctx = repo[rev]
1877 ctx = repo[rev]
1878 if default or id:
1878 if default or id:
1879 output = [hexfunc(ctx.node())]
1879 output = [hexfunc(ctx.node())]
1880 if num:
1880 if num:
1881 output.append(str(ctx.rev()))
1881 output.append(str(ctx.rev()))
1882
1882
1883 if repo.local() and default and not ui.quiet:
1883 if repo.local() and default and not ui.quiet:
1884 b = encoding.tolocal(ctx.branch())
1884 b = encoding.tolocal(ctx.branch())
1885 if b != 'default':
1885 if b != 'default':
1886 output.append("(%s)" % b)
1886 output.append("(%s)" % b)
1887
1887
1888 # multiple tags for a single parent separated by '/'
1888 # multiple tags for a single parent separated by '/'
1889 t = "/".join(ctx.tags())
1889 t = "/".join(ctx.tags())
1890 if t:
1890 if t:
1891 output.append(t)
1891 output.append(t)
1892
1892
1893 if branch:
1893 if branch:
1894 output.append(encoding.tolocal(ctx.branch()))
1894 output.append(encoding.tolocal(ctx.branch()))
1895
1895
1896 if tags:
1896 if tags:
1897 output.extend(ctx.tags())
1897 output.extend(ctx.tags())
1898
1898
1899 ui.write("%s\n" % ' '.join(output))
1899 ui.write("%s\n" % ' '.join(output))
1900
1900
1901 def import_(ui, repo, patch1, *patches, **opts):
1901 def import_(ui, repo, patch1, *patches, **opts):
1902 """import an ordered set of patches
1902 """import an ordered set of patches
1903
1903
1904 Import a list of patches and commit them individually (unless
1904 Import a list of patches and commit them individually (unless
1905 --no-commit is specified).
1905 --no-commit is specified).
1906
1906
1907 If there are outstanding changes in the working directory, import
1907 If there are outstanding changes in the working directory, import
1908 will abort unless given the -f/--force flag.
1908 will abort unless given the -f/--force flag.
1909
1909
1910 You can import a patch straight from a mail message. Even patches
1910 You can import a patch straight from a mail message. Even patches
1911 as attachments work (to use the body part, it must have type
1911 as attachments work (to use the body part, it must have type
1912 text/plain or text/x-patch). From and Subject headers of email
1912 text/plain or text/x-patch). From and Subject headers of email
1913 message are used as default committer and commit message. All
1913 message are used as default committer and commit message. All
1914 text/plain body parts before first diff are added to commit
1914 text/plain body parts before first diff are added to commit
1915 message.
1915 message.
1916
1916
1917 If the imported patch was generated by :hg:`export`, user and
1917 If the imported patch was generated by :hg:`export`, user and
1918 description from patch override values from message headers and
1918 description from patch override values from message headers and
1919 body. Values given on command line with -m/--message and -u/--user
1919 body. Values given on command line with -m/--message and -u/--user
1920 override these.
1920 override these.
1921
1921
1922 If --exact is specified, import will set the working directory to
1922 If --exact is specified, import will set the working directory to
1923 the parent of each patch before applying it, and will abort if the
1923 the parent of each patch before applying it, and will abort if the
1924 resulting changeset has a different ID than the one recorded in
1924 resulting changeset has a different ID than the one recorded in
1925 the patch. This may happen due to character set problems or other
1925 the patch. This may happen due to character set problems or other
1926 deficiencies in the text patch format.
1926 deficiencies in the text patch format.
1927
1927
1928 With -s/--similarity, hg will attempt to discover renames and
1928 With -s/--similarity, hg will attempt to discover renames and
1929 copies in the patch in the same way as 'addremove'.
1929 copies in the patch in the same way as 'addremove'.
1930
1930
1931 To read a patch from standard input, use "-" as the patch name. If
1931 To read a patch from standard input, use "-" as the patch name. If
1932 a URL is specified, the patch will be downloaded from it.
1932 a URL is specified, the patch will be downloaded from it.
1933 See :hg:`help dates` for a list of formats valid for -d/--date.
1933 See :hg:`help dates` for a list of formats valid for -d/--date.
1934
1934
1935 Returns 0 on success.
1935 Returns 0 on success.
1936 """
1936 """
1937 patches = (patch1,) + patches
1937 patches = (patch1,) + patches
1938
1938
1939 date = opts.get('date')
1939 date = opts.get('date')
1940 if date:
1940 if date:
1941 opts['date'] = util.parsedate(date)
1941 opts['date'] = util.parsedate(date)
1942
1942
1943 try:
1943 try:
1944 sim = float(opts.get('similarity') or 0)
1944 sim = float(opts.get('similarity') or 0)
1945 except ValueError:
1945 except ValueError:
1946 raise util.Abort(_('similarity must be a number'))
1946 raise util.Abort(_('similarity must be a number'))
1947 if sim < 0 or sim > 100:
1947 if sim < 0 or sim > 100:
1948 raise util.Abort(_('similarity must be between 0 and 100'))
1948 raise util.Abort(_('similarity must be between 0 and 100'))
1949
1949
1950 if opts.get('exact') or not opts.get('force'):
1950 if opts.get('exact') or not opts.get('force'):
1951 cmdutil.bail_if_changed(repo)
1951 cmdutil.bail_if_changed(repo)
1952
1952
1953 d = opts["base"]
1953 d = opts["base"]
1954 strip = opts["strip"]
1954 strip = opts["strip"]
1955 wlock = lock = None
1955 wlock = lock = None
1956
1956
1957 def tryone(ui, hunk):
1957 def tryone(ui, hunk):
1958 tmpname, message, user, date, branch, nodeid, p1, p2 = \
1958 tmpname, message, user, date, branch, nodeid, p1, p2 = \
1959 patch.extract(ui, hunk)
1959 patch.extract(ui, hunk)
1960
1960
1961 if not tmpname:
1961 if not tmpname:
1962 return None
1962 return None
1963 commitid = _('to working directory')
1963 commitid = _('to working directory')
1964
1964
1965 try:
1965 try:
1966 cmdline_message = cmdutil.logmessage(opts)
1966 cmdline_message = cmdutil.logmessage(opts)
1967 if cmdline_message:
1967 if cmdline_message:
1968 # pickup the cmdline msg
1968 # pickup the cmdline msg
1969 message = cmdline_message
1969 message = cmdline_message
1970 elif message:
1970 elif message:
1971 # pickup the patch msg
1971 # pickup the patch msg
1972 message = message.strip()
1972 message = message.strip()
1973 else:
1973 else:
1974 # launch the editor
1974 # launch the editor
1975 message = None
1975 message = None
1976 ui.debug('message:\n%s\n' % message)
1976 ui.debug('message:\n%s\n' % message)
1977
1977
1978 wp = repo.parents()
1978 wp = repo.parents()
1979 if opts.get('exact'):
1979 if opts.get('exact'):
1980 if not nodeid or not p1:
1980 if not nodeid or not p1:
1981 raise util.Abort(_('not a Mercurial patch'))
1981 raise util.Abort(_('not a Mercurial patch'))
1982 p1 = repo.lookup(p1)
1982 p1 = repo.lookup(p1)
1983 p2 = repo.lookup(p2 or hex(nullid))
1983 p2 = repo.lookup(p2 or hex(nullid))
1984
1984
1985 if p1 != wp[0].node():
1985 if p1 != wp[0].node():
1986 hg.clean(repo, p1)
1986 hg.clean(repo, p1)
1987 repo.dirstate.setparents(p1, p2)
1987 repo.dirstate.setparents(p1, p2)
1988 elif p2:
1988 elif p2:
1989 try:
1989 try:
1990 p1 = repo.lookup(p1)
1990 p1 = repo.lookup(p1)
1991 p2 = repo.lookup(p2)
1991 p2 = repo.lookup(p2)
1992 if p1 == wp[0].node():
1992 if p1 == wp[0].node():
1993 repo.dirstate.setparents(p1, p2)
1993 repo.dirstate.setparents(p1, p2)
1994 except error.RepoError:
1994 except error.RepoError:
1995 pass
1995 pass
1996 if opts.get('exact') or opts.get('import_branch'):
1996 if opts.get('exact') or opts.get('import_branch'):
1997 repo.dirstate.setbranch(branch or 'default')
1997 repo.dirstate.setbranch(branch or 'default')
1998
1998
1999 files = {}
1999 files = {}
2000 try:
2000 try:
2001 patch.patch(tmpname, ui, strip=strip, cwd=repo.root,
2001 patch.patch(tmpname, ui, strip=strip, cwd=repo.root,
2002 files=files, eolmode=None)
2002 files=files, eolmode=None)
2003 finally:
2003 finally:
2004 files = patch.updatedir(ui, repo, files,
2004 files = patch.updatedir(ui, repo, files,
2005 similarity=sim / 100.0)
2005 similarity=sim / 100.0)
2006 if not opts.get('no_commit'):
2006 if not opts.get('no_commit'):
2007 if opts.get('exact'):
2007 if opts.get('exact'):
2008 m = None
2008 m = None
2009 else:
2009 else:
2010 m = cmdutil.matchfiles(repo, files or [])
2010 m = cmdutil.matchfiles(repo, files or [])
2011 n = repo.commit(message, opts.get('user') or user,
2011 n = repo.commit(message, opts.get('user') or user,
2012 opts.get('date') or date, match=m,
2012 opts.get('date') or date, match=m,
2013 editor=cmdutil.commiteditor)
2013 editor=cmdutil.commiteditor)
2014 if opts.get('exact'):
2014 if opts.get('exact'):
2015 if hex(n) != nodeid:
2015 if hex(n) != nodeid:
2016 repo.rollback()
2016 repo.rollback()
2017 raise util.Abort(_('patch is damaged'
2017 raise util.Abort(_('patch is damaged'
2018 ' or loses information'))
2018 ' or loses information'))
2019 # Force a dirstate write so that the next transaction
2019 # Force a dirstate write so that the next transaction
2020 # backups an up-do-date file.
2020 # backups an up-do-date file.
2021 repo.dirstate.write()
2021 repo.dirstate.write()
2022 if n:
2022 if n:
2023 commitid = short(n)
2023 commitid = short(n)
2024
2024
2025 return commitid
2025 return commitid
2026 finally:
2026 finally:
2027 os.unlink(tmpname)
2027 os.unlink(tmpname)
2028
2028
2029 try:
2029 try:
2030 wlock = repo.wlock()
2030 wlock = repo.wlock()
2031 lock = repo.lock()
2031 lock = repo.lock()
2032 lastcommit = None
2032 lastcommit = None
2033 for p in patches:
2033 for p in patches:
2034 pf = os.path.join(d, p)
2034 pf = os.path.join(d, p)
2035
2035
2036 if pf == '-':
2036 if pf == '-':
2037 ui.status(_("applying patch from stdin\n"))
2037 ui.status(_("applying patch from stdin\n"))
2038 pf = sys.stdin
2038 pf = sys.stdin
2039 else:
2039 else:
2040 ui.status(_("applying %s\n") % p)
2040 ui.status(_("applying %s\n") % p)
2041 pf = url.open(ui, pf)
2041 pf = url.open(ui, pf)
2042
2042
2043 haspatch = False
2043 haspatch = False
2044 for hunk in patch.split(pf):
2044 for hunk in patch.split(pf):
2045 commitid = tryone(ui, hunk)
2045 commitid = tryone(ui, hunk)
2046 if commitid:
2046 if commitid:
2047 haspatch = True
2047 haspatch = True
2048 if lastcommit:
2048 if lastcommit:
2049 ui.status(_('applied %s\n') % lastcommit)
2049 ui.status(_('applied %s\n') % lastcommit)
2050 lastcommit = commitid
2050 lastcommit = commitid
2051
2051
2052 if not haspatch:
2052 if not haspatch:
2053 raise util.Abort(_('no diffs found'))
2053 raise util.Abort(_('no diffs found'))
2054
2054
2055 finally:
2055 finally:
2056 release(lock, wlock)
2056 release(lock, wlock)
2057
2057
2058 def incoming(ui, repo, source="default", **opts):
2058 def incoming(ui, repo, source="default", **opts):
2059 """show new changesets found in source
2059 """show new changesets found in source
2060
2060
2061 Show new changesets found in the specified path/URL or the default
2061 Show new changesets found in the specified path/URL or the default
2062 pull location. These are the changesets that would have been pulled
2062 pull location. These are the changesets that would have been pulled
2063 if a pull at the time you issued this command.
2063 if a pull at the time you issued this command.
2064
2064
2065 For remote repository, using --bundle avoids downloading the
2065 For remote repository, using --bundle avoids downloading the
2066 changesets twice if the incoming is followed by a pull.
2066 changesets twice if the incoming is followed by a pull.
2067
2067
2068 See pull for valid source format details.
2068 See pull for valid source format details.
2069
2069
2070 Returns 0 if there are incoming changes, 1 otherwise.
2070 Returns 0 if there are incoming changes, 1 otherwise.
2071 """
2071 """
2072 limit = cmdutil.loglimit(opts)
2072 limit = cmdutil.loglimit(opts)
2073 source, branches = hg.parseurl(ui.expandpath(source), opts.get('branch'))
2073 source, branches = hg.parseurl(ui.expandpath(source), opts.get('branch'))
2074 other = hg.repository(cmdutil.remoteui(repo, opts), source)
2074 other = hg.repository(cmdutil.remoteui(repo, opts), source)
2075 ui.status(_('comparing with %s\n') % url.hidepassword(source))
2075 ui.status(_('comparing with %s\n') % url.hidepassword(source))
2076 revs, checkout = hg.addbranchrevs(repo, other, branches, opts.get('rev'))
2076 revs, checkout = hg.addbranchrevs(repo, other, branches, opts.get('rev'))
2077 if revs:
2077 if revs:
2078 revs = [other.lookup(rev) for rev in revs]
2078 revs = [other.lookup(rev) for rev in revs]
2079 common, incoming, rheads = repo.findcommonincoming(other, heads=revs,
2079 common, incoming, rheads = repo.findcommonincoming(other, heads=revs,
2080 force=opts["force"])
2080 force=opts["force"])
2081 if not incoming:
2081 if not incoming:
2082 try:
2082 try:
2083 os.unlink(opts["bundle"])
2083 os.unlink(opts["bundle"])
2084 except:
2084 except:
2085 pass
2085 pass
2086 ui.status(_("no changes found\n"))
2086 ui.status(_("no changes found\n"))
2087 return 1
2087 return 1
2088
2088
2089 cleanup = None
2089 cleanup = None
2090 try:
2090 try:
2091 fname = opts["bundle"]
2091 fname = opts["bundle"]
2092 if fname or not other.local():
2092 if fname or not other.local():
2093 # create a bundle (uncompressed if other repo is not local)
2093 # create a bundle (uncompressed if other repo is not local)
2094
2094
2095 if revs is None and other.capable('changegroupsubset'):
2095 if revs is None and other.capable('changegroupsubset'):
2096 revs = rheads
2096 revs = rheads
2097
2097
2098 if revs is None:
2098 if revs is None:
2099 cg = other.changegroup(incoming, "incoming")
2099 cg = other.changegroup(incoming, "incoming")
2100 else:
2100 else:
2101 cg = other.changegroupsubset(incoming, revs, 'incoming')
2101 cg = other.changegroupsubset(incoming, revs, 'incoming')
2102 bundletype = other.local() and "HG10BZ" or "HG10UN"
2102 bundletype = other.local() and "HG10BZ" or "HG10UN"
2103 fname = cleanup = changegroup.writebundle(cg, fname, bundletype)
2103 fname = cleanup = changegroup.writebundle(cg, fname, bundletype)
2104 # keep written bundle?
2104 # keep written bundle?
2105 if opts["bundle"]:
2105 if opts["bundle"]:
2106 cleanup = None
2106 cleanup = None
2107 if not other.local():
2107 if not other.local():
2108 # use the created uncompressed bundlerepo
2108 # use the created uncompressed bundlerepo
2109 other = bundlerepo.bundlerepository(ui, repo.root, fname)
2109 other = bundlerepo.bundlerepository(ui, repo.root, fname)
2110
2110
2111 o = other.changelog.nodesbetween(incoming, revs)[0]
2111 o = other.changelog.nodesbetween(incoming, revs)[0]
2112 if opts.get('newest_first'):
2112 if opts.get('newest_first'):
2113 o.reverse()
2113 o.reverse()
2114 displayer = cmdutil.show_changeset(ui, other, opts)
2114 displayer = cmdutil.show_changeset(ui, other, opts)
2115 count = 0
2115 count = 0
2116 for n in o:
2116 for n in o:
2117 if limit is not None and count >= limit:
2117 if limit is not None and count >= limit:
2118 break
2118 break
2119 parents = [p for p in other.changelog.parents(n) if p != nullid]
2119 parents = [p for p in other.changelog.parents(n) if p != nullid]
2120 if opts.get('no_merges') and len(parents) == 2:
2120 if opts.get('no_merges') and len(parents) == 2:
2121 continue
2121 continue
2122 count += 1
2122 count += 1
2123 displayer.show(other[n])
2123 displayer.show(other[n])
2124 displayer.close()
2124 displayer.close()
2125 finally:
2125 finally:
2126 if hasattr(other, 'close'):
2126 if hasattr(other, 'close'):
2127 other.close()
2127 other.close()
2128 if cleanup:
2128 if cleanup:
2129 os.unlink(cleanup)
2129 os.unlink(cleanup)
2130
2130
2131 def init(ui, dest=".", **opts):
2131 def init(ui, dest=".", **opts):
2132 """create a new repository in the given directory
2132 """create a new repository in the given directory
2133
2133
2134 Initialize a new repository in the given directory. If the given
2134 Initialize a new repository in the given directory. If the given
2135 directory does not exist, it will be created.
2135 directory does not exist, it will be created.
2136
2136
2137 If no directory is given, the current directory is used.
2137 If no directory is given, the current directory is used.
2138
2138
2139 It is possible to specify an ``ssh://`` URL as the destination.
2139 It is possible to specify an ``ssh://`` URL as the destination.
2140 See :hg:`help urls` for more information.
2140 See :hg:`help urls` for more information.
2141
2141
2142 Returns 0 on success.
2142 Returns 0 on success.
2143 """
2143 """
2144 hg.repository(cmdutil.remoteui(ui, opts), dest, create=1)
2144 hg.repository(cmdutil.remoteui(ui, opts), dest, create=1)
2145
2145
2146 def locate(ui, repo, *pats, **opts):
2146 def locate(ui, repo, *pats, **opts):
2147 """locate files matching specific patterns
2147 """locate files matching specific patterns
2148
2148
2149 Print files under Mercurial control in the working directory whose
2149 Print files under Mercurial control in the working directory whose
2150 names match the given patterns.
2150 names match the given patterns.
2151
2151
2152 By default, this command searches all directories in the working
2152 By default, this command searches all directories in the working
2153 directory. To search just the current directory and its
2153 directory. To search just the current directory and its
2154 subdirectories, use "--include .".
2154 subdirectories, use "--include .".
2155
2155
2156 If no patterns are given to match, this command prints the names
2156 If no patterns are given to match, this command prints the names
2157 of all files under Mercurial control in the working directory.
2157 of all files under Mercurial control in the working directory.
2158
2158
2159 If you want to feed the output of this command into the "xargs"
2159 If you want to feed the output of this command into the "xargs"
2160 command, use the -0 option to both this command and "xargs". This
2160 command, use the -0 option to both this command and "xargs". This
2161 will avoid the problem of "xargs" treating single filenames that
2161 will avoid the problem of "xargs" treating single filenames that
2162 contain whitespace as multiple filenames.
2162 contain whitespace as multiple filenames.
2163
2163
2164 Returns 0 if a match is found, 1 otherwise.
2164 Returns 0 if a match is found, 1 otherwise.
2165 """
2165 """
2166 end = opts.get('print0') and '\0' or '\n'
2166 end = opts.get('print0') and '\0' or '\n'
2167 rev = opts.get('rev') or None
2167 rev = opts.get('rev') or None
2168
2168
2169 ret = 1
2169 ret = 1
2170 m = cmdutil.match(repo, pats, opts, default='relglob')
2170 m = cmdutil.match(repo, pats, opts, default='relglob')
2171 m.bad = lambda x, y: False
2171 m.bad = lambda x, y: False
2172 for abs in repo[rev].walk(m):
2172 for abs in repo[rev].walk(m):
2173 if not rev and abs not in repo.dirstate:
2173 if not rev and abs not in repo.dirstate:
2174 continue
2174 continue
2175 if opts.get('fullpath'):
2175 if opts.get('fullpath'):
2176 ui.write(repo.wjoin(abs), end)
2176 ui.write(repo.wjoin(abs), end)
2177 else:
2177 else:
2178 ui.write(((pats and m.rel(abs)) or abs), end)
2178 ui.write(((pats and m.rel(abs)) or abs), end)
2179 ret = 0
2179 ret = 0
2180
2180
2181 return ret
2181 return ret
2182
2182
2183 def log(ui, repo, *pats, **opts):
2183 def log(ui, repo, *pats, **opts):
2184 """show revision history of entire repository or files
2184 """show revision history of entire repository or files
2185
2185
2186 Print the revision history of the specified files or the entire
2186 Print the revision history of the specified files or the entire
2187 project.
2187 project.
2188
2188
2189 File history is shown without following rename or copy history of
2189 File history is shown without following rename or copy history of
2190 files. Use -f/--follow with a filename to follow history across
2190 files. Use -f/--follow with a filename to follow history across
2191 renames and copies. --follow without a filename will only show
2191 renames and copies. --follow without a filename will only show
2192 ancestors or descendants of the starting revision. --follow-first
2192 ancestors or descendants of the starting revision. --follow-first
2193 only follows the first parent of merge revisions.
2193 only follows the first parent of merge revisions.
2194
2194
2195 If no revision range is specified, the default is tip:0 unless
2195 If no revision range is specified, the default is tip:0 unless
2196 --follow is set, in which case the working directory parent is
2196 --follow is set, in which case the working directory parent is
2197 used as the starting revision.
2197 used as the starting revision.
2198
2198
2199 See :hg:`help dates` for a list of formats valid for -d/--date.
2199 See :hg:`help dates` for a list of formats valid for -d/--date.
2200
2200
2201 By default this command prints revision number and changeset id,
2201 By default this command prints revision number and changeset id,
2202 tags, non-trivial parents, user, date and time, and a summary for
2202 tags, non-trivial parents, user, date and time, and a summary for
2203 each commit. When the -v/--verbose switch is used, the list of
2203 each commit. When the -v/--verbose switch is used, the list of
2204 changed files and full commit message are shown.
2204 changed files and full commit message are shown.
2205
2205
2206 NOTE: log -p/--patch may generate unexpected diff output for merge
2206 NOTE: log -p/--patch may generate unexpected diff output for merge
2207 changesets, as it will only compare the merge changeset against
2207 changesets, as it will only compare the merge changeset against
2208 its first parent. Also, only files different from BOTH parents
2208 its first parent. Also, only files different from BOTH parents
2209 will appear in files:.
2209 will appear in files:.
2210
2210
2211 Returns 0 on success.
2211 Returns 0 on success.
2212 """
2212 """
2213
2213
2214 matchfn = cmdutil.match(repo, pats, opts)
2214 matchfn = cmdutil.match(repo, pats, opts)
2215 limit = cmdutil.loglimit(opts)
2215 limit = cmdutil.loglimit(opts)
2216 count = 0
2216 count = 0
2217
2217
2218 endrev = None
2218 endrev = None
2219 if opts.get('copies') and opts.get('rev'):
2219 if opts.get('copies') and opts.get('rev'):
2220 endrev = max(cmdutil.revrange(repo, opts.get('rev'))) + 1
2220 endrev = max(cmdutil.revrange(repo, opts.get('rev'))) + 1
2221
2221
2222 df = False
2222 df = False
2223 if opts["date"]:
2223 if opts["date"]:
2224 df = util.matchdate(opts["date"])
2224 df = util.matchdate(opts["date"])
2225
2225
2226 branches = opts.get('branch', []) + opts.get('only_branch', [])
2226 branches = opts.get('branch', []) + opts.get('only_branch', [])
2227 opts['branch'] = [repo.lookupbranch(b) for b in branches]
2227 opts['branch'] = [repo.lookupbranch(b) for b in branches]
2228
2228
2229 displayer = cmdutil.show_changeset(ui, repo, opts, True, matchfn)
2229 displayer = cmdutil.show_changeset(ui, repo, opts, True, matchfn)
2230 def prep(ctx, fns):
2230 def prep(ctx, fns):
2231 rev = ctx.rev()
2231 rev = ctx.rev()
2232 parents = [p for p in repo.changelog.parentrevs(rev)
2232 parents = [p for p in repo.changelog.parentrevs(rev)
2233 if p != nullrev]
2233 if p != nullrev]
2234 if opts.get('no_merges') and len(parents) == 2:
2234 if opts.get('no_merges') and len(parents) == 2:
2235 return
2235 return
2236 if opts.get('only_merges') and len(parents) != 2:
2236 if opts.get('only_merges') and len(parents) != 2:
2237 return
2237 return
2238 if opts.get('branch') and ctx.branch() not in opts['branch']:
2238 if opts.get('branch') and ctx.branch() not in opts['branch']:
2239 return
2239 return
2240 if df and not df(ctx.date()[0]):
2240 if df and not df(ctx.date()[0]):
2241 return
2241 return
2242 if opts['user'] and not [k for k in opts['user'] if k in ctx.user()]:
2242 if opts['user'] and not [k for k in opts['user'] if k in ctx.user()]:
2243 return
2243 return
2244 if opts.get('keyword'):
2244 if opts.get('keyword'):
2245 for k in [kw.lower() for kw in opts['keyword']]:
2245 for k in [kw.lower() for kw in opts['keyword']]:
2246 if (k in ctx.user().lower() or
2246 if (k in ctx.user().lower() or
2247 k in ctx.description().lower() or
2247 k in ctx.description().lower() or
2248 k in " ".join(ctx.files()).lower()):
2248 k in " ".join(ctx.files()).lower()):
2249 break
2249 break
2250 else:
2250 else:
2251 return
2251 return
2252
2252
2253 copies = None
2253 copies = None
2254 if opts.get('copies') and rev:
2254 if opts.get('copies') and rev:
2255 copies = []
2255 copies = []
2256 getrenamed = templatekw.getrenamedfn(repo, endrev=endrev)
2256 getrenamed = templatekw.getrenamedfn(repo, endrev=endrev)
2257 for fn in ctx.files():
2257 for fn in ctx.files():
2258 rename = getrenamed(fn, rev)
2258 rename = getrenamed(fn, rev)
2259 if rename:
2259 if rename:
2260 copies.append((fn, rename[0]))
2260 copies.append((fn, rename[0]))
2261
2261
2262 displayer.show(ctx, copies=copies)
2262 displayer.show(ctx, copies=copies)
2263
2263
2264 for ctx in cmdutil.walkchangerevs(repo, matchfn, opts, prep):
2264 for ctx in cmdutil.walkchangerevs(repo, matchfn, opts, prep):
2265 if count == limit:
2265 if count == limit:
2266 break
2266 break
2267 if displayer.flush(ctx.rev()):
2267 if displayer.flush(ctx.rev()):
2268 count += 1
2268 count += 1
2269 displayer.close()
2269 displayer.close()
2270
2270
2271 def manifest(ui, repo, node=None, rev=None):
2271 def manifest(ui, repo, node=None, rev=None):
2272 """output the current or given revision of the project manifest
2272 """output the current or given revision of the project manifest
2273
2273
2274 Print a list of version controlled files for the given revision.
2274 Print a list of version controlled files for the given revision.
2275 If no revision is given, the first parent of the working directory
2275 If no revision is given, the first parent of the working directory
2276 is used, or the null revision if no revision is checked out.
2276 is used, or the null revision if no revision is checked out.
2277
2277
2278 With -v, print file permissions, symlink and executable bits.
2278 With -v, print file permissions, symlink and executable bits.
2279 With --debug, print file revision hashes.
2279 With --debug, print file revision hashes.
2280
2280
2281 Returns 0 on success.
2281 Returns 0 on success.
2282 """
2282 """
2283
2283
2284 if rev and node:
2284 if rev and node:
2285 raise util.Abort(_("please specify just one revision"))
2285 raise util.Abort(_("please specify just one revision"))
2286
2286
2287 if not node:
2287 if not node:
2288 node = rev
2288 node = rev
2289
2289
2290 decor = {'l':'644 @ ', 'x':'755 * ', '':'644 '}
2290 decor = {'l':'644 @ ', 'x':'755 * ', '':'644 '}
2291 ctx = repo[node]
2291 ctx = repo[node]
2292 for f in ctx:
2292 for f in ctx:
2293 if ui.debugflag:
2293 if ui.debugflag:
2294 ui.write("%40s " % hex(ctx.manifest()[f]))
2294 ui.write("%40s " % hex(ctx.manifest()[f]))
2295 if ui.verbose:
2295 if ui.verbose:
2296 ui.write(decor[ctx.flags(f)])
2296 ui.write(decor[ctx.flags(f)])
2297 ui.write("%s\n" % f)
2297 ui.write("%s\n" % f)
2298
2298
2299 def merge(ui, repo, node=None, **opts):
2299 def merge(ui, repo, node=None, **opts):
2300 """merge working directory with another revision
2300 """merge working directory with another revision
2301
2301
2302 The current working directory is updated with all changes made in
2302 The current working directory is updated with all changes made in
2303 the requested revision since the last common predecessor revision.
2303 the requested revision since the last common predecessor revision.
2304
2304
2305 Files that changed between either parent are marked as changed for
2305 Files that changed between either parent are marked as changed for
2306 the next commit and a commit must be performed before any further
2306 the next commit and a commit must be performed before any further
2307 updates to the repository are allowed. The next commit will have
2307 updates to the repository are allowed. The next commit will have
2308 two parents.
2308 two parents.
2309
2309
2310 If no revision is specified, the working directory's parent is a
2310 If no revision is specified, the working directory's parent is a
2311 head revision, and the current branch contains exactly one other
2311 head revision, and the current branch contains exactly one other
2312 head, the other head is merged with by default. Otherwise, an
2312 head, the other head is merged with by default. Otherwise, an
2313 explicit revision with which to merge with must be provided.
2313 explicit revision with which to merge with must be provided.
2314
2314
2315 Returns 0 on success, 1 if there are unresolved files.
2315 Returns 0 on success, 1 if there are unresolved files.
2316 """
2316 """
2317
2317
2318 if opts.get('rev') and node:
2318 if opts.get('rev') and node:
2319 raise util.Abort(_("please specify just one revision"))
2319 raise util.Abort(_("please specify just one revision"))
2320 if not node:
2320 if not node:
2321 node = opts.get('rev')
2321 node = opts.get('rev')
2322
2322
2323 if not node:
2323 if not node:
2324 branch = repo.changectx(None).branch()
2324 branch = repo.changectx(None).branch()
2325 bheads = repo.branchheads(branch)
2325 bheads = repo.branchheads(branch)
2326 if len(bheads) > 2:
2326 if len(bheads) > 2:
2327 ui.warn(_("abort: branch '%s' has %d heads - "
2327 ui.warn(_("abort: branch '%s' has %d heads - "
2328 "please merge with an explicit rev\n")
2328 "please merge with an explicit rev\n")
2329 % (branch, len(bheads)))
2329 % (branch, len(bheads)))
2330 ui.status(_("(run 'hg heads .' to see heads)\n"))
2330 ui.status(_("(run 'hg heads .' to see heads)\n"))
2331 return False
2331 return False
2332
2332
2333 parent = repo.dirstate.parents()[0]
2333 parent = repo.dirstate.parents()[0]
2334 if len(bheads) == 1:
2334 if len(bheads) == 1:
2335 if len(repo.heads()) > 1:
2335 if len(repo.heads()) > 1:
2336 ui.warn(_("abort: branch '%s' has one head - "
2336 ui.warn(_("abort: branch '%s' has one head - "
2337 "please merge with an explicit rev\n" % branch))
2337 "please merge with an explicit rev\n" % branch))
2338 ui.status(_("(run 'hg heads' to see all heads)\n"))
2338 ui.status(_("(run 'hg heads' to see all heads)\n"))
2339 return False
2339 return False
2340 msg = _('there is nothing to merge')
2340 msg = _('there is nothing to merge')
2341 if parent != repo.lookup(repo[None].branch()):
2341 if parent != repo.lookup(repo[None].branch()):
2342 msg = _('%s - use "hg update" instead') % msg
2342 msg = _('%s - use "hg update" instead') % msg
2343 raise util.Abort(msg)
2343 raise util.Abort(msg)
2344
2344
2345 if parent not in bheads:
2345 if parent not in bheads:
2346 raise util.Abort(_('working dir not at a head rev - '
2346 raise util.Abort(_('working dir not at a head rev - '
2347 'use "hg update" or merge with an explicit rev'))
2347 'use "hg update" or merge with an explicit rev'))
2348 node = parent == bheads[0] and bheads[-1] or bheads[0]
2348 node = parent == bheads[0] and bheads[-1] or bheads[0]
2349
2349
2350 if opts.get('preview'):
2350 if opts.get('preview'):
2351 # find nodes that are ancestors of p2 but not of p1
2351 # find nodes that are ancestors of p2 but not of p1
2352 p1 = repo.lookup('.')
2352 p1 = repo.lookup('.')
2353 p2 = repo.lookup(node)
2353 p2 = repo.lookup(node)
2354 nodes = repo.changelog.findmissing(common=[p1], heads=[p2])
2354 nodes = repo.changelog.findmissing(common=[p1], heads=[p2])
2355
2355
2356 displayer = cmdutil.show_changeset(ui, repo, opts)
2356 displayer = cmdutil.show_changeset(ui, repo, opts)
2357 for node in nodes:
2357 for node in nodes:
2358 displayer.show(repo[node])
2358 displayer.show(repo[node])
2359 displayer.close()
2359 displayer.close()
2360 return 0
2360 return 0
2361
2361
2362 return hg.merge(repo, node, force=opts.get('force'))
2362 return hg.merge(repo, node, force=opts.get('force'))
2363
2363
2364 def outgoing(ui, repo, dest=None, **opts):
2364 def outgoing(ui, repo, dest=None, **opts):
2365 """show changesets not found in the destination
2365 """show changesets not found in the destination
2366
2366
2367 Show changesets not found in the specified destination repository
2367 Show changesets not found in the specified destination repository
2368 or the default push location. These are the changesets that would
2368 or the default push location. These are the changesets that would
2369 be pushed if a push was requested.
2369 be pushed if a push was requested.
2370
2370
2371 See pull for details of valid destination formats.
2371 See pull for details of valid destination formats.
2372
2372
2373 Returns 0 if there are outgoing changes, 1 otherwise.
2373 Returns 0 if there are outgoing changes, 1 otherwise.
2374 """
2374 """
2375 limit = cmdutil.loglimit(opts)
2375 limit = cmdutil.loglimit(opts)
2376 dest = ui.expandpath(dest or 'default-push', dest or 'default')
2376 dest = ui.expandpath(dest or 'default-push', dest or 'default')
2377 dest, branches = hg.parseurl(dest, opts.get('branch'))
2377 dest, branches = hg.parseurl(dest, opts.get('branch'))
2378 revs, checkout = hg.addbranchrevs(repo, repo, branches, opts.get('rev'))
2378 revs, checkout = hg.addbranchrevs(repo, repo, branches, opts.get('rev'))
2379 if revs:
2379 if revs:
2380 revs = [repo.lookup(rev) for rev in revs]
2380 revs = [repo.lookup(rev) for rev in revs]
2381
2381
2382 other = hg.repository(cmdutil.remoteui(repo, opts), dest)
2382 other = hg.repository(cmdutil.remoteui(repo, opts), dest)
2383 ui.status(_('comparing with %s\n') % url.hidepassword(dest))
2383 ui.status(_('comparing with %s\n') % url.hidepassword(dest))
2384 o = repo.findoutgoing(other, force=opts.get('force'))
2384 o = repo.findoutgoing(other, force=opts.get('force'))
2385 if not o:
2385 if not o:
2386 ui.status(_("no changes found\n"))
2386 ui.status(_("no changes found\n"))
2387 return 1
2387 return 1
2388 o = repo.changelog.nodesbetween(o, revs)[0]
2388 o = repo.changelog.nodesbetween(o, revs)[0]
2389 if opts.get('newest_first'):
2389 if opts.get('newest_first'):
2390 o.reverse()
2390 o.reverse()
2391 displayer = cmdutil.show_changeset(ui, repo, opts)
2391 displayer = cmdutil.show_changeset(ui, repo, opts)
2392 count = 0
2392 count = 0
2393 for n in o:
2393 for n in o:
2394 if limit is not None and count >= limit:
2394 if limit is not None and count >= limit:
2395 break
2395 break
2396 parents = [p for p in repo.changelog.parents(n) if p != nullid]
2396 parents = [p for p in repo.changelog.parents(n) if p != nullid]
2397 if opts.get('no_merges') and len(parents) == 2:
2397 if opts.get('no_merges') and len(parents) == 2:
2398 continue
2398 continue
2399 count += 1
2399 count += 1
2400 displayer.show(repo[n])
2400 displayer.show(repo[n])
2401 displayer.close()
2401 displayer.close()
2402
2402
2403 def parents(ui, repo, file_=None, **opts):
2403 def parents(ui, repo, file_=None, **opts):
2404 """show the parents of the working directory or revision
2404 """show the parents of the working directory or revision
2405
2405
2406 Print the working directory's parent revisions. If a revision is
2406 Print the working directory's parent revisions. If a revision is
2407 given via -r/--rev, the parent of that revision will be printed.
2407 given via -r/--rev, the parent of that revision will be printed.
2408 If a file argument is given, the revision in which the file was
2408 If a file argument is given, the revision in which the file was
2409 last changed (before the working directory revision or the
2409 last changed (before the working directory revision or the
2410 argument to --rev if given) is printed.
2410 argument to --rev if given) is printed.
2411
2411
2412 Returns 0 on success.
2412 Returns 0 on success.
2413 """
2413 """
2414 rev = opts.get('rev')
2414 rev = opts.get('rev')
2415 if rev:
2415 if rev:
2416 ctx = repo[rev]
2416 ctx = repo[rev]
2417 else:
2417 else:
2418 ctx = repo[None]
2418 ctx = repo[None]
2419
2419
2420 if file_:
2420 if file_:
2421 m = cmdutil.match(repo, (file_,), opts)
2421 m = cmdutil.match(repo, (file_,), opts)
2422 if m.anypats() or len(m.files()) != 1:
2422 if m.anypats() or len(m.files()) != 1:
2423 raise util.Abort(_('can only specify an explicit filename'))
2423 raise util.Abort(_('can only specify an explicit filename'))
2424 file_ = m.files()[0]
2424 file_ = m.files()[0]
2425 filenodes = []
2425 filenodes = []
2426 for cp in ctx.parents():
2426 for cp in ctx.parents():
2427 if not cp:
2427 if not cp:
2428 continue
2428 continue
2429 try:
2429 try:
2430 filenodes.append(cp.filenode(file_))
2430 filenodes.append(cp.filenode(file_))
2431 except error.LookupError:
2431 except error.LookupError:
2432 pass
2432 pass
2433 if not filenodes:
2433 if not filenodes:
2434 raise util.Abort(_("'%s' not found in manifest!") % file_)
2434 raise util.Abort(_("'%s' not found in manifest!") % file_)
2435 fl = repo.file(file_)
2435 fl = repo.file(file_)
2436 p = [repo.lookup(fl.linkrev(fl.rev(fn))) for fn in filenodes]
2436 p = [repo.lookup(fl.linkrev(fl.rev(fn))) for fn in filenodes]
2437 else:
2437 else:
2438 p = [cp.node() for cp in ctx.parents()]
2438 p = [cp.node() for cp in ctx.parents()]
2439
2439
2440 displayer = cmdutil.show_changeset(ui, repo, opts)
2440 displayer = cmdutil.show_changeset(ui, repo, opts)
2441 for n in p:
2441 for n in p:
2442 if n != nullid:
2442 if n != nullid:
2443 displayer.show(repo[n])
2443 displayer.show(repo[n])
2444 displayer.close()
2444 displayer.close()
2445
2445
2446 def paths(ui, repo, search=None):
2446 def paths(ui, repo, search=None):
2447 """show aliases for remote repositories
2447 """show aliases for remote repositories
2448
2448
2449 Show definition of symbolic path name NAME. If no name is given,
2449 Show definition of symbolic path name NAME. If no name is given,
2450 show definition of all available names.
2450 show definition of all available names.
2451
2451
2452 Path names are defined in the [paths] section of
2452 Path names are defined in the [paths] section of
2453 ``/etc/mercurial/hgrc`` and ``$HOME/.hgrc``. If run inside a
2453 ``/etc/mercurial/hgrc`` and ``$HOME/.hgrc``. If run inside a
2454 repository, ``.hg/hgrc`` is used, too.
2454 repository, ``.hg/hgrc`` is used, too.
2455
2455
2456 The path names ``default`` and ``default-push`` have a special
2456 The path names ``default`` and ``default-push`` have a special
2457 meaning. When performing a push or pull operation, they are used
2457 meaning. When performing a push or pull operation, they are used
2458 as fallbacks if no location is specified on the command-line.
2458 as fallbacks if no location is specified on the command-line.
2459 When ``default-push`` is set, it will be used for push and
2459 When ``default-push`` is set, it will be used for push and
2460 ``default`` will be used for pull; otherwise ``default`` is used
2460 ``default`` will be used for pull; otherwise ``default`` is used
2461 as the fallback for both. When cloning a repository, the clone
2461 as the fallback for both. When cloning a repository, the clone
2462 source is written as ``default`` in ``.hg/hgrc``. Note that
2462 source is written as ``default`` in ``.hg/hgrc``. Note that
2463 ``default`` and ``default-push`` apply to all inbound (e.g.
2463 ``default`` and ``default-push`` apply to all inbound (e.g.
2464 :hg:`incoming`) and outbound (e.g. :hg:`outgoing`, :hg:`email` and
2464 :hg:`incoming`) and outbound (e.g. :hg:`outgoing`, :hg:`email` and
2465 :hg:`bundle`) operations.
2465 :hg:`bundle`) operations.
2466
2466
2467 See :hg:`help urls` for more information.
2467 See :hg:`help urls` for more information.
2468 """
2468 """
2469 if search:
2469 if search:
2470 for name, path in ui.configitems("paths"):
2470 for name, path in ui.configitems("paths"):
2471 if name == search:
2471 if name == search:
2472 ui.write("%s\n" % url.hidepassword(path))
2472 ui.write("%s\n" % url.hidepassword(path))
2473 return
2473 return
2474 ui.warn(_("not found!\n"))
2474 ui.warn(_("not found!\n"))
2475 return 1
2475 return 1
2476 else:
2476 else:
2477 for name, path in ui.configitems("paths"):
2477 for name, path in ui.configitems("paths"):
2478 ui.write("%s = %s\n" % (name, url.hidepassword(path)))
2478 ui.write("%s = %s\n" % (name, url.hidepassword(path)))
2479
2479
2480 def postincoming(ui, repo, modheads, optupdate, checkout):
2480 def postincoming(ui, repo, modheads, optupdate, checkout):
2481 if modheads == 0:
2481 if modheads == 0:
2482 return
2482 return
2483 if optupdate:
2483 if optupdate:
2484 if (modheads <= 1 or len(repo.branchheads()) == 1) or checkout:
2484 if (modheads <= 1 or len(repo.branchheads()) == 1) or checkout:
2485 return hg.update(repo, checkout)
2485 return hg.update(repo, checkout)
2486 else:
2486 else:
2487 ui.status(_("not updating, since new heads added\n"))
2487 ui.status(_("not updating, since new heads added\n"))
2488 if modheads > 1:
2488 if modheads > 1:
2489 ui.status(_("(run 'hg heads' to see heads, 'hg merge' to merge)\n"))
2489 ui.status(_("(run 'hg heads' to see heads, 'hg merge' to merge)\n"))
2490 else:
2490 else:
2491 ui.status(_("(run 'hg update' to get a working copy)\n"))
2491 ui.status(_("(run 'hg update' to get a working copy)\n"))
2492
2492
2493 def pull(ui, repo, source="default", **opts):
2493 def pull(ui, repo, source="default", **opts):
2494 """pull changes from the specified source
2494 """pull changes from the specified source
2495
2495
2496 Pull changes from a remote repository to a local one.
2496 Pull changes from a remote repository to a local one.
2497
2497
2498 This finds all changes from the repository at the specified path
2498 This finds all changes from the repository at the specified path
2499 or URL and adds them to a local repository (the current one unless
2499 or URL and adds them to a local repository (the current one unless
2500 -R is specified). By default, this does not update the copy of the
2500 -R is specified). By default, this does not update the copy of the
2501 project in the working directory.
2501 project in the working directory.
2502
2502
2503 Use :hg:`incoming` if you want to see what would have been added
2503 Use :hg:`incoming` if you want to see what would have been added
2504 by a pull at the time you issued this command. If you then decide
2504 by a pull at the time you issued this command. If you then decide
2505 to add those changes to the repository, you should use :hg:`pull
2505 to add those changes to the repository, you should use :hg:`pull
2506 -r X` where ``X`` is the last changeset listed by :hg:`incoming`.
2506 -r X` where ``X`` is the last changeset listed by :hg:`incoming`.
2507
2507
2508 If SOURCE is omitted, the 'default' path will be used.
2508 If SOURCE is omitted, the 'default' path will be used.
2509 See :hg:`help urls` for more information.
2509 See :hg:`help urls` for more information.
2510
2510
2511 Returns 0 on success, 1 if an update had unresolved files.
2511 Returns 0 on success, 1 if an update had unresolved files.
2512 """
2512 """
2513 source, branches = hg.parseurl(ui.expandpath(source), opts.get('branch'))
2513 source, branches = hg.parseurl(ui.expandpath(source), opts.get('branch'))
2514 other = hg.repository(cmdutil.remoteui(repo, opts), source)
2514 other = hg.repository(cmdutil.remoteui(repo, opts), source)
2515 ui.status(_('pulling from %s\n') % url.hidepassword(source))
2515 ui.status(_('pulling from %s\n') % url.hidepassword(source))
2516 revs, checkout = hg.addbranchrevs(repo, other, branches, opts.get('rev'))
2516 revs, checkout = hg.addbranchrevs(repo, other, branches, opts.get('rev'))
2517 if revs:
2517 if revs:
2518 try:
2518 try:
2519 revs = [other.lookup(rev) for rev in revs]
2519 revs = [other.lookup(rev) for rev in revs]
2520 except error.CapabilityError:
2520 except error.CapabilityError:
2521 err = _("Other repository doesn't support revision lookup, "
2521 err = _("Other repository doesn't support revision lookup, "
2522 "so a rev cannot be specified.")
2522 "so a rev cannot be specified.")
2523 raise util.Abort(err)
2523 raise util.Abort(err)
2524
2524
2525 modheads = repo.pull(other, heads=revs, force=opts.get('force'))
2525 modheads = repo.pull(other, heads=revs, force=opts.get('force'))
2526 if checkout:
2526 if checkout:
2527 checkout = str(repo.changelog.rev(other.lookup(checkout)))
2527 checkout = str(repo.changelog.rev(other.lookup(checkout)))
2528 return postincoming(ui, repo, modheads, opts.get('update'), checkout)
2528 return postincoming(ui, repo, modheads, opts.get('update'), checkout)
2529
2529
2530 def push(ui, repo, dest=None, **opts):
2530 def push(ui, repo, dest=None, **opts):
2531 """push changes to the specified destination
2531 """push changes to the specified destination
2532
2532
2533 Push changes from the local repository to the specified destination.
2533 Push changes from the local repository to the specified destination.
2534
2534
2535 This is the symmetrical operation for pull. It moves changes from
2535 This is the symmetrical operation for pull. It moves changes from
2536 the current repository to a different one. If the destination is
2536 the current repository to a different one. If the destination is
2537 local this is identical to a pull in that directory from the
2537 local this is identical to a pull in that directory from the
2538 current one.
2538 current one.
2539
2539
2540 By default, push will refuse to run if it detects the result would
2540 By default, push will refuse to run if it detects the result would
2541 increase the number of remote heads. This generally indicates the
2541 increase the number of remote heads. This generally indicates the
2542 user forgot to pull and merge before pushing.
2542 user forgot to pull and merge before pushing.
2543
2543
2544 If -r/--rev is used, the named revision and all its ancestors will
2544 If -r/--rev is used, the named revision and all its ancestors will
2545 be pushed to the remote repository.
2545 be pushed to the remote repository.
2546
2546
2547 Please see :hg:`help urls` for important details about ``ssh://``
2547 Please see :hg:`help urls` for important details about ``ssh://``
2548 URLs. If DESTINATION is omitted, a default path will be used.
2548 URLs. If DESTINATION is omitted, a default path will be used.
2549
2549
2550 Returns 0 if push was successful, 1 if nothing to push.
2550 Returns 0 if push was successful, 1 if nothing to push.
2551 """
2551 """
2552 dest = ui.expandpath(dest or 'default-push', dest or 'default')
2552 dest = ui.expandpath(dest or 'default-push', dest or 'default')
2553 dest, branches = hg.parseurl(dest, opts.get('branch'))
2553 dest, branches = hg.parseurl(dest, opts.get('branch'))
2554 revs, checkout = hg.addbranchrevs(repo, repo, branches, opts.get('rev'))
2554 revs, checkout = hg.addbranchrevs(repo, repo, branches, opts.get('rev'))
2555 other = hg.repository(cmdutil.remoteui(repo, opts), dest)
2555 other = hg.repository(cmdutil.remoteui(repo, opts), dest)
2556 ui.status(_('pushing to %s\n') % url.hidepassword(dest))
2556 ui.status(_('pushing to %s\n') % url.hidepassword(dest))
2557 if revs:
2557 if revs:
2558 revs = [repo.lookup(rev) for rev in revs]
2558 revs = [repo.lookup(rev) for rev in revs]
2559
2559
2560 # push subrepos depth-first for coherent ordering
2560 # push subrepos depth-first for coherent ordering
2561 c = repo['']
2561 c = repo['']
2562 subs = c.substate # only repos that are committed
2562 subs = c.substate # only repos that are committed
2563 for s in sorted(subs):
2563 for s in sorted(subs):
2564 if not c.sub(s).push(opts.get('force')):
2564 if not c.sub(s).push(opts.get('force')):
2565 return False
2565 return False
2566
2566
2567 r = repo.push(other, opts.get('force'), revs=revs)
2567 r = repo.push(other, opts.get('force'), revs=revs,
2568 newbranch=opts.get('new_branch'))
2568 return r == 0
2569 return r == 0
2569
2570
2570 def recover(ui, repo):
2571 def recover(ui, repo):
2571 """roll back an interrupted transaction
2572 """roll back an interrupted transaction
2572
2573
2573 Recover from an interrupted commit or pull.
2574 Recover from an interrupted commit or pull.
2574
2575
2575 This command tries to fix the repository status after an
2576 This command tries to fix the repository status after an
2576 interrupted operation. It should only be necessary when Mercurial
2577 interrupted operation. It should only be necessary when Mercurial
2577 suggests it.
2578 suggests it.
2578
2579
2579 Returns 0 if successful, 1 if nothing to recover or verify fails.
2580 Returns 0 if successful, 1 if nothing to recover or verify fails.
2580 """
2581 """
2581 if repo.recover():
2582 if repo.recover():
2582 return hg.verify(repo)
2583 return hg.verify(repo)
2583 return 1
2584 return 1
2584
2585
2585 def remove(ui, repo, *pats, **opts):
2586 def remove(ui, repo, *pats, **opts):
2586 """remove the specified files on the next commit
2587 """remove the specified files on the next commit
2587
2588
2588 Schedule the indicated files for removal from the repository.
2589 Schedule the indicated files for removal from the repository.
2589
2590
2590 This only removes files from the current branch, not from the
2591 This only removes files from the current branch, not from the
2591 entire project history. -A/--after can be used to remove only
2592 entire project history. -A/--after can be used to remove only
2592 files that have already been deleted, -f/--force can be used to
2593 files that have already been deleted, -f/--force can be used to
2593 force deletion, and -Af can be used to remove files from the next
2594 force deletion, and -Af can be used to remove files from the next
2594 revision without deleting them from the working directory.
2595 revision without deleting them from the working directory.
2595
2596
2596 The following table details the behavior of remove for different
2597 The following table details the behavior of remove for different
2597 file states (columns) and option combinations (rows). The file
2598 file states (columns) and option combinations (rows). The file
2598 states are Added [A], Clean [C], Modified [M] and Missing [!] (as
2599 states are Added [A], Clean [C], Modified [M] and Missing [!] (as
2599 reported by :hg:`status`). The actions are Warn, Remove (from
2600 reported by :hg:`status`). The actions are Warn, Remove (from
2600 branch) and Delete (from disk)::
2601 branch) and Delete (from disk)::
2601
2602
2602 A C M !
2603 A C M !
2603 none W RD W R
2604 none W RD W R
2604 -f R RD RD R
2605 -f R RD RD R
2605 -A W W W R
2606 -A W W W R
2606 -Af R R R R
2607 -Af R R R R
2607
2608
2608 This command schedules the files to be removed at the next commit.
2609 This command schedules the files to be removed at the next commit.
2609 To undo a remove before that, see :hg:`revert`.
2610 To undo a remove before that, see :hg:`revert`.
2610
2611
2611 Returns 0 on success, 1 if any warnings encountered.
2612 Returns 0 on success, 1 if any warnings encountered.
2612 """
2613 """
2613
2614
2614 ret = 0
2615 ret = 0
2615 after, force = opts.get('after'), opts.get('force')
2616 after, force = opts.get('after'), opts.get('force')
2616 if not pats and not after:
2617 if not pats and not after:
2617 raise util.Abort(_('no files specified'))
2618 raise util.Abort(_('no files specified'))
2618
2619
2619 m = cmdutil.match(repo, pats, opts)
2620 m = cmdutil.match(repo, pats, opts)
2620 s = repo.status(match=m, clean=True)
2621 s = repo.status(match=m, clean=True)
2621 modified, added, deleted, clean = s[0], s[1], s[3], s[6]
2622 modified, added, deleted, clean = s[0], s[1], s[3], s[6]
2622
2623
2623 for f in m.files():
2624 for f in m.files():
2624 if f not in repo.dirstate and not os.path.isdir(m.rel(f)):
2625 if f not in repo.dirstate and not os.path.isdir(m.rel(f)):
2625 ui.warn(_('not removing %s: file is untracked\n') % m.rel(f))
2626 ui.warn(_('not removing %s: file is untracked\n') % m.rel(f))
2626 ret = 1
2627 ret = 1
2627
2628
2628 def warn(files, reason):
2629 def warn(files, reason):
2629 for f in files:
2630 for f in files:
2630 ui.warn(_('not removing %s: file %s (use -f to force removal)\n')
2631 ui.warn(_('not removing %s: file %s (use -f to force removal)\n')
2631 % (m.rel(f), reason))
2632 % (m.rel(f), reason))
2632 ret = 1
2633 ret = 1
2633
2634
2634 if force:
2635 if force:
2635 remove, forget = modified + deleted + clean, added
2636 remove, forget = modified + deleted + clean, added
2636 elif after:
2637 elif after:
2637 remove, forget = deleted, []
2638 remove, forget = deleted, []
2638 warn(modified + added + clean, _('still exists'))
2639 warn(modified + added + clean, _('still exists'))
2639 else:
2640 else:
2640 remove, forget = deleted + clean, []
2641 remove, forget = deleted + clean, []
2641 warn(modified, _('is modified'))
2642 warn(modified, _('is modified'))
2642 warn(added, _('has been marked for add'))
2643 warn(added, _('has been marked for add'))
2643
2644
2644 for f in sorted(remove + forget):
2645 for f in sorted(remove + forget):
2645 if ui.verbose or not m.exact(f):
2646 if ui.verbose or not m.exact(f):
2646 ui.status(_('removing %s\n') % m.rel(f))
2647 ui.status(_('removing %s\n') % m.rel(f))
2647
2648
2648 repo.forget(forget)
2649 repo.forget(forget)
2649 repo.remove(remove, unlink=not after)
2650 repo.remove(remove, unlink=not after)
2650 return ret
2651 return ret
2651
2652
2652 def rename(ui, repo, *pats, **opts):
2653 def rename(ui, repo, *pats, **opts):
2653 """rename files; equivalent of copy + remove
2654 """rename files; equivalent of copy + remove
2654
2655
2655 Mark dest as copies of sources; mark sources for deletion. If dest
2656 Mark dest as copies of sources; mark sources for deletion. If dest
2656 is a directory, copies are put in that directory. If dest is a
2657 is a directory, copies are put in that directory. If dest is a
2657 file, there can only be one source.
2658 file, there can only be one source.
2658
2659
2659 By default, this command copies the contents of files as they
2660 By default, this command copies the contents of files as they
2660 exist in the working directory. If invoked with -A/--after, the
2661 exist in the working directory. If invoked with -A/--after, the
2661 operation is recorded, but no copying is performed.
2662 operation is recorded, but no copying is performed.
2662
2663
2663 This command takes effect at the next commit. To undo a rename
2664 This command takes effect at the next commit. To undo a rename
2664 before that, see :hg:`revert`.
2665 before that, see :hg:`revert`.
2665
2666
2666 Returns 0 on success, 1 if errors are encountered.
2667 Returns 0 on success, 1 if errors are encountered.
2667 """
2668 """
2668 wlock = repo.wlock(False)
2669 wlock = repo.wlock(False)
2669 try:
2670 try:
2670 return cmdutil.copy(ui, repo, pats, opts, rename=True)
2671 return cmdutil.copy(ui, repo, pats, opts, rename=True)
2671 finally:
2672 finally:
2672 wlock.release()
2673 wlock.release()
2673
2674
2674 def resolve(ui, repo, *pats, **opts):
2675 def resolve(ui, repo, *pats, **opts):
2675 """various operations to help finish a merge
2676 """various operations to help finish a merge
2676
2677
2677 This command includes several actions that are often useful while
2678 This command includes several actions that are often useful while
2678 performing a merge, after running ``merge`` but before running
2679 performing a merge, after running ``merge`` but before running
2679 ``commit``. (It is only meaningful if your working directory has
2680 ``commit``. (It is only meaningful if your working directory has
2680 two parents.) It is most relevant for merges with unresolved
2681 two parents.) It is most relevant for merges with unresolved
2681 conflicts, which are typically a result of non-interactive merging with
2682 conflicts, which are typically a result of non-interactive merging with
2682 ``internal:merge`` or a command-line merge tool like ``diff3``.
2683 ``internal:merge`` or a command-line merge tool like ``diff3``.
2683
2684
2684 The available actions are:
2685 The available actions are:
2685
2686
2686 1) list files that were merged with conflicts (U, for unresolved)
2687 1) list files that were merged with conflicts (U, for unresolved)
2687 and without conflicts (R, for resolved): ``hg resolve -l``
2688 and without conflicts (R, for resolved): ``hg resolve -l``
2688 (this is like ``status`` for merges)
2689 (this is like ``status`` for merges)
2689 2) record that you have resolved conflicts in certain files:
2690 2) record that you have resolved conflicts in certain files:
2690 ``hg resolve -m [file ...]`` (default: mark all unresolved files)
2691 ``hg resolve -m [file ...]`` (default: mark all unresolved files)
2691 3) forget that you have resolved conflicts in certain files:
2692 3) forget that you have resolved conflicts in certain files:
2692 ``hg resolve -u [file ...]`` (default: unmark all resolved files)
2693 ``hg resolve -u [file ...]`` (default: unmark all resolved files)
2693 4) discard your current attempt(s) at resolving conflicts and
2694 4) discard your current attempt(s) at resolving conflicts and
2694 restart the merge from scratch: ``hg resolve file...``
2695 restart the merge from scratch: ``hg resolve file...``
2695 (or ``-a`` for all unresolved files)
2696 (or ``-a`` for all unresolved files)
2696
2697
2697 Note that Mercurial will not let you commit files with unresolved merge
2698 Note that Mercurial will not let you commit files with unresolved merge
2698 conflicts. You must use ``hg resolve -m ...`` before you can commit
2699 conflicts. You must use ``hg resolve -m ...`` before you can commit
2699 after a conflicting merge.
2700 after a conflicting merge.
2700
2701
2701 Returns 0 on success, 1 if any files fail a resolve attempt.
2702 Returns 0 on success, 1 if any files fail a resolve attempt.
2702 """
2703 """
2703
2704
2704 all, mark, unmark, show, nostatus = \
2705 all, mark, unmark, show, nostatus = \
2705 [opts.get(o) for o in 'all mark unmark list no_status'.split()]
2706 [opts.get(o) for o in 'all mark unmark list no_status'.split()]
2706
2707
2707 if (show and (mark or unmark)) or (mark and unmark):
2708 if (show and (mark or unmark)) or (mark and unmark):
2708 raise util.Abort(_("too many options specified"))
2709 raise util.Abort(_("too many options specified"))
2709 if pats and all:
2710 if pats and all:
2710 raise util.Abort(_("can't specify --all and patterns"))
2711 raise util.Abort(_("can't specify --all and patterns"))
2711 if not (all or pats or show or mark or unmark):
2712 if not (all or pats or show or mark or unmark):
2712 raise util.Abort(_('no files or directories specified; '
2713 raise util.Abort(_('no files or directories specified; '
2713 'use --all to remerge all files'))
2714 'use --all to remerge all files'))
2714
2715
2715 ms = mergemod.mergestate(repo)
2716 ms = mergemod.mergestate(repo)
2716 m = cmdutil.match(repo, pats, opts)
2717 m = cmdutil.match(repo, pats, opts)
2717 ret = 0
2718 ret = 0
2718
2719
2719 for f in ms:
2720 for f in ms:
2720 if m(f):
2721 if m(f):
2721 if show:
2722 if show:
2722 if nostatus:
2723 if nostatus:
2723 ui.write("%s\n" % f)
2724 ui.write("%s\n" % f)
2724 else:
2725 else:
2725 ui.write("%s %s\n" % (ms[f].upper(), f),
2726 ui.write("%s %s\n" % (ms[f].upper(), f),
2726 label='resolve.' +
2727 label='resolve.' +
2727 {'u': 'unresolved', 'r': 'resolved'}[ms[f]])
2728 {'u': 'unresolved', 'r': 'resolved'}[ms[f]])
2728 elif mark:
2729 elif mark:
2729 ms.mark(f, "r")
2730 ms.mark(f, "r")
2730 elif unmark:
2731 elif unmark:
2731 ms.mark(f, "u")
2732 ms.mark(f, "u")
2732 else:
2733 else:
2733 wctx = repo[None]
2734 wctx = repo[None]
2734 mctx = wctx.parents()[-1]
2735 mctx = wctx.parents()[-1]
2735
2736
2736 # backup pre-resolve (merge uses .orig for its own purposes)
2737 # backup pre-resolve (merge uses .orig for its own purposes)
2737 a = repo.wjoin(f)
2738 a = repo.wjoin(f)
2738 util.copyfile(a, a + ".resolve")
2739 util.copyfile(a, a + ".resolve")
2739
2740
2740 # resolve file
2741 # resolve file
2741 if ms.resolve(f, wctx, mctx):
2742 if ms.resolve(f, wctx, mctx):
2742 ret = 1
2743 ret = 1
2743
2744
2744 # replace filemerge's .orig file with our resolve file
2745 # replace filemerge's .orig file with our resolve file
2745 util.rename(a + ".resolve", a + ".orig")
2746 util.rename(a + ".resolve", a + ".orig")
2746 return ret
2747 return ret
2747
2748
2748 def revert(ui, repo, *pats, **opts):
2749 def revert(ui, repo, *pats, **opts):
2749 """restore individual files or directories to an earlier state
2750 """restore individual files or directories to an earlier state
2750
2751
2751 (Use update -r to check out earlier revisions, revert does not
2752 (Use update -r to check out earlier revisions, revert does not
2752 change the working directory parents.)
2753 change the working directory parents.)
2753
2754
2754 With no revision specified, revert the named files or directories
2755 With no revision specified, revert the named files or directories
2755 to the contents they had in the parent of the working directory.
2756 to the contents they had in the parent of the working directory.
2756 This restores the contents of the affected files to an unmodified
2757 This restores the contents of the affected files to an unmodified
2757 state and unschedules adds, removes, copies, and renames. If the
2758 state and unschedules adds, removes, copies, and renames. If the
2758 working directory has two parents, you must explicitly specify a
2759 working directory has two parents, you must explicitly specify a
2759 revision.
2760 revision.
2760
2761
2761 Using the -r/--rev option, revert the given files or directories
2762 Using the -r/--rev option, revert the given files or directories
2762 to their contents as of a specific revision. This can be helpful
2763 to their contents as of a specific revision. This can be helpful
2763 to "roll back" some or all of an earlier change. See :hg:`help
2764 to "roll back" some or all of an earlier change. See :hg:`help
2764 dates` for a list of formats valid for -d/--date.
2765 dates` for a list of formats valid for -d/--date.
2765
2766
2766 Revert modifies the working directory. It does not commit any
2767 Revert modifies the working directory. It does not commit any
2767 changes, or change the parent of the working directory. If you
2768 changes, or change the parent of the working directory. If you
2768 revert to a revision other than the parent of the working
2769 revert to a revision other than the parent of the working
2769 directory, the reverted files will thus appear modified
2770 directory, the reverted files will thus appear modified
2770 afterwards.
2771 afterwards.
2771
2772
2772 If a file has been deleted, it is restored. If the executable mode
2773 If a file has been deleted, it is restored. If the executable mode
2773 of a file was changed, it is reset.
2774 of a file was changed, it is reset.
2774
2775
2775 If names are given, all files matching the names are reverted.
2776 If names are given, all files matching the names are reverted.
2776 If no arguments are given, no files are reverted.
2777 If no arguments are given, no files are reverted.
2777
2778
2778 Modified files are saved with a .orig suffix before reverting.
2779 Modified files are saved with a .orig suffix before reverting.
2779 To disable these backups, use --no-backup.
2780 To disable these backups, use --no-backup.
2780
2781
2781 Returns 0 on success.
2782 Returns 0 on success.
2782 """
2783 """
2783
2784
2784 if opts["date"]:
2785 if opts["date"]:
2785 if opts["rev"]:
2786 if opts["rev"]:
2786 raise util.Abort(_("you can't specify a revision and a date"))
2787 raise util.Abort(_("you can't specify a revision and a date"))
2787 opts["rev"] = cmdutil.finddate(ui, repo, opts["date"])
2788 opts["rev"] = cmdutil.finddate(ui, repo, opts["date"])
2788
2789
2789 if not pats and not opts.get('all'):
2790 if not pats and not opts.get('all'):
2790 raise util.Abort(_('no files or directories specified; '
2791 raise util.Abort(_('no files or directories specified; '
2791 'use --all to revert the whole repo'))
2792 'use --all to revert the whole repo'))
2792
2793
2793 parent, p2 = repo.dirstate.parents()
2794 parent, p2 = repo.dirstate.parents()
2794 if not opts.get('rev') and p2 != nullid:
2795 if not opts.get('rev') and p2 != nullid:
2795 raise util.Abort(_('uncommitted merge - please provide a '
2796 raise util.Abort(_('uncommitted merge - please provide a '
2796 'specific revision'))
2797 'specific revision'))
2797 ctx = repo[opts.get('rev')]
2798 ctx = repo[opts.get('rev')]
2798 node = ctx.node()
2799 node = ctx.node()
2799 mf = ctx.manifest()
2800 mf = ctx.manifest()
2800 if node == parent:
2801 if node == parent:
2801 pmf = mf
2802 pmf = mf
2802 else:
2803 else:
2803 pmf = None
2804 pmf = None
2804
2805
2805 # need all matching names in dirstate and manifest of target rev,
2806 # need all matching names in dirstate and manifest of target rev,
2806 # so have to walk both. do not print errors if files exist in one
2807 # so have to walk both. do not print errors if files exist in one
2807 # but not other.
2808 # but not other.
2808
2809
2809 names = {}
2810 names = {}
2810
2811
2811 wlock = repo.wlock()
2812 wlock = repo.wlock()
2812 try:
2813 try:
2813 # walk dirstate.
2814 # walk dirstate.
2814
2815
2815 m = cmdutil.match(repo, pats, opts)
2816 m = cmdutil.match(repo, pats, opts)
2816 m.bad = lambda x, y: False
2817 m.bad = lambda x, y: False
2817 for abs in repo.walk(m):
2818 for abs in repo.walk(m):
2818 names[abs] = m.rel(abs), m.exact(abs)
2819 names[abs] = m.rel(abs), m.exact(abs)
2819
2820
2820 # walk target manifest.
2821 # walk target manifest.
2821
2822
2822 def badfn(path, msg):
2823 def badfn(path, msg):
2823 if path in names:
2824 if path in names:
2824 return
2825 return
2825 path_ = path + '/'
2826 path_ = path + '/'
2826 for f in names:
2827 for f in names:
2827 if f.startswith(path_):
2828 if f.startswith(path_):
2828 return
2829 return
2829 ui.warn("%s: %s\n" % (m.rel(path), msg))
2830 ui.warn("%s: %s\n" % (m.rel(path), msg))
2830
2831
2831 m = cmdutil.match(repo, pats, opts)
2832 m = cmdutil.match(repo, pats, opts)
2832 m.bad = badfn
2833 m.bad = badfn
2833 for abs in repo[node].walk(m):
2834 for abs in repo[node].walk(m):
2834 if abs not in names:
2835 if abs not in names:
2835 names[abs] = m.rel(abs), m.exact(abs)
2836 names[abs] = m.rel(abs), m.exact(abs)
2836
2837
2837 m = cmdutil.matchfiles(repo, names)
2838 m = cmdutil.matchfiles(repo, names)
2838 changes = repo.status(match=m)[:4]
2839 changes = repo.status(match=m)[:4]
2839 modified, added, removed, deleted = map(set, changes)
2840 modified, added, removed, deleted = map(set, changes)
2840
2841
2841 # if f is a rename, also revert the source
2842 # if f is a rename, also revert the source
2842 cwd = repo.getcwd()
2843 cwd = repo.getcwd()
2843 for f in added:
2844 for f in added:
2844 src = repo.dirstate.copied(f)
2845 src = repo.dirstate.copied(f)
2845 if src and src not in names and repo.dirstate[src] == 'r':
2846 if src and src not in names and repo.dirstate[src] == 'r':
2846 removed.add(src)
2847 removed.add(src)
2847 names[src] = (repo.pathto(src, cwd), True)
2848 names[src] = (repo.pathto(src, cwd), True)
2848
2849
2849 def removeforget(abs):
2850 def removeforget(abs):
2850 if repo.dirstate[abs] == 'a':
2851 if repo.dirstate[abs] == 'a':
2851 return _('forgetting %s\n')
2852 return _('forgetting %s\n')
2852 return _('removing %s\n')
2853 return _('removing %s\n')
2853
2854
2854 revert = ([], _('reverting %s\n'))
2855 revert = ([], _('reverting %s\n'))
2855 add = ([], _('adding %s\n'))
2856 add = ([], _('adding %s\n'))
2856 remove = ([], removeforget)
2857 remove = ([], removeforget)
2857 undelete = ([], _('undeleting %s\n'))
2858 undelete = ([], _('undeleting %s\n'))
2858
2859
2859 disptable = (
2860 disptable = (
2860 # dispatch table:
2861 # dispatch table:
2861 # file state
2862 # file state
2862 # action if in target manifest
2863 # action if in target manifest
2863 # action if not in target manifest
2864 # action if not in target manifest
2864 # make backup if in target manifest
2865 # make backup if in target manifest
2865 # make backup if not in target manifest
2866 # make backup if not in target manifest
2866 (modified, revert, remove, True, True),
2867 (modified, revert, remove, True, True),
2867 (added, revert, remove, True, False),
2868 (added, revert, remove, True, False),
2868 (removed, undelete, None, False, False),
2869 (removed, undelete, None, False, False),
2869 (deleted, revert, remove, False, False),
2870 (deleted, revert, remove, False, False),
2870 )
2871 )
2871
2872
2872 for abs, (rel, exact) in sorted(names.items()):
2873 for abs, (rel, exact) in sorted(names.items()):
2873 mfentry = mf.get(abs)
2874 mfentry = mf.get(abs)
2874 target = repo.wjoin(abs)
2875 target = repo.wjoin(abs)
2875 def handle(xlist, dobackup):
2876 def handle(xlist, dobackup):
2876 xlist[0].append(abs)
2877 xlist[0].append(abs)
2877 if dobackup and not opts.get('no_backup') and util.lexists(target):
2878 if dobackup and not opts.get('no_backup') and util.lexists(target):
2878 bakname = "%s.orig" % rel
2879 bakname = "%s.orig" % rel
2879 ui.note(_('saving current version of %s as %s\n') %
2880 ui.note(_('saving current version of %s as %s\n') %
2880 (rel, bakname))
2881 (rel, bakname))
2881 if not opts.get('dry_run'):
2882 if not opts.get('dry_run'):
2882 util.copyfile(target, bakname)
2883 util.copyfile(target, bakname)
2883 if ui.verbose or not exact:
2884 if ui.verbose or not exact:
2884 msg = xlist[1]
2885 msg = xlist[1]
2885 if not isinstance(msg, basestring):
2886 if not isinstance(msg, basestring):
2886 msg = msg(abs)
2887 msg = msg(abs)
2887 ui.status(msg % rel)
2888 ui.status(msg % rel)
2888 for table, hitlist, misslist, backuphit, backupmiss in disptable:
2889 for table, hitlist, misslist, backuphit, backupmiss in disptable:
2889 if abs not in table:
2890 if abs not in table:
2890 continue
2891 continue
2891 # file has changed in dirstate
2892 # file has changed in dirstate
2892 if mfentry:
2893 if mfentry:
2893 handle(hitlist, backuphit)
2894 handle(hitlist, backuphit)
2894 elif misslist is not None:
2895 elif misslist is not None:
2895 handle(misslist, backupmiss)
2896 handle(misslist, backupmiss)
2896 break
2897 break
2897 else:
2898 else:
2898 if abs not in repo.dirstate:
2899 if abs not in repo.dirstate:
2899 if mfentry:
2900 if mfentry:
2900 handle(add, True)
2901 handle(add, True)
2901 elif exact:
2902 elif exact:
2902 ui.warn(_('file not managed: %s\n') % rel)
2903 ui.warn(_('file not managed: %s\n') % rel)
2903 continue
2904 continue
2904 # file has not changed in dirstate
2905 # file has not changed in dirstate
2905 if node == parent:
2906 if node == parent:
2906 if exact:
2907 if exact:
2907 ui.warn(_('no changes needed to %s\n') % rel)
2908 ui.warn(_('no changes needed to %s\n') % rel)
2908 continue
2909 continue
2909 if pmf is None:
2910 if pmf is None:
2910 # only need parent manifest in this unlikely case,
2911 # only need parent manifest in this unlikely case,
2911 # so do not read by default
2912 # so do not read by default
2912 pmf = repo[parent].manifest()
2913 pmf = repo[parent].manifest()
2913 if abs in pmf:
2914 if abs in pmf:
2914 if mfentry:
2915 if mfentry:
2915 # if version of file is same in parent and target
2916 # if version of file is same in parent and target
2916 # manifests, do nothing
2917 # manifests, do nothing
2917 if (pmf[abs] != mfentry or
2918 if (pmf[abs] != mfentry or
2918 pmf.flags(abs) != mf.flags(abs)):
2919 pmf.flags(abs) != mf.flags(abs)):
2919 handle(revert, False)
2920 handle(revert, False)
2920 else:
2921 else:
2921 handle(remove, False)
2922 handle(remove, False)
2922
2923
2923 if not opts.get('dry_run'):
2924 if not opts.get('dry_run'):
2924 def checkout(f):
2925 def checkout(f):
2925 fc = ctx[f]
2926 fc = ctx[f]
2926 repo.wwrite(f, fc.data(), fc.flags())
2927 repo.wwrite(f, fc.data(), fc.flags())
2927
2928
2928 audit_path = util.path_auditor(repo.root)
2929 audit_path = util.path_auditor(repo.root)
2929 for f in remove[0]:
2930 for f in remove[0]:
2930 if repo.dirstate[f] == 'a':
2931 if repo.dirstate[f] == 'a':
2931 repo.dirstate.forget(f)
2932 repo.dirstate.forget(f)
2932 continue
2933 continue
2933 audit_path(f)
2934 audit_path(f)
2934 try:
2935 try:
2935 util.unlink(repo.wjoin(f))
2936 util.unlink(repo.wjoin(f))
2936 except OSError:
2937 except OSError:
2937 pass
2938 pass
2938 repo.dirstate.remove(f)
2939 repo.dirstate.remove(f)
2939
2940
2940 normal = None
2941 normal = None
2941 if node == parent:
2942 if node == parent:
2942 # We're reverting to our parent. If possible, we'd like status
2943 # We're reverting to our parent. If possible, we'd like status
2943 # to report the file as clean. We have to use normallookup for
2944 # to report the file as clean. We have to use normallookup for
2944 # merges to avoid losing information about merged/dirty files.
2945 # merges to avoid losing information about merged/dirty files.
2945 if p2 != nullid:
2946 if p2 != nullid:
2946 normal = repo.dirstate.normallookup
2947 normal = repo.dirstate.normallookup
2947 else:
2948 else:
2948 normal = repo.dirstate.normal
2949 normal = repo.dirstate.normal
2949 for f in revert[0]:
2950 for f in revert[0]:
2950 checkout(f)
2951 checkout(f)
2951 if normal:
2952 if normal:
2952 normal(f)
2953 normal(f)
2953
2954
2954 for f in add[0]:
2955 for f in add[0]:
2955 checkout(f)
2956 checkout(f)
2956 repo.dirstate.add(f)
2957 repo.dirstate.add(f)
2957
2958
2958 normal = repo.dirstate.normallookup
2959 normal = repo.dirstate.normallookup
2959 if node == parent and p2 == nullid:
2960 if node == parent and p2 == nullid:
2960 normal = repo.dirstate.normal
2961 normal = repo.dirstate.normal
2961 for f in undelete[0]:
2962 for f in undelete[0]:
2962 checkout(f)
2963 checkout(f)
2963 normal(f)
2964 normal(f)
2964
2965
2965 finally:
2966 finally:
2966 wlock.release()
2967 wlock.release()
2967
2968
2968 def rollback(ui, repo, **opts):
2969 def rollback(ui, repo, **opts):
2969 """roll back the last transaction (dangerous)
2970 """roll back the last transaction (dangerous)
2970
2971
2971 This command should be used with care. There is only one level of
2972 This command should be used with care. There is only one level of
2972 rollback, and there is no way to undo a rollback. It will also
2973 rollback, and there is no way to undo a rollback. It will also
2973 restore the dirstate at the time of the last transaction, losing
2974 restore the dirstate at the time of the last transaction, losing
2974 any dirstate changes since that time. This command does not alter
2975 any dirstate changes since that time. This command does not alter
2975 the working directory.
2976 the working directory.
2976
2977
2977 Transactions are used to encapsulate the effects of all commands
2978 Transactions are used to encapsulate the effects of all commands
2978 that create new changesets or propagate existing changesets into a
2979 that create new changesets or propagate existing changesets into a
2979 repository. For example, the following commands are transactional,
2980 repository. For example, the following commands are transactional,
2980 and their effects can be rolled back:
2981 and their effects can be rolled back:
2981
2982
2982 - commit
2983 - commit
2983 - import
2984 - import
2984 - pull
2985 - pull
2985 - push (with this repository as the destination)
2986 - push (with this repository as the destination)
2986 - unbundle
2987 - unbundle
2987
2988
2988 This command is not intended for use on public repositories. Once
2989 This command is not intended for use on public repositories. Once
2989 changes are visible for pull by other users, rolling a transaction
2990 changes are visible for pull by other users, rolling a transaction
2990 back locally is ineffective (someone else may already have pulled
2991 back locally is ineffective (someone else may already have pulled
2991 the changes). Furthermore, a race is possible with readers of the
2992 the changes). Furthermore, a race is possible with readers of the
2992 repository; for example an in-progress pull from the repository
2993 repository; for example an in-progress pull from the repository
2993 may fail if a rollback is performed.
2994 may fail if a rollback is performed.
2994
2995
2995 Returns 0 on success, 1 if no rollback data is available.
2996 Returns 0 on success, 1 if no rollback data is available.
2996 """
2997 """
2997 return repo.rollback(opts.get('dry_run'))
2998 return repo.rollback(opts.get('dry_run'))
2998
2999
2999 def root(ui, repo):
3000 def root(ui, repo):
3000 """print the root (top) of the current working directory
3001 """print the root (top) of the current working directory
3001
3002
3002 Print the root directory of the current repository.
3003 Print the root directory of the current repository.
3003
3004
3004 Returns 0 on success.
3005 Returns 0 on success.
3005 """
3006 """
3006 ui.write(repo.root + "\n")
3007 ui.write(repo.root + "\n")
3007
3008
3008 def serve(ui, repo, **opts):
3009 def serve(ui, repo, **opts):
3009 """start stand-alone webserver
3010 """start stand-alone webserver
3010
3011
3011 Start a local HTTP repository browser and pull server. You can use
3012 Start a local HTTP repository browser and pull server. You can use
3012 this for ad-hoc sharing and browing of repositories. It is
3013 this for ad-hoc sharing and browing of repositories. It is
3013 recommended to use a real web server to serve a repository for
3014 recommended to use a real web server to serve a repository for
3014 longer periods of time.
3015 longer periods of time.
3015
3016
3016 Please note that the server does not implement access control.
3017 Please note that the server does not implement access control.
3017 This means that, by default, anybody can read from the server and
3018 This means that, by default, anybody can read from the server and
3018 nobody can write to it by default. Set the ``web.allow_push``
3019 nobody can write to it by default. Set the ``web.allow_push``
3019 option to ``*`` to allow everybody to push to the server. You
3020 option to ``*`` to allow everybody to push to the server. You
3020 should use a real web server if you need to authenticate users.
3021 should use a real web server if you need to authenticate users.
3021
3022
3022 By default, the server logs accesses to stdout and errors to
3023 By default, the server logs accesses to stdout and errors to
3023 stderr. Use the -A/--accesslog and -E/--errorlog options to log to
3024 stderr. Use the -A/--accesslog and -E/--errorlog options to log to
3024 files.
3025 files.
3025
3026
3026 To have the server choose a free port number to listen on, specify
3027 To have the server choose a free port number to listen on, specify
3027 a port number of 0; in this case, the server will print the port
3028 a port number of 0; in this case, the server will print the port
3028 number it uses.
3029 number it uses.
3029
3030
3030 Returns 0 on success.
3031 Returns 0 on success.
3031 """
3032 """
3032
3033
3033 if opts["stdio"]:
3034 if opts["stdio"]:
3034 if repo is None:
3035 if repo is None:
3035 raise error.RepoError(_("There is no Mercurial repository here"
3036 raise error.RepoError(_("There is no Mercurial repository here"
3036 " (.hg not found)"))
3037 " (.hg not found)"))
3037 s = sshserver.sshserver(ui, repo)
3038 s = sshserver.sshserver(ui, repo)
3038 s.serve_forever()
3039 s.serve_forever()
3039
3040
3040 # this way we can check if something was given in the command-line
3041 # this way we can check if something was given in the command-line
3041 if opts.get('port'):
3042 if opts.get('port'):
3042 opts['port'] = int(opts.get('port'))
3043 opts['port'] = int(opts.get('port'))
3043
3044
3044 baseui = repo and repo.baseui or ui
3045 baseui = repo and repo.baseui or ui
3045 optlist = ("name templates style address port prefix ipv6"
3046 optlist = ("name templates style address port prefix ipv6"
3046 " accesslog errorlog certificate encoding")
3047 " accesslog errorlog certificate encoding")
3047 for o in optlist.split():
3048 for o in optlist.split():
3048 val = opts.get(o, '')
3049 val = opts.get(o, '')
3049 if val in (None, ''): # should check against default options instead
3050 if val in (None, ''): # should check against default options instead
3050 continue
3051 continue
3051 baseui.setconfig("web", o, val)
3052 baseui.setconfig("web", o, val)
3052 if repo and repo.ui != baseui:
3053 if repo and repo.ui != baseui:
3053 repo.ui.setconfig("web", o, val)
3054 repo.ui.setconfig("web", o, val)
3054
3055
3055 o = opts.get('web_conf') or opts.get('webdir_conf')
3056 o = opts.get('web_conf') or opts.get('webdir_conf')
3056 if not o:
3057 if not o:
3057 if not repo:
3058 if not repo:
3058 raise error.RepoError(_("There is no Mercurial repository"
3059 raise error.RepoError(_("There is no Mercurial repository"
3059 " here (.hg not found)"))
3060 " here (.hg not found)"))
3060 o = repo.root
3061 o = repo.root
3061
3062
3062 app = hgweb.hgweb(o, baseui=ui)
3063 app = hgweb.hgweb(o, baseui=ui)
3063
3064
3064 class service(object):
3065 class service(object):
3065 def init(self):
3066 def init(self):
3066 util.set_signal_handler()
3067 util.set_signal_handler()
3067 self.httpd = hgweb.server.create_server(ui, app)
3068 self.httpd = hgweb.server.create_server(ui, app)
3068
3069
3069 if opts['port'] and not ui.verbose:
3070 if opts['port'] and not ui.verbose:
3070 return
3071 return
3071
3072
3072 if self.httpd.prefix:
3073 if self.httpd.prefix:
3073 prefix = self.httpd.prefix.strip('/') + '/'
3074 prefix = self.httpd.prefix.strip('/') + '/'
3074 else:
3075 else:
3075 prefix = ''
3076 prefix = ''
3076
3077
3077 port = ':%d' % self.httpd.port
3078 port = ':%d' % self.httpd.port
3078 if port == ':80':
3079 if port == ':80':
3079 port = ''
3080 port = ''
3080
3081
3081 bindaddr = self.httpd.addr
3082 bindaddr = self.httpd.addr
3082 if bindaddr == '0.0.0.0':
3083 if bindaddr == '0.0.0.0':
3083 bindaddr = '*'
3084 bindaddr = '*'
3084 elif ':' in bindaddr: # IPv6
3085 elif ':' in bindaddr: # IPv6
3085 bindaddr = '[%s]' % bindaddr
3086 bindaddr = '[%s]' % bindaddr
3086
3087
3087 fqaddr = self.httpd.fqaddr
3088 fqaddr = self.httpd.fqaddr
3088 if ':' in fqaddr:
3089 if ':' in fqaddr:
3089 fqaddr = '[%s]' % fqaddr
3090 fqaddr = '[%s]' % fqaddr
3090 if opts['port']:
3091 if opts['port']:
3091 write = ui.status
3092 write = ui.status
3092 else:
3093 else:
3093 write = ui.write
3094 write = ui.write
3094 write(_('listening at http://%s%s/%s (bound to %s:%d)\n') %
3095 write(_('listening at http://%s%s/%s (bound to %s:%d)\n') %
3095 (fqaddr, port, prefix, bindaddr, self.httpd.port))
3096 (fqaddr, port, prefix, bindaddr, self.httpd.port))
3096
3097
3097 def run(self):
3098 def run(self):
3098 self.httpd.serve_forever()
3099 self.httpd.serve_forever()
3099
3100
3100 service = service()
3101 service = service()
3101
3102
3102 cmdutil.service(opts, initfn=service.init, runfn=service.run)
3103 cmdutil.service(opts, initfn=service.init, runfn=service.run)
3103
3104
3104 def status(ui, repo, *pats, **opts):
3105 def status(ui, repo, *pats, **opts):
3105 """show changed files in the working directory
3106 """show changed files in the working directory
3106
3107
3107 Show status of files in the repository. If names are given, only
3108 Show status of files in the repository. If names are given, only
3108 files that match are shown. Files that are clean or ignored or
3109 files that match are shown. Files that are clean or ignored or
3109 the source of a copy/move operation, are not listed unless
3110 the source of a copy/move operation, are not listed unless
3110 -c/--clean, -i/--ignored, -C/--copies or -A/--all are given.
3111 -c/--clean, -i/--ignored, -C/--copies or -A/--all are given.
3111 Unless options described with "show only ..." are given, the
3112 Unless options described with "show only ..." are given, the
3112 options -mardu are used.
3113 options -mardu are used.
3113
3114
3114 Option -q/--quiet hides untracked (unknown and ignored) files
3115 Option -q/--quiet hides untracked (unknown and ignored) files
3115 unless explicitly requested with -u/--unknown or -i/--ignored.
3116 unless explicitly requested with -u/--unknown or -i/--ignored.
3116
3117
3117 NOTE: status may appear to disagree with diff if permissions have
3118 NOTE: status may appear to disagree with diff if permissions have
3118 changed or a merge has occurred. The standard diff format does not
3119 changed or a merge has occurred. The standard diff format does not
3119 report permission changes and diff only reports changes relative
3120 report permission changes and diff only reports changes relative
3120 to one merge parent.
3121 to one merge parent.
3121
3122
3122 If one revision is given, it is used as the base revision.
3123 If one revision is given, it is used as the base revision.
3123 If two revisions are given, the differences between them are
3124 If two revisions are given, the differences between them are
3124 shown. The --change option can also be used as a shortcut to list
3125 shown. The --change option can also be used as a shortcut to list
3125 the changed files of a revision from its first parent.
3126 the changed files of a revision from its first parent.
3126
3127
3127 The codes used to show the status of files are::
3128 The codes used to show the status of files are::
3128
3129
3129 M = modified
3130 M = modified
3130 A = added
3131 A = added
3131 R = removed
3132 R = removed
3132 C = clean
3133 C = clean
3133 ! = missing (deleted by non-hg command, but still tracked)
3134 ! = missing (deleted by non-hg command, but still tracked)
3134 ? = not tracked
3135 ? = not tracked
3135 I = ignored
3136 I = ignored
3136 = origin of the previous file listed as A (added)
3137 = origin of the previous file listed as A (added)
3137
3138
3138 Returns 0 on success.
3139 Returns 0 on success.
3139 """
3140 """
3140
3141
3141 revs = opts.get('rev')
3142 revs = opts.get('rev')
3142 change = opts.get('change')
3143 change = opts.get('change')
3143
3144
3144 if revs and change:
3145 if revs and change:
3145 msg = _('cannot specify --rev and --change at the same time')
3146 msg = _('cannot specify --rev and --change at the same time')
3146 raise util.Abort(msg)
3147 raise util.Abort(msg)
3147 elif change:
3148 elif change:
3148 node2 = repo.lookup(change)
3149 node2 = repo.lookup(change)
3149 node1 = repo[node2].parents()[0].node()
3150 node1 = repo[node2].parents()[0].node()
3150 else:
3151 else:
3151 node1, node2 = cmdutil.revpair(repo, revs)
3152 node1, node2 = cmdutil.revpair(repo, revs)
3152
3153
3153 cwd = (pats and repo.getcwd()) or ''
3154 cwd = (pats and repo.getcwd()) or ''
3154 end = opts.get('print0') and '\0' or '\n'
3155 end = opts.get('print0') and '\0' or '\n'
3155 copy = {}
3156 copy = {}
3156 states = 'modified added removed deleted unknown ignored clean'.split()
3157 states = 'modified added removed deleted unknown ignored clean'.split()
3157 show = [k for k in states if opts.get(k)]
3158 show = [k for k in states if opts.get(k)]
3158 if opts.get('all'):
3159 if opts.get('all'):
3159 show += ui.quiet and (states[:4] + ['clean']) or states
3160 show += ui.quiet and (states[:4] + ['clean']) or states
3160 if not show:
3161 if not show:
3161 show = ui.quiet and states[:4] or states[:5]
3162 show = ui.quiet and states[:4] or states[:5]
3162
3163
3163 stat = repo.status(node1, node2, cmdutil.match(repo, pats, opts),
3164 stat = repo.status(node1, node2, cmdutil.match(repo, pats, opts),
3164 'ignored' in show, 'clean' in show, 'unknown' in show)
3165 'ignored' in show, 'clean' in show, 'unknown' in show)
3165 changestates = zip(states, 'MAR!?IC', stat)
3166 changestates = zip(states, 'MAR!?IC', stat)
3166
3167
3167 if (opts.get('all') or opts.get('copies')) and not opts.get('no_status'):
3168 if (opts.get('all') or opts.get('copies')) and not opts.get('no_status'):
3168 ctxn = repo[nullid]
3169 ctxn = repo[nullid]
3169 ctx1 = repo[node1]
3170 ctx1 = repo[node1]
3170 ctx2 = repo[node2]
3171 ctx2 = repo[node2]
3171 added = stat[1]
3172 added = stat[1]
3172 if node2 is None:
3173 if node2 is None:
3173 added = stat[0] + stat[1] # merged?
3174 added = stat[0] + stat[1] # merged?
3174
3175
3175 for k, v in copies.copies(repo, ctx1, ctx2, ctxn)[0].iteritems():
3176 for k, v in copies.copies(repo, ctx1, ctx2, ctxn)[0].iteritems():
3176 if k in added:
3177 if k in added:
3177 copy[k] = v
3178 copy[k] = v
3178 elif v in added:
3179 elif v in added:
3179 copy[v] = k
3180 copy[v] = k
3180
3181
3181 for state, char, files in changestates:
3182 for state, char, files in changestates:
3182 if state in show:
3183 if state in show:
3183 format = "%s %%s%s" % (char, end)
3184 format = "%s %%s%s" % (char, end)
3184 if opts.get('no_status'):
3185 if opts.get('no_status'):
3185 format = "%%s%s" % end
3186 format = "%%s%s" % end
3186
3187
3187 for f in files:
3188 for f in files:
3188 ui.write(format % repo.pathto(f, cwd),
3189 ui.write(format % repo.pathto(f, cwd),
3189 label='status.' + state)
3190 label='status.' + state)
3190 if f in copy:
3191 if f in copy:
3191 ui.write(' %s%s' % (repo.pathto(copy[f], cwd), end),
3192 ui.write(' %s%s' % (repo.pathto(copy[f], cwd), end),
3192 label='status.copied')
3193 label='status.copied')
3193
3194
3194 def summary(ui, repo, **opts):
3195 def summary(ui, repo, **opts):
3195 """summarize working directory state
3196 """summarize working directory state
3196
3197
3197 This generates a brief summary of the working directory state,
3198 This generates a brief summary of the working directory state,
3198 including parents, branch, commit status, and available updates.
3199 including parents, branch, commit status, and available updates.
3199
3200
3200 With the --remote option, this will check the default paths for
3201 With the --remote option, this will check the default paths for
3201 incoming and outgoing changes. This can be time-consuming.
3202 incoming and outgoing changes. This can be time-consuming.
3202
3203
3203 Returns 0 on success.
3204 Returns 0 on success.
3204 """
3205 """
3205
3206
3206 ctx = repo[None]
3207 ctx = repo[None]
3207 parents = ctx.parents()
3208 parents = ctx.parents()
3208 pnode = parents[0].node()
3209 pnode = parents[0].node()
3209
3210
3210 for p in parents:
3211 for p in parents:
3211 # label with log.changeset (instead of log.parent) since this
3212 # label with log.changeset (instead of log.parent) since this
3212 # shows a working directory parent *changeset*:
3213 # shows a working directory parent *changeset*:
3213 ui.write(_('parent: %d:%s ') % (p.rev(), str(p)),
3214 ui.write(_('parent: %d:%s ') % (p.rev(), str(p)),
3214 label='log.changeset')
3215 label='log.changeset')
3215 ui.write(' '.join(p.tags()), label='log.tag')
3216 ui.write(' '.join(p.tags()), label='log.tag')
3216 if p.rev() == -1:
3217 if p.rev() == -1:
3217 if not len(repo):
3218 if not len(repo):
3218 ui.write(_(' (empty repository)'))
3219 ui.write(_(' (empty repository)'))
3219 else:
3220 else:
3220 ui.write(_(' (no revision checked out)'))
3221 ui.write(_(' (no revision checked out)'))
3221 ui.write('\n')
3222 ui.write('\n')
3222 if p.description():
3223 if p.description():
3223 ui.status(' ' + p.description().splitlines()[0].strip() + '\n',
3224 ui.status(' ' + p.description().splitlines()[0].strip() + '\n',
3224 label='log.summary')
3225 label='log.summary')
3225
3226
3226 branch = ctx.branch()
3227 branch = ctx.branch()
3227 bheads = repo.branchheads(branch)
3228 bheads = repo.branchheads(branch)
3228 m = _('branch: %s\n') % branch
3229 m = _('branch: %s\n') % branch
3229 if branch != 'default':
3230 if branch != 'default':
3230 ui.write(m, label='log.branch')
3231 ui.write(m, label='log.branch')
3231 else:
3232 else:
3232 ui.status(m, label='log.branch')
3233 ui.status(m, label='log.branch')
3233
3234
3234 st = list(repo.status(unknown=True))[:6]
3235 st = list(repo.status(unknown=True))[:6]
3235
3236
3236 ms = mergemod.mergestate(repo)
3237 ms = mergemod.mergestate(repo)
3237 st.append([f for f in ms if ms[f] == 'u'])
3238 st.append([f for f in ms if ms[f] == 'u'])
3238
3239
3239 subs = [s for s in ctx.substate if ctx.sub(s).dirty()]
3240 subs = [s for s in ctx.substate if ctx.sub(s).dirty()]
3240 st.append(subs)
3241 st.append(subs)
3241
3242
3242 labels = [ui.label(_('%d modified'), 'status.modified'),
3243 labels = [ui.label(_('%d modified'), 'status.modified'),
3243 ui.label(_('%d added'), 'status.added'),
3244 ui.label(_('%d added'), 'status.added'),
3244 ui.label(_('%d removed'), 'status.removed'),
3245 ui.label(_('%d removed'), 'status.removed'),
3245 ui.label(_('%d deleted'), 'status.deleted'),
3246 ui.label(_('%d deleted'), 'status.deleted'),
3246 ui.label(_('%d unknown'), 'status.unknown'),
3247 ui.label(_('%d unknown'), 'status.unknown'),
3247 ui.label(_('%d ignored'), 'status.ignored'),
3248 ui.label(_('%d ignored'), 'status.ignored'),
3248 ui.label(_('%d unresolved'), 'resolve.unresolved'),
3249 ui.label(_('%d unresolved'), 'resolve.unresolved'),
3249 ui.label(_('%d subrepos'), 'status.modified')]
3250 ui.label(_('%d subrepos'), 'status.modified')]
3250 t = []
3251 t = []
3251 for s, l in zip(st, labels):
3252 for s, l in zip(st, labels):
3252 if s:
3253 if s:
3253 t.append(l % len(s))
3254 t.append(l % len(s))
3254
3255
3255 t = ', '.join(t)
3256 t = ', '.join(t)
3256 cleanworkdir = False
3257 cleanworkdir = False
3257
3258
3258 if len(parents) > 1:
3259 if len(parents) > 1:
3259 t += _(' (merge)')
3260 t += _(' (merge)')
3260 elif branch != parents[0].branch():
3261 elif branch != parents[0].branch():
3261 t += _(' (new branch)')
3262 t += _(' (new branch)')
3262 elif (parents[0].extra().get('close') and
3263 elif (parents[0].extra().get('close') and
3263 pnode in repo.branchheads(branch, closed=True)):
3264 pnode in repo.branchheads(branch, closed=True)):
3264 t += _(' (head closed)')
3265 t += _(' (head closed)')
3265 elif (not st[0] and not st[1] and not st[2] and not st[7]):
3266 elif (not st[0] and not st[1] and not st[2] and not st[7]):
3266 t += _(' (clean)')
3267 t += _(' (clean)')
3267 cleanworkdir = True
3268 cleanworkdir = True
3268 elif pnode not in bheads:
3269 elif pnode not in bheads:
3269 t += _(' (new branch head)')
3270 t += _(' (new branch head)')
3270
3271
3271 if cleanworkdir:
3272 if cleanworkdir:
3272 ui.status(_('commit: %s\n') % t.strip())
3273 ui.status(_('commit: %s\n') % t.strip())
3273 else:
3274 else:
3274 ui.write(_('commit: %s\n') % t.strip())
3275 ui.write(_('commit: %s\n') % t.strip())
3275
3276
3276 # all ancestors of branch heads - all ancestors of parent = new csets
3277 # all ancestors of branch heads - all ancestors of parent = new csets
3277 new = [0] * len(repo)
3278 new = [0] * len(repo)
3278 cl = repo.changelog
3279 cl = repo.changelog
3279 for a in [cl.rev(n) for n in bheads]:
3280 for a in [cl.rev(n) for n in bheads]:
3280 new[a] = 1
3281 new[a] = 1
3281 for a in cl.ancestors(*[cl.rev(n) for n in bheads]):
3282 for a in cl.ancestors(*[cl.rev(n) for n in bheads]):
3282 new[a] = 1
3283 new[a] = 1
3283 for a in [p.rev() for p in parents]:
3284 for a in [p.rev() for p in parents]:
3284 if a >= 0:
3285 if a >= 0:
3285 new[a] = 0
3286 new[a] = 0
3286 for a in cl.ancestors(*[p.rev() for p in parents]):
3287 for a in cl.ancestors(*[p.rev() for p in parents]):
3287 new[a] = 0
3288 new[a] = 0
3288 new = sum(new)
3289 new = sum(new)
3289
3290
3290 if new == 0:
3291 if new == 0:
3291 ui.status(_('update: (current)\n'))
3292 ui.status(_('update: (current)\n'))
3292 elif pnode not in bheads:
3293 elif pnode not in bheads:
3293 ui.write(_('update: %d new changesets (update)\n') % new)
3294 ui.write(_('update: %d new changesets (update)\n') % new)
3294 else:
3295 else:
3295 ui.write(_('update: %d new changesets, %d branch heads (merge)\n') %
3296 ui.write(_('update: %d new changesets, %d branch heads (merge)\n') %
3296 (new, len(bheads)))
3297 (new, len(bheads)))
3297
3298
3298 if opts.get('remote'):
3299 if opts.get('remote'):
3299 t = []
3300 t = []
3300 source, branches = hg.parseurl(ui.expandpath('default'))
3301 source, branches = hg.parseurl(ui.expandpath('default'))
3301 other = hg.repository(cmdutil.remoteui(repo, {}), source)
3302 other = hg.repository(cmdutil.remoteui(repo, {}), source)
3302 revs, checkout = hg.addbranchrevs(repo, other, branches, opts.get('rev'))
3303 revs, checkout = hg.addbranchrevs(repo, other, branches, opts.get('rev'))
3303 ui.debug('comparing with %s\n' % url.hidepassword(source))
3304 ui.debug('comparing with %s\n' % url.hidepassword(source))
3304 repo.ui.pushbuffer()
3305 repo.ui.pushbuffer()
3305 common, incoming, rheads = repo.findcommonincoming(other)
3306 common, incoming, rheads = repo.findcommonincoming(other)
3306 repo.ui.popbuffer()
3307 repo.ui.popbuffer()
3307 if incoming:
3308 if incoming:
3308 t.append(_('1 or more incoming'))
3309 t.append(_('1 or more incoming'))
3309
3310
3310 dest, branches = hg.parseurl(ui.expandpath('default-push', 'default'))
3311 dest, branches = hg.parseurl(ui.expandpath('default-push', 'default'))
3311 revs, checkout = hg.addbranchrevs(repo, repo, branches, None)
3312 revs, checkout = hg.addbranchrevs(repo, repo, branches, None)
3312 other = hg.repository(cmdutil.remoteui(repo, {}), dest)
3313 other = hg.repository(cmdutil.remoteui(repo, {}), dest)
3313 ui.debug('comparing with %s\n' % url.hidepassword(dest))
3314 ui.debug('comparing with %s\n' % url.hidepassword(dest))
3314 repo.ui.pushbuffer()
3315 repo.ui.pushbuffer()
3315 o = repo.findoutgoing(other)
3316 o = repo.findoutgoing(other)
3316 repo.ui.popbuffer()
3317 repo.ui.popbuffer()
3317 o = repo.changelog.nodesbetween(o, None)[0]
3318 o = repo.changelog.nodesbetween(o, None)[0]
3318 if o:
3319 if o:
3319 t.append(_('%d outgoing') % len(o))
3320 t.append(_('%d outgoing') % len(o))
3320
3321
3321 if t:
3322 if t:
3322 ui.write(_('remote: %s\n') % (', '.join(t)))
3323 ui.write(_('remote: %s\n') % (', '.join(t)))
3323 else:
3324 else:
3324 ui.status(_('remote: (synced)\n'))
3325 ui.status(_('remote: (synced)\n'))
3325
3326
3326 def tag(ui, repo, name1, *names, **opts):
3327 def tag(ui, repo, name1, *names, **opts):
3327 """add one or more tags for the current or given revision
3328 """add one or more tags for the current or given revision
3328
3329
3329 Name a particular revision using <name>.
3330 Name a particular revision using <name>.
3330
3331
3331 Tags are used to name particular revisions of the repository and are
3332 Tags are used to name particular revisions of the repository and are
3332 very useful to compare different revisions, to go back to significant
3333 very useful to compare different revisions, to go back to significant
3333 earlier versions or to mark branch points as releases, etc.
3334 earlier versions or to mark branch points as releases, etc.
3334
3335
3335 If no revision is given, the parent of the working directory is
3336 If no revision is given, the parent of the working directory is
3336 used, or tip if no revision is checked out.
3337 used, or tip if no revision is checked out.
3337
3338
3338 To facilitate version control, distribution, and merging of tags,
3339 To facilitate version control, distribution, and merging of tags,
3339 they are stored as a file named ".hgtags" which is managed
3340 they are stored as a file named ".hgtags" which is managed
3340 similarly to other project files and can be hand-edited if
3341 similarly to other project files and can be hand-edited if
3341 necessary. The file '.hg/localtags' is used for local tags (not
3342 necessary. The file '.hg/localtags' is used for local tags (not
3342 shared among repositories).
3343 shared among repositories).
3343
3344
3344 See :hg:`help dates` for a list of formats valid for -d/--date.
3345 See :hg:`help dates` for a list of formats valid for -d/--date.
3345
3346
3346 Since tag names have priority over branch names during revision
3347 Since tag names have priority over branch names during revision
3347 lookup, using an existing branch name as a tag name is discouraged.
3348 lookup, using an existing branch name as a tag name is discouraged.
3348
3349
3349 Returns 0 on success.
3350 Returns 0 on success.
3350 """
3351 """
3351
3352
3352 rev_ = "."
3353 rev_ = "."
3353 names = [t.strip() for t in (name1,) + names]
3354 names = [t.strip() for t in (name1,) + names]
3354 if len(names) != len(set(names)):
3355 if len(names) != len(set(names)):
3355 raise util.Abort(_('tag names must be unique'))
3356 raise util.Abort(_('tag names must be unique'))
3356 for n in names:
3357 for n in names:
3357 if n in ['tip', '.', 'null']:
3358 if n in ['tip', '.', 'null']:
3358 raise util.Abort(_('the name \'%s\' is reserved') % n)
3359 raise util.Abort(_('the name \'%s\' is reserved') % n)
3359 if opts.get('rev') and opts.get('remove'):
3360 if opts.get('rev') and opts.get('remove'):
3360 raise util.Abort(_("--rev and --remove are incompatible"))
3361 raise util.Abort(_("--rev and --remove are incompatible"))
3361 if opts.get('rev'):
3362 if opts.get('rev'):
3362 rev_ = opts['rev']
3363 rev_ = opts['rev']
3363 message = opts.get('message')
3364 message = opts.get('message')
3364 if opts.get('remove'):
3365 if opts.get('remove'):
3365 expectedtype = opts.get('local') and 'local' or 'global'
3366 expectedtype = opts.get('local') and 'local' or 'global'
3366 for n in names:
3367 for n in names:
3367 if not repo.tagtype(n):
3368 if not repo.tagtype(n):
3368 raise util.Abort(_('tag \'%s\' does not exist') % n)
3369 raise util.Abort(_('tag \'%s\' does not exist') % n)
3369 if repo.tagtype(n) != expectedtype:
3370 if repo.tagtype(n) != expectedtype:
3370 if expectedtype == 'global':
3371 if expectedtype == 'global':
3371 raise util.Abort(_('tag \'%s\' is not a global tag') % n)
3372 raise util.Abort(_('tag \'%s\' is not a global tag') % n)
3372 else:
3373 else:
3373 raise util.Abort(_('tag \'%s\' is not a local tag') % n)
3374 raise util.Abort(_('tag \'%s\' is not a local tag') % n)
3374 rev_ = nullid
3375 rev_ = nullid
3375 if not message:
3376 if not message:
3376 # we don't translate commit messages
3377 # we don't translate commit messages
3377 message = 'Removed tag %s' % ', '.join(names)
3378 message = 'Removed tag %s' % ', '.join(names)
3378 elif not opts.get('force'):
3379 elif not opts.get('force'):
3379 for n in names:
3380 for n in names:
3380 if n in repo.tags():
3381 if n in repo.tags():
3381 raise util.Abort(_('tag \'%s\' already exists '
3382 raise util.Abort(_('tag \'%s\' already exists '
3382 '(use -f to force)') % n)
3383 '(use -f to force)') % n)
3383 if not rev_ and repo.dirstate.parents()[1] != nullid:
3384 if not rev_ and repo.dirstate.parents()[1] != nullid:
3384 raise util.Abort(_('uncommitted merge - please provide a '
3385 raise util.Abort(_('uncommitted merge - please provide a '
3385 'specific revision'))
3386 'specific revision'))
3386 r = repo[rev_].node()
3387 r = repo[rev_].node()
3387
3388
3388 if not message:
3389 if not message:
3389 # we don't translate commit messages
3390 # we don't translate commit messages
3390 message = ('Added tag %s for changeset %s' %
3391 message = ('Added tag %s for changeset %s' %
3391 (', '.join(names), short(r)))
3392 (', '.join(names), short(r)))
3392
3393
3393 date = opts.get('date')
3394 date = opts.get('date')
3394 if date:
3395 if date:
3395 date = util.parsedate(date)
3396 date = util.parsedate(date)
3396
3397
3397 if opts.get('edit'):
3398 if opts.get('edit'):
3398 message = ui.edit(message, ui.username())
3399 message = ui.edit(message, ui.username())
3399
3400
3400 repo.tag(names, r, message, opts.get('local'), opts.get('user'), date)
3401 repo.tag(names, r, message, opts.get('local'), opts.get('user'), date)
3401
3402
3402 def tags(ui, repo):
3403 def tags(ui, repo):
3403 """list repository tags
3404 """list repository tags
3404
3405
3405 This lists both regular and local tags. When the -v/--verbose
3406 This lists both regular and local tags. When the -v/--verbose
3406 switch is used, a third column "local" is printed for local tags.
3407 switch is used, a third column "local" is printed for local tags.
3407
3408
3408 Returns 0 on success.
3409 Returns 0 on success.
3409 """
3410 """
3410
3411
3411 hexfunc = ui.debugflag and hex or short
3412 hexfunc = ui.debugflag and hex or short
3412 tagtype = ""
3413 tagtype = ""
3413
3414
3414 for t, n in reversed(repo.tagslist()):
3415 for t, n in reversed(repo.tagslist()):
3415 if ui.quiet:
3416 if ui.quiet:
3416 ui.write("%s\n" % t)
3417 ui.write("%s\n" % t)
3417 continue
3418 continue
3418
3419
3419 try:
3420 try:
3420 hn = hexfunc(n)
3421 hn = hexfunc(n)
3421 r = "%5d:%s" % (repo.changelog.rev(n), hn)
3422 r = "%5d:%s" % (repo.changelog.rev(n), hn)
3422 except error.LookupError:
3423 except error.LookupError:
3423 r = " ?:%s" % hn
3424 r = " ?:%s" % hn
3424 else:
3425 else:
3425 spaces = " " * (30 - encoding.colwidth(t))
3426 spaces = " " * (30 - encoding.colwidth(t))
3426 if ui.verbose:
3427 if ui.verbose:
3427 if repo.tagtype(t) == 'local':
3428 if repo.tagtype(t) == 'local':
3428 tagtype = " local"
3429 tagtype = " local"
3429 else:
3430 else:
3430 tagtype = ""
3431 tagtype = ""
3431 ui.write("%s%s %s%s\n" % (t, spaces, r, tagtype))
3432 ui.write("%s%s %s%s\n" % (t, spaces, r, tagtype))
3432
3433
3433 def tip(ui, repo, **opts):
3434 def tip(ui, repo, **opts):
3434 """show the tip revision
3435 """show the tip revision
3435
3436
3436 The tip revision (usually just called the tip) is the changeset
3437 The tip revision (usually just called the tip) is the changeset
3437 most recently added to the repository (and therefore the most
3438 most recently added to the repository (and therefore the most
3438 recently changed head).
3439 recently changed head).
3439
3440
3440 If you have just made a commit, that commit will be the tip. If
3441 If you have just made a commit, that commit will be the tip. If
3441 you have just pulled changes from another repository, the tip of
3442 you have just pulled changes from another repository, the tip of
3442 that repository becomes the current tip. The "tip" tag is special
3443 that repository becomes the current tip. The "tip" tag is special
3443 and cannot be renamed or assigned to a different changeset.
3444 and cannot be renamed or assigned to a different changeset.
3444
3445
3445 Returns 0 on success.
3446 Returns 0 on success.
3446 """
3447 """
3447 displayer = cmdutil.show_changeset(ui, repo, opts)
3448 displayer = cmdutil.show_changeset(ui, repo, opts)
3448 displayer.show(repo[len(repo) - 1])
3449 displayer.show(repo[len(repo) - 1])
3449 displayer.close()
3450 displayer.close()
3450
3451
3451 def unbundle(ui, repo, fname1, *fnames, **opts):
3452 def unbundle(ui, repo, fname1, *fnames, **opts):
3452 """apply one or more changegroup files
3453 """apply one or more changegroup files
3453
3454
3454 Apply one or more compressed changegroup files generated by the
3455 Apply one or more compressed changegroup files generated by the
3455 bundle command.
3456 bundle command.
3456
3457
3457 Returns 0 on success, 1 if an update has unresolved files.
3458 Returns 0 on success, 1 if an update has unresolved files.
3458 """
3459 """
3459 fnames = (fname1,) + fnames
3460 fnames = (fname1,) + fnames
3460
3461
3461 lock = repo.lock()
3462 lock = repo.lock()
3462 try:
3463 try:
3463 for fname in fnames:
3464 for fname in fnames:
3464 f = url.open(ui, fname)
3465 f = url.open(ui, fname)
3465 gen = changegroup.readbundle(f, fname)
3466 gen = changegroup.readbundle(f, fname)
3466 modheads = repo.addchangegroup(gen, 'unbundle', 'bundle:' + fname)
3467 modheads = repo.addchangegroup(gen, 'unbundle', 'bundle:' + fname)
3467 finally:
3468 finally:
3468 lock.release()
3469 lock.release()
3469
3470
3470 return postincoming(ui, repo, modheads, opts.get('update'), None)
3471 return postincoming(ui, repo, modheads, opts.get('update'), None)
3471
3472
3472 def update(ui, repo, node=None, rev=None, clean=False, date=None, check=False):
3473 def update(ui, repo, node=None, rev=None, clean=False, date=None, check=False):
3473 """update working directory (or switch revisions)
3474 """update working directory (or switch revisions)
3474
3475
3475 Update the repository's working directory to the specified
3476 Update the repository's working directory to the specified
3476 changeset.
3477 changeset.
3477
3478
3478 If no changeset is specified, attempt to update to the head of the
3479 If no changeset is specified, attempt to update to the head of the
3479 current branch. If this head is a descendant of the working
3480 current branch. If this head is a descendant of the working
3480 directory's parent, update to it, otherwise abort.
3481 directory's parent, update to it, otherwise abort.
3481
3482
3482 The following rules apply when the working directory contains
3483 The following rules apply when the working directory contains
3483 uncommitted changes:
3484 uncommitted changes:
3484
3485
3485 1. If neither -c/--check nor -C/--clean is specified, and if
3486 1. If neither -c/--check nor -C/--clean is specified, and if
3486 the requested changeset is an ancestor or descendant of
3487 the requested changeset is an ancestor or descendant of
3487 the working directory's parent, the uncommitted changes
3488 the working directory's parent, the uncommitted changes
3488 are merged into the requested changeset and the merged
3489 are merged into the requested changeset and the merged
3489 result is left uncommitted. If the requested changeset is
3490 result is left uncommitted. If the requested changeset is
3490 not an ancestor or descendant (that is, it is on another
3491 not an ancestor or descendant (that is, it is on another
3491 branch), the update is aborted and the uncommitted changes
3492 branch), the update is aborted and the uncommitted changes
3492 are preserved.
3493 are preserved.
3493
3494
3494 2. With the -c/--check option, the update is aborted and the
3495 2. With the -c/--check option, the update is aborted and the
3495 uncommitted changes are preserved.
3496 uncommitted changes are preserved.
3496
3497
3497 3. With the -C/--clean option, uncommitted changes are discarded and
3498 3. With the -C/--clean option, uncommitted changes are discarded and
3498 the working directory is updated to the requested changeset.
3499 the working directory is updated to the requested changeset.
3499
3500
3500 Use null as the changeset to remove the working directory (like
3501 Use null as the changeset to remove the working directory (like
3501 :hg:`clone -U`).
3502 :hg:`clone -U`).
3502
3503
3503 If you want to update just one file to an older changeset, use :hg:`revert`.
3504 If you want to update just one file to an older changeset, use :hg:`revert`.
3504
3505
3505 See :hg:`help dates` for a list of formats valid for -d/--date.
3506 See :hg:`help dates` for a list of formats valid for -d/--date.
3506
3507
3507 Returns 0 on success, 1 if there are unresolved files.
3508 Returns 0 on success, 1 if there are unresolved files.
3508 """
3509 """
3509 if rev and node:
3510 if rev and node:
3510 raise util.Abort(_("please specify just one revision"))
3511 raise util.Abort(_("please specify just one revision"))
3511
3512
3512 if not rev:
3513 if not rev:
3513 rev = node
3514 rev = node
3514
3515
3515 if check and clean:
3516 if check and clean:
3516 raise util.Abort(_("cannot specify both -c/--check and -C/--clean"))
3517 raise util.Abort(_("cannot specify both -c/--check and -C/--clean"))
3517
3518
3518 if check:
3519 if check:
3519 # we could use dirty() but we can ignore merge and branch trivia
3520 # we could use dirty() but we can ignore merge and branch trivia
3520 c = repo[None]
3521 c = repo[None]
3521 if c.modified() or c.added() or c.removed():
3522 if c.modified() or c.added() or c.removed():
3522 raise util.Abort(_("uncommitted local changes"))
3523 raise util.Abort(_("uncommitted local changes"))
3523
3524
3524 if date:
3525 if date:
3525 if rev:
3526 if rev:
3526 raise util.Abort(_("you can't specify a revision and a date"))
3527 raise util.Abort(_("you can't specify a revision and a date"))
3527 rev = cmdutil.finddate(ui, repo, date)
3528 rev = cmdutil.finddate(ui, repo, date)
3528
3529
3529 if clean or check:
3530 if clean or check:
3530 return hg.clean(repo, rev)
3531 return hg.clean(repo, rev)
3531 else:
3532 else:
3532 return hg.update(repo, rev)
3533 return hg.update(repo, rev)
3533
3534
3534 def verify(ui, repo):
3535 def verify(ui, repo):
3535 """verify the integrity of the repository
3536 """verify the integrity of the repository
3536
3537
3537 Verify the integrity of the current repository.
3538 Verify the integrity of the current repository.
3538
3539
3539 This will perform an extensive check of the repository's
3540 This will perform an extensive check of the repository's
3540 integrity, validating the hashes and checksums of each entry in
3541 integrity, validating the hashes and checksums of each entry in
3541 the changelog, manifest, and tracked files, as well as the
3542 the changelog, manifest, and tracked files, as well as the
3542 integrity of their crosslinks and indices.
3543 integrity of their crosslinks and indices.
3543
3544
3544 Returns 0 on success, 1 if errors are encountered.
3545 Returns 0 on success, 1 if errors are encountered.
3545 """
3546 """
3546 return hg.verify(repo)
3547 return hg.verify(repo)
3547
3548
3548 def version_(ui):
3549 def version_(ui):
3549 """output version and copyright information"""
3550 """output version and copyright information"""
3550 ui.write(_("Mercurial Distributed SCM (version %s)\n")
3551 ui.write(_("Mercurial Distributed SCM (version %s)\n")
3551 % util.version())
3552 % util.version())
3552 ui.status(_(
3553 ui.status(_(
3553 "\nCopyright (C) 2005-2010 Matt Mackall <mpm@selenic.com> and others\n"
3554 "\nCopyright (C) 2005-2010 Matt Mackall <mpm@selenic.com> and others\n"
3554 "This is free software; see the source for copying conditions. "
3555 "This is free software; see the source for copying conditions. "
3555 "There is NO\nwarranty; "
3556 "There is NO\nwarranty; "
3556 "not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.\n"
3557 "not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.\n"
3557 ))
3558 ))
3558
3559
3559 # Command options and aliases are listed here, alphabetically
3560 # Command options and aliases are listed here, alphabetically
3560
3561
3561 globalopts = [
3562 globalopts = [
3562 ('R', 'repository', '',
3563 ('R', 'repository', '',
3563 _('repository root directory or name of overlay bundle file')),
3564 _('repository root directory or name of overlay bundle file')),
3564 ('', 'cwd', '', _('change working directory')),
3565 ('', 'cwd', '', _('change working directory')),
3565 ('y', 'noninteractive', None,
3566 ('y', 'noninteractive', None,
3566 _('do not prompt, assume \'yes\' for any required answers')),
3567 _('do not prompt, assume \'yes\' for any required answers')),
3567 ('q', 'quiet', None, _('suppress output')),
3568 ('q', 'quiet', None, _('suppress output')),
3568 ('v', 'verbose', None, _('enable additional output')),
3569 ('v', 'verbose', None, _('enable additional output')),
3569 ('', 'config', [],
3570 ('', 'config', [],
3570 _('set/override config option (use \'section.name=value\')')),
3571 _('set/override config option (use \'section.name=value\')')),
3571 ('', 'debug', None, _('enable debugging output')),
3572 ('', 'debug', None, _('enable debugging output')),
3572 ('', 'debugger', None, _('start debugger')),
3573 ('', 'debugger', None, _('start debugger')),
3573 ('', 'encoding', encoding.encoding, _('set the charset encoding')),
3574 ('', 'encoding', encoding.encoding, _('set the charset encoding')),
3574 ('', 'encodingmode', encoding.encodingmode,
3575 ('', 'encodingmode', encoding.encodingmode,
3575 _('set the charset encoding mode')),
3576 _('set the charset encoding mode')),
3576 ('', 'traceback', None, _('always print a traceback on exception')),
3577 ('', 'traceback', None, _('always print a traceback on exception')),
3577 ('', 'time', None, _('time how long the command takes')),
3578 ('', 'time', None, _('time how long the command takes')),
3578 ('', 'profile', None, _('print command execution profile')),
3579 ('', 'profile', None, _('print command execution profile')),
3579 ('', 'version', None, _('output version information and exit')),
3580 ('', 'version', None, _('output version information and exit')),
3580 ('h', 'help', None, _('display help and exit')),
3581 ('h', 'help', None, _('display help and exit')),
3581 ]
3582 ]
3582
3583
3583 dryrunopts = [('n', 'dry-run', None,
3584 dryrunopts = [('n', 'dry-run', None,
3584 _('do not perform actions, just print output'))]
3585 _('do not perform actions, just print output'))]
3585
3586
3586 remoteopts = [
3587 remoteopts = [
3587 ('e', 'ssh', '', _('specify ssh command to use')),
3588 ('e', 'ssh', '', _('specify ssh command to use')),
3588 ('', 'remotecmd', '', _('specify hg command to run on the remote side')),
3589 ('', 'remotecmd', '', _('specify hg command to run on the remote side')),
3589 ]
3590 ]
3590
3591
3591 walkopts = [
3592 walkopts = [
3592 ('I', 'include', [], _('include names matching the given patterns')),
3593 ('I', 'include', [], _('include names matching the given patterns')),
3593 ('X', 'exclude', [], _('exclude names matching the given patterns')),
3594 ('X', 'exclude', [], _('exclude names matching the given patterns')),
3594 ]
3595 ]
3595
3596
3596 commitopts = [
3597 commitopts = [
3597 ('m', 'message', '', _('use <text> as commit message')),
3598 ('m', 'message', '', _('use <text> as commit message')),
3598 ('l', 'logfile', '', _('read commit message from <file>')),
3599 ('l', 'logfile', '', _('read commit message from <file>')),
3599 ]
3600 ]
3600
3601
3601 commitopts2 = [
3602 commitopts2 = [
3602 ('d', 'date', '', _('record datecode as commit date')),
3603 ('d', 'date', '', _('record datecode as commit date')),
3603 ('u', 'user', '', _('record the specified user as committer')),
3604 ('u', 'user', '', _('record the specified user as committer')),
3604 ]
3605 ]
3605
3606
3606 templateopts = [
3607 templateopts = [
3607 ('', 'style', '', _('display using template map file')),
3608 ('', 'style', '', _('display using template map file')),
3608 ('', 'template', '', _('display with template')),
3609 ('', 'template', '', _('display with template')),
3609 ]
3610 ]
3610
3611
3611 logopts = [
3612 logopts = [
3612 ('p', 'patch', None, _('show patch')),
3613 ('p', 'patch', None, _('show patch')),
3613 ('g', 'git', None, _('use git extended diff format')),
3614 ('g', 'git', None, _('use git extended diff format')),
3614 ('l', 'limit', '', _('limit number of changes displayed')),
3615 ('l', 'limit', '', _('limit number of changes displayed')),
3615 ('M', 'no-merges', None, _('do not show merges')),
3616 ('M', 'no-merges', None, _('do not show merges')),
3616 ('', 'stat', None, _('output diffstat-style summary of changes')),
3617 ('', 'stat', None, _('output diffstat-style summary of changes')),
3617 ] + templateopts
3618 ] + templateopts
3618
3619
3619 diffopts = [
3620 diffopts = [
3620 ('a', 'text', None, _('treat all files as text')),
3621 ('a', 'text', None, _('treat all files as text')),
3621 ('g', 'git', None, _('use git extended diff format')),
3622 ('g', 'git', None, _('use git extended diff format')),
3622 ('', 'nodates', None, _('omit dates from diff headers'))
3623 ('', 'nodates', None, _('omit dates from diff headers'))
3623 ]
3624 ]
3624
3625
3625 diffopts2 = [
3626 diffopts2 = [
3626 ('p', 'show-function', None, _('show which function each change is in')),
3627 ('p', 'show-function', None, _('show which function each change is in')),
3627 ('', 'reverse', None, _('produce a diff that undoes the changes')),
3628 ('', 'reverse', None, _('produce a diff that undoes the changes')),
3628 ('w', 'ignore-all-space', None,
3629 ('w', 'ignore-all-space', None,
3629 _('ignore white space when comparing lines')),
3630 _('ignore white space when comparing lines')),
3630 ('b', 'ignore-space-change', None,
3631 ('b', 'ignore-space-change', None,
3631 _('ignore changes in the amount of white space')),
3632 _('ignore changes in the amount of white space')),
3632 ('B', 'ignore-blank-lines', None,
3633 ('B', 'ignore-blank-lines', None,
3633 _('ignore changes whose lines are all blank')),
3634 _('ignore changes whose lines are all blank')),
3634 ('U', 'unified', '', _('number of lines of context to show')),
3635 ('U', 'unified', '', _('number of lines of context to show')),
3635 ('', 'stat', None, _('output diffstat-style summary of changes')),
3636 ('', 'stat', None, _('output diffstat-style summary of changes')),
3636 ]
3637 ]
3637
3638
3638 similarityopts = [
3639 similarityopts = [
3639 ('s', 'similarity', '',
3640 ('s', 'similarity', '',
3640 _('guess renamed files by similarity (0<=s<=100)'))
3641 _('guess renamed files by similarity (0<=s<=100)'))
3641 ]
3642 ]
3642
3643
3643 table = {
3644 table = {
3644 "^add": (add, walkopts + dryrunopts, _('[OPTION]... [FILE]...')),
3645 "^add": (add, walkopts + dryrunopts, _('[OPTION]... [FILE]...')),
3645 "addremove":
3646 "addremove":
3646 (addremove, similarityopts + walkopts + dryrunopts,
3647 (addremove, similarityopts + walkopts + dryrunopts,
3647 _('[OPTION]... [FILE]...')),
3648 _('[OPTION]... [FILE]...')),
3648 "^annotate|blame":
3649 "^annotate|blame":
3649 (annotate,
3650 (annotate,
3650 [('r', 'rev', '', _('annotate the specified revision')),
3651 [('r', 'rev', '', _('annotate the specified revision')),
3651 ('', 'follow', None,
3652 ('', 'follow', None,
3652 _('follow copies/renames and list the filename (DEPRECATED)')),
3653 _('follow copies/renames and list the filename (DEPRECATED)')),
3653 ('', 'no-follow', None, _("don't follow copies and renames")),
3654 ('', 'no-follow', None, _("don't follow copies and renames")),
3654 ('a', 'text', None, _('treat all files as text')),
3655 ('a', 'text', None, _('treat all files as text')),
3655 ('u', 'user', None, _('list the author (long with -v)')),
3656 ('u', 'user', None, _('list the author (long with -v)')),
3656 ('f', 'file', None, _('list the filename')),
3657 ('f', 'file', None, _('list the filename')),
3657 ('d', 'date', None, _('list the date (short with -q)')),
3658 ('d', 'date', None, _('list the date (short with -q)')),
3658 ('n', 'number', None, _('list the revision number (default)')),
3659 ('n', 'number', None, _('list the revision number (default)')),
3659 ('c', 'changeset', None, _('list the changeset')),
3660 ('c', 'changeset', None, _('list the changeset')),
3660 ('l', 'line-number', None,
3661 ('l', 'line-number', None,
3661 _('show line number at the first appearance'))
3662 _('show line number at the first appearance'))
3662 ] + walkopts,
3663 ] + walkopts,
3663 _('[-r REV] [-f] [-a] [-u] [-d] [-n] [-c] [-l] FILE...')),
3664 _('[-r REV] [-f] [-a] [-u] [-d] [-n] [-c] [-l] FILE...')),
3664 "archive":
3665 "archive":
3665 (archive,
3666 (archive,
3666 [('', 'no-decode', None, _('do not pass files through decoders')),
3667 [('', 'no-decode', None, _('do not pass files through decoders')),
3667 ('p', 'prefix', '', _('directory prefix for files in archive')),
3668 ('p', 'prefix', '', _('directory prefix for files in archive')),
3668 ('r', 'rev', '', _('revision to distribute')),
3669 ('r', 'rev', '', _('revision to distribute')),
3669 ('t', 'type', '', _('type of distribution to create')),
3670 ('t', 'type', '', _('type of distribution to create')),
3670 ] + walkopts,
3671 ] + walkopts,
3671 _('[OPTION]... DEST')),
3672 _('[OPTION]... DEST')),
3672 "backout":
3673 "backout":
3673 (backout,
3674 (backout,
3674 [('', 'merge', None,
3675 [('', 'merge', None,
3675 _('merge with old dirstate parent after backout')),
3676 _('merge with old dirstate parent after backout')),
3676 ('', 'parent', '', _('parent to choose when backing out merge')),
3677 ('', 'parent', '', _('parent to choose when backing out merge')),
3677 ('r', 'rev', '', _('revision to backout')),
3678 ('r', 'rev', '', _('revision to backout')),
3678 ] + walkopts + commitopts + commitopts2,
3679 ] + walkopts + commitopts + commitopts2,
3679 _('[OPTION]... [-r] REV')),
3680 _('[OPTION]... [-r] REV')),
3680 "bisect":
3681 "bisect":
3681 (bisect,
3682 (bisect,
3682 [('r', 'reset', False, _('reset bisect state')),
3683 [('r', 'reset', False, _('reset bisect state')),
3683 ('g', 'good', False, _('mark changeset good')),
3684 ('g', 'good', False, _('mark changeset good')),
3684 ('b', 'bad', False, _('mark changeset bad')),
3685 ('b', 'bad', False, _('mark changeset bad')),
3685 ('s', 'skip', False, _('skip testing changeset')),
3686 ('s', 'skip', False, _('skip testing changeset')),
3686 ('c', 'command', '', _('use command to check changeset state')),
3687 ('c', 'command', '', _('use command to check changeset state')),
3687 ('U', 'noupdate', False, _('do not update to target'))],
3688 ('U', 'noupdate', False, _('do not update to target'))],
3688 _("[-gbsr] [-U] [-c CMD] [REV]")),
3689 _("[-gbsr] [-U] [-c CMD] [REV]")),
3689 "branch":
3690 "branch":
3690 (branch,
3691 (branch,
3691 [('f', 'force', None,
3692 [('f', 'force', None,
3692 _('set branch name even if it shadows an existing branch')),
3693 _('set branch name even if it shadows an existing branch')),
3693 ('C', 'clean', None, _('reset branch name to parent branch name'))],
3694 ('C', 'clean', None, _('reset branch name to parent branch name'))],
3694 _('[-fC] [NAME]')),
3695 _('[-fC] [NAME]')),
3695 "branches":
3696 "branches":
3696 (branches,
3697 (branches,
3697 [('a', 'active', False,
3698 [('a', 'active', False,
3698 _('show only branches that have unmerged heads')),
3699 _('show only branches that have unmerged heads')),
3699 ('c', 'closed', False,
3700 ('c', 'closed', False,
3700 _('show normal and closed branches'))],
3701 _('show normal and closed branches'))],
3701 _('[-ac]')),
3702 _('[-ac]')),
3702 "bundle":
3703 "bundle":
3703 (bundle,
3704 (bundle,
3704 [('f', 'force', None,
3705 [('f', 'force', None,
3705 _('run even when the destination is unrelated')),
3706 _('run even when the destination is unrelated')),
3706 ('r', 'rev', [],
3707 ('r', 'rev', [],
3707 _('a changeset intended to be added to the destination')),
3708 _('a changeset intended to be added to the destination')),
3708 ('b', 'branch', [],
3709 ('b', 'branch', [],
3709 _('a specific branch you would like to bundle')),
3710 _('a specific branch you would like to bundle')),
3710 ('', 'base', [],
3711 ('', 'base', [],
3711 _('a base changeset assumed to be available at the destination')),
3712 _('a base changeset assumed to be available at the destination')),
3712 ('a', 'all', None, _('bundle all changesets in the repository')),
3713 ('a', 'all', None, _('bundle all changesets in the repository')),
3713 ('t', 'type', 'bzip2', _('bundle compression type to use')),
3714 ('t', 'type', 'bzip2', _('bundle compression type to use')),
3714 ] + remoteopts,
3715 ] + remoteopts,
3715 _('[-f] [-t TYPE] [-a] [-r REV]... [--base REV]... FILE [DEST]')),
3716 _('[-f] [-t TYPE] [-a] [-r REV]... [--base REV]... FILE [DEST]')),
3716 "cat":
3717 "cat":
3717 (cat,
3718 (cat,
3718 [('o', 'output', '', _('print output to file with formatted name')),
3719 [('o', 'output', '', _('print output to file with formatted name')),
3719 ('r', 'rev', '', _('print the given revision')),
3720 ('r', 'rev', '', _('print the given revision')),
3720 ('', 'decode', None, _('apply any matching decode filter')),
3721 ('', 'decode', None, _('apply any matching decode filter')),
3721 ] + walkopts,
3722 ] + walkopts,
3722 _('[OPTION]... FILE...')),
3723 _('[OPTION]... FILE...')),
3723 "^clone":
3724 "^clone":
3724 (clone,
3725 (clone,
3725 [('U', 'noupdate', None,
3726 [('U', 'noupdate', None,
3726 _('the clone will include an empty working copy (only a repository)')),
3727 _('the clone will include an empty working copy (only a repository)')),
3727 ('u', 'updaterev', '',
3728 ('u', 'updaterev', '',
3728 _('revision, tag or branch to check out')),
3729 _('revision, tag or branch to check out')),
3729 ('r', 'rev', [],
3730 ('r', 'rev', [],
3730 _('include the specified changeset')),
3731 _('include the specified changeset')),
3731 ('b', 'branch', [],
3732 ('b', 'branch', [],
3732 _('clone only the specified branch')),
3733 _('clone only the specified branch')),
3733 ('', 'pull', None, _('use pull protocol to copy metadata')),
3734 ('', 'pull', None, _('use pull protocol to copy metadata')),
3734 ('', 'uncompressed', None,
3735 ('', 'uncompressed', None,
3735 _('use uncompressed transfer (fast over LAN)')),
3736 _('use uncompressed transfer (fast over LAN)')),
3736 ] + remoteopts,
3737 ] + remoteopts,
3737 _('[OPTION]... SOURCE [DEST]')),
3738 _('[OPTION]... SOURCE [DEST]')),
3738 "^commit|ci":
3739 "^commit|ci":
3739 (commit,
3740 (commit,
3740 [('A', 'addremove', None,
3741 [('A', 'addremove', None,
3741 _('mark new/missing files as added/removed before committing')),
3742 _('mark new/missing files as added/removed before committing')),
3742 ('', 'close-branch', None,
3743 ('', 'close-branch', None,
3743 _('mark a branch as closed, hiding it from the branch list')),
3744 _('mark a branch as closed, hiding it from the branch list')),
3744 ] + walkopts + commitopts + commitopts2,
3745 ] + walkopts + commitopts + commitopts2,
3745 _('[OPTION]... [FILE]...')),
3746 _('[OPTION]... [FILE]...')),
3746 "copy|cp":
3747 "copy|cp":
3747 (copy,
3748 (copy,
3748 [('A', 'after', None, _('record a copy that has already occurred')),
3749 [('A', 'after', None, _('record a copy that has already occurred')),
3749 ('f', 'force', None,
3750 ('f', 'force', None,
3750 _('forcibly copy over an existing managed file')),
3751 _('forcibly copy over an existing managed file')),
3751 ] + walkopts + dryrunopts,
3752 ] + walkopts + dryrunopts,
3752 _('[OPTION]... [SOURCE]... DEST')),
3753 _('[OPTION]... [SOURCE]... DEST')),
3753 "debugancestor": (debugancestor, [], _('[INDEX] REV1 REV2')),
3754 "debugancestor": (debugancestor, [], _('[INDEX] REV1 REV2')),
3754 "debugcheckstate": (debugcheckstate, [], ''),
3755 "debugcheckstate": (debugcheckstate, [], ''),
3755 "debugcommands": (debugcommands, [], _('[COMMAND]')),
3756 "debugcommands": (debugcommands, [], _('[COMMAND]')),
3756 "debugcomplete":
3757 "debugcomplete":
3757 (debugcomplete,
3758 (debugcomplete,
3758 [('o', 'options', None, _('show the command options'))],
3759 [('o', 'options', None, _('show the command options'))],
3759 _('[-o] CMD')),
3760 _('[-o] CMD')),
3760 "debugdate":
3761 "debugdate":
3761 (debugdate,
3762 (debugdate,
3762 [('e', 'extended', None, _('try extended date formats'))],
3763 [('e', 'extended', None, _('try extended date formats'))],
3763 _('[-e] DATE [RANGE]')),
3764 _('[-e] DATE [RANGE]')),
3764 "debugdata": (debugdata, [], _('FILE REV')),
3765 "debugdata": (debugdata, [], _('FILE REV')),
3765 "debugfsinfo": (debugfsinfo, [], _('[PATH]')),
3766 "debugfsinfo": (debugfsinfo, [], _('[PATH]')),
3766 "debugindex": (debugindex, [], _('FILE')),
3767 "debugindex": (debugindex, [], _('FILE')),
3767 "debugindexdot": (debugindexdot, [], _('FILE')),
3768 "debugindexdot": (debugindexdot, [], _('FILE')),
3768 "debuginstall": (debuginstall, [], ''),
3769 "debuginstall": (debuginstall, [], ''),
3769 "debugrebuildstate":
3770 "debugrebuildstate":
3770 (debugrebuildstate,
3771 (debugrebuildstate,
3771 [('r', 'rev', '', _('revision to rebuild to'))],
3772 [('r', 'rev', '', _('revision to rebuild to'))],
3772 _('[-r REV] [REV]')),
3773 _('[-r REV] [REV]')),
3773 "debugrename":
3774 "debugrename":
3774 (debugrename,
3775 (debugrename,
3775 [('r', 'rev', '', _('revision to debug'))],
3776 [('r', 'rev', '', _('revision to debug'))],
3776 _('[-r REV] FILE')),
3777 _('[-r REV] FILE')),
3777 "debugsetparents":
3778 "debugsetparents":
3778 (debugsetparents, [], _('REV1 [REV2]')),
3779 (debugsetparents, [], _('REV1 [REV2]')),
3779 "debugstate":
3780 "debugstate":
3780 (debugstate,
3781 (debugstate,
3781 [('', 'nodates', None, _('do not display the saved mtime'))],
3782 [('', 'nodates', None, _('do not display the saved mtime'))],
3782 _('[OPTION]...')),
3783 _('[OPTION]...')),
3783 "debugsub":
3784 "debugsub":
3784 (debugsub,
3785 (debugsub,
3785 [('r', 'rev', '', _('revision to check'))],
3786 [('r', 'rev', '', _('revision to check'))],
3786 _('[-r REV] [REV]')),
3787 _('[-r REV] [REV]')),
3787 "debugwalk": (debugwalk, walkopts, _('[OPTION]... [FILE]...')),
3788 "debugwalk": (debugwalk, walkopts, _('[OPTION]... [FILE]...')),
3788 "^diff":
3789 "^diff":
3789 (diff,
3790 (diff,
3790 [('r', 'rev', [], _('revision')),
3791 [('r', 'rev', [], _('revision')),
3791 ('c', 'change', '', _('change made by revision'))
3792 ('c', 'change', '', _('change made by revision'))
3792 ] + diffopts + diffopts2 + walkopts,
3793 ] + diffopts + diffopts2 + walkopts,
3793 _('[OPTION]... ([-c REV] | [-r REV1 [-r REV2]]) [FILE]...')),
3794 _('[OPTION]... ([-c REV] | [-r REV1 [-r REV2]]) [FILE]...')),
3794 "^export":
3795 "^export":
3795 (export,
3796 (export,
3796 [('o', 'output', '', _('print output to file with formatted name')),
3797 [('o', 'output', '', _('print output to file with formatted name')),
3797 ('', 'switch-parent', None, _('diff against the second parent')),
3798 ('', 'switch-parent', None, _('diff against the second parent')),
3798 ('r', 'rev', [], _('revisions to export')),
3799 ('r', 'rev', [], _('revisions to export')),
3799 ] + diffopts,
3800 ] + diffopts,
3800 _('[OPTION]... [-o OUTFILESPEC] REV...')),
3801 _('[OPTION]... [-o OUTFILESPEC] REV...')),
3801 "^forget":
3802 "^forget":
3802 (forget,
3803 (forget,
3803 [] + walkopts,
3804 [] + walkopts,
3804 _('[OPTION]... FILE...')),
3805 _('[OPTION]... FILE...')),
3805 "grep":
3806 "grep":
3806 (grep,
3807 (grep,
3807 [('0', 'print0', None, _('end fields with NUL')),
3808 [('0', 'print0', None, _('end fields with NUL')),
3808 ('', 'all', None, _('print all revisions that match')),
3809 ('', 'all', None, _('print all revisions that match')),
3809 ('f', 'follow', None,
3810 ('f', 'follow', None,
3810 _('follow changeset history,'
3811 _('follow changeset history,'
3811 ' or file history across copies and renames')),
3812 ' or file history across copies and renames')),
3812 ('i', 'ignore-case', None, _('ignore case when matching')),
3813 ('i', 'ignore-case', None, _('ignore case when matching')),
3813 ('l', 'files-with-matches', None,
3814 ('l', 'files-with-matches', None,
3814 _('print only filenames and revisions that match')),
3815 _('print only filenames and revisions that match')),
3815 ('n', 'line-number', None, _('print matching line numbers')),
3816 ('n', 'line-number', None, _('print matching line numbers')),
3816 ('r', 'rev', [], _('only search files changed within revision range')),
3817 ('r', 'rev', [], _('only search files changed within revision range')),
3817 ('u', 'user', None, _('list the author (long with -v)')),
3818 ('u', 'user', None, _('list the author (long with -v)')),
3818 ('d', 'date', None, _('list the date (short with -q)')),
3819 ('d', 'date', None, _('list the date (short with -q)')),
3819 ] + walkopts,
3820 ] + walkopts,
3820 _('[OPTION]... PATTERN [FILE]...')),
3821 _('[OPTION]... PATTERN [FILE]...')),
3821 "heads":
3822 "heads":
3822 (heads,
3823 (heads,
3823 [('r', 'rev', '', _('show only heads which are descendants of REV')),
3824 [('r', 'rev', '', _('show only heads which are descendants of REV')),
3824 ('t', 'topo', False, _('show topological heads only')),
3825 ('t', 'topo', False, _('show topological heads only')),
3825 ('a', 'active', False,
3826 ('a', 'active', False,
3826 _('show active branchheads only [DEPRECATED]')),
3827 _('show active branchheads only [DEPRECATED]')),
3827 ('c', 'closed', False,
3828 ('c', 'closed', False,
3828 _('show normal and closed branch heads')),
3829 _('show normal and closed branch heads')),
3829 ] + templateopts,
3830 ] + templateopts,
3830 _('[-ac] [-r REV] [REV]...')),
3831 _('[-ac] [-r REV] [REV]...')),
3831 "help": (help_, [], _('[TOPIC]')),
3832 "help": (help_, [], _('[TOPIC]')),
3832 "identify|id":
3833 "identify|id":
3833 (identify,
3834 (identify,
3834 [('r', 'rev', '', _('identify the specified revision')),
3835 [('r', 'rev', '', _('identify the specified revision')),
3835 ('n', 'num', None, _('show local revision number')),
3836 ('n', 'num', None, _('show local revision number')),
3836 ('i', 'id', None, _('show global revision id')),
3837 ('i', 'id', None, _('show global revision id')),
3837 ('b', 'branch', None, _('show branch')),
3838 ('b', 'branch', None, _('show branch')),
3838 ('t', 'tags', None, _('show tags'))],
3839 ('t', 'tags', None, _('show tags'))],
3839 _('[-nibt] [-r REV] [SOURCE]')),
3840 _('[-nibt] [-r REV] [SOURCE]')),
3840 "import|patch":
3841 "import|patch":
3841 (import_,
3842 (import_,
3842 [('p', 'strip', 1,
3843 [('p', 'strip', 1,
3843 _('directory strip option for patch. This has the same '
3844 _('directory strip option for patch. This has the same '
3844 'meaning as the corresponding patch option')),
3845 'meaning as the corresponding patch option')),
3845 ('b', 'base', '', _('base path')),
3846 ('b', 'base', '', _('base path')),
3846 ('f', 'force', None,
3847 ('f', 'force', None,
3847 _('skip check for outstanding uncommitted changes')),
3848 _('skip check for outstanding uncommitted changes')),
3848 ('', 'no-commit', None,
3849 ('', 'no-commit', None,
3849 _("don't commit, just update the working directory")),
3850 _("don't commit, just update the working directory")),
3850 ('', 'exact', None,
3851 ('', 'exact', None,
3851 _('apply patch to the nodes from which it was generated')),
3852 _('apply patch to the nodes from which it was generated')),
3852 ('', 'import-branch', None,
3853 ('', 'import-branch', None,
3853 _('use any branch information in patch (implied by --exact)'))] +
3854 _('use any branch information in patch (implied by --exact)'))] +
3854 commitopts + commitopts2 + similarityopts,
3855 commitopts + commitopts2 + similarityopts,
3855 _('[OPTION]... PATCH...')),
3856 _('[OPTION]... PATCH...')),
3856 "incoming|in":
3857 "incoming|in":
3857 (incoming,
3858 (incoming,
3858 [('f', 'force', None,
3859 [('f', 'force', None,
3859 _('run even if remote repository is unrelated')),
3860 _('run even if remote repository is unrelated')),
3860 ('n', 'newest-first', None, _('show newest record first')),
3861 ('n', 'newest-first', None, _('show newest record first')),
3861 ('', 'bundle', '', _('file to store the bundles into')),
3862 ('', 'bundle', '', _('file to store the bundles into')),
3862 ('r', 'rev', [],
3863 ('r', 'rev', [],
3863 _('a remote changeset intended to be added')),
3864 _('a remote changeset intended to be added')),
3864 ('b', 'branch', [],
3865 ('b', 'branch', [],
3865 _('a specific branch you would like to pull')),
3866 _('a specific branch you would like to pull')),
3866 ] + logopts + remoteopts,
3867 ] + logopts + remoteopts,
3867 _('[-p] [-n] [-M] [-f] [-r REV]...'
3868 _('[-p] [-n] [-M] [-f] [-r REV]...'
3868 ' [--bundle FILENAME] [SOURCE]')),
3869 ' [--bundle FILENAME] [SOURCE]')),
3869 "^init":
3870 "^init":
3870 (init,
3871 (init,
3871 remoteopts,
3872 remoteopts,
3872 _('[-e CMD] [--remotecmd CMD] [DEST]')),
3873 _('[-e CMD] [--remotecmd CMD] [DEST]')),
3873 "locate":
3874 "locate":
3874 (locate,
3875 (locate,
3875 [('r', 'rev', '', _('search the repository as it is in REV')),
3876 [('r', 'rev', '', _('search the repository as it is in REV')),
3876 ('0', 'print0', None,
3877 ('0', 'print0', None,
3877 _('end filenames with NUL, for use with xargs')),
3878 _('end filenames with NUL, for use with xargs')),
3878 ('f', 'fullpath', None,
3879 ('f', 'fullpath', None,
3879 _('print complete paths from the filesystem root')),
3880 _('print complete paths from the filesystem root')),
3880 ] + walkopts,
3881 ] + walkopts,
3881 _('[OPTION]... [PATTERN]...')),
3882 _('[OPTION]... [PATTERN]...')),
3882 "^log|history":
3883 "^log|history":
3883 (log,
3884 (log,
3884 [('f', 'follow', None,
3885 [('f', 'follow', None,
3885 _('follow changeset history,'
3886 _('follow changeset history,'
3886 ' or file history across copies and renames')),
3887 ' or file history across copies and renames')),
3887 ('', 'follow-first', None,
3888 ('', 'follow-first', None,
3888 _('only follow the first parent of merge changesets')),
3889 _('only follow the first parent of merge changesets')),
3889 ('d', 'date', '', _('show revisions matching date spec')),
3890 ('d', 'date', '', _('show revisions matching date spec')),
3890 ('C', 'copies', None, _('show copied files')),
3891 ('C', 'copies', None, _('show copied files')),
3891 ('k', 'keyword', [], _('do case-insensitive search for a keyword')),
3892 ('k', 'keyword', [], _('do case-insensitive search for a keyword')),
3892 ('r', 'rev', [], _('show the specified revision or range')),
3893 ('r', 'rev', [], _('show the specified revision or range')),
3893 ('', 'removed', None, _('include revisions where files were removed')),
3894 ('', 'removed', None, _('include revisions where files were removed')),
3894 ('m', 'only-merges', None, _('show only merges')),
3895 ('m', 'only-merges', None, _('show only merges')),
3895 ('u', 'user', [], _('revisions committed by user')),
3896 ('u', 'user', [], _('revisions committed by user')),
3896 ('', 'only-branch', [],
3897 ('', 'only-branch', [],
3897 _('show only changesets within the given named branch (DEPRECATED)')),
3898 _('show only changesets within the given named branch (DEPRECATED)')),
3898 ('b', 'branch', [],
3899 ('b', 'branch', [],
3899 _('show changesets within the given named branch')),
3900 _('show changesets within the given named branch')),
3900 ('P', 'prune', [],
3901 ('P', 'prune', [],
3901 _('do not display revision or any of its ancestors')),
3902 _('do not display revision or any of its ancestors')),
3902 ] + logopts + walkopts,
3903 ] + logopts + walkopts,
3903 _('[OPTION]... [FILE]')),
3904 _('[OPTION]... [FILE]')),
3904 "manifest":
3905 "manifest":
3905 (manifest,
3906 (manifest,
3906 [('r', 'rev', '', _('revision to display'))],
3907 [('r', 'rev', '', _('revision to display'))],
3907 _('[-r REV]')),
3908 _('[-r REV]')),
3908 "^merge":
3909 "^merge":
3909 (merge,
3910 (merge,
3910 [('f', 'force', None, _('force a merge with outstanding changes')),
3911 [('f', 'force', None, _('force a merge with outstanding changes')),
3911 ('r', 'rev', '', _('revision to merge')),
3912 ('r', 'rev', '', _('revision to merge')),
3912 ('P', 'preview', None,
3913 ('P', 'preview', None,
3913 _('review revisions to merge (no merge is performed)'))],
3914 _('review revisions to merge (no merge is performed)'))],
3914 _('[-P] [-f] [[-r] REV]')),
3915 _('[-P] [-f] [[-r] REV]')),
3915 "outgoing|out":
3916 "outgoing|out":
3916 (outgoing,
3917 (outgoing,
3917 [('f', 'force', None,
3918 [('f', 'force', None,
3918 _('run even when the destination is unrelated')),
3919 _('run even when the destination is unrelated')),
3919 ('r', 'rev', [],
3920 ('r', 'rev', [],
3920 _('a changeset intended to be included in the destination')),
3921 _('a changeset intended to be included in the destination')),
3921 ('n', 'newest-first', None, _('show newest record first')),
3922 ('n', 'newest-first', None, _('show newest record first')),
3922 ('b', 'branch', [],
3923 ('b', 'branch', [],
3923 _('a specific branch you would like to push')),
3924 _('a specific branch you would like to push')),
3924 ] + logopts + remoteopts,
3925 ] + logopts + remoteopts,
3925 _('[-M] [-p] [-n] [-f] [-r REV]... [DEST]')),
3926 _('[-M] [-p] [-n] [-f] [-r REV]... [DEST]')),
3926 "parents":
3927 "parents":
3927 (parents,
3928 (parents,
3928 [('r', 'rev', '', _('show parents of the specified revision')),
3929 [('r', 'rev', '', _('show parents of the specified revision')),
3929 ] + templateopts,
3930 ] + templateopts,
3930 _('[-r REV] [FILE]')),
3931 _('[-r REV] [FILE]')),
3931 "paths": (paths, [], _('[NAME]')),
3932 "paths": (paths, [], _('[NAME]')),
3932 "^pull":
3933 "^pull":
3933 (pull,
3934 (pull,
3934 [('u', 'update', None,
3935 [('u', 'update', None,
3935 _('update to new branch head if changesets were pulled')),
3936 _('update to new branch head if changesets were pulled')),
3936 ('f', 'force', None,
3937 ('f', 'force', None,
3937 _('run even when remote repository is unrelated')),
3938 _('run even when remote repository is unrelated')),
3938 ('r', 'rev', [],
3939 ('r', 'rev', [],
3939 _('a remote changeset intended to be added')),
3940 _('a remote changeset intended to be added')),
3940 ('b', 'branch', [],
3941 ('b', 'branch', [],
3941 _('a specific branch you would like to pull')),
3942 _('a specific branch you would like to pull')),
3942 ] + remoteopts,
3943 ] + remoteopts,
3943 _('[-u] [-f] [-r REV]... [-e CMD] [--remotecmd CMD] [SOURCE]')),
3944 _('[-u] [-f] [-r REV]... [-e CMD] [--remotecmd CMD] [SOURCE]')),
3944 "^push":
3945 "^push":
3945 (push,
3946 (push,
3946 [('f', 'force', None, _('force push')),
3947 [('f', 'force', None, _('force push')),
3947 ('r', 'rev', [],
3948 ('r', 'rev', [],
3948 _('a changeset intended to be included in the destination')),
3949 _('a changeset intended to be included in the destination')),
3949 ('b', 'branch', [],
3950 ('b', 'branch', [],
3950 _('a specific branch you would like to push')),
3951 _('a specific branch you would like to push')),
3952 ('', 'new-branch', False, _('allow pushing a new branch')),
3951 ] + remoteopts,
3953 ] + remoteopts,
3952 _('[-f] [-r REV]... [-e CMD] [--remotecmd CMD] [DEST]')),
3954 _('[-f] [-r REV]... [-e CMD] [--remotecmd CMD] [DEST]')),
3953 "recover": (recover, []),
3955 "recover": (recover, []),
3954 "^remove|rm":
3956 "^remove|rm":
3955 (remove,
3957 (remove,
3956 [('A', 'after', None, _('record delete for missing files')),
3958 [('A', 'after', None, _('record delete for missing files')),
3957 ('f', 'force', None,
3959 ('f', 'force', None,
3958 _('remove (and delete) file even if added or modified')),
3960 _('remove (and delete) file even if added or modified')),
3959 ] + walkopts,
3961 ] + walkopts,
3960 _('[OPTION]... FILE...')),
3962 _('[OPTION]... FILE...')),
3961 "rename|mv":
3963 "rename|mv":
3962 (rename,
3964 (rename,
3963 [('A', 'after', None, _('record a rename that has already occurred')),
3965 [('A', 'after', None, _('record a rename that has already occurred')),
3964 ('f', 'force', None,
3966 ('f', 'force', None,
3965 _('forcibly copy over an existing managed file')),
3967 _('forcibly copy over an existing managed file')),
3966 ] + walkopts + dryrunopts,
3968 ] + walkopts + dryrunopts,
3967 _('[OPTION]... SOURCE... DEST')),
3969 _('[OPTION]... SOURCE... DEST')),
3968 "resolve":
3970 "resolve":
3969 (resolve,
3971 (resolve,
3970 [('a', 'all', None, _('select all unresolved files')),
3972 [('a', 'all', None, _('select all unresolved files')),
3971 ('l', 'list', None, _('list state of files needing merge')),
3973 ('l', 'list', None, _('list state of files needing merge')),
3972 ('m', 'mark', None, _('mark files as resolved')),
3974 ('m', 'mark', None, _('mark files as resolved')),
3973 ('u', 'unmark', None, _('unmark files as resolved')),
3975 ('u', 'unmark', None, _('unmark files as resolved')),
3974 ('n', 'no-status', None, _('hide status prefix'))]
3976 ('n', 'no-status', None, _('hide status prefix'))]
3975 + walkopts,
3977 + walkopts,
3976 _('[OPTION]... [FILE]...')),
3978 _('[OPTION]... [FILE]...')),
3977 "revert":
3979 "revert":
3978 (revert,
3980 (revert,
3979 [('a', 'all', None, _('revert all changes when no arguments given')),
3981 [('a', 'all', None, _('revert all changes when no arguments given')),
3980 ('d', 'date', '', _('tipmost revision matching date')),
3982 ('d', 'date', '', _('tipmost revision matching date')),
3981 ('r', 'rev', '', _('revert to the specified revision')),
3983 ('r', 'rev', '', _('revert to the specified revision')),
3982 ('', 'no-backup', None, _('do not save backup copies of files')),
3984 ('', 'no-backup', None, _('do not save backup copies of files')),
3983 ] + walkopts + dryrunopts,
3985 ] + walkopts + dryrunopts,
3984 _('[OPTION]... [-r REV] [NAME]...')),
3986 _('[OPTION]... [-r REV] [NAME]...')),
3985 "rollback": (rollback, dryrunopts),
3987 "rollback": (rollback, dryrunopts),
3986 "root": (root, []),
3988 "root": (root, []),
3987 "^serve":
3989 "^serve":
3988 (serve,
3990 (serve,
3989 [('A', 'accesslog', '', _('name of access log file to write to')),
3991 [('A', 'accesslog', '', _('name of access log file to write to')),
3990 ('d', 'daemon', None, _('run server in background')),
3992 ('d', 'daemon', None, _('run server in background')),
3991 ('', 'daemon-pipefds', '', _('used internally by daemon mode')),
3993 ('', 'daemon-pipefds', '', _('used internally by daemon mode')),
3992 ('E', 'errorlog', '', _('name of error log file to write to')),
3994 ('E', 'errorlog', '', _('name of error log file to write to')),
3993 # use string type, then we can check if something was passed
3995 # use string type, then we can check if something was passed
3994 ('p', 'port', '', _('port to listen on (default: 8000)')),
3996 ('p', 'port', '', _('port to listen on (default: 8000)')),
3995 ('a', 'address', '',
3997 ('a', 'address', '',
3996 _('address to listen on (default: all interfaces)')),
3998 _('address to listen on (default: all interfaces)')),
3997 ('', 'prefix', '',
3999 ('', 'prefix', '',
3998 _('prefix path to serve from (default: server root)')),
4000 _('prefix path to serve from (default: server root)')),
3999 ('n', 'name', '',
4001 ('n', 'name', '',
4000 _('name to show in web pages (default: working directory)')),
4002 _('name to show in web pages (default: working directory)')),
4001 ('', 'web-conf', '', _('name of the hgweb config file'
4003 ('', 'web-conf', '', _('name of the hgweb config file'
4002 ' (serve more than one repository)')),
4004 ' (serve more than one repository)')),
4003 ('', 'webdir-conf', '', _('name of the hgweb config file'
4005 ('', 'webdir-conf', '', _('name of the hgweb config file'
4004 ' (DEPRECATED)')),
4006 ' (DEPRECATED)')),
4005 ('', 'pid-file', '', _('name of file to write process ID to')),
4007 ('', 'pid-file', '', _('name of file to write process ID to')),
4006 ('', 'stdio', None, _('for remote clients')),
4008 ('', 'stdio', None, _('for remote clients')),
4007 ('t', 'templates', '', _('web templates to use')),
4009 ('t', 'templates', '', _('web templates to use')),
4008 ('', 'style', '', _('template style to use')),
4010 ('', 'style', '', _('template style to use')),
4009 ('6', 'ipv6', None, _('use IPv6 in addition to IPv4')),
4011 ('6', 'ipv6', None, _('use IPv6 in addition to IPv4')),
4010 ('', 'certificate', '', _('SSL certificate file'))],
4012 ('', 'certificate', '', _('SSL certificate file'))],
4011 _('[OPTION]...')),
4013 _('[OPTION]...')),
4012 "showconfig|debugconfig":
4014 "showconfig|debugconfig":
4013 (showconfig,
4015 (showconfig,
4014 [('u', 'untrusted', None, _('show untrusted configuration options'))],
4016 [('u', 'untrusted', None, _('show untrusted configuration options'))],
4015 _('[-u] [NAME]...')),
4017 _('[-u] [NAME]...')),
4016 "^summary|sum":
4018 "^summary|sum":
4017 (summary,
4019 (summary,
4018 [('', 'remote', None, _('check for push and pull'))], '[--remote]'),
4020 [('', 'remote', None, _('check for push and pull'))], '[--remote]'),
4019 "^status|st":
4021 "^status|st":
4020 (status,
4022 (status,
4021 [('A', 'all', None, _('show status of all files')),
4023 [('A', 'all', None, _('show status of all files')),
4022 ('m', 'modified', None, _('show only modified files')),
4024 ('m', 'modified', None, _('show only modified files')),
4023 ('a', 'added', None, _('show only added files')),
4025 ('a', 'added', None, _('show only added files')),
4024 ('r', 'removed', None, _('show only removed files')),
4026 ('r', 'removed', None, _('show only removed files')),
4025 ('d', 'deleted', None, _('show only deleted (but tracked) files')),
4027 ('d', 'deleted', None, _('show only deleted (but tracked) files')),
4026 ('c', 'clean', None, _('show only files without changes')),
4028 ('c', 'clean', None, _('show only files without changes')),
4027 ('u', 'unknown', None, _('show only unknown (not tracked) files')),
4029 ('u', 'unknown', None, _('show only unknown (not tracked) files')),
4028 ('i', 'ignored', None, _('show only ignored files')),
4030 ('i', 'ignored', None, _('show only ignored files')),
4029 ('n', 'no-status', None, _('hide status prefix')),
4031 ('n', 'no-status', None, _('hide status prefix')),
4030 ('C', 'copies', None, _('show source of copied files')),
4032 ('C', 'copies', None, _('show source of copied files')),
4031 ('0', 'print0', None,
4033 ('0', 'print0', None,
4032 _('end filenames with NUL, for use with xargs')),
4034 _('end filenames with NUL, for use with xargs')),
4033 ('', 'rev', [], _('show difference from revision')),
4035 ('', 'rev', [], _('show difference from revision')),
4034 ('', 'change', '', _('list the changed files of a revision')),
4036 ('', 'change', '', _('list the changed files of a revision')),
4035 ] + walkopts,
4037 ] + walkopts,
4036 _('[OPTION]... [FILE]...')),
4038 _('[OPTION]... [FILE]...')),
4037 "tag":
4039 "tag":
4038 (tag,
4040 (tag,
4039 [('f', 'force', None, _('replace existing tag')),
4041 [('f', 'force', None, _('replace existing tag')),
4040 ('l', 'local', None, _('make the tag local')),
4042 ('l', 'local', None, _('make the tag local')),
4041 ('r', 'rev', '', _('revision to tag')),
4043 ('r', 'rev', '', _('revision to tag')),
4042 ('', 'remove', None, _('remove a tag')),
4044 ('', 'remove', None, _('remove a tag')),
4043 # -l/--local is already there, commitopts cannot be used
4045 # -l/--local is already there, commitopts cannot be used
4044 ('e', 'edit', None, _('edit commit message')),
4046 ('e', 'edit', None, _('edit commit message')),
4045 ('m', 'message', '', _('use <text> as commit message')),
4047 ('m', 'message', '', _('use <text> as commit message')),
4046 ] + commitopts2,
4048 ] + commitopts2,
4047 _('[-f] [-l] [-m TEXT] [-d DATE] [-u USER] [-r REV] NAME...')),
4049 _('[-f] [-l] [-m TEXT] [-d DATE] [-u USER] [-r REV] NAME...')),
4048 "tags": (tags, [], ''),
4050 "tags": (tags, [], ''),
4049 "tip":
4051 "tip":
4050 (tip,
4052 (tip,
4051 [('p', 'patch', None, _('show patch')),
4053 [('p', 'patch', None, _('show patch')),
4052 ('g', 'git', None, _('use git extended diff format')),
4054 ('g', 'git', None, _('use git extended diff format')),
4053 ] + templateopts,
4055 ] + templateopts,
4054 _('[-p] [-g]')),
4056 _('[-p] [-g]')),
4055 "unbundle":
4057 "unbundle":
4056 (unbundle,
4058 (unbundle,
4057 [('u', 'update', None,
4059 [('u', 'update', None,
4058 _('update to new branch head if changesets were unbundled'))],
4060 _('update to new branch head if changesets were unbundled'))],
4059 _('[-u] FILE...')),
4061 _('[-u] FILE...')),
4060 "^update|up|checkout|co":
4062 "^update|up|checkout|co":
4061 (update,
4063 (update,
4062 [('C', 'clean', None, _('discard uncommitted changes (no backup)')),
4064 [('C', 'clean', None, _('discard uncommitted changes (no backup)')),
4063 ('c', 'check', None, _('check for uncommitted changes')),
4065 ('c', 'check', None, _('check for uncommitted changes')),
4064 ('d', 'date', '', _('tipmost revision matching date')),
4066 ('d', 'date', '', _('tipmost revision matching date')),
4065 ('r', 'rev', '', _('revision'))],
4067 ('r', 'rev', '', _('revision'))],
4066 _('[-c] [-C] [-d DATE] [[-r] REV]')),
4068 _('[-c] [-C] [-d DATE] [[-r] REV]')),
4067 "verify": (verify, []),
4069 "verify": (verify, []),
4068 "version": (version_, []),
4070 "version": (version_, []),
4069 }
4071 }
4070
4072
4071 norepo = ("clone init version help debugcommands debugcomplete debugdata"
4073 norepo = ("clone init version help debugcommands debugcomplete debugdata"
4072 " debugindex debugindexdot debugdate debuginstall debugfsinfo")
4074 " debugindex debugindexdot debugdate debuginstall debugfsinfo")
4073 optionalrepo = ("identify paths serve showconfig debugancestor")
4075 optionalrepo = ("identify paths serve showconfig debugancestor")
@@ -1,2292 +1,2294 b''
1 # localrepo.py - read/write repository class for mercurial
1 # localrepo.py - read/write repository class for mercurial
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from node import bin, hex, nullid, nullrev, short
8 from node import bin, hex, nullid, nullrev, short
9 from i18n import _
9 from i18n import _
10 import repo, changegroup, subrepo
10 import repo, changegroup, subrepo
11 import changelog, dirstate, filelog, manifest, context
11 import changelog, dirstate, filelog, manifest, context
12 import lock, transaction, store, encoding
12 import lock, transaction, store, encoding
13 import util, extensions, hook, error
13 import util, extensions, hook, error
14 import match as matchmod
14 import match as matchmod
15 import merge as mergemod
15 import merge as mergemod
16 import tags as tagsmod
16 import tags as tagsmod
17 import url as urlmod
17 import url as urlmod
18 from lock import release
18 from lock import release
19 import weakref, stat, errno, os, time, inspect
19 import weakref, stat, errno, os, time, inspect
20 propertycache = util.propertycache
20 propertycache = util.propertycache
21
21
22 class localrepository(repo.repository):
22 class localrepository(repo.repository):
23 capabilities = set(('lookup', 'changegroupsubset', 'branchmap'))
23 capabilities = set(('lookup', 'changegroupsubset', 'branchmap'))
24 supported = set('revlogv1 store fncache shared'.split())
24 supported = set('revlogv1 store fncache shared'.split())
25
25
26 def __init__(self, baseui, path=None, create=0):
26 def __init__(self, baseui, path=None, create=0):
27 repo.repository.__init__(self)
27 repo.repository.__init__(self)
28 self.root = os.path.realpath(util.expandpath(path))
28 self.root = os.path.realpath(util.expandpath(path))
29 self.path = os.path.join(self.root, ".hg")
29 self.path = os.path.join(self.root, ".hg")
30 self.origroot = path
30 self.origroot = path
31 self.opener = util.opener(self.path)
31 self.opener = util.opener(self.path)
32 self.wopener = util.opener(self.root)
32 self.wopener = util.opener(self.root)
33 self.baseui = baseui
33 self.baseui = baseui
34 self.ui = baseui.copy()
34 self.ui = baseui.copy()
35
35
36 try:
36 try:
37 self.ui.readconfig(self.join("hgrc"), self.root)
37 self.ui.readconfig(self.join("hgrc"), self.root)
38 extensions.loadall(self.ui)
38 extensions.loadall(self.ui)
39 except IOError:
39 except IOError:
40 pass
40 pass
41
41
42 if not os.path.isdir(self.path):
42 if not os.path.isdir(self.path):
43 if create:
43 if create:
44 if not os.path.exists(path):
44 if not os.path.exists(path):
45 os.mkdir(path)
45 os.mkdir(path)
46 os.mkdir(self.path)
46 os.mkdir(self.path)
47 requirements = ["revlogv1"]
47 requirements = ["revlogv1"]
48 if self.ui.configbool('format', 'usestore', True):
48 if self.ui.configbool('format', 'usestore', True):
49 os.mkdir(os.path.join(self.path, "store"))
49 os.mkdir(os.path.join(self.path, "store"))
50 requirements.append("store")
50 requirements.append("store")
51 if self.ui.configbool('format', 'usefncache', True):
51 if self.ui.configbool('format', 'usefncache', True):
52 requirements.append("fncache")
52 requirements.append("fncache")
53 # create an invalid changelog
53 # create an invalid changelog
54 self.opener("00changelog.i", "a").write(
54 self.opener("00changelog.i", "a").write(
55 '\0\0\0\2' # represents revlogv2
55 '\0\0\0\2' # represents revlogv2
56 ' dummy changelog to prevent using the old repo layout'
56 ' dummy changelog to prevent using the old repo layout'
57 )
57 )
58 reqfile = self.opener("requires", "w")
58 reqfile = self.opener("requires", "w")
59 for r in requirements:
59 for r in requirements:
60 reqfile.write("%s\n" % r)
60 reqfile.write("%s\n" % r)
61 reqfile.close()
61 reqfile.close()
62 else:
62 else:
63 raise error.RepoError(_("repository %s not found") % path)
63 raise error.RepoError(_("repository %s not found") % path)
64 elif create:
64 elif create:
65 raise error.RepoError(_("repository %s already exists") % path)
65 raise error.RepoError(_("repository %s already exists") % path)
66 else:
66 else:
67 # find requirements
67 # find requirements
68 requirements = set()
68 requirements = set()
69 try:
69 try:
70 requirements = set(self.opener("requires").read().splitlines())
70 requirements = set(self.opener("requires").read().splitlines())
71 except IOError, inst:
71 except IOError, inst:
72 if inst.errno != errno.ENOENT:
72 if inst.errno != errno.ENOENT:
73 raise
73 raise
74 for r in requirements - self.supported:
74 for r in requirements - self.supported:
75 raise error.RepoError(_("requirement '%s' not supported") % r)
75 raise error.RepoError(_("requirement '%s' not supported") % r)
76
76
77 self.sharedpath = self.path
77 self.sharedpath = self.path
78 try:
78 try:
79 s = os.path.realpath(self.opener("sharedpath").read())
79 s = os.path.realpath(self.opener("sharedpath").read())
80 if not os.path.exists(s):
80 if not os.path.exists(s):
81 raise error.RepoError(
81 raise error.RepoError(
82 _('.hg/sharedpath points to nonexistent directory %s') % s)
82 _('.hg/sharedpath points to nonexistent directory %s') % s)
83 self.sharedpath = s
83 self.sharedpath = s
84 except IOError, inst:
84 except IOError, inst:
85 if inst.errno != errno.ENOENT:
85 if inst.errno != errno.ENOENT:
86 raise
86 raise
87
87
88 self.store = store.store(requirements, self.sharedpath, util.opener)
88 self.store = store.store(requirements, self.sharedpath, util.opener)
89 self.spath = self.store.path
89 self.spath = self.store.path
90 self.sopener = self.store.opener
90 self.sopener = self.store.opener
91 self.sjoin = self.store.join
91 self.sjoin = self.store.join
92 self.opener.createmode = self.store.createmode
92 self.opener.createmode = self.store.createmode
93 self.sopener.options = {}
93 self.sopener.options = {}
94
94
95 # These two define the set of tags for this repository. _tags
95 # These two define the set of tags for this repository. _tags
96 # maps tag name to node; _tagtypes maps tag name to 'global' or
96 # maps tag name to node; _tagtypes maps tag name to 'global' or
97 # 'local'. (Global tags are defined by .hgtags across all
97 # 'local'. (Global tags are defined by .hgtags across all
98 # heads, and local tags are defined in .hg/localtags.) They
98 # heads, and local tags are defined in .hg/localtags.) They
99 # constitute the in-memory cache of tags.
99 # constitute the in-memory cache of tags.
100 self._tags = None
100 self._tags = None
101 self._tagtypes = None
101 self._tagtypes = None
102
102
103 self._branchcache = None # in UTF-8
103 self._branchcache = None # in UTF-8
104 self._branchcachetip = None
104 self._branchcachetip = None
105 self.nodetagscache = None
105 self.nodetagscache = None
106 self.filterpats = {}
106 self.filterpats = {}
107 self._datafilters = {}
107 self._datafilters = {}
108 self._transref = self._lockref = self._wlockref = None
108 self._transref = self._lockref = self._wlockref = None
109
109
110 @propertycache
110 @propertycache
111 def changelog(self):
111 def changelog(self):
112 c = changelog.changelog(self.sopener)
112 c = changelog.changelog(self.sopener)
113 if 'HG_PENDING' in os.environ:
113 if 'HG_PENDING' in os.environ:
114 p = os.environ['HG_PENDING']
114 p = os.environ['HG_PENDING']
115 if p.startswith(self.root):
115 if p.startswith(self.root):
116 c.readpending('00changelog.i.a')
116 c.readpending('00changelog.i.a')
117 self.sopener.options['defversion'] = c.version
117 self.sopener.options['defversion'] = c.version
118 return c
118 return c
119
119
120 @propertycache
120 @propertycache
121 def manifest(self):
121 def manifest(self):
122 return manifest.manifest(self.sopener)
122 return manifest.manifest(self.sopener)
123
123
124 @propertycache
124 @propertycache
125 def dirstate(self):
125 def dirstate(self):
126 return dirstate.dirstate(self.opener, self.ui, self.root)
126 return dirstate.dirstate(self.opener, self.ui, self.root)
127
127
128 def __getitem__(self, changeid):
128 def __getitem__(self, changeid):
129 if changeid is None:
129 if changeid is None:
130 return context.workingctx(self)
130 return context.workingctx(self)
131 return context.changectx(self, changeid)
131 return context.changectx(self, changeid)
132
132
133 def __contains__(self, changeid):
133 def __contains__(self, changeid):
134 try:
134 try:
135 return bool(self.lookup(changeid))
135 return bool(self.lookup(changeid))
136 except error.RepoLookupError:
136 except error.RepoLookupError:
137 return False
137 return False
138
138
139 def __nonzero__(self):
139 def __nonzero__(self):
140 return True
140 return True
141
141
142 def __len__(self):
142 def __len__(self):
143 return len(self.changelog)
143 return len(self.changelog)
144
144
145 def __iter__(self):
145 def __iter__(self):
146 for i in xrange(len(self)):
146 for i in xrange(len(self)):
147 yield i
147 yield i
148
148
149 def url(self):
149 def url(self):
150 return 'file:' + self.root
150 return 'file:' + self.root
151
151
152 def hook(self, name, throw=False, **args):
152 def hook(self, name, throw=False, **args):
153 return hook.hook(self.ui, self, name, throw, **args)
153 return hook.hook(self.ui, self, name, throw, **args)
154
154
155 tag_disallowed = ':\r\n'
155 tag_disallowed = ':\r\n'
156
156
157 def _tag(self, names, node, message, local, user, date, extra={}):
157 def _tag(self, names, node, message, local, user, date, extra={}):
158 if isinstance(names, str):
158 if isinstance(names, str):
159 allchars = names
159 allchars = names
160 names = (names,)
160 names = (names,)
161 else:
161 else:
162 allchars = ''.join(names)
162 allchars = ''.join(names)
163 for c in self.tag_disallowed:
163 for c in self.tag_disallowed:
164 if c in allchars:
164 if c in allchars:
165 raise util.Abort(_('%r cannot be used in a tag name') % c)
165 raise util.Abort(_('%r cannot be used in a tag name') % c)
166
166
167 branches = self.branchmap()
167 branches = self.branchmap()
168 for name in names:
168 for name in names:
169 self.hook('pretag', throw=True, node=hex(node), tag=name,
169 self.hook('pretag', throw=True, node=hex(node), tag=name,
170 local=local)
170 local=local)
171 if name in branches:
171 if name in branches:
172 self.ui.warn(_("warning: tag %s conflicts with existing"
172 self.ui.warn(_("warning: tag %s conflicts with existing"
173 " branch name\n") % name)
173 " branch name\n") % name)
174
174
175 def writetags(fp, names, munge, prevtags):
175 def writetags(fp, names, munge, prevtags):
176 fp.seek(0, 2)
176 fp.seek(0, 2)
177 if prevtags and prevtags[-1] != '\n':
177 if prevtags and prevtags[-1] != '\n':
178 fp.write('\n')
178 fp.write('\n')
179 for name in names:
179 for name in names:
180 m = munge and munge(name) or name
180 m = munge and munge(name) or name
181 if self._tagtypes and name in self._tagtypes:
181 if self._tagtypes and name in self._tagtypes:
182 old = self._tags.get(name, nullid)
182 old = self._tags.get(name, nullid)
183 fp.write('%s %s\n' % (hex(old), m))
183 fp.write('%s %s\n' % (hex(old), m))
184 fp.write('%s %s\n' % (hex(node), m))
184 fp.write('%s %s\n' % (hex(node), m))
185 fp.close()
185 fp.close()
186
186
187 prevtags = ''
187 prevtags = ''
188 if local:
188 if local:
189 try:
189 try:
190 fp = self.opener('localtags', 'r+')
190 fp = self.opener('localtags', 'r+')
191 except IOError:
191 except IOError:
192 fp = self.opener('localtags', 'a')
192 fp = self.opener('localtags', 'a')
193 else:
193 else:
194 prevtags = fp.read()
194 prevtags = fp.read()
195
195
196 # local tags are stored in the current charset
196 # local tags are stored in the current charset
197 writetags(fp, names, None, prevtags)
197 writetags(fp, names, None, prevtags)
198 for name in names:
198 for name in names:
199 self.hook('tag', node=hex(node), tag=name, local=local)
199 self.hook('tag', node=hex(node), tag=name, local=local)
200 return
200 return
201
201
202 try:
202 try:
203 fp = self.wfile('.hgtags', 'rb+')
203 fp = self.wfile('.hgtags', 'rb+')
204 except IOError:
204 except IOError:
205 fp = self.wfile('.hgtags', 'ab')
205 fp = self.wfile('.hgtags', 'ab')
206 else:
206 else:
207 prevtags = fp.read()
207 prevtags = fp.read()
208
208
209 # committed tags are stored in UTF-8
209 # committed tags are stored in UTF-8
210 writetags(fp, names, encoding.fromlocal, prevtags)
210 writetags(fp, names, encoding.fromlocal, prevtags)
211
211
212 if '.hgtags' not in self.dirstate:
212 if '.hgtags' not in self.dirstate:
213 self.add(['.hgtags'])
213 self.add(['.hgtags'])
214
214
215 m = matchmod.exact(self.root, '', ['.hgtags'])
215 m = matchmod.exact(self.root, '', ['.hgtags'])
216 tagnode = self.commit(message, user, date, extra=extra, match=m)
216 tagnode = self.commit(message, user, date, extra=extra, match=m)
217
217
218 for name in names:
218 for name in names:
219 self.hook('tag', node=hex(node), tag=name, local=local)
219 self.hook('tag', node=hex(node), tag=name, local=local)
220
220
221 return tagnode
221 return tagnode
222
222
223 def tag(self, names, node, message, local, user, date):
223 def tag(self, names, node, message, local, user, date):
224 '''tag a revision with one or more symbolic names.
224 '''tag a revision with one or more symbolic names.
225
225
226 names is a list of strings or, when adding a single tag, names may be a
226 names is a list of strings or, when adding a single tag, names may be a
227 string.
227 string.
228
228
229 if local is True, the tags are stored in a per-repository file.
229 if local is True, the tags are stored in a per-repository file.
230 otherwise, they are stored in the .hgtags file, and a new
230 otherwise, they are stored in the .hgtags file, and a new
231 changeset is committed with the change.
231 changeset is committed with the change.
232
232
233 keyword arguments:
233 keyword arguments:
234
234
235 local: whether to store tags in non-version-controlled file
235 local: whether to store tags in non-version-controlled file
236 (default False)
236 (default False)
237
237
238 message: commit message to use if committing
238 message: commit message to use if committing
239
239
240 user: name of user to use if committing
240 user: name of user to use if committing
241
241
242 date: date tuple to use if committing'''
242 date: date tuple to use if committing'''
243
243
244 for x in self.status()[:5]:
244 for x in self.status()[:5]:
245 if '.hgtags' in x:
245 if '.hgtags' in x:
246 raise util.Abort(_('working copy of .hgtags is changed '
246 raise util.Abort(_('working copy of .hgtags is changed '
247 '(please commit .hgtags manually)'))
247 '(please commit .hgtags manually)'))
248
248
249 self.tags() # instantiate the cache
249 self.tags() # instantiate the cache
250 self._tag(names, node, message, local, user, date)
250 self._tag(names, node, message, local, user, date)
251
251
252 def tags(self):
252 def tags(self):
253 '''return a mapping of tag to node'''
253 '''return a mapping of tag to node'''
254 if self._tags is None:
254 if self._tags is None:
255 (self._tags, self._tagtypes) = self._findtags()
255 (self._tags, self._tagtypes) = self._findtags()
256
256
257 return self._tags
257 return self._tags
258
258
259 def _findtags(self):
259 def _findtags(self):
260 '''Do the hard work of finding tags. Return a pair of dicts
260 '''Do the hard work of finding tags. Return a pair of dicts
261 (tags, tagtypes) where tags maps tag name to node, and tagtypes
261 (tags, tagtypes) where tags maps tag name to node, and tagtypes
262 maps tag name to a string like \'global\' or \'local\'.
262 maps tag name to a string like \'global\' or \'local\'.
263 Subclasses or extensions are free to add their own tags, but
263 Subclasses or extensions are free to add their own tags, but
264 should be aware that the returned dicts will be retained for the
264 should be aware that the returned dicts will be retained for the
265 duration of the localrepo object.'''
265 duration of the localrepo object.'''
266
266
267 # XXX what tagtype should subclasses/extensions use? Currently
267 # XXX what tagtype should subclasses/extensions use? Currently
268 # mq and bookmarks add tags, but do not set the tagtype at all.
268 # mq and bookmarks add tags, but do not set the tagtype at all.
269 # Should each extension invent its own tag type? Should there
269 # Should each extension invent its own tag type? Should there
270 # be one tagtype for all such "virtual" tags? Or is the status
270 # be one tagtype for all such "virtual" tags? Or is the status
271 # quo fine?
271 # quo fine?
272
272
273 alltags = {} # map tag name to (node, hist)
273 alltags = {} # map tag name to (node, hist)
274 tagtypes = {}
274 tagtypes = {}
275
275
276 tagsmod.findglobaltags(self.ui, self, alltags, tagtypes)
276 tagsmod.findglobaltags(self.ui, self, alltags, tagtypes)
277 tagsmod.readlocaltags(self.ui, self, alltags, tagtypes)
277 tagsmod.readlocaltags(self.ui, self, alltags, tagtypes)
278
278
279 # Build the return dicts. Have to re-encode tag names because
279 # Build the return dicts. Have to re-encode tag names because
280 # the tags module always uses UTF-8 (in order not to lose info
280 # the tags module always uses UTF-8 (in order not to lose info
281 # writing to the cache), but the rest of Mercurial wants them in
281 # writing to the cache), but the rest of Mercurial wants them in
282 # local encoding.
282 # local encoding.
283 tags = {}
283 tags = {}
284 for (name, (node, hist)) in alltags.iteritems():
284 for (name, (node, hist)) in alltags.iteritems():
285 if node != nullid:
285 if node != nullid:
286 tags[encoding.tolocal(name)] = node
286 tags[encoding.tolocal(name)] = node
287 tags['tip'] = self.changelog.tip()
287 tags['tip'] = self.changelog.tip()
288 tagtypes = dict([(encoding.tolocal(name), value)
288 tagtypes = dict([(encoding.tolocal(name), value)
289 for (name, value) in tagtypes.iteritems()])
289 for (name, value) in tagtypes.iteritems()])
290 return (tags, tagtypes)
290 return (tags, tagtypes)
291
291
292 def tagtype(self, tagname):
292 def tagtype(self, tagname):
293 '''
293 '''
294 return the type of the given tag. result can be:
294 return the type of the given tag. result can be:
295
295
296 'local' : a local tag
296 'local' : a local tag
297 'global' : a global tag
297 'global' : a global tag
298 None : tag does not exist
298 None : tag does not exist
299 '''
299 '''
300
300
301 self.tags()
301 self.tags()
302
302
303 return self._tagtypes.get(tagname)
303 return self._tagtypes.get(tagname)
304
304
305 def tagslist(self):
305 def tagslist(self):
306 '''return a list of tags ordered by revision'''
306 '''return a list of tags ordered by revision'''
307 l = []
307 l = []
308 for t, n in self.tags().iteritems():
308 for t, n in self.tags().iteritems():
309 try:
309 try:
310 r = self.changelog.rev(n)
310 r = self.changelog.rev(n)
311 except:
311 except:
312 r = -2 # sort to the beginning of the list if unknown
312 r = -2 # sort to the beginning of the list if unknown
313 l.append((r, t, n))
313 l.append((r, t, n))
314 return [(t, n) for r, t, n in sorted(l)]
314 return [(t, n) for r, t, n in sorted(l)]
315
315
316 def nodetags(self, node):
316 def nodetags(self, node):
317 '''return the tags associated with a node'''
317 '''return the tags associated with a node'''
318 if not self.nodetagscache:
318 if not self.nodetagscache:
319 self.nodetagscache = {}
319 self.nodetagscache = {}
320 for t, n in self.tags().iteritems():
320 for t, n in self.tags().iteritems():
321 self.nodetagscache.setdefault(n, []).append(t)
321 self.nodetagscache.setdefault(n, []).append(t)
322 for tags in self.nodetagscache.itervalues():
322 for tags in self.nodetagscache.itervalues():
323 tags.sort()
323 tags.sort()
324 return self.nodetagscache.get(node, [])
324 return self.nodetagscache.get(node, [])
325
325
326 def _branchtags(self, partial, lrev):
326 def _branchtags(self, partial, lrev):
327 # TODO: rename this function?
327 # TODO: rename this function?
328 tiprev = len(self) - 1
328 tiprev = len(self) - 1
329 if lrev != tiprev:
329 if lrev != tiprev:
330 ctxgen = (self[r] for r in xrange(lrev + 1, tiprev + 1))
330 ctxgen = (self[r] for r in xrange(lrev + 1, tiprev + 1))
331 self._updatebranchcache(partial, ctxgen)
331 self._updatebranchcache(partial, ctxgen)
332 self._writebranchcache(partial, self.changelog.tip(), tiprev)
332 self._writebranchcache(partial, self.changelog.tip(), tiprev)
333
333
334 return partial
334 return partial
335
335
336 def branchmap(self):
336 def branchmap(self):
337 '''returns a dictionary {branch: [branchheads]}'''
337 '''returns a dictionary {branch: [branchheads]}'''
338 tip = self.changelog.tip()
338 tip = self.changelog.tip()
339 if self._branchcache is not None and self._branchcachetip == tip:
339 if self._branchcache is not None and self._branchcachetip == tip:
340 return self._branchcache
340 return self._branchcache
341
341
342 oldtip = self._branchcachetip
342 oldtip = self._branchcachetip
343 self._branchcachetip = tip
343 self._branchcachetip = tip
344 if oldtip is None or oldtip not in self.changelog.nodemap:
344 if oldtip is None or oldtip not in self.changelog.nodemap:
345 partial, last, lrev = self._readbranchcache()
345 partial, last, lrev = self._readbranchcache()
346 else:
346 else:
347 lrev = self.changelog.rev(oldtip)
347 lrev = self.changelog.rev(oldtip)
348 partial = self._branchcache
348 partial = self._branchcache
349
349
350 self._branchtags(partial, lrev)
350 self._branchtags(partial, lrev)
351 # this private cache holds all heads (not just tips)
351 # this private cache holds all heads (not just tips)
352 self._branchcache = partial
352 self._branchcache = partial
353
353
354 return self._branchcache
354 return self._branchcache
355
355
356 def branchtags(self):
356 def branchtags(self):
357 '''return a dict where branch names map to the tipmost head of
357 '''return a dict where branch names map to the tipmost head of
358 the branch, open heads come before closed'''
358 the branch, open heads come before closed'''
359 bt = {}
359 bt = {}
360 for bn, heads in self.branchmap().iteritems():
360 for bn, heads in self.branchmap().iteritems():
361 tip = heads[-1]
361 tip = heads[-1]
362 for h in reversed(heads):
362 for h in reversed(heads):
363 if 'close' not in self.changelog.read(h)[5]:
363 if 'close' not in self.changelog.read(h)[5]:
364 tip = h
364 tip = h
365 break
365 break
366 bt[bn] = tip
366 bt[bn] = tip
367 return bt
367 return bt
368
368
369
369
370 def _readbranchcache(self):
370 def _readbranchcache(self):
371 partial = {}
371 partial = {}
372 try:
372 try:
373 f = self.opener("branchheads.cache")
373 f = self.opener("branchheads.cache")
374 lines = f.read().split('\n')
374 lines = f.read().split('\n')
375 f.close()
375 f.close()
376 except (IOError, OSError):
376 except (IOError, OSError):
377 return {}, nullid, nullrev
377 return {}, nullid, nullrev
378
378
379 try:
379 try:
380 last, lrev = lines.pop(0).split(" ", 1)
380 last, lrev = lines.pop(0).split(" ", 1)
381 last, lrev = bin(last), int(lrev)
381 last, lrev = bin(last), int(lrev)
382 if lrev >= len(self) or self[lrev].node() != last:
382 if lrev >= len(self) or self[lrev].node() != last:
383 # invalidate the cache
383 # invalidate the cache
384 raise ValueError('invalidating branch cache (tip differs)')
384 raise ValueError('invalidating branch cache (tip differs)')
385 for l in lines:
385 for l in lines:
386 if not l:
386 if not l:
387 continue
387 continue
388 node, label = l.split(" ", 1)
388 node, label = l.split(" ", 1)
389 partial.setdefault(label.strip(), []).append(bin(node))
389 partial.setdefault(label.strip(), []).append(bin(node))
390 except KeyboardInterrupt:
390 except KeyboardInterrupt:
391 raise
391 raise
392 except Exception, inst:
392 except Exception, inst:
393 if self.ui.debugflag:
393 if self.ui.debugflag:
394 self.ui.warn(str(inst), '\n')
394 self.ui.warn(str(inst), '\n')
395 partial, last, lrev = {}, nullid, nullrev
395 partial, last, lrev = {}, nullid, nullrev
396 return partial, last, lrev
396 return partial, last, lrev
397
397
398 def _writebranchcache(self, branches, tip, tiprev):
398 def _writebranchcache(self, branches, tip, tiprev):
399 try:
399 try:
400 f = self.opener("branchheads.cache", "w", atomictemp=True)
400 f = self.opener("branchheads.cache", "w", atomictemp=True)
401 f.write("%s %s\n" % (hex(tip), tiprev))
401 f.write("%s %s\n" % (hex(tip), tiprev))
402 for label, nodes in branches.iteritems():
402 for label, nodes in branches.iteritems():
403 for node in nodes:
403 for node in nodes:
404 f.write("%s %s\n" % (hex(node), label))
404 f.write("%s %s\n" % (hex(node), label))
405 f.rename()
405 f.rename()
406 except (IOError, OSError):
406 except (IOError, OSError):
407 pass
407 pass
408
408
409 def _updatebranchcache(self, partial, ctxgen):
409 def _updatebranchcache(self, partial, ctxgen):
410 # collect new branch entries
410 # collect new branch entries
411 newbranches = {}
411 newbranches = {}
412 for c in ctxgen:
412 for c in ctxgen:
413 newbranches.setdefault(c.branch(), []).append(c.node())
413 newbranches.setdefault(c.branch(), []).append(c.node())
414 # if older branchheads are reachable from new ones, they aren't
414 # if older branchheads are reachable from new ones, they aren't
415 # really branchheads. Note checking parents is insufficient:
415 # really branchheads. Note checking parents is insufficient:
416 # 1 (branch a) -> 2 (branch b) -> 3 (branch a)
416 # 1 (branch a) -> 2 (branch b) -> 3 (branch a)
417 for branch, newnodes in newbranches.iteritems():
417 for branch, newnodes in newbranches.iteritems():
418 bheads = partial.setdefault(branch, [])
418 bheads = partial.setdefault(branch, [])
419 bheads.extend(newnodes)
419 bheads.extend(newnodes)
420 if len(bheads) <= 1:
420 if len(bheads) <= 1:
421 continue
421 continue
422 # starting from tip means fewer passes over reachable
422 # starting from tip means fewer passes over reachable
423 while newnodes:
423 while newnodes:
424 latest = newnodes.pop()
424 latest = newnodes.pop()
425 if latest not in bheads:
425 if latest not in bheads:
426 continue
426 continue
427 minbhrev = self[min([self[bh].rev() for bh in bheads])].node()
427 minbhrev = self[min([self[bh].rev() for bh in bheads])].node()
428 reachable = self.changelog.reachable(latest, minbhrev)
428 reachable = self.changelog.reachable(latest, minbhrev)
429 reachable.remove(latest)
429 reachable.remove(latest)
430 bheads = [b for b in bheads if b not in reachable]
430 bheads = [b for b in bheads if b not in reachable]
431 partial[branch] = bheads
431 partial[branch] = bheads
432
432
433 def lookup(self, key):
433 def lookup(self, key):
434 if isinstance(key, int):
434 if isinstance(key, int):
435 return self.changelog.node(key)
435 return self.changelog.node(key)
436 elif key == '.':
436 elif key == '.':
437 return self.dirstate.parents()[0]
437 return self.dirstate.parents()[0]
438 elif key == 'null':
438 elif key == 'null':
439 return nullid
439 return nullid
440 elif key == 'tip':
440 elif key == 'tip':
441 return self.changelog.tip()
441 return self.changelog.tip()
442 n = self.changelog._match(key)
442 n = self.changelog._match(key)
443 if n:
443 if n:
444 return n
444 return n
445 if key in self.tags():
445 if key in self.tags():
446 return self.tags()[key]
446 return self.tags()[key]
447 if key in self.branchtags():
447 if key in self.branchtags():
448 return self.branchtags()[key]
448 return self.branchtags()[key]
449 n = self.changelog._partialmatch(key)
449 n = self.changelog._partialmatch(key)
450 if n:
450 if n:
451 return n
451 return n
452
452
453 # can't find key, check if it might have come from damaged dirstate
453 # can't find key, check if it might have come from damaged dirstate
454 if key in self.dirstate.parents():
454 if key in self.dirstate.parents():
455 raise error.Abort(_("working directory has unknown parent '%s'!")
455 raise error.Abort(_("working directory has unknown parent '%s'!")
456 % short(key))
456 % short(key))
457 try:
457 try:
458 if len(key) == 20:
458 if len(key) == 20:
459 key = hex(key)
459 key = hex(key)
460 except:
460 except:
461 pass
461 pass
462 raise error.RepoLookupError(_("unknown revision '%s'") % key)
462 raise error.RepoLookupError(_("unknown revision '%s'") % key)
463
463
464 def lookupbranch(self, key, remote=None):
464 def lookupbranch(self, key, remote=None):
465 repo = remote or self
465 repo = remote or self
466 if key in repo.branchmap():
466 if key in repo.branchmap():
467 return key
467 return key
468
468
469 repo = (remote and remote.local()) and remote or self
469 repo = (remote and remote.local()) and remote or self
470 return repo[key].branch()
470 return repo[key].branch()
471
471
472 def local(self):
472 def local(self):
473 return True
473 return True
474
474
475 def join(self, f):
475 def join(self, f):
476 return os.path.join(self.path, f)
476 return os.path.join(self.path, f)
477
477
478 def wjoin(self, f):
478 def wjoin(self, f):
479 return os.path.join(self.root, f)
479 return os.path.join(self.root, f)
480
480
481 def rjoin(self, f):
481 def rjoin(self, f):
482 return os.path.join(self.root, util.pconvert(f))
482 return os.path.join(self.root, util.pconvert(f))
483
483
484 def file(self, f):
484 def file(self, f):
485 if f[0] == '/':
485 if f[0] == '/':
486 f = f[1:]
486 f = f[1:]
487 return filelog.filelog(self.sopener, f)
487 return filelog.filelog(self.sopener, f)
488
488
489 def changectx(self, changeid):
489 def changectx(self, changeid):
490 return self[changeid]
490 return self[changeid]
491
491
492 def parents(self, changeid=None):
492 def parents(self, changeid=None):
493 '''get list of changectxs for parents of changeid'''
493 '''get list of changectxs for parents of changeid'''
494 return self[changeid].parents()
494 return self[changeid].parents()
495
495
496 def filectx(self, path, changeid=None, fileid=None):
496 def filectx(self, path, changeid=None, fileid=None):
497 """changeid can be a changeset revision, node, or tag.
497 """changeid can be a changeset revision, node, or tag.
498 fileid can be a file revision or node."""
498 fileid can be a file revision or node."""
499 return context.filectx(self, path, changeid, fileid)
499 return context.filectx(self, path, changeid, fileid)
500
500
501 def getcwd(self):
501 def getcwd(self):
502 return self.dirstate.getcwd()
502 return self.dirstate.getcwd()
503
503
504 def pathto(self, f, cwd=None):
504 def pathto(self, f, cwd=None):
505 return self.dirstate.pathto(f, cwd)
505 return self.dirstate.pathto(f, cwd)
506
506
507 def wfile(self, f, mode='r'):
507 def wfile(self, f, mode='r'):
508 return self.wopener(f, mode)
508 return self.wopener(f, mode)
509
509
510 def _link(self, f):
510 def _link(self, f):
511 return os.path.islink(self.wjoin(f))
511 return os.path.islink(self.wjoin(f))
512
512
513 def _filter(self, filter, filename, data):
513 def _filter(self, filter, filename, data):
514 if filter not in self.filterpats:
514 if filter not in self.filterpats:
515 l = []
515 l = []
516 for pat, cmd in self.ui.configitems(filter):
516 for pat, cmd in self.ui.configitems(filter):
517 if cmd == '!':
517 if cmd == '!':
518 continue
518 continue
519 mf = matchmod.match(self.root, '', [pat])
519 mf = matchmod.match(self.root, '', [pat])
520 fn = None
520 fn = None
521 params = cmd
521 params = cmd
522 for name, filterfn in self._datafilters.iteritems():
522 for name, filterfn in self._datafilters.iteritems():
523 if cmd.startswith(name):
523 if cmd.startswith(name):
524 fn = filterfn
524 fn = filterfn
525 params = cmd[len(name):].lstrip()
525 params = cmd[len(name):].lstrip()
526 break
526 break
527 if not fn:
527 if not fn:
528 fn = lambda s, c, **kwargs: util.filter(s, c)
528 fn = lambda s, c, **kwargs: util.filter(s, c)
529 # Wrap old filters not supporting keyword arguments
529 # Wrap old filters not supporting keyword arguments
530 if not inspect.getargspec(fn)[2]:
530 if not inspect.getargspec(fn)[2]:
531 oldfn = fn
531 oldfn = fn
532 fn = lambda s, c, **kwargs: oldfn(s, c)
532 fn = lambda s, c, **kwargs: oldfn(s, c)
533 l.append((mf, fn, params))
533 l.append((mf, fn, params))
534 self.filterpats[filter] = l
534 self.filterpats[filter] = l
535
535
536 for mf, fn, cmd in self.filterpats[filter]:
536 for mf, fn, cmd in self.filterpats[filter]:
537 if mf(filename):
537 if mf(filename):
538 self.ui.debug("filtering %s through %s\n" % (filename, cmd))
538 self.ui.debug("filtering %s through %s\n" % (filename, cmd))
539 data = fn(data, cmd, ui=self.ui, repo=self, filename=filename)
539 data = fn(data, cmd, ui=self.ui, repo=self, filename=filename)
540 break
540 break
541
541
542 return data
542 return data
543
543
544 def adddatafilter(self, name, filter):
544 def adddatafilter(self, name, filter):
545 self._datafilters[name] = filter
545 self._datafilters[name] = filter
546
546
547 def wread(self, filename):
547 def wread(self, filename):
548 if self._link(filename):
548 if self._link(filename):
549 data = os.readlink(self.wjoin(filename))
549 data = os.readlink(self.wjoin(filename))
550 else:
550 else:
551 data = self.wopener(filename, 'r').read()
551 data = self.wopener(filename, 'r').read()
552 return self._filter("encode", filename, data)
552 return self._filter("encode", filename, data)
553
553
554 def wwrite(self, filename, data, flags):
554 def wwrite(self, filename, data, flags):
555 data = self._filter("decode", filename, data)
555 data = self._filter("decode", filename, data)
556 try:
556 try:
557 os.unlink(self.wjoin(filename))
557 os.unlink(self.wjoin(filename))
558 except OSError:
558 except OSError:
559 pass
559 pass
560 if 'l' in flags:
560 if 'l' in flags:
561 self.wopener.symlink(data, filename)
561 self.wopener.symlink(data, filename)
562 else:
562 else:
563 self.wopener(filename, 'w').write(data)
563 self.wopener(filename, 'w').write(data)
564 if 'x' in flags:
564 if 'x' in flags:
565 util.set_flags(self.wjoin(filename), False, True)
565 util.set_flags(self.wjoin(filename), False, True)
566
566
567 def wwritedata(self, filename, data):
567 def wwritedata(self, filename, data):
568 return self._filter("decode", filename, data)
568 return self._filter("decode", filename, data)
569
569
570 def transaction(self, desc):
570 def transaction(self, desc):
571 tr = self._transref and self._transref() or None
571 tr = self._transref and self._transref() or None
572 if tr and tr.running():
572 if tr and tr.running():
573 return tr.nest()
573 return tr.nest()
574
574
575 # abort here if the journal already exists
575 # abort here if the journal already exists
576 if os.path.exists(self.sjoin("journal")):
576 if os.path.exists(self.sjoin("journal")):
577 raise error.RepoError(
577 raise error.RepoError(
578 _("abandoned transaction found - run hg recover"))
578 _("abandoned transaction found - run hg recover"))
579
579
580 # save dirstate for rollback
580 # save dirstate for rollback
581 try:
581 try:
582 ds = self.opener("dirstate").read()
582 ds = self.opener("dirstate").read()
583 except IOError:
583 except IOError:
584 ds = ""
584 ds = ""
585 self.opener("journal.dirstate", "w").write(ds)
585 self.opener("journal.dirstate", "w").write(ds)
586 self.opener("journal.branch", "w").write(self.dirstate.branch())
586 self.opener("journal.branch", "w").write(self.dirstate.branch())
587 self.opener("journal.desc", "w").write("%d\n%s\n" % (len(self), desc))
587 self.opener("journal.desc", "w").write("%d\n%s\n" % (len(self), desc))
588
588
589 renames = [(self.sjoin("journal"), self.sjoin("undo")),
589 renames = [(self.sjoin("journal"), self.sjoin("undo")),
590 (self.join("journal.dirstate"), self.join("undo.dirstate")),
590 (self.join("journal.dirstate"), self.join("undo.dirstate")),
591 (self.join("journal.branch"), self.join("undo.branch")),
591 (self.join("journal.branch"), self.join("undo.branch")),
592 (self.join("journal.desc"), self.join("undo.desc"))]
592 (self.join("journal.desc"), self.join("undo.desc"))]
593 tr = transaction.transaction(self.ui.warn, self.sopener,
593 tr = transaction.transaction(self.ui.warn, self.sopener,
594 self.sjoin("journal"),
594 self.sjoin("journal"),
595 aftertrans(renames),
595 aftertrans(renames),
596 self.store.createmode)
596 self.store.createmode)
597 self._transref = weakref.ref(tr)
597 self._transref = weakref.ref(tr)
598 return tr
598 return tr
599
599
600 def recover(self):
600 def recover(self):
601 lock = self.lock()
601 lock = self.lock()
602 try:
602 try:
603 if os.path.exists(self.sjoin("journal")):
603 if os.path.exists(self.sjoin("journal")):
604 self.ui.status(_("rolling back interrupted transaction\n"))
604 self.ui.status(_("rolling back interrupted transaction\n"))
605 transaction.rollback(self.sopener, self.sjoin("journal"),
605 transaction.rollback(self.sopener, self.sjoin("journal"),
606 self.ui.warn)
606 self.ui.warn)
607 self.invalidate()
607 self.invalidate()
608 return True
608 return True
609 else:
609 else:
610 self.ui.warn(_("no interrupted transaction available\n"))
610 self.ui.warn(_("no interrupted transaction available\n"))
611 return False
611 return False
612 finally:
612 finally:
613 lock.release()
613 lock.release()
614
614
615 def rollback(self, dryrun=False):
615 def rollback(self, dryrun=False):
616 wlock = lock = None
616 wlock = lock = None
617 try:
617 try:
618 wlock = self.wlock()
618 wlock = self.wlock()
619 lock = self.lock()
619 lock = self.lock()
620 if os.path.exists(self.sjoin("undo")):
620 if os.path.exists(self.sjoin("undo")):
621 try:
621 try:
622 args = self.opener("undo.desc", "r").read().splitlines()
622 args = self.opener("undo.desc", "r").read().splitlines()
623 if len(args) >= 3 and self.ui.verbose:
623 if len(args) >= 3 and self.ui.verbose:
624 desc = _("rolling back to revision %s"
624 desc = _("rolling back to revision %s"
625 " (undo %s: %s)\n") % (
625 " (undo %s: %s)\n") % (
626 int(args[0]) - 1, args[1], args[2])
626 int(args[0]) - 1, args[1], args[2])
627 elif len(args) >= 2:
627 elif len(args) >= 2:
628 desc = _("rolling back to revision %s (undo %s)\n") % (
628 desc = _("rolling back to revision %s (undo %s)\n") % (
629 int(args[0]) - 1, args[1])
629 int(args[0]) - 1, args[1])
630 except IOError:
630 except IOError:
631 desc = _("rolling back unknown transaction\n")
631 desc = _("rolling back unknown transaction\n")
632 self.ui.status(desc)
632 self.ui.status(desc)
633 if dryrun:
633 if dryrun:
634 return
634 return
635 transaction.rollback(self.sopener, self.sjoin("undo"),
635 transaction.rollback(self.sopener, self.sjoin("undo"),
636 self.ui.warn)
636 self.ui.warn)
637 util.rename(self.join("undo.dirstate"), self.join("dirstate"))
637 util.rename(self.join("undo.dirstate"), self.join("dirstate"))
638 try:
638 try:
639 branch = self.opener("undo.branch").read()
639 branch = self.opener("undo.branch").read()
640 self.dirstate.setbranch(branch)
640 self.dirstate.setbranch(branch)
641 except IOError:
641 except IOError:
642 self.ui.warn(_("Named branch could not be reset, "
642 self.ui.warn(_("Named branch could not be reset, "
643 "current branch still is: %s\n")
643 "current branch still is: %s\n")
644 % encoding.tolocal(self.dirstate.branch()))
644 % encoding.tolocal(self.dirstate.branch()))
645 self.invalidate()
645 self.invalidate()
646 self.dirstate.invalidate()
646 self.dirstate.invalidate()
647 self.destroyed()
647 self.destroyed()
648 else:
648 else:
649 self.ui.warn(_("no rollback information available\n"))
649 self.ui.warn(_("no rollback information available\n"))
650 return 1
650 return 1
651 finally:
651 finally:
652 release(lock, wlock)
652 release(lock, wlock)
653
653
654 def invalidatecaches(self):
654 def invalidatecaches(self):
655 self._tags = None
655 self._tags = None
656 self._tagtypes = None
656 self._tagtypes = None
657 self.nodetagscache = None
657 self.nodetagscache = None
658 self._branchcache = None # in UTF-8
658 self._branchcache = None # in UTF-8
659 self._branchcachetip = None
659 self._branchcachetip = None
660
660
661 def invalidate(self):
661 def invalidate(self):
662 for a in "changelog manifest".split():
662 for a in "changelog manifest".split():
663 if a in self.__dict__:
663 if a in self.__dict__:
664 delattr(self, a)
664 delattr(self, a)
665 self.invalidatecaches()
665 self.invalidatecaches()
666
666
667 def _lock(self, lockname, wait, releasefn, acquirefn, desc):
667 def _lock(self, lockname, wait, releasefn, acquirefn, desc):
668 try:
668 try:
669 l = lock.lock(lockname, 0, releasefn, desc=desc)
669 l = lock.lock(lockname, 0, releasefn, desc=desc)
670 except error.LockHeld, inst:
670 except error.LockHeld, inst:
671 if not wait:
671 if not wait:
672 raise
672 raise
673 self.ui.warn(_("waiting for lock on %s held by %r\n") %
673 self.ui.warn(_("waiting for lock on %s held by %r\n") %
674 (desc, inst.locker))
674 (desc, inst.locker))
675 # default to 600 seconds timeout
675 # default to 600 seconds timeout
676 l = lock.lock(lockname, int(self.ui.config("ui", "timeout", "600")),
676 l = lock.lock(lockname, int(self.ui.config("ui", "timeout", "600")),
677 releasefn, desc=desc)
677 releasefn, desc=desc)
678 if acquirefn:
678 if acquirefn:
679 acquirefn()
679 acquirefn()
680 return l
680 return l
681
681
682 def lock(self, wait=True):
682 def lock(self, wait=True):
683 '''Lock the repository store (.hg/store) and return a weak reference
683 '''Lock the repository store (.hg/store) and return a weak reference
684 to the lock. Use this before modifying the store (e.g. committing or
684 to the lock. Use this before modifying the store (e.g. committing or
685 stripping). If you are opening a transaction, get a lock as well.)'''
685 stripping). If you are opening a transaction, get a lock as well.)'''
686 l = self._lockref and self._lockref()
686 l = self._lockref and self._lockref()
687 if l is not None and l.held:
687 if l is not None and l.held:
688 l.lock()
688 l.lock()
689 return l
689 return l
690
690
691 l = self._lock(self.sjoin("lock"), wait, None, self.invalidate,
691 l = self._lock(self.sjoin("lock"), wait, None, self.invalidate,
692 _('repository %s') % self.origroot)
692 _('repository %s') % self.origroot)
693 self._lockref = weakref.ref(l)
693 self._lockref = weakref.ref(l)
694 return l
694 return l
695
695
696 def wlock(self, wait=True):
696 def wlock(self, wait=True):
697 '''Lock the non-store parts of the repository (everything under
697 '''Lock the non-store parts of the repository (everything under
698 .hg except .hg/store) and return a weak reference to the lock.
698 .hg except .hg/store) and return a weak reference to the lock.
699 Use this before modifying files in .hg.'''
699 Use this before modifying files in .hg.'''
700 l = self._wlockref and self._wlockref()
700 l = self._wlockref and self._wlockref()
701 if l is not None and l.held:
701 if l is not None and l.held:
702 l.lock()
702 l.lock()
703 return l
703 return l
704
704
705 l = self._lock(self.join("wlock"), wait, self.dirstate.write,
705 l = self._lock(self.join("wlock"), wait, self.dirstate.write,
706 self.dirstate.invalidate, _('working directory of %s') %
706 self.dirstate.invalidate, _('working directory of %s') %
707 self.origroot)
707 self.origroot)
708 self._wlockref = weakref.ref(l)
708 self._wlockref = weakref.ref(l)
709 return l
709 return l
710
710
711 def _filecommit(self, fctx, manifest1, manifest2, linkrev, tr, changelist):
711 def _filecommit(self, fctx, manifest1, manifest2, linkrev, tr, changelist):
712 """
712 """
713 commit an individual file as part of a larger transaction
713 commit an individual file as part of a larger transaction
714 """
714 """
715
715
716 fname = fctx.path()
716 fname = fctx.path()
717 text = fctx.data()
717 text = fctx.data()
718 flog = self.file(fname)
718 flog = self.file(fname)
719 fparent1 = manifest1.get(fname, nullid)
719 fparent1 = manifest1.get(fname, nullid)
720 fparent2 = fparent2o = manifest2.get(fname, nullid)
720 fparent2 = fparent2o = manifest2.get(fname, nullid)
721
721
722 meta = {}
722 meta = {}
723 copy = fctx.renamed()
723 copy = fctx.renamed()
724 if copy and copy[0] != fname:
724 if copy and copy[0] != fname:
725 # Mark the new revision of this file as a copy of another
725 # Mark the new revision of this file as a copy of another
726 # file. This copy data will effectively act as a parent
726 # file. This copy data will effectively act as a parent
727 # of this new revision. If this is a merge, the first
727 # of this new revision. If this is a merge, the first
728 # parent will be the nullid (meaning "look up the copy data")
728 # parent will be the nullid (meaning "look up the copy data")
729 # and the second one will be the other parent. For example:
729 # and the second one will be the other parent. For example:
730 #
730 #
731 # 0 --- 1 --- 3 rev1 changes file foo
731 # 0 --- 1 --- 3 rev1 changes file foo
732 # \ / rev2 renames foo to bar and changes it
732 # \ / rev2 renames foo to bar and changes it
733 # \- 2 -/ rev3 should have bar with all changes and
733 # \- 2 -/ rev3 should have bar with all changes and
734 # should record that bar descends from
734 # should record that bar descends from
735 # bar in rev2 and foo in rev1
735 # bar in rev2 and foo in rev1
736 #
736 #
737 # this allows this merge to succeed:
737 # this allows this merge to succeed:
738 #
738 #
739 # 0 --- 1 --- 3 rev4 reverts the content change from rev2
739 # 0 --- 1 --- 3 rev4 reverts the content change from rev2
740 # \ / merging rev3 and rev4 should use bar@rev2
740 # \ / merging rev3 and rev4 should use bar@rev2
741 # \- 2 --- 4 as the merge base
741 # \- 2 --- 4 as the merge base
742 #
742 #
743
743
744 cfname = copy[0]
744 cfname = copy[0]
745 crev = manifest1.get(cfname)
745 crev = manifest1.get(cfname)
746 newfparent = fparent2
746 newfparent = fparent2
747
747
748 if manifest2: # branch merge
748 if manifest2: # branch merge
749 if fparent2 == nullid or crev is None: # copied on remote side
749 if fparent2 == nullid or crev is None: # copied on remote side
750 if cfname in manifest2:
750 if cfname in manifest2:
751 crev = manifest2[cfname]
751 crev = manifest2[cfname]
752 newfparent = fparent1
752 newfparent = fparent1
753
753
754 # find source in nearest ancestor if we've lost track
754 # find source in nearest ancestor if we've lost track
755 if not crev:
755 if not crev:
756 self.ui.debug(" %s: searching for copy revision for %s\n" %
756 self.ui.debug(" %s: searching for copy revision for %s\n" %
757 (fname, cfname))
757 (fname, cfname))
758 for ancestor in self['.'].ancestors():
758 for ancestor in self['.'].ancestors():
759 if cfname in ancestor:
759 if cfname in ancestor:
760 crev = ancestor[cfname].filenode()
760 crev = ancestor[cfname].filenode()
761 break
761 break
762
762
763 self.ui.debug(" %s: copy %s:%s\n" % (fname, cfname, hex(crev)))
763 self.ui.debug(" %s: copy %s:%s\n" % (fname, cfname, hex(crev)))
764 meta["copy"] = cfname
764 meta["copy"] = cfname
765 meta["copyrev"] = hex(crev)
765 meta["copyrev"] = hex(crev)
766 fparent1, fparent2 = nullid, newfparent
766 fparent1, fparent2 = nullid, newfparent
767 elif fparent2 != nullid:
767 elif fparent2 != nullid:
768 # is one parent an ancestor of the other?
768 # is one parent an ancestor of the other?
769 fparentancestor = flog.ancestor(fparent1, fparent2)
769 fparentancestor = flog.ancestor(fparent1, fparent2)
770 if fparentancestor == fparent1:
770 if fparentancestor == fparent1:
771 fparent1, fparent2 = fparent2, nullid
771 fparent1, fparent2 = fparent2, nullid
772 elif fparentancestor == fparent2:
772 elif fparentancestor == fparent2:
773 fparent2 = nullid
773 fparent2 = nullid
774
774
775 # is the file changed?
775 # is the file changed?
776 if fparent2 != nullid or flog.cmp(fparent1, text) or meta:
776 if fparent2 != nullid or flog.cmp(fparent1, text) or meta:
777 changelist.append(fname)
777 changelist.append(fname)
778 return flog.add(text, meta, tr, linkrev, fparent1, fparent2)
778 return flog.add(text, meta, tr, linkrev, fparent1, fparent2)
779
779
780 # are just the flags changed during merge?
780 # are just the flags changed during merge?
781 if fparent1 != fparent2o and manifest1.flags(fname) != fctx.flags():
781 if fparent1 != fparent2o and manifest1.flags(fname) != fctx.flags():
782 changelist.append(fname)
782 changelist.append(fname)
783
783
784 return fparent1
784 return fparent1
785
785
786 def commit(self, text="", user=None, date=None, match=None, force=False,
786 def commit(self, text="", user=None, date=None, match=None, force=False,
787 editor=False, extra={}):
787 editor=False, extra={}):
788 """Add a new revision to current repository.
788 """Add a new revision to current repository.
789
789
790 Revision information is gathered from the working directory,
790 Revision information is gathered from the working directory,
791 match can be used to filter the committed files. If editor is
791 match can be used to filter the committed files. If editor is
792 supplied, it is called to get a commit message.
792 supplied, it is called to get a commit message.
793 """
793 """
794
794
795 def fail(f, msg):
795 def fail(f, msg):
796 raise util.Abort('%s: %s' % (f, msg))
796 raise util.Abort('%s: %s' % (f, msg))
797
797
798 if not match:
798 if not match:
799 match = matchmod.always(self.root, '')
799 match = matchmod.always(self.root, '')
800
800
801 if not force:
801 if not force:
802 vdirs = []
802 vdirs = []
803 match.dir = vdirs.append
803 match.dir = vdirs.append
804 match.bad = fail
804 match.bad = fail
805
805
806 wlock = self.wlock()
806 wlock = self.wlock()
807 try:
807 try:
808 wctx = self[None]
808 wctx = self[None]
809 merge = len(wctx.parents()) > 1
809 merge = len(wctx.parents()) > 1
810
810
811 if (not force and merge and match and
811 if (not force and merge and match and
812 (match.files() or match.anypats())):
812 (match.files() or match.anypats())):
813 raise util.Abort(_('cannot partially commit a merge '
813 raise util.Abort(_('cannot partially commit a merge '
814 '(do not specify files or patterns)'))
814 '(do not specify files or patterns)'))
815
815
816 changes = self.status(match=match, clean=force)
816 changes = self.status(match=match, clean=force)
817 if force:
817 if force:
818 changes[0].extend(changes[6]) # mq may commit unchanged files
818 changes[0].extend(changes[6]) # mq may commit unchanged files
819
819
820 # check subrepos
820 # check subrepos
821 subs = []
821 subs = []
822 removedsubs = set()
822 removedsubs = set()
823 for p in wctx.parents():
823 for p in wctx.parents():
824 removedsubs.update(s for s in p.substate if match(s))
824 removedsubs.update(s for s in p.substate if match(s))
825 for s in wctx.substate:
825 for s in wctx.substate:
826 removedsubs.discard(s)
826 removedsubs.discard(s)
827 if match(s) and wctx.sub(s).dirty():
827 if match(s) and wctx.sub(s).dirty():
828 subs.append(s)
828 subs.append(s)
829 if (subs or removedsubs) and '.hgsubstate' not in changes[0]:
829 if (subs or removedsubs) and '.hgsubstate' not in changes[0]:
830 changes[0].insert(0, '.hgsubstate')
830 changes[0].insert(0, '.hgsubstate')
831
831
832 # make sure all explicit patterns are matched
832 # make sure all explicit patterns are matched
833 if not force and match.files():
833 if not force and match.files():
834 matched = set(changes[0] + changes[1] + changes[2])
834 matched = set(changes[0] + changes[1] + changes[2])
835
835
836 for f in match.files():
836 for f in match.files():
837 if f == '.' or f in matched or f in wctx.substate:
837 if f == '.' or f in matched or f in wctx.substate:
838 continue
838 continue
839 if f in changes[3]: # missing
839 if f in changes[3]: # missing
840 fail(f, _('file not found!'))
840 fail(f, _('file not found!'))
841 if f in vdirs: # visited directory
841 if f in vdirs: # visited directory
842 d = f + '/'
842 d = f + '/'
843 for mf in matched:
843 for mf in matched:
844 if mf.startswith(d):
844 if mf.startswith(d):
845 break
845 break
846 else:
846 else:
847 fail(f, _("no match under directory!"))
847 fail(f, _("no match under directory!"))
848 elif f not in self.dirstate:
848 elif f not in self.dirstate:
849 fail(f, _("file not tracked!"))
849 fail(f, _("file not tracked!"))
850
850
851 if (not force and not extra.get("close") and not merge
851 if (not force and not extra.get("close") and not merge
852 and not (changes[0] or changes[1] or changes[2])
852 and not (changes[0] or changes[1] or changes[2])
853 and wctx.branch() == wctx.p1().branch()):
853 and wctx.branch() == wctx.p1().branch()):
854 return None
854 return None
855
855
856 ms = mergemod.mergestate(self)
856 ms = mergemod.mergestate(self)
857 for f in changes[0]:
857 for f in changes[0]:
858 if f in ms and ms[f] == 'u':
858 if f in ms and ms[f] == 'u':
859 raise util.Abort(_("unresolved merge conflicts "
859 raise util.Abort(_("unresolved merge conflicts "
860 "(see hg resolve)"))
860 "(see hg resolve)"))
861
861
862 cctx = context.workingctx(self, text, user, date, extra, changes)
862 cctx = context.workingctx(self, text, user, date, extra, changes)
863 if editor:
863 if editor:
864 cctx._text = editor(self, cctx, subs)
864 cctx._text = editor(self, cctx, subs)
865 edited = (text != cctx._text)
865 edited = (text != cctx._text)
866
866
867 # commit subs
867 # commit subs
868 if subs or removedsubs:
868 if subs or removedsubs:
869 state = wctx.substate.copy()
869 state = wctx.substate.copy()
870 for s in subs:
870 for s in subs:
871 sub = wctx.sub(s)
871 sub = wctx.sub(s)
872 self.ui.status(_('committing subrepository %s\n') %
872 self.ui.status(_('committing subrepository %s\n') %
873 subrepo.relpath(sub))
873 subrepo.relpath(sub))
874 sr = sub.commit(cctx._text, user, date)
874 sr = sub.commit(cctx._text, user, date)
875 state[s] = (state[s][0], sr)
875 state[s] = (state[s][0], sr)
876 subrepo.writestate(self, state)
876 subrepo.writestate(self, state)
877
877
878 # Save commit message in case this transaction gets rolled back
878 # Save commit message in case this transaction gets rolled back
879 # (e.g. by a pretxncommit hook). Leave the content alone on
879 # (e.g. by a pretxncommit hook). Leave the content alone on
880 # the assumption that the user will use the same editor again.
880 # the assumption that the user will use the same editor again.
881 msgfile = self.opener('last-message.txt', 'wb')
881 msgfile = self.opener('last-message.txt', 'wb')
882 msgfile.write(cctx._text)
882 msgfile.write(cctx._text)
883 msgfile.close()
883 msgfile.close()
884
884
885 p1, p2 = self.dirstate.parents()
885 p1, p2 = self.dirstate.parents()
886 hookp1, hookp2 = hex(p1), (p2 != nullid and hex(p2) or '')
886 hookp1, hookp2 = hex(p1), (p2 != nullid and hex(p2) or '')
887 try:
887 try:
888 self.hook("precommit", throw=True, parent1=hookp1, parent2=hookp2)
888 self.hook("precommit", throw=True, parent1=hookp1, parent2=hookp2)
889 ret = self.commitctx(cctx, True)
889 ret = self.commitctx(cctx, True)
890 except:
890 except:
891 if edited:
891 if edited:
892 msgfn = self.pathto(msgfile.name[len(self.root)+1:])
892 msgfn = self.pathto(msgfile.name[len(self.root)+1:])
893 self.ui.write(
893 self.ui.write(
894 _('note: commit message saved in %s\n') % msgfn)
894 _('note: commit message saved in %s\n') % msgfn)
895 raise
895 raise
896
896
897 # update dirstate and mergestate
897 # update dirstate and mergestate
898 for f in changes[0] + changes[1]:
898 for f in changes[0] + changes[1]:
899 self.dirstate.normal(f)
899 self.dirstate.normal(f)
900 for f in changes[2]:
900 for f in changes[2]:
901 self.dirstate.forget(f)
901 self.dirstate.forget(f)
902 self.dirstate.setparents(ret)
902 self.dirstate.setparents(ret)
903 ms.reset()
903 ms.reset()
904 finally:
904 finally:
905 wlock.release()
905 wlock.release()
906
906
907 self.hook("commit", node=hex(ret), parent1=hookp1, parent2=hookp2)
907 self.hook("commit", node=hex(ret), parent1=hookp1, parent2=hookp2)
908 return ret
908 return ret
909
909
910 def commitctx(self, ctx, error=False):
910 def commitctx(self, ctx, error=False):
911 """Add a new revision to current repository.
911 """Add a new revision to current repository.
912 Revision information is passed via the context argument.
912 Revision information is passed via the context argument.
913 """
913 """
914
914
915 tr = lock = None
915 tr = lock = None
916 removed = ctx.removed()
916 removed = ctx.removed()
917 p1, p2 = ctx.p1(), ctx.p2()
917 p1, p2 = ctx.p1(), ctx.p2()
918 m1 = p1.manifest().copy()
918 m1 = p1.manifest().copy()
919 m2 = p2.manifest()
919 m2 = p2.manifest()
920 user = ctx.user()
920 user = ctx.user()
921
921
922 lock = self.lock()
922 lock = self.lock()
923 try:
923 try:
924 tr = self.transaction("commit")
924 tr = self.transaction("commit")
925 trp = weakref.proxy(tr)
925 trp = weakref.proxy(tr)
926
926
927 # check in files
927 # check in files
928 new = {}
928 new = {}
929 changed = []
929 changed = []
930 linkrev = len(self)
930 linkrev = len(self)
931 for f in sorted(ctx.modified() + ctx.added()):
931 for f in sorted(ctx.modified() + ctx.added()):
932 self.ui.note(f + "\n")
932 self.ui.note(f + "\n")
933 try:
933 try:
934 fctx = ctx[f]
934 fctx = ctx[f]
935 new[f] = self._filecommit(fctx, m1, m2, linkrev, trp,
935 new[f] = self._filecommit(fctx, m1, m2, linkrev, trp,
936 changed)
936 changed)
937 m1.set(f, fctx.flags())
937 m1.set(f, fctx.flags())
938 except OSError, inst:
938 except OSError, inst:
939 self.ui.warn(_("trouble committing %s!\n") % f)
939 self.ui.warn(_("trouble committing %s!\n") % f)
940 raise
940 raise
941 except IOError, inst:
941 except IOError, inst:
942 errcode = getattr(inst, 'errno', errno.ENOENT)
942 errcode = getattr(inst, 'errno', errno.ENOENT)
943 if error or errcode and errcode != errno.ENOENT:
943 if error or errcode and errcode != errno.ENOENT:
944 self.ui.warn(_("trouble committing %s!\n") % f)
944 self.ui.warn(_("trouble committing %s!\n") % f)
945 raise
945 raise
946 else:
946 else:
947 removed.append(f)
947 removed.append(f)
948
948
949 # update manifest
949 # update manifest
950 m1.update(new)
950 m1.update(new)
951 removed = [f for f in sorted(removed) if f in m1 or f in m2]
951 removed = [f for f in sorted(removed) if f in m1 or f in m2]
952 drop = [f for f in removed if f in m1]
952 drop = [f for f in removed if f in m1]
953 for f in drop:
953 for f in drop:
954 del m1[f]
954 del m1[f]
955 mn = self.manifest.add(m1, trp, linkrev, p1.manifestnode(),
955 mn = self.manifest.add(m1, trp, linkrev, p1.manifestnode(),
956 p2.manifestnode(), (new, drop))
956 p2.manifestnode(), (new, drop))
957
957
958 # update changelog
958 # update changelog
959 self.changelog.delayupdate()
959 self.changelog.delayupdate()
960 n = self.changelog.add(mn, changed + removed, ctx.description(),
960 n = self.changelog.add(mn, changed + removed, ctx.description(),
961 trp, p1.node(), p2.node(),
961 trp, p1.node(), p2.node(),
962 user, ctx.date(), ctx.extra().copy())
962 user, ctx.date(), ctx.extra().copy())
963 p = lambda: self.changelog.writepending() and self.root or ""
963 p = lambda: self.changelog.writepending() and self.root or ""
964 xp1, xp2 = p1.hex(), p2 and p2.hex() or ''
964 xp1, xp2 = p1.hex(), p2 and p2.hex() or ''
965 self.hook('pretxncommit', throw=True, node=hex(n), parent1=xp1,
965 self.hook('pretxncommit', throw=True, node=hex(n), parent1=xp1,
966 parent2=xp2, pending=p)
966 parent2=xp2, pending=p)
967 self.changelog.finalize(trp)
967 self.changelog.finalize(trp)
968 tr.close()
968 tr.close()
969
969
970 if self._branchcache:
970 if self._branchcache:
971 self.branchtags()
971 self.branchtags()
972 return n
972 return n
973 finally:
973 finally:
974 del tr
974 del tr
975 lock.release()
975 lock.release()
976
976
977 def destroyed(self):
977 def destroyed(self):
978 '''Inform the repository that nodes have been destroyed.
978 '''Inform the repository that nodes have been destroyed.
979 Intended for use by strip and rollback, so there's a common
979 Intended for use by strip and rollback, so there's a common
980 place for anything that has to be done after destroying history.'''
980 place for anything that has to be done after destroying history.'''
981 # XXX it might be nice if we could take the list of destroyed
981 # XXX it might be nice if we could take the list of destroyed
982 # nodes, but I don't see an easy way for rollback() to do that
982 # nodes, but I don't see an easy way for rollback() to do that
983
983
984 # Ensure the persistent tag cache is updated. Doing it now
984 # Ensure the persistent tag cache is updated. Doing it now
985 # means that the tag cache only has to worry about destroyed
985 # means that the tag cache only has to worry about destroyed
986 # heads immediately after a strip/rollback. That in turn
986 # heads immediately after a strip/rollback. That in turn
987 # guarantees that "cachetip == currenttip" (comparing both rev
987 # guarantees that "cachetip == currenttip" (comparing both rev
988 # and node) always means no nodes have been added or destroyed.
988 # and node) always means no nodes have been added or destroyed.
989
989
990 # XXX this is suboptimal when qrefresh'ing: we strip the current
990 # XXX this is suboptimal when qrefresh'ing: we strip the current
991 # head, refresh the tag cache, then immediately add a new head.
991 # head, refresh the tag cache, then immediately add a new head.
992 # But I think doing it this way is necessary for the "instant
992 # But I think doing it this way is necessary for the "instant
993 # tag cache retrieval" case to work.
993 # tag cache retrieval" case to work.
994 self.invalidatecaches()
994 self.invalidatecaches()
995
995
996 def walk(self, match, node=None):
996 def walk(self, match, node=None):
997 '''
997 '''
998 walk recursively through the directory tree or a given
998 walk recursively through the directory tree or a given
999 changeset, finding all files matched by the match
999 changeset, finding all files matched by the match
1000 function
1000 function
1001 '''
1001 '''
1002 return self[node].walk(match)
1002 return self[node].walk(match)
1003
1003
1004 def status(self, node1='.', node2=None, match=None,
1004 def status(self, node1='.', node2=None, match=None,
1005 ignored=False, clean=False, unknown=False):
1005 ignored=False, clean=False, unknown=False):
1006 """return status of files between two nodes or node and working directory
1006 """return status of files between two nodes or node and working directory
1007
1007
1008 If node1 is None, use the first dirstate parent instead.
1008 If node1 is None, use the first dirstate parent instead.
1009 If node2 is None, compare node1 with working directory.
1009 If node2 is None, compare node1 with working directory.
1010 """
1010 """
1011
1011
1012 def mfmatches(ctx):
1012 def mfmatches(ctx):
1013 mf = ctx.manifest().copy()
1013 mf = ctx.manifest().copy()
1014 for fn in mf.keys():
1014 for fn in mf.keys():
1015 if not match(fn):
1015 if not match(fn):
1016 del mf[fn]
1016 del mf[fn]
1017 return mf
1017 return mf
1018
1018
1019 if isinstance(node1, context.changectx):
1019 if isinstance(node1, context.changectx):
1020 ctx1 = node1
1020 ctx1 = node1
1021 else:
1021 else:
1022 ctx1 = self[node1]
1022 ctx1 = self[node1]
1023 if isinstance(node2, context.changectx):
1023 if isinstance(node2, context.changectx):
1024 ctx2 = node2
1024 ctx2 = node2
1025 else:
1025 else:
1026 ctx2 = self[node2]
1026 ctx2 = self[node2]
1027
1027
1028 working = ctx2.rev() is None
1028 working = ctx2.rev() is None
1029 parentworking = working and ctx1 == self['.']
1029 parentworking = working and ctx1 == self['.']
1030 match = match or matchmod.always(self.root, self.getcwd())
1030 match = match or matchmod.always(self.root, self.getcwd())
1031 listignored, listclean, listunknown = ignored, clean, unknown
1031 listignored, listclean, listunknown = ignored, clean, unknown
1032
1032
1033 # load earliest manifest first for caching reasons
1033 # load earliest manifest first for caching reasons
1034 if not working and ctx2.rev() < ctx1.rev():
1034 if not working and ctx2.rev() < ctx1.rev():
1035 ctx2.manifest()
1035 ctx2.manifest()
1036
1036
1037 if not parentworking:
1037 if not parentworking:
1038 def bad(f, msg):
1038 def bad(f, msg):
1039 if f not in ctx1:
1039 if f not in ctx1:
1040 self.ui.warn('%s: %s\n' % (self.dirstate.pathto(f), msg))
1040 self.ui.warn('%s: %s\n' % (self.dirstate.pathto(f), msg))
1041 match.bad = bad
1041 match.bad = bad
1042
1042
1043 if working: # we need to scan the working dir
1043 if working: # we need to scan the working dir
1044 subrepos = ctx1.substate.keys()
1044 subrepos = ctx1.substate.keys()
1045 s = self.dirstate.status(match, subrepos, listignored,
1045 s = self.dirstate.status(match, subrepos, listignored,
1046 listclean, listunknown)
1046 listclean, listunknown)
1047 cmp, modified, added, removed, deleted, unknown, ignored, clean = s
1047 cmp, modified, added, removed, deleted, unknown, ignored, clean = s
1048
1048
1049 # check for any possibly clean files
1049 # check for any possibly clean files
1050 if parentworking and cmp:
1050 if parentworking and cmp:
1051 fixup = []
1051 fixup = []
1052 # do a full compare of any files that might have changed
1052 # do a full compare of any files that might have changed
1053 for f in sorted(cmp):
1053 for f in sorted(cmp):
1054 if (f not in ctx1 or ctx2.flags(f) != ctx1.flags(f)
1054 if (f not in ctx1 or ctx2.flags(f) != ctx1.flags(f)
1055 or ctx1[f].cmp(ctx2[f].data())):
1055 or ctx1[f].cmp(ctx2[f].data())):
1056 modified.append(f)
1056 modified.append(f)
1057 else:
1057 else:
1058 fixup.append(f)
1058 fixup.append(f)
1059
1059
1060 if listclean:
1060 if listclean:
1061 clean += fixup
1061 clean += fixup
1062
1062
1063 # update dirstate for files that are actually clean
1063 # update dirstate for files that are actually clean
1064 if fixup:
1064 if fixup:
1065 try:
1065 try:
1066 # updating the dirstate is optional
1066 # updating the dirstate is optional
1067 # so we don't wait on the lock
1067 # so we don't wait on the lock
1068 wlock = self.wlock(False)
1068 wlock = self.wlock(False)
1069 try:
1069 try:
1070 for f in fixup:
1070 for f in fixup:
1071 self.dirstate.normal(f)
1071 self.dirstate.normal(f)
1072 finally:
1072 finally:
1073 wlock.release()
1073 wlock.release()
1074 except error.LockError:
1074 except error.LockError:
1075 pass
1075 pass
1076
1076
1077 if not parentworking:
1077 if not parentworking:
1078 mf1 = mfmatches(ctx1)
1078 mf1 = mfmatches(ctx1)
1079 if working:
1079 if working:
1080 # we are comparing working dir against non-parent
1080 # we are comparing working dir against non-parent
1081 # generate a pseudo-manifest for the working dir
1081 # generate a pseudo-manifest for the working dir
1082 mf2 = mfmatches(self['.'])
1082 mf2 = mfmatches(self['.'])
1083 for f in cmp + modified + added:
1083 for f in cmp + modified + added:
1084 mf2[f] = None
1084 mf2[f] = None
1085 mf2.set(f, ctx2.flags(f))
1085 mf2.set(f, ctx2.flags(f))
1086 for f in removed:
1086 for f in removed:
1087 if f in mf2:
1087 if f in mf2:
1088 del mf2[f]
1088 del mf2[f]
1089 else:
1089 else:
1090 # we are comparing two revisions
1090 # we are comparing two revisions
1091 deleted, unknown, ignored = [], [], []
1091 deleted, unknown, ignored = [], [], []
1092 mf2 = mfmatches(ctx2)
1092 mf2 = mfmatches(ctx2)
1093
1093
1094 modified, added, clean = [], [], []
1094 modified, added, clean = [], [], []
1095 for fn in mf2:
1095 for fn in mf2:
1096 if fn in mf1:
1096 if fn in mf1:
1097 if (mf1.flags(fn) != mf2.flags(fn) or
1097 if (mf1.flags(fn) != mf2.flags(fn) or
1098 (mf1[fn] != mf2[fn] and
1098 (mf1[fn] != mf2[fn] and
1099 (mf2[fn] or ctx1[fn].cmp(ctx2[fn].data())))):
1099 (mf2[fn] or ctx1[fn].cmp(ctx2[fn].data())))):
1100 modified.append(fn)
1100 modified.append(fn)
1101 elif listclean:
1101 elif listclean:
1102 clean.append(fn)
1102 clean.append(fn)
1103 del mf1[fn]
1103 del mf1[fn]
1104 else:
1104 else:
1105 added.append(fn)
1105 added.append(fn)
1106 removed = mf1.keys()
1106 removed = mf1.keys()
1107
1107
1108 r = modified, added, removed, deleted, unknown, ignored, clean
1108 r = modified, added, removed, deleted, unknown, ignored, clean
1109 [l.sort() for l in r]
1109 [l.sort() for l in r]
1110 return r
1110 return r
1111
1111
1112 def add(self, list):
1112 def add(self, list):
1113 wlock = self.wlock()
1113 wlock = self.wlock()
1114 try:
1114 try:
1115 rejected = []
1115 rejected = []
1116 for f in list:
1116 for f in list:
1117 p = self.wjoin(f)
1117 p = self.wjoin(f)
1118 try:
1118 try:
1119 st = os.lstat(p)
1119 st = os.lstat(p)
1120 except:
1120 except:
1121 self.ui.warn(_("%s does not exist!\n") % f)
1121 self.ui.warn(_("%s does not exist!\n") % f)
1122 rejected.append(f)
1122 rejected.append(f)
1123 continue
1123 continue
1124 if st.st_size > 10000000:
1124 if st.st_size > 10000000:
1125 self.ui.warn(_("%s: up to %d MB of RAM may be required "
1125 self.ui.warn(_("%s: up to %d MB of RAM may be required "
1126 "to manage this file\n"
1126 "to manage this file\n"
1127 "(use 'hg revert %s' to cancel the "
1127 "(use 'hg revert %s' to cancel the "
1128 "pending addition)\n")
1128 "pending addition)\n")
1129 % (f, 3 * st.st_size // 1000000, f))
1129 % (f, 3 * st.st_size // 1000000, f))
1130 if not (stat.S_ISREG(st.st_mode) or stat.S_ISLNK(st.st_mode)):
1130 if not (stat.S_ISREG(st.st_mode) or stat.S_ISLNK(st.st_mode)):
1131 self.ui.warn(_("%s not added: only files and symlinks "
1131 self.ui.warn(_("%s not added: only files and symlinks "
1132 "supported currently\n") % f)
1132 "supported currently\n") % f)
1133 rejected.append(p)
1133 rejected.append(p)
1134 elif self.dirstate[f] in 'amn':
1134 elif self.dirstate[f] in 'amn':
1135 self.ui.warn(_("%s already tracked!\n") % f)
1135 self.ui.warn(_("%s already tracked!\n") % f)
1136 elif self.dirstate[f] == 'r':
1136 elif self.dirstate[f] == 'r':
1137 self.dirstate.normallookup(f)
1137 self.dirstate.normallookup(f)
1138 else:
1138 else:
1139 self.dirstate.add(f)
1139 self.dirstate.add(f)
1140 return rejected
1140 return rejected
1141 finally:
1141 finally:
1142 wlock.release()
1142 wlock.release()
1143
1143
1144 def forget(self, list):
1144 def forget(self, list):
1145 wlock = self.wlock()
1145 wlock = self.wlock()
1146 try:
1146 try:
1147 for f in list:
1147 for f in list:
1148 if self.dirstate[f] != 'a':
1148 if self.dirstate[f] != 'a':
1149 self.ui.warn(_("%s not added!\n") % f)
1149 self.ui.warn(_("%s not added!\n") % f)
1150 else:
1150 else:
1151 self.dirstate.forget(f)
1151 self.dirstate.forget(f)
1152 finally:
1152 finally:
1153 wlock.release()
1153 wlock.release()
1154
1154
1155 def remove(self, list, unlink=False):
1155 def remove(self, list, unlink=False):
1156 if unlink:
1156 if unlink:
1157 for f in list:
1157 for f in list:
1158 try:
1158 try:
1159 util.unlink(self.wjoin(f))
1159 util.unlink(self.wjoin(f))
1160 except OSError, inst:
1160 except OSError, inst:
1161 if inst.errno != errno.ENOENT:
1161 if inst.errno != errno.ENOENT:
1162 raise
1162 raise
1163 wlock = self.wlock()
1163 wlock = self.wlock()
1164 try:
1164 try:
1165 for f in list:
1165 for f in list:
1166 if unlink and os.path.exists(self.wjoin(f)):
1166 if unlink and os.path.exists(self.wjoin(f)):
1167 self.ui.warn(_("%s still exists!\n") % f)
1167 self.ui.warn(_("%s still exists!\n") % f)
1168 elif self.dirstate[f] == 'a':
1168 elif self.dirstate[f] == 'a':
1169 self.dirstate.forget(f)
1169 self.dirstate.forget(f)
1170 elif f not in self.dirstate:
1170 elif f not in self.dirstate:
1171 self.ui.warn(_("%s not tracked!\n") % f)
1171 self.ui.warn(_("%s not tracked!\n") % f)
1172 else:
1172 else:
1173 self.dirstate.remove(f)
1173 self.dirstate.remove(f)
1174 finally:
1174 finally:
1175 wlock.release()
1175 wlock.release()
1176
1176
1177 def undelete(self, list):
1177 def undelete(self, list):
1178 manifests = [self.manifest.read(self.changelog.read(p)[0])
1178 manifests = [self.manifest.read(self.changelog.read(p)[0])
1179 for p in self.dirstate.parents() if p != nullid]
1179 for p in self.dirstate.parents() if p != nullid]
1180 wlock = self.wlock()
1180 wlock = self.wlock()
1181 try:
1181 try:
1182 for f in list:
1182 for f in list:
1183 if self.dirstate[f] != 'r':
1183 if self.dirstate[f] != 'r':
1184 self.ui.warn(_("%s not removed!\n") % f)
1184 self.ui.warn(_("%s not removed!\n") % f)
1185 else:
1185 else:
1186 m = f in manifests[0] and manifests[0] or manifests[1]
1186 m = f in manifests[0] and manifests[0] or manifests[1]
1187 t = self.file(f).read(m[f])
1187 t = self.file(f).read(m[f])
1188 self.wwrite(f, t, m.flags(f))
1188 self.wwrite(f, t, m.flags(f))
1189 self.dirstate.normal(f)
1189 self.dirstate.normal(f)
1190 finally:
1190 finally:
1191 wlock.release()
1191 wlock.release()
1192
1192
1193 def copy(self, source, dest):
1193 def copy(self, source, dest):
1194 p = self.wjoin(dest)
1194 p = self.wjoin(dest)
1195 if not (os.path.exists(p) or os.path.islink(p)):
1195 if not (os.path.exists(p) or os.path.islink(p)):
1196 self.ui.warn(_("%s does not exist!\n") % dest)
1196 self.ui.warn(_("%s does not exist!\n") % dest)
1197 elif not (os.path.isfile(p) or os.path.islink(p)):
1197 elif not (os.path.isfile(p) or os.path.islink(p)):
1198 self.ui.warn(_("copy failed: %s is not a file or a "
1198 self.ui.warn(_("copy failed: %s is not a file or a "
1199 "symbolic link\n") % dest)
1199 "symbolic link\n") % dest)
1200 else:
1200 else:
1201 wlock = self.wlock()
1201 wlock = self.wlock()
1202 try:
1202 try:
1203 if self.dirstate[dest] in '?r':
1203 if self.dirstate[dest] in '?r':
1204 self.dirstate.add(dest)
1204 self.dirstate.add(dest)
1205 self.dirstate.copy(source, dest)
1205 self.dirstate.copy(source, dest)
1206 finally:
1206 finally:
1207 wlock.release()
1207 wlock.release()
1208
1208
1209 def heads(self, start=None):
1209 def heads(self, start=None):
1210 heads = self.changelog.heads(start)
1210 heads = self.changelog.heads(start)
1211 # sort the output in rev descending order
1211 # sort the output in rev descending order
1212 heads = [(-self.changelog.rev(h), h) for h in heads]
1212 heads = [(-self.changelog.rev(h), h) for h in heads]
1213 return [n for (r, n) in sorted(heads)]
1213 return [n for (r, n) in sorted(heads)]
1214
1214
1215 def branchheads(self, branch=None, start=None, closed=False):
1215 def branchheads(self, branch=None, start=None, closed=False):
1216 '''return a (possibly filtered) list of heads for the given branch
1216 '''return a (possibly filtered) list of heads for the given branch
1217
1217
1218 Heads are returned in topological order, from newest to oldest.
1218 Heads are returned in topological order, from newest to oldest.
1219 If branch is None, use the dirstate branch.
1219 If branch is None, use the dirstate branch.
1220 If start is not None, return only heads reachable from start.
1220 If start is not None, return only heads reachable from start.
1221 If closed is True, return heads that are marked as closed as well.
1221 If closed is True, return heads that are marked as closed as well.
1222 '''
1222 '''
1223 if branch is None:
1223 if branch is None:
1224 branch = self[None].branch()
1224 branch = self[None].branch()
1225 branches = self.branchmap()
1225 branches = self.branchmap()
1226 if branch not in branches:
1226 if branch not in branches:
1227 return []
1227 return []
1228 # the cache returns heads ordered lowest to highest
1228 # the cache returns heads ordered lowest to highest
1229 bheads = list(reversed(branches[branch]))
1229 bheads = list(reversed(branches[branch]))
1230 if start is not None:
1230 if start is not None:
1231 # filter out the heads that cannot be reached from startrev
1231 # filter out the heads that cannot be reached from startrev
1232 fbheads = set(self.changelog.nodesbetween([start], bheads)[2])
1232 fbheads = set(self.changelog.nodesbetween([start], bheads)[2])
1233 bheads = [h for h in bheads if h in fbheads]
1233 bheads = [h for h in bheads if h in fbheads]
1234 if not closed:
1234 if not closed:
1235 bheads = [h for h in bheads if
1235 bheads = [h for h in bheads if
1236 ('close' not in self.changelog.read(h)[5])]
1236 ('close' not in self.changelog.read(h)[5])]
1237 return bheads
1237 return bheads
1238
1238
1239 def branches(self, nodes):
1239 def branches(self, nodes):
1240 if not nodes:
1240 if not nodes:
1241 nodes = [self.changelog.tip()]
1241 nodes = [self.changelog.tip()]
1242 b = []
1242 b = []
1243 for n in nodes:
1243 for n in nodes:
1244 t = n
1244 t = n
1245 while 1:
1245 while 1:
1246 p = self.changelog.parents(n)
1246 p = self.changelog.parents(n)
1247 if p[1] != nullid or p[0] == nullid:
1247 if p[1] != nullid or p[0] == nullid:
1248 b.append((t, n, p[0], p[1]))
1248 b.append((t, n, p[0], p[1]))
1249 break
1249 break
1250 n = p[0]
1250 n = p[0]
1251 return b
1251 return b
1252
1252
1253 def between(self, pairs):
1253 def between(self, pairs):
1254 r = []
1254 r = []
1255
1255
1256 for top, bottom in pairs:
1256 for top, bottom in pairs:
1257 n, l, i = top, [], 0
1257 n, l, i = top, [], 0
1258 f = 1
1258 f = 1
1259
1259
1260 while n != bottom and n != nullid:
1260 while n != bottom and n != nullid:
1261 p = self.changelog.parents(n)[0]
1261 p = self.changelog.parents(n)[0]
1262 if i == f:
1262 if i == f:
1263 l.append(n)
1263 l.append(n)
1264 f = f * 2
1264 f = f * 2
1265 n = p
1265 n = p
1266 i += 1
1266 i += 1
1267
1267
1268 r.append(l)
1268 r.append(l)
1269
1269
1270 return r
1270 return r
1271
1271
1272 def findincoming(self, remote, base=None, heads=None, force=False):
1272 def findincoming(self, remote, base=None, heads=None, force=False):
1273 """Return list of roots of the subsets of missing nodes from remote
1273 """Return list of roots of the subsets of missing nodes from remote
1274
1274
1275 If base dict is specified, assume that these nodes and their parents
1275 If base dict is specified, assume that these nodes and their parents
1276 exist on the remote side and that no child of a node of base exists
1276 exist on the remote side and that no child of a node of base exists
1277 in both remote and self.
1277 in both remote and self.
1278 Furthermore base will be updated to include the nodes that exists
1278 Furthermore base will be updated to include the nodes that exists
1279 in self and remote but no children exists in self and remote.
1279 in self and remote but no children exists in self and remote.
1280 If a list of heads is specified, return only nodes which are heads
1280 If a list of heads is specified, return only nodes which are heads
1281 or ancestors of these heads.
1281 or ancestors of these heads.
1282
1282
1283 All the ancestors of base are in self and in remote.
1283 All the ancestors of base are in self and in remote.
1284 All the descendants of the list returned are missing in self.
1284 All the descendants of the list returned are missing in self.
1285 (and so we know that the rest of the nodes are missing in remote, see
1285 (and so we know that the rest of the nodes are missing in remote, see
1286 outgoing)
1286 outgoing)
1287 """
1287 """
1288 return self.findcommonincoming(remote, base, heads, force)[1]
1288 return self.findcommonincoming(remote, base, heads, force)[1]
1289
1289
1290 def findcommonincoming(self, remote, base=None, heads=None, force=False):
1290 def findcommonincoming(self, remote, base=None, heads=None, force=False):
1291 """Return a tuple (common, missing roots, heads) used to identify
1291 """Return a tuple (common, missing roots, heads) used to identify
1292 missing nodes from remote.
1292 missing nodes from remote.
1293
1293
1294 If base dict is specified, assume that these nodes and their parents
1294 If base dict is specified, assume that these nodes and their parents
1295 exist on the remote side and that no child of a node of base exists
1295 exist on the remote side and that no child of a node of base exists
1296 in both remote and self.
1296 in both remote and self.
1297 Furthermore base will be updated to include the nodes that exists
1297 Furthermore base will be updated to include the nodes that exists
1298 in self and remote but no children exists in self and remote.
1298 in self and remote but no children exists in self and remote.
1299 If a list of heads is specified, return only nodes which are heads
1299 If a list of heads is specified, return only nodes which are heads
1300 or ancestors of these heads.
1300 or ancestors of these heads.
1301
1301
1302 All the ancestors of base are in self and in remote.
1302 All the ancestors of base are in self and in remote.
1303 """
1303 """
1304 m = self.changelog.nodemap
1304 m = self.changelog.nodemap
1305 search = []
1305 search = []
1306 fetch = set()
1306 fetch = set()
1307 seen = set()
1307 seen = set()
1308 seenbranch = set()
1308 seenbranch = set()
1309 if base is None:
1309 if base is None:
1310 base = {}
1310 base = {}
1311
1311
1312 if not heads:
1312 if not heads:
1313 heads = remote.heads()
1313 heads = remote.heads()
1314
1314
1315 if self.changelog.tip() == nullid:
1315 if self.changelog.tip() == nullid:
1316 base[nullid] = 1
1316 base[nullid] = 1
1317 if heads != [nullid]:
1317 if heads != [nullid]:
1318 return [nullid], [nullid], list(heads)
1318 return [nullid], [nullid], list(heads)
1319 return [nullid], [], []
1319 return [nullid], [], []
1320
1320
1321 # assume we're closer to the tip than the root
1321 # assume we're closer to the tip than the root
1322 # and start by examining the heads
1322 # and start by examining the heads
1323 self.ui.status(_("searching for changes\n"))
1323 self.ui.status(_("searching for changes\n"))
1324
1324
1325 unknown = []
1325 unknown = []
1326 for h in heads:
1326 for h in heads:
1327 if h not in m:
1327 if h not in m:
1328 unknown.append(h)
1328 unknown.append(h)
1329 else:
1329 else:
1330 base[h] = 1
1330 base[h] = 1
1331
1331
1332 heads = unknown
1332 heads = unknown
1333 if not unknown:
1333 if not unknown:
1334 return base.keys(), [], []
1334 return base.keys(), [], []
1335
1335
1336 req = set(unknown)
1336 req = set(unknown)
1337 reqcnt = 0
1337 reqcnt = 0
1338
1338
1339 # search through remote branches
1339 # search through remote branches
1340 # a 'branch' here is a linear segment of history, with four parts:
1340 # a 'branch' here is a linear segment of history, with four parts:
1341 # head, root, first parent, second parent
1341 # head, root, first parent, second parent
1342 # (a branch always has two parents (or none) by definition)
1342 # (a branch always has two parents (or none) by definition)
1343 unknown = remote.branches(unknown)
1343 unknown = remote.branches(unknown)
1344 while unknown:
1344 while unknown:
1345 r = []
1345 r = []
1346 while unknown:
1346 while unknown:
1347 n = unknown.pop(0)
1347 n = unknown.pop(0)
1348 if n[0] in seen:
1348 if n[0] in seen:
1349 continue
1349 continue
1350
1350
1351 self.ui.debug("examining %s:%s\n"
1351 self.ui.debug("examining %s:%s\n"
1352 % (short(n[0]), short(n[1])))
1352 % (short(n[0]), short(n[1])))
1353 if n[0] == nullid: # found the end of the branch
1353 if n[0] == nullid: # found the end of the branch
1354 pass
1354 pass
1355 elif n in seenbranch:
1355 elif n in seenbranch:
1356 self.ui.debug("branch already found\n")
1356 self.ui.debug("branch already found\n")
1357 continue
1357 continue
1358 elif n[1] and n[1] in m: # do we know the base?
1358 elif n[1] and n[1] in m: # do we know the base?
1359 self.ui.debug("found incomplete branch %s:%s\n"
1359 self.ui.debug("found incomplete branch %s:%s\n"
1360 % (short(n[0]), short(n[1])))
1360 % (short(n[0]), short(n[1])))
1361 search.append(n[0:2]) # schedule branch range for scanning
1361 search.append(n[0:2]) # schedule branch range for scanning
1362 seenbranch.add(n)
1362 seenbranch.add(n)
1363 else:
1363 else:
1364 if n[1] not in seen and n[1] not in fetch:
1364 if n[1] not in seen and n[1] not in fetch:
1365 if n[2] in m and n[3] in m:
1365 if n[2] in m and n[3] in m:
1366 self.ui.debug("found new changeset %s\n" %
1366 self.ui.debug("found new changeset %s\n" %
1367 short(n[1]))
1367 short(n[1]))
1368 fetch.add(n[1]) # earliest unknown
1368 fetch.add(n[1]) # earliest unknown
1369 for p in n[2:4]:
1369 for p in n[2:4]:
1370 if p in m:
1370 if p in m:
1371 base[p] = 1 # latest known
1371 base[p] = 1 # latest known
1372
1372
1373 for p in n[2:4]:
1373 for p in n[2:4]:
1374 if p not in req and p not in m:
1374 if p not in req and p not in m:
1375 r.append(p)
1375 r.append(p)
1376 req.add(p)
1376 req.add(p)
1377 seen.add(n[0])
1377 seen.add(n[0])
1378
1378
1379 if r:
1379 if r:
1380 reqcnt += 1
1380 reqcnt += 1
1381 self.ui.progress(_('searching'), reqcnt, unit=_('queries'))
1381 self.ui.progress(_('searching'), reqcnt, unit=_('queries'))
1382 self.ui.debug("request %d: %s\n" %
1382 self.ui.debug("request %d: %s\n" %
1383 (reqcnt, " ".join(map(short, r))))
1383 (reqcnt, " ".join(map(short, r))))
1384 for p in xrange(0, len(r), 10):
1384 for p in xrange(0, len(r), 10):
1385 for b in remote.branches(r[p:p + 10]):
1385 for b in remote.branches(r[p:p + 10]):
1386 self.ui.debug("received %s:%s\n" %
1386 self.ui.debug("received %s:%s\n" %
1387 (short(b[0]), short(b[1])))
1387 (short(b[0]), short(b[1])))
1388 unknown.append(b)
1388 unknown.append(b)
1389
1389
1390 # do binary search on the branches we found
1390 # do binary search on the branches we found
1391 while search:
1391 while search:
1392 newsearch = []
1392 newsearch = []
1393 reqcnt += 1
1393 reqcnt += 1
1394 self.ui.progress(_('searching'), reqcnt, unit=_('queries'))
1394 self.ui.progress(_('searching'), reqcnt, unit=_('queries'))
1395 for n, l in zip(search, remote.between(search)):
1395 for n, l in zip(search, remote.between(search)):
1396 l.append(n[1])
1396 l.append(n[1])
1397 p = n[0]
1397 p = n[0]
1398 f = 1
1398 f = 1
1399 for i in l:
1399 for i in l:
1400 self.ui.debug("narrowing %d:%d %s\n" % (f, len(l), short(i)))
1400 self.ui.debug("narrowing %d:%d %s\n" % (f, len(l), short(i)))
1401 if i in m:
1401 if i in m:
1402 if f <= 2:
1402 if f <= 2:
1403 self.ui.debug("found new branch changeset %s\n" %
1403 self.ui.debug("found new branch changeset %s\n" %
1404 short(p))
1404 short(p))
1405 fetch.add(p)
1405 fetch.add(p)
1406 base[i] = 1
1406 base[i] = 1
1407 else:
1407 else:
1408 self.ui.debug("narrowed branch search to %s:%s\n"
1408 self.ui.debug("narrowed branch search to %s:%s\n"
1409 % (short(p), short(i)))
1409 % (short(p), short(i)))
1410 newsearch.append((p, i))
1410 newsearch.append((p, i))
1411 break
1411 break
1412 p, f = i, f * 2
1412 p, f = i, f * 2
1413 search = newsearch
1413 search = newsearch
1414
1414
1415 # sanity check our fetch list
1415 # sanity check our fetch list
1416 for f in fetch:
1416 for f in fetch:
1417 if f in m:
1417 if f in m:
1418 raise error.RepoError(_("already have changeset ")
1418 raise error.RepoError(_("already have changeset ")
1419 + short(f[:4]))
1419 + short(f[:4]))
1420
1420
1421 if base.keys() == [nullid]:
1421 if base.keys() == [nullid]:
1422 if force:
1422 if force:
1423 self.ui.warn(_("warning: repository is unrelated\n"))
1423 self.ui.warn(_("warning: repository is unrelated\n"))
1424 else:
1424 else:
1425 raise util.Abort(_("repository is unrelated"))
1425 raise util.Abort(_("repository is unrelated"))
1426
1426
1427 self.ui.debug("found new changesets starting at " +
1427 self.ui.debug("found new changesets starting at " +
1428 " ".join([short(f) for f in fetch]) + "\n")
1428 " ".join([short(f) for f in fetch]) + "\n")
1429
1429
1430 self.ui.progress(_('searching'), None)
1430 self.ui.progress(_('searching'), None)
1431 self.ui.debug("%d total queries\n" % reqcnt)
1431 self.ui.debug("%d total queries\n" % reqcnt)
1432
1432
1433 return base.keys(), list(fetch), heads
1433 return base.keys(), list(fetch), heads
1434
1434
1435 def findoutgoing(self, remote, base=None, heads=None, force=False):
1435 def findoutgoing(self, remote, base=None, heads=None, force=False):
1436 """Return list of nodes that are roots of subsets not in remote
1436 """Return list of nodes that are roots of subsets not in remote
1437
1437
1438 If base dict is specified, assume that these nodes and their parents
1438 If base dict is specified, assume that these nodes and their parents
1439 exist on the remote side.
1439 exist on the remote side.
1440 If a list of heads is specified, return only nodes which are heads
1440 If a list of heads is specified, return only nodes which are heads
1441 or ancestors of these heads, and return a second element which
1441 or ancestors of these heads, and return a second element which
1442 contains all remote heads which get new children.
1442 contains all remote heads which get new children.
1443 """
1443 """
1444 if base is None:
1444 if base is None:
1445 base = {}
1445 base = {}
1446 self.findincoming(remote, base, heads, force=force)
1446 self.findincoming(remote, base, heads, force=force)
1447
1447
1448 self.ui.debug("common changesets up to "
1448 self.ui.debug("common changesets up to "
1449 + " ".join(map(short, base.keys())) + "\n")
1449 + " ".join(map(short, base.keys())) + "\n")
1450
1450
1451 remain = set(self.changelog.nodemap)
1451 remain = set(self.changelog.nodemap)
1452
1452
1453 # prune everything remote has from the tree
1453 # prune everything remote has from the tree
1454 remain.remove(nullid)
1454 remain.remove(nullid)
1455 remove = base.keys()
1455 remove = base.keys()
1456 while remove:
1456 while remove:
1457 n = remove.pop(0)
1457 n = remove.pop(0)
1458 if n in remain:
1458 if n in remain:
1459 remain.remove(n)
1459 remain.remove(n)
1460 for p in self.changelog.parents(n):
1460 for p in self.changelog.parents(n):
1461 remove.append(p)
1461 remove.append(p)
1462
1462
1463 # find every node whose parents have been pruned
1463 # find every node whose parents have been pruned
1464 subset = []
1464 subset = []
1465 # find every remote head that will get new children
1465 # find every remote head that will get new children
1466 updated_heads = set()
1466 updated_heads = set()
1467 for n in remain:
1467 for n in remain:
1468 p1, p2 = self.changelog.parents(n)
1468 p1, p2 = self.changelog.parents(n)
1469 if p1 not in remain and p2 not in remain:
1469 if p1 not in remain and p2 not in remain:
1470 subset.append(n)
1470 subset.append(n)
1471 if heads:
1471 if heads:
1472 if p1 in heads:
1472 if p1 in heads:
1473 updated_heads.add(p1)
1473 updated_heads.add(p1)
1474 if p2 in heads:
1474 if p2 in heads:
1475 updated_heads.add(p2)
1475 updated_heads.add(p2)
1476
1476
1477 # this is the set of all roots we have to push
1477 # this is the set of all roots we have to push
1478 if heads:
1478 if heads:
1479 return subset, list(updated_heads)
1479 return subset, list(updated_heads)
1480 else:
1480 else:
1481 return subset
1481 return subset
1482
1482
1483 def pull(self, remote, heads=None, force=False):
1483 def pull(self, remote, heads=None, force=False):
1484 lock = self.lock()
1484 lock = self.lock()
1485 try:
1485 try:
1486 common, fetch, rheads = self.findcommonincoming(remote, heads=heads,
1486 common, fetch, rheads = self.findcommonincoming(remote, heads=heads,
1487 force=force)
1487 force=force)
1488 if not fetch:
1488 if not fetch:
1489 self.ui.status(_("no changes found\n"))
1489 self.ui.status(_("no changes found\n"))
1490 return 0
1490 return 0
1491
1491
1492 if fetch == [nullid]:
1492 if fetch == [nullid]:
1493 self.ui.status(_("requesting all changes\n"))
1493 self.ui.status(_("requesting all changes\n"))
1494 elif heads is None and remote.capable('changegroupsubset'):
1494 elif heads is None and remote.capable('changegroupsubset'):
1495 # issue1320, avoid a race if remote changed after discovery
1495 # issue1320, avoid a race if remote changed after discovery
1496 heads = rheads
1496 heads = rheads
1497
1497
1498 if heads is None:
1498 if heads is None:
1499 cg = remote.changegroup(fetch, 'pull')
1499 cg = remote.changegroup(fetch, 'pull')
1500 else:
1500 else:
1501 if not remote.capable('changegroupsubset'):
1501 if not remote.capable('changegroupsubset'):
1502 raise util.Abort(_("Partial pull cannot be done because "
1502 raise util.Abort(_("Partial pull cannot be done because "
1503 "other repository doesn't support "
1503 "other repository doesn't support "
1504 "changegroupsubset."))
1504 "changegroupsubset."))
1505 cg = remote.changegroupsubset(fetch, heads, 'pull')
1505 cg = remote.changegroupsubset(fetch, heads, 'pull')
1506 return self.addchangegroup(cg, 'pull', remote.url())
1506 return self.addchangegroup(cg, 'pull', remote.url())
1507 finally:
1507 finally:
1508 lock.release()
1508 lock.release()
1509
1509
1510 def push(self, remote, force=False, revs=None):
1510 def push(self, remote, force=False, revs=None, newbranch=False):
1511 '''Push outgoing changesets (limited by revs) from the current
1511 '''Push outgoing changesets (limited by revs) from the current
1512 repository to remote. Return an integer:
1512 repository to remote. Return an integer:
1513 - 0 means HTTP error *or* nothing to push
1513 - 0 means HTTP error *or* nothing to push
1514 - 1 means we pushed and remote head count is unchanged *or*
1514 - 1 means we pushed and remote head count is unchanged *or*
1515 we have outgoing changesets but refused to push
1515 we have outgoing changesets but refused to push
1516 - other values as described by addchangegroup()
1516 - other values as described by addchangegroup()
1517 '''
1517 '''
1518 # there are two ways to push to remote repo:
1518 # there are two ways to push to remote repo:
1519 #
1519 #
1520 # addchangegroup assumes local user can lock remote
1520 # addchangegroup assumes local user can lock remote
1521 # repo (local filesystem, old ssh servers).
1521 # repo (local filesystem, old ssh servers).
1522 #
1522 #
1523 # unbundle assumes local user cannot lock remote repo (new ssh
1523 # unbundle assumes local user cannot lock remote repo (new ssh
1524 # servers, http servers).
1524 # servers, http servers).
1525
1525
1526 if remote.capable('unbundle'):
1526 if remote.capable('unbundle'):
1527 return self.push_unbundle(remote, force, revs)
1527 return self.push_unbundle(remote, force, revs, newbranch)
1528 return self.push_addchangegroup(remote, force, revs)
1528 return self.push_addchangegroup(remote, force, revs, newbranch)
1529
1529
1530 def prepush(self, remote, force, revs):
1530 def prepush(self, remote, force, revs, newbranch):
1531 '''Analyze the local and remote repositories and determine which
1531 '''Analyze the local and remote repositories and determine which
1532 changesets need to be pushed to the remote. Return value depends
1532 changesets need to be pushed to the remote. Return value depends
1533 on circumstances:
1533 on circumstances:
1534
1534
1535 If we are not going to push anything, return a tuple (None,
1535 If we are not going to push anything, return a tuple (None,
1536 outgoing) where outgoing is 0 if there are no outgoing
1536 outgoing) where outgoing is 0 if there are no outgoing
1537 changesets and 1 if there are, but we refuse to push them
1537 changesets and 1 if there are, but we refuse to push them
1538 (e.g. would create new remote heads).
1538 (e.g. would create new remote heads).
1539
1539
1540 Otherwise, return a tuple (changegroup, remoteheads), where
1540 Otherwise, return a tuple (changegroup, remoteheads), where
1541 changegroup is a readable file-like object whose read() returns
1541 changegroup is a readable file-like object whose read() returns
1542 successive changegroup chunks ready to be sent over the wire and
1542 successive changegroup chunks ready to be sent over the wire and
1543 remoteheads is the list of remote heads.'''
1543 remoteheads is the list of remote heads.'''
1544 common = {}
1544 common = {}
1545 remote_heads = remote.heads()
1545 remote_heads = remote.heads()
1546 inc = self.findincoming(remote, common, remote_heads, force=force)
1546 inc = self.findincoming(remote, common, remote_heads, force=force)
1547
1547
1548 cl = self.changelog
1548 cl = self.changelog
1549 update, updated_heads = self.findoutgoing(remote, common, remote_heads)
1549 update, updated_heads = self.findoutgoing(remote, common, remote_heads)
1550 outg, bases, heads = cl.nodesbetween(update, revs)
1550 outg, bases, heads = cl.nodesbetween(update, revs)
1551
1551
1552 if not bases:
1552 if not bases:
1553 self.ui.status(_("no changes found\n"))
1553 self.ui.status(_("no changes found\n"))
1554 return None, 1
1554 return None, 1
1555
1555
1556 if not force and remote_heads != [nullid]:
1556 if not force and remote_heads != [nullid]:
1557
1557
1558 def fail_multiple_heads(unsynced, branch=None):
1558 def fail_multiple_heads(unsynced, branch=None):
1559 if branch:
1559 if branch:
1560 msg = _("abort: push creates new remote heads"
1560 msg = _("abort: push creates new remote heads"
1561 " on branch '%s'!\n") % branch
1561 " on branch '%s'!\n") % branch
1562 else:
1562 else:
1563 msg = _("abort: push creates new remote heads!\n")
1563 msg = _("abort: push creates new remote heads!\n")
1564 self.ui.warn(msg)
1564 self.ui.warn(msg)
1565 if unsynced:
1565 if unsynced:
1566 self.ui.status(_("(you should pull and merge or"
1566 self.ui.status(_("(you should pull and merge or"
1567 " use push -f to force)\n"))
1567 " use push -f to force)\n"))
1568 else:
1568 else:
1569 self.ui.status(_("(did you forget to merge?"
1569 self.ui.status(_("(did you forget to merge?"
1570 " use push -f to force)\n"))
1570 " use push -f to force)\n"))
1571 return None, 0
1571 return None, 0
1572
1572
1573 if remote.capable('branchmap'):
1573 if remote.capable('branchmap'):
1574 # Check for each named branch if we're creating new remote heads.
1574 # Check for each named branch if we're creating new remote heads.
1575 # To be a remote head after push, node must be either:
1575 # To be a remote head after push, node must be either:
1576 # - unknown locally
1576 # - unknown locally
1577 # - a local outgoing head descended from update
1577 # - a local outgoing head descended from update
1578 # - a remote head that's known locally and not
1578 # - a remote head that's known locally and not
1579 # ancestral to an outgoing head
1579 # ancestral to an outgoing head
1580 #
1580 #
1581 # New named branches cannot be created without --force.
1581 # New named branches cannot be created without --force.
1582
1582
1583 # 1. Create set of branches involved in the push.
1583 # 1. Create set of branches involved in the push.
1584 branches = set(self[n].branch() for n in outg)
1584 branches = set(self[n].branch() for n in outg)
1585
1585
1586 # 2. Check for new branches on the remote.
1586 # 2. Check for new branches on the remote.
1587 remotemap = remote.branchmap()
1587 remotemap = remote.branchmap()
1588 newbranches = branches - set(remotemap)
1588 newbranches = branches - set(remotemap)
1589 if newbranches: # new branch requires --force
1589 if newbranches and not newbranch: # new branch requires --new-branch
1590 branchnames = ', '.join("%s" % b for b in newbranches)
1590 branchnames = ', '.join("%s" % b for b in newbranches)
1591 self.ui.warn(_("abort: push creates "
1591 self.ui.warn(_("abort: push creates "
1592 "new remote branches: %s!\n")
1592 "new remote branches: %s!\n")
1593 % branchnames)
1593 % branchnames)
1594 self.ui.status(_("(use 'hg push -f' to force)\n"))
1594 self.ui.status(_("(use 'hg push --new-branch' to create new "
1595 "remote branches)\n"))
1595 return None, 0
1596 return None, 0
1597 branches.difference_update(newbranches)
1596
1598
1597 # 3. Construct the initial oldmap and newmap dicts.
1599 # 3. Construct the initial oldmap and newmap dicts.
1598 # They contain information about the remote heads before and
1600 # They contain information about the remote heads before and
1599 # after the push, respectively.
1601 # after the push, respectively.
1600 # Heads not found locally are not included in either dict,
1602 # Heads not found locally are not included in either dict,
1601 # since they won't be affected by the push.
1603 # since they won't be affected by the push.
1602 # unsynced contains all branches with incoming changesets.
1604 # unsynced contains all branches with incoming changesets.
1603 oldmap = {}
1605 oldmap = {}
1604 newmap = {}
1606 newmap = {}
1605 unsynced = set()
1607 unsynced = set()
1606 for branch in branches:
1608 for branch in branches:
1607 remoteheads = remotemap[branch]
1609 remoteheads = remotemap[branch]
1608 prunedheads = [h for h in remoteheads if h in cl.nodemap]
1610 prunedheads = [h for h in remoteheads if h in cl.nodemap]
1609 oldmap[branch] = prunedheads
1611 oldmap[branch] = prunedheads
1610 newmap[branch] = list(prunedheads)
1612 newmap[branch] = list(prunedheads)
1611 if len(remoteheads) > len(prunedheads):
1613 if len(remoteheads) > len(prunedheads):
1612 unsynced.add(branch)
1614 unsynced.add(branch)
1613
1615
1614 # 4. Update newmap with outgoing changes.
1616 # 4. Update newmap with outgoing changes.
1615 # This will possibly add new heads and remove existing ones.
1617 # This will possibly add new heads and remove existing ones.
1616 ctxgen = (self[n] for n in outg)
1618 ctxgen = (self[n] for n in outg)
1617 self._updatebranchcache(newmap, ctxgen)
1619 self._updatebranchcache(newmap, ctxgen)
1618
1620
1619 # 5. Check for new heads.
1621 # 5. Check for new heads.
1620 # If there are more heads after the push than before, a suitable
1622 # If there are more heads after the push than before, a suitable
1621 # warning, depending on unsynced status, is displayed.
1623 # warning, depending on unsynced status, is displayed.
1622 for branch in branches:
1624 for branch in branches:
1623 if len(newmap[branch]) > len(oldmap[branch]):
1625 if len(newmap[branch]) > len(oldmap[branch]):
1624 return fail_multiple_heads(branch in unsynced, branch)
1626 return fail_multiple_heads(branch in unsynced, branch)
1625
1627
1626 # 6. Check for unsynced changes on involved branches.
1628 # 6. Check for unsynced changes on involved branches.
1627 if unsynced:
1629 if unsynced:
1628 self.ui.warn(_("note: unsynced remote changes!\n"))
1630 self.ui.warn(_("note: unsynced remote changes!\n"))
1629
1631
1630 else:
1632 else:
1631 # Old servers: Check for new topological heads.
1633 # Old servers: Check for new topological heads.
1632 # Code based on _updatebranchcache.
1634 # Code based on _updatebranchcache.
1633 newheads = set(h for h in remote_heads if h in cl.nodemap)
1635 newheads = set(h for h in remote_heads if h in cl.nodemap)
1634 oldheadcnt = len(newheads)
1636 oldheadcnt = len(newheads)
1635 newheads.update(outg)
1637 newheads.update(outg)
1636 if len(newheads) > 1:
1638 if len(newheads) > 1:
1637 for latest in reversed(outg):
1639 for latest in reversed(outg):
1638 if latest not in newheads:
1640 if latest not in newheads:
1639 continue
1641 continue
1640 minhrev = min(cl.rev(h) for h in newheads)
1642 minhrev = min(cl.rev(h) for h in newheads)
1641 reachable = cl.reachable(latest, cl.node(minhrev))
1643 reachable = cl.reachable(latest, cl.node(minhrev))
1642 reachable.remove(latest)
1644 reachable.remove(latest)
1643 newheads.difference_update(reachable)
1645 newheads.difference_update(reachable)
1644 if len(newheads) > oldheadcnt:
1646 if len(newheads) > oldheadcnt:
1645 return fail_multiple_heads(inc)
1647 return fail_multiple_heads(inc)
1646 if inc:
1648 if inc:
1647 self.ui.warn(_("note: unsynced remote changes!\n"))
1649 self.ui.warn(_("note: unsynced remote changes!\n"))
1648
1650
1649 if revs is None:
1651 if revs is None:
1650 # use the fast path, no race possible on push
1652 # use the fast path, no race possible on push
1651 nodes = self.changelog.findmissing(common.keys())
1653 nodes = self.changelog.findmissing(common.keys())
1652 cg = self._changegroup(nodes, 'push')
1654 cg = self._changegroup(nodes, 'push')
1653 else:
1655 else:
1654 cg = self.changegroupsubset(update, revs, 'push')
1656 cg = self.changegroupsubset(update, revs, 'push')
1655 return cg, remote_heads
1657 return cg, remote_heads
1656
1658
1657 def push_addchangegroup(self, remote, force, revs):
1659 def push_addchangegroup(self, remote, force, revs, newbranch):
1658 '''Push a changegroup by locking the remote and sending the
1660 '''Push a changegroup by locking the remote and sending the
1659 addchangegroup command to it. Used for local and old SSH repos.
1661 addchangegroup command to it. Used for local and old SSH repos.
1660 Return an integer: see push().
1662 Return an integer: see push().
1661 '''
1663 '''
1662 lock = remote.lock()
1664 lock = remote.lock()
1663 try:
1665 try:
1664 ret = self.prepush(remote, force, revs)
1666 ret = self.prepush(remote, force, revs, newbranch)
1665 if ret[0] is not None:
1667 if ret[0] is not None:
1666 cg, remote_heads = ret
1668 cg, remote_heads = ret
1667 # here, we return an integer indicating remote head count change
1669 # here, we return an integer indicating remote head count change
1668 return remote.addchangegroup(cg, 'push', self.url())
1670 return remote.addchangegroup(cg, 'push', self.url())
1669 # and here we return 0 for "nothing to push" or 1 for
1671 # and here we return 0 for "nothing to push" or 1 for
1670 # "something to push but I refuse"
1672 # "something to push but I refuse"
1671 return ret[1]
1673 return ret[1]
1672 finally:
1674 finally:
1673 lock.release()
1675 lock.release()
1674
1676
1675 def push_unbundle(self, remote, force, revs):
1677 def push_unbundle(self, remote, force, revs, newbranch):
1676 '''Push a changegroup by unbundling it on the remote. Used for new
1678 '''Push a changegroup by unbundling it on the remote. Used for new
1677 SSH and HTTP repos. Return an integer: see push().'''
1679 SSH and HTTP repos. Return an integer: see push().'''
1678 # local repo finds heads on server, finds out what revs it
1680 # local repo finds heads on server, finds out what revs it
1679 # must push. once revs transferred, if server finds it has
1681 # must push. once revs transferred, if server finds it has
1680 # different heads (someone else won commit/push race), server
1682 # different heads (someone else won commit/push race), server
1681 # aborts.
1683 # aborts.
1682
1684
1683 ret = self.prepush(remote, force, revs)
1685 ret = self.prepush(remote, force, revs, newbranch)
1684 if ret[0] is not None:
1686 if ret[0] is not None:
1685 cg, remote_heads = ret
1687 cg, remote_heads = ret
1686 if force:
1688 if force:
1687 remote_heads = ['force']
1689 remote_heads = ['force']
1688 # ssh: return remote's addchangegroup()
1690 # ssh: return remote's addchangegroup()
1689 # http: return remote's addchangegroup() or 0 for error
1691 # http: return remote's addchangegroup() or 0 for error
1690 return remote.unbundle(cg, remote_heads, 'push')
1692 return remote.unbundle(cg, remote_heads, 'push')
1691 # as in push_addchangegroup()
1693 # as in push_addchangegroup()
1692 return ret[1]
1694 return ret[1]
1693
1695
1694 def changegroupinfo(self, nodes, source):
1696 def changegroupinfo(self, nodes, source):
1695 if self.ui.verbose or source == 'bundle':
1697 if self.ui.verbose or source == 'bundle':
1696 self.ui.status(_("%d changesets found\n") % len(nodes))
1698 self.ui.status(_("%d changesets found\n") % len(nodes))
1697 if self.ui.debugflag:
1699 if self.ui.debugflag:
1698 self.ui.debug("list of changesets:\n")
1700 self.ui.debug("list of changesets:\n")
1699 for node in nodes:
1701 for node in nodes:
1700 self.ui.debug("%s\n" % hex(node))
1702 self.ui.debug("%s\n" % hex(node))
1701
1703
1702 def changegroupsubset(self, bases, heads, source, extranodes=None):
1704 def changegroupsubset(self, bases, heads, source, extranodes=None):
1703 """Compute a changegroup consisting of all the nodes that are
1705 """Compute a changegroup consisting of all the nodes that are
1704 descendents of any of the bases and ancestors of any of the heads.
1706 descendents of any of the bases and ancestors of any of the heads.
1705 Return a chunkbuffer object whose read() method will return
1707 Return a chunkbuffer object whose read() method will return
1706 successive changegroup chunks.
1708 successive changegroup chunks.
1707
1709
1708 It is fairly complex as determining which filenodes and which
1710 It is fairly complex as determining which filenodes and which
1709 manifest nodes need to be included for the changeset to be complete
1711 manifest nodes need to be included for the changeset to be complete
1710 is non-trivial.
1712 is non-trivial.
1711
1713
1712 Another wrinkle is doing the reverse, figuring out which changeset in
1714 Another wrinkle is doing the reverse, figuring out which changeset in
1713 the changegroup a particular filenode or manifestnode belongs to.
1715 the changegroup a particular filenode or manifestnode belongs to.
1714
1716
1715 The caller can specify some nodes that must be included in the
1717 The caller can specify some nodes that must be included in the
1716 changegroup using the extranodes argument. It should be a dict
1718 changegroup using the extranodes argument. It should be a dict
1717 where the keys are the filenames (or 1 for the manifest), and the
1719 where the keys are the filenames (or 1 for the manifest), and the
1718 values are lists of (node, linknode) tuples, where node is a wanted
1720 values are lists of (node, linknode) tuples, where node is a wanted
1719 node and linknode is the changelog node that should be transmitted as
1721 node and linknode is the changelog node that should be transmitted as
1720 the linkrev.
1722 the linkrev.
1721 """
1723 """
1722
1724
1723 # Set up some initial variables
1725 # Set up some initial variables
1724 # Make it easy to refer to self.changelog
1726 # Make it easy to refer to self.changelog
1725 cl = self.changelog
1727 cl = self.changelog
1726 # msng is short for missing - compute the list of changesets in this
1728 # msng is short for missing - compute the list of changesets in this
1727 # changegroup.
1729 # changegroup.
1728 if not bases:
1730 if not bases:
1729 bases = [nullid]
1731 bases = [nullid]
1730 msng_cl_lst, bases, heads = cl.nodesbetween(bases, heads)
1732 msng_cl_lst, bases, heads = cl.nodesbetween(bases, heads)
1731
1733
1732 if extranodes is None:
1734 if extranodes is None:
1733 # can we go through the fast path ?
1735 # can we go through the fast path ?
1734 heads.sort()
1736 heads.sort()
1735 allheads = self.heads()
1737 allheads = self.heads()
1736 allheads.sort()
1738 allheads.sort()
1737 if heads == allheads:
1739 if heads == allheads:
1738 return self._changegroup(msng_cl_lst, source)
1740 return self._changegroup(msng_cl_lst, source)
1739
1741
1740 # slow path
1742 # slow path
1741 self.hook('preoutgoing', throw=True, source=source)
1743 self.hook('preoutgoing', throw=True, source=source)
1742
1744
1743 self.changegroupinfo(msng_cl_lst, source)
1745 self.changegroupinfo(msng_cl_lst, source)
1744 # Some bases may turn out to be superfluous, and some heads may be
1746 # Some bases may turn out to be superfluous, and some heads may be
1745 # too. nodesbetween will return the minimal set of bases and heads
1747 # too. nodesbetween will return the minimal set of bases and heads
1746 # necessary to re-create the changegroup.
1748 # necessary to re-create the changegroup.
1747
1749
1748 # Known heads are the list of heads that it is assumed the recipient
1750 # Known heads are the list of heads that it is assumed the recipient
1749 # of this changegroup will know about.
1751 # of this changegroup will know about.
1750 knownheads = set()
1752 knownheads = set()
1751 # We assume that all parents of bases are known heads.
1753 # We assume that all parents of bases are known heads.
1752 for n in bases:
1754 for n in bases:
1753 knownheads.update(cl.parents(n))
1755 knownheads.update(cl.parents(n))
1754 knownheads.discard(nullid)
1756 knownheads.discard(nullid)
1755 knownheads = list(knownheads)
1757 knownheads = list(knownheads)
1756 if knownheads:
1758 if knownheads:
1757 # Now that we know what heads are known, we can compute which
1759 # Now that we know what heads are known, we can compute which
1758 # changesets are known. The recipient must know about all
1760 # changesets are known. The recipient must know about all
1759 # changesets required to reach the known heads from the null
1761 # changesets required to reach the known heads from the null
1760 # changeset.
1762 # changeset.
1761 has_cl_set, junk, junk = cl.nodesbetween(None, knownheads)
1763 has_cl_set, junk, junk = cl.nodesbetween(None, knownheads)
1762 junk = None
1764 junk = None
1763 # Transform the list into a set.
1765 # Transform the list into a set.
1764 has_cl_set = set(has_cl_set)
1766 has_cl_set = set(has_cl_set)
1765 else:
1767 else:
1766 # If there were no known heads, the recipient cannot be assumed to
1768 # If there were no known heads, the recipient cannot be assumed to
1767 # know about any changesets.
1769 # know about any changesets.
1768 has_cl_set = set()
1770 has_cl_set = set()
1769
1771
1770 # Make it easy to refer to self.manifest
1772 # Make it easy to refer to self.manifest
1771 mnfst = self.manifest
1773 mnfst = self.manifest
1772 # We don't know which manifests are missing yet
1774 # We don't know which manifests are missing yet
1773 msng_mnfst_set = {}
1775 msng_mnfst_set = {}
1774 # Nor do we know which filenodes are missing.
1776 # Nor do we know which filenodes are missing.
1775 msng_filenode_set = {}
1777 msng_filenode_set = {}
1776
1778
1777 junk = mnfst.index[len(mnfst) - 1] # Get around a bug in lazyindex
1779 junk = mnfst.index[len(mnfst) - 1] # Get around a bug in lazyindex
1778 junk = None
1780 junk = None
1779
1781
1780 # A changeset always belongs to itself, so the changenode lookup
1782 # A changeset always belongs to itself, so the changenode lookup
1781 # function for a changenode is identity.
1783 # function for a changenode is identity.
1782 def identity(x):
1784 def identity(x):
1783 return x
1785 return x
1784
1786
1785 # If we determine that a particular file or manifest node must be a
1787 # If we determine that a particular file or manifest node must be a
1786 # node that the recipient of the changegroup will already have, we can
1788 # node that the recipient of the changegroup will already have, we can
1787 # also assume the recipient will have all the parents. This function
1789 # also assume the recipient will have all the parents. This function
1788 # prunes them from the set of missing nodes.
1790 # prunes them from the set of missing nodes.
1789 def prune_parents(revlog, hasset, msngset):
1791 def prune_parents(revlog, hasset, msngset):
1790 for r in revlog.ancestors(*[revlog.rev(n) for n in hasset]):
1792 for r in revlog.ancestors(*[revlog.rev(n) for n in hasset]):
1791 msngset.pop(revlog.node(r), None)
1793 msngset.pop(revlog.node(r), None)
1792
1794
1793 # Use the information collected in collect_manifests_and_files to say
1795 # Use the information collected in collect_manifests_and_files to say
1794 # which changenode any manifestnode belongs to.
1796 # which changenode any manifestnode belongs to.
1795 def lookup_manifest_link(mnfstnode):
1797 def lookup_manifest_link(mnfstnode):
1796 return msng_mnfst_set[mnfstnode]
1798 return msng_mnfst_set[mnfstnode]
1797
1799
1798 # A function generating function that sets up the initial environment
1800 # A function generating function that sets up the initial environment
1799 # the inner function.
1801 # the inner function.
1800 def filenode_collector(changedfiles):
1802 def filenode_collector(changedfiles):
1801 # This gathers information from each manifestnode included in the
1803 # This gathers information from each manifestnode included in the
1802 # changegroup about which filenodes the manifest node references
1804 # changegroup about which filenodes the manifest node references
1803 # so we can include those in the changegroup too.
1805 # so we can include those in the changegroup too.
1804 #
1806 #
1805 # It also remembers which changenode each filenode belongs to. It
1807 # It also remembers which changenode each filenode belongs to. It
1806 # does this by assuming the a filenode belongs to the changenode
1808 # does this by assuming the a filenode belongs to the changenode
1807 # the first manifest that references it belongs to.
1809 # the first manifest that references it belongs to.
1808 def collect_msng_filenodes(mnfstnode):
1810 def collect_msng_filenodes(mnfstnode):
1809 r = mnfst.rev(mnfstnode)
1811 r = mnfst.rev(mnfstnode)
1810 if r - 1 in mnfst.parentrevs(r):
1812 if r - 1 in mnfst.parentrevs(r):
1811 # If the previous rev is one of the parents,
1813 # If the previous rev is one of the parents,
1812 # we only need to see a diff.
1814 # we only need to see a diff.
1813 deltamf = mnfst.readdelta(mnfstnode)
1815 deltamf = mnfst.readdelta(mnfstnode)
1814 # For each line in the delta
1816 # For each line in the delta
1815 for f, fnode in deltamf.iteritems():
1817 for f, fnode in deltamf.iteritems():
1816 f = changedfiles.get(f, None)
1818 f = changedfiles.get(f, None)
1817 # And if the file is in the list of files we care
1819 # And if the file is in the list of files we care
1818 # about.
1820 # about.
1819 if f is not None:
1821 if f is not None:
1820 # Get the changenode this manifest belongs to
1822 # Get the changenode this manifest belongs to
1821 clnode = msng_mnfst_set[mnfstnode]
1823 clnode = msng_mnfst_set[mnfstnode]
1822 # Create the set of filenodes for the file if
1824 # Create the set of filenodes for the file if
1823 # there isn't one already.
1825 # there isn't one already.
1824 ndset = msng_filenode_set.setdefault(f, {})
1826 ndset = msng_filenode_set.setdefault(f, {})
1825 # And set the filenode's changelog node to the
1827 # And set the filenode's changelog node to the
1826 # manifest's if it hasn't been set already.
1828 # manifest's if it hasn't been set already.
1827 ndset.setdefault(fnode, clnode)
1829 ndset.setdefault(fnode, clnode)
1828 else:
1830 else:
1829 # Otherwise we need a full manifest.
1831 # Otherwise we need a full manifest.
1830 m = mnfst.read(mnfstnode)
1832 m = mnfst.read(mnfstnode)
1831 # For every file in we care about.
1833 # For every file in we care about.
1832 for f in changedfiles:
1834 for f in changedfiles:
1833 fnode = m.get(f, None)
1835 fnode = m.get(f, None)
1834 # If it's in the manifest
1836 # If it's in the manifest
1835 if fnode is not None:
1837 if fnode is not None:
1836 # See comments above.
1838 # See comments above.
1837 clnode = msng_mnfst_set[mnfstnode]
1839 clnode = msng_mnfst_set[mnfstnode]
1838 ndset = msng_filenode_set.setdefault(f, {})
1840 ndset = msng_filenode_set.setdefault(f, {})
1839 ndset.setdefault(fnode, clnode)
1841 ndset.setdefault(fnode, clnode)
1840 return collect_msng_filenodes
1842 return collect_msng_filenodes
1841
1843
1842 # We have a list of filenodes we think we need for a file, lets remove
1844 # We have a list of filenodes we think we need for a file, lets remove
1843 # all those we know the recipient must have.
1845 # all those we know the recipient must have.
1844 def prune_filenodes(f, filerevlog):
1846 def prune_filenodes(f, filerevlog):
1845 msngset = msng_filenode_set[f]
1847 msngset = msng_filenode_set[f]
1846 hasset = set()
1848 hasset = set()
1847 # If a 'missing' filenode thinks it belongs to a changenode we
1849 # If a 'missing' filenode thinks it belongs to a changenode we
1848 # assume the recipient must have, then the recipient must have
1850 # assume the recipient must have, then the recipient must have
1849 # that filenode.
1851 # that filenode.
1850 for n in msngset:
1852 for n in msngset:
1851 clnode = cl.node(filerevlog.linkrev(filerevlog.rev(n)))
1853 clnode = cl.node(filerevlog.linkrev(filerevlog.rev(n)))
1852 if clnode in has_cl_set:
1854 if clnode in has_cl_set:
1853 hasset.add(n)
1855 hasset.add(n)
1854 prune_parents(filerevlog, hasset, msngset)
1856 prune_parents(filerevlog, hasset, msngset)
1855
1857
1856 # A function generator function that sets up the a context for the
1858 # A function generator function that sets up the a context for the
1857 # inner function.
1859 # inner function.
1858 def lookup_filenode_link_func(fname):
1860 def lookup_filenode_link_func(fname):
1859 msngset = msng_filenode_set[fname]
1861 msngset = msng_filenode_set[fname]
1860 # Lookup the changenode the filenode belongs to.
1862 # Lookup the changenode the filenode belongs to.
1861 def lookup_filenode_link(fnode):
1863 def lookup_filenode_link(fnode):
1862 return msngset[fnode]
1864 return msngset[fnode]
1863 return lookup_filenode_link
1865 return lookup_filenode_link
1864
1866
1865 # Add the nodes that were explicitly requested.
1867 # Add the nodes that were explicitly requested.
1866 def add_extra_nodes(name, nodes):
1868 def add_extra_nodes(name, nodes):
1867 if not extranodes or name not in extranodes:
1869 if not extranodes or name not in extranodes:
1868 return
1870 return
1869
1871
1870 for node, linknode in extranodes[name]:
1872 for node, linknode in extranodes[name]:
1871 if node not in nodes:
1873 if node not in nodes:
1872 nodes[node] = linknode
1874 nodes[node] = linknode
1873
1875
1874 # Now that we have all theses utility functions to help out and
1876 # Now that we have all theses utility functions to help out and
1875 # logically divide up the task, generate the group.
1877 # logically divide up the task, generate the group.
1876 def gengroup():
1878 def gengroup():
1877 # The set of changed files starts empty.
1879 # The set of changed files starts empty.
1878 changedfiles = {}
1880 changedfiles = {}
1879 collect = changegroup.collector(cl, msng_mnfst_set, changedfiles)
1881 collect = changegroup.collector(cl, msng_mnfst_set, changedfiles)
1880
1882
1881 # Create a changenode group generator that will call our functions
1883 # Create a changenode group generator that will call our functions
1882 # back to lookup the owning changenode and collect information.
1884 # back to lookup the owning changenode and collect information.
1883 group = cl.group(msng_cl_lst, identity, collect)
1885 group = cl.group(msng_cl_lst, identity, collect)
1884 cnt = 0
1886 cnt = 0
1885 for chnk in group:
1887 for chnk in group:
1886 yield chnk
1888 yield chnk
1887 self.ui.progress(_('bundling changes'), cnt, unit=_('chunks'))
1889 self.ui.progress(_('bundling changes'), cnt, unit=_('chunks'))
1888 cnt += 1
1890 cnt += 1
1889 self.ui.progress(_('bundling changes'), None)
1891 self.ui.progress(_('bundling changes'), None)
1890
1892
1891
1893
1892 # Figure out which manifest nodes (of the ones we think might be
1894 # Figure out which manifest nodes (of the ones we think might be
1893 # part of the changegroup) the recipient must know about and
1895 # part of the changegroup) the recipient must know about and
1894 # remove them from the changegroup.
1896 # remove them from the changegroup.
1895 has_mnfst_set = set()
1897 has_mnfst_set = set()
1896 for n in msng_mnfst_set:
1898 for n in msng_mnfst_set:
1897 # If a 'missing' manifest thinks it belongs to a changenode
1899 # If a 'missing' manifest thinks it belongs to a changenode
1898 # the recipient is assumed to have, obviously the recipient
1900 # the recipient is assumed to have, obviously the recipient
1899 # must have that manifest.
1901 # must have that manifest.
1900 linknode = cl.node(mnfst.linkrev(mnfst.rev(n)))
1902 linknode = cl.node(mnfst.linkrev(mnfst.rev(n)))
1901 if linknode in has_cl_set:
1903 if linknode in has_cl_set:
1902 has_mnfst_set.add(n)
1904 has_mnfst_set.add(n)
1903 prune_parents(mnfst, has_mnfst_set, msng_mnfst_set)
1905 prune_parents(mnfst, has_mnfst_set, msng_mnfst_set)
1904 add_extra_nodes(1, msng_mnfst_set)
1906 add_extra_nodes(1, msng_mnfst_set)
1905 msng_mnfst_lst = msng_mnfst_set.keys()
1907 msng_mnfst_lst = msng_mnfst_set.keys()
1906 # Sort the manifestnodes by revision number.
1908 # Sort the manifestnodes by revision number.
1907 msng_mnfst_lst.sort(key=mnfst.rev)
1909 msng_mnfst_lst.sort(key=mnfst.rev)
1908 # Create a generator for the manifestnodes that calls our lookup
1910 # Create a generator for the manifestnodes that calls our lookup
1909 # and data collection functions back.
1911 # and data collection functions back.
1910 group = mnfst.group(msng_mnfst_lst, lookup_manifest_link,
1912 group = mnfst.group(msng_mnfst_lst, lookup_manifest_link,
1911 filenode_collector(changedfiles))
1913 filenode_collector(changedfiles))
1912 cnt = 0
1914 cnt = 0
1913 for chnk in group:
1915 for chnk in group:
1914 yield chnk
1916 yield chnk
1915 self.ui.progress(_('bundling manifests'), cnt, unit=_('chunks'))
1917 self.ui.progress(_('bundling manifests'), cnt, unit=_('chunks'))
1916 cnt += 1
1918 cnt += 1
1917 self.ui.progress(_('bundling manifests'), None)
1919 self.ui.progress(_('bundling manifests'), None)
1918
1920
1919 # These are no longer needed, dereference and toss the memory for
1921 # These are no longer needed, dereference and toss the memory for
1920 # them.
1922 # them.
1921 msng_mnfst_lst = None
1923 msng_mnfst_lst = None
1922 msng_mnfst_set.clear()
1924 msng_mnfst_set.clear()
1923
1925
1924 if extranodes:
1926 if extranodes:
1925 for fname in extranodes:
1927 for fname in extranodes:
1926 if isinstance(fname, int):
1928 if isinstance(fname, int):
1927 continue
1929 continue
1928 msng_filenode_set.setdefault(fname, {})
1930 msng_filenode_set.setdefault(fname, {})
1929 changedfiles[fname] = 1
1931 changedfiles[fname] = 1
1930 # Go through all our files in order sorted by name.
1932 # Go through all our files in order sorted by name.
1931 cnt = 0
1933 cnt = 0
1932 for fname in sorted(changedfiles):
1934 for fname in sorted(changedfiles):
1933 filerevlog = self.file(fname)
1935 filerevlog = self.file(fname)
1934 if not len(filerevlog):
1936 if not len(filerevlog):
1935 raise util.Abort(_("empty or missing revlog for %s") % fname)
1937 raise util.Abort(_("empty or missing revlog for %s") % fname)
1936 # Toss out the filenodes that the recipient isn't really
1938 # Toss out the filenodes that the recipient isn't really
1937 # missing.
1939 # missing.
1938 if fname in msng_filenode_set:
1940 if fname in msng_filenode_set:
1939 prune_filenodes(fname, filerevlog)
1941 prune_filenodes(fname, filerevlog)
1940 add_extra_nodes(fname, msng_filenode_set[fname])
1942 add_extra_nodes(fname, msng_filenode_set[fname])
1941 msng_filenode_lst = msng_filenode_set[fname].keys()
1943 msng_filenode_lst = msng_filenode_set[fname].keys()
1942 else:
1944 else:
1943 msng_filenode_lst = []
1945 msng_filenode_lst = []
1944 # If any filenodes are left, generate the group for them,
1946 # If any filenodes are left, generate the group for them,
1945 # otherwise don't bother.
1947 # otherwise don't bother.
1946 if len(msng_filenode_lst) > 0:
1948 if len(msng_filenode_lst) > 0:
1947 yield changegroup.chunkheader(len(fname))
1949 yield changegroup.chunkheader(len(fname))
1948 yield fname
1950 yield fname
1949 # Sort the filenodes by their revision #
1951 # Sort the filenodes by their revision #
1950 msng_filenode_lst.sort(key=filerevlog.rev)
1952 msng_filenode_lst.sort(key=filerevlog.rev)
1951 # Create a group generator and only pass in a changenode
1953 # Create a group generator and only pass in a changenode
1952 # lookup function as we need to collect no information
1954 # lookup function as we need to collect no information
1953 # from filenodes.
1955 # from filenodes.
1954 group = filerevlog.group(msng_filenode_lst,
1956 group = filerevlog.group(msng_filenode_lst,
1955 lookup_filenode_link_func(fname))
1957 lookup_filenode_link_func(fname))
1956 for chnk in group:
1958 for chnk in group:
1957 self.ui.progress(
1959 self.ui.progress(
1958 _('bundling files'), cnt, item=fname, unit=_('chunks'))
1960 _('bundling files'), cnt, item=fname, unit=_('chunks'))
1959 cnt += 1
1961 cnt += 1
1960 yield chnk
1962 yield chnk
1961 if fname in msng_filenode_set:
1963 if fname in msng_filenode_set:
1962 # Don't need this anymore, toss it to free memory.
1964 # Don't need this anymore, toss it to free memory.
1963 del msng_filenode_set[fname]
1965 del msng_filenode_set[fname]
1964 # Signal that no more groups are left.
1966 # Signal that no more groups are left.
1965 yield changegroup.closechunk()
1967 yield changegroup.closechunk()
1966 self.ui.progress(_('bundling files'), None)
1968 self.ui.progress(_('bundling files'), None)
1967
1969
1968 if msng_cl_lst:
1970 if msng_cl_lst:
1969 self.hook('outgoing', node=hex(msng_cl_lst[0]), source=source)
1971 self.hook('outgoing', node=hex(msng_cl_lst[0]), source=source)
1970
1972
1971 return util.chunkbuffer(gengroup())
1973 return util.chunkbuffer(gengroup())
1972
1974
1973 def changegroup(self, basenodes, source):
1975 def changegroup(self, basenodes, source):
1974 # to avoid a race we use changegroupsubset() (issue1320)
1976 # to avoid a race we use changegroupsubset() (issue1320)
1975 return self.changegroupsubset(basenodes, self.heads(), source)
1977 return self.changegroupsubset(basenodes, self.heads(), source)
1976
1978
1977 def _changegroup(self, nodes, source):
1979 def _changegroup(self, nodes, source):
1978 """Compute the changegroup of all nodes that we have that a recipient
1980 """Compute the changegroup of all nodes that we have that a recipient
1979 doesn't. Return a chunkbuffer object whose read() method will return
1981 doesn't. Return a chunkbuffer object whose read() method will return
1980 successive changegroup chunks.
1982 successive changegroup chunks.
1981
1983
1982 This is much easier than the previous function as we can assume that
1984 This is much easier than the previous function as we can assume that
1983 the recipient has any changenode we aren't sending them.
1985 the recipient has any changenode we aren't sending them.
1984
1986
1985 nodes is the set of nodes to send"""
1987 nodes is the set of nodes to send"""
1986
1988
1987 self.hook('preoutgoing', throw=True, source=source)
1989 self.hook('preoutgoing', throw=True, source=source)
1988
1990
1989 cl = self.changelog
1991 cl = self.changelog
1990 revset = set([cl.rev(n) for n in nodes])
1992 revset = set([cl.rev(n) for n in nodes])
1991 self.changegroupinfo(nodes, source)
1993 self.changegroupinfo(nodes, source)
1992
1994
1993 def identity(x):
1995 def identity(x):
1994 return x
1996 return x
1995
1997
1996 def gennodelst(log):
1998 def gennodelst(log):
1997 for r in log:
1999 for r in log:
1998 if log.linkrev(r) in revset:
2000 if log.linkrev(r) in revset:
1999 yield log.node(r)
2001 yield log.node(r)
2000
2002
2001 def lookuprevlink_func(revlog):
2003 def lookuprevlink_func(revlog):
2002 def lookuprevlink(n):
2004 def lookuprevlink(n):
2003 return cl.node(revlog.linkrev(revlog.rev(n)))
2005 return cl.node(revlog.linkrev(revlog.rev(n)))
2004 return lookuprevlink
2006 return lookuprevlink
2005
2007
2006 def gengroup():
2008 def gengroup():
2007 '''yield a sequence of changegroup chunks (strings)'''
2009 '''yield a sequence of changegroup chunks (strings)'''
2008 # construct a list of all changed files
2010 # construct a list of all changed files
2009 changedfiles = {}
2011 changedfiles = {}
2010 mmfs = {}
2012 mmfs = {}
2011 collect = changegroup.collector(cl, mmfs, changedfiles)
2013 collect = changegroup.collector(cl, mmfs, changedfiles)
2012
2014
2013 cnt = 0
2015 cnt = 0
2014 for chnk in cl.group(nodes, identity, collect):
2016 for chnk in cl.group(nodes, identity, collect):
2015 self.ui.progress(_('bundling changes'), cnt, unit=_('chunks'))
2017 self.ui.progress(_('bundling changes'), cnt, unit=_('chunks'))
2016 cnt += 1
2018 cnt += 1
2017 yield chnk
2019 yield chnk
2018 self.ui.progress(_('bundling changes'), None)
2020 self.ui.progress(_('bundling changes'), None)
2019
2021
2020 mnfst = self.manifest
2022 mnfst = self.manifest
2021 nodeiter = gennodelst(mnfst)
2023 nodeiter = gennodelst(mnfst)
2022 cnt = 0
2024 cnt = 0
2023 for chnk in mnfst.group(nodeiter, lookuprevlink_func(mnfst)):
2025 for chnk in mnfst.group(nodeiter, lookuprevlink_func(mnfst)):
2024 self.ui.progress(_('bundling manifests'), cnt, unit=_('chunks'))
2026 self.ui.progress(_('bundling manifests'), cnt, unit=_('chunks'))
2025 cnt += 1
2027 cnt += 1
2026 yield chnk
2028 yield chnk
2027 self.ui.progress(_('bundling manifests'), None)
2029 self.ui.progress(_('bundling manifests'), None)
2028
2030
2029 cnt = 0
2031 cnt = 0
2030 for fname in sorted(changedfiles):
2032 for fname in sorted(changedfiles):
2031 filerevlog = self.file(fname)
2033 filerevlog = self.file(fname)
2032 if not len(filerevlog):
2034 if not len(filerevlog):
2033 raise util.Abort(_("empty or missing revlog for %s") % fname)
2035 raise util.Abort(_("empty or missing revlog for %s") % fname)
2034 nodeiter = gennodelst(filerevlog)
2036 nodeiter = gennodelst(filerevlog)
2035 nodeiter = list(nodeiter)
2037 nodeiter = list(nodeiter)
2036 if nodeiter:
2038 if nodeiter:
2037 yield changegroup.chunkheader(len(fname))
2039 yield changegroup.chunkheader(len(fname))
2038 yield fname
2040 yield fname
2039 lookup = lookuprevlink_func(filerevlog)
2041 lookup = lookuprevlink_func(filerevlog)
2040 for chnk in filerevlog.group(nodeiter, lookup):
2042 for chnk in filerevlog.group(nodeiter, lookup):
2041 self.ui.progress(
2043 self.ui.progress(
2042 _('bundling files'), cnt, item=fname, unit=_('chunks'))
2044 _('bundling files'), cnt, item=fname, unit=_('chunks'))
2043 cnt += 1
2045 cnt += 1
2044 yield chnk
2046 yield chnk
2045 self.ui.progress(_('bundling files'), None)
2047 self.ui.progress(_('bundling files'), None)
2046
2048
2047 yield changegroup.closechunk()
2049 yield changegroup.closechunk()
2048
2050
2049 if nodes:
2051 if nodes:
2050 self.hook('outgoing', node=hex(nodes[0]), source=source)
2052 self.hook('outgoing', node=hex(nodes[0]), source=source)
2051
2053
2052 return util.chunkbuffer(gengroup())
2054 return util.chunkbuffer(gengroup())
2053
2055
2054 def addchangegroup(self, source, srctype, url, emptyok=False):
2056 def addchangegroup(self, source, srctype, url, emptyok=False):
2055 """Add the changegroup returned by source.read() to this repo.
2057 """Add the changegroup returned by source.read() to this repo.
2056 srctype is a string like 'push', 'pull', or 'unbundle'. url is
2058 srctype is a string like 'push', 'pull', or 'unbundle'. url is
2057 the URL of the repo where this changegroup is coming from.
2059 the URL of the repo where this changegroup is coming from.
2058
2060
2059 Return an integer summarizing the change to this repo:
2061 Return an integer summarizing the change to this repo:
2060 - nothing changed or no source: 0
2062 - nothing changed or no source: 0
2061 - more heads than before: 1+added heads (2..n)
2063 - more heads than before: 1+added heads (2..n)
2062 - fewer heads than before: -1-removed heads (-2..-n)
2064 - fewer heads than before: -1-removed heads (-2..-n)
2063 - number of heads stays the same: 1
2065 - number of heads stays the same: 1
2064 """
2066 """
2065 def csmap(x):
2067 def csmap(x):
2066 self.ui.debug("add changeset %s\n" % short(x))
2068 self.ui.debug("add changeset %s\n" % short(x))
2067 return len(cl)
2069 return len(cl)
2068
2070
2069 def revmap(x):
2071 def revmap(x):
2070 return cl.rev(x)
2072 return cl.rev(x)
2071
2073
2072 if not source:
2074 if not source:
2073 return 0
2075 return 0
2074
2076
2075 self.hook('prechangegroup', throw=True, source=srctype, url=url)
2077 self.hook('prechangegroup', throw=True, source=srctype, url=url)
2076
2078
2077 changesets = files = revisions = 0
2079 changesets = files = revisions = 0
2078 efiles = set()
2080 efiles = set()
2079
2081
2080 # write changelog data to temp files so concurrent readers will not see
2082 # write changelog data to temp files so concurrent readers will not see
2081 # inconsistent view
2083 # inconsistent view
2082 cl = self.changelog
2084 cl = self.changelog
2083 cl.delayupdate()
2085 cl.delayupdate()
2084 oldheads = len(cl.heads())
2086 oldheads = len(cl.heads())
2085
2087
2086 tr = self.transaction("\n".join([srctype, urlmod.hidepassword(url)]))
2088 tr = self.transaction("\n".join([srctype, urlmod.hidepassword(url)]))
2087 try:
2089 try:
2088 trp = weakref.proxy(tr)
2090 trp = weakref.proxy(tr)
2089 # pull off the changeset group
2091 # pull off the changeset group
2090 self.ui.status(_("adding changesets\n"))
2092 self.ui.status(_("adding changesets\n"))
2091 clstart = len(cl)
2093 clstart = len(cl)
2092 class prog(object):
2094 class prog(object):
2093 step = _('changesets')
2095 step = _('changesets')
2094 count = 1
2096 count = 1
2095 ui = self.ui
2097 ui = self.ui
2096 total = None
2098 total = None
2097 def __call__(self):
2099 def __call__(self):
2098 self.ui.progress(self.step, self.count, unit=_('chunks'),
2100 self.ui.progress(self.step, self.count, unit=_('chunks'),
2099 total=self.total)
2101 total=self.total)
2100 self.count += 1
2102 self.count += 1
2101 pr = prog()
2103 pr = prog()
2102 chunkiter = changegroup.chunkiter(source, progress=pr)
2104 chunkiter = changegroup.chunkiter(source, progress=pr)
2103 if cl.addgroup(chunkiter, csmap, trp) is None and not emptyok:
2105 if cl.addgroup(chunkiter, csmap, trp) is None and not emptyok:
2104 raise util.Abort(_("received changelog group is empty"))
2106 raise util.Abort(_("received changelog group is empty"))
2105 clend = len(cl)
2107 clend = len(cl)
2106 changesets = clend - clstart
2108 changesets = clend - clstart
2107 for c in xrange(clstart, clend):
2109 for c in xrange(clstart, clend):
2108 efiles.update(self[c].files())
2110 efiles.update(self[c].files())
2109 efiles = len(efiles)
2111 efiles = len(efiles)
2110 self.ui.progress(_('changesets'), None)
2112 self.ui.progress(_('changesets'), None)
2111
2113
2112 # pull off the manifest group
2114 # pull off the manifest group
2113 self.ui.status(_("adding manifests\n"))
2115 self.ui.status(_("adding manifests\n"))
2114 pr.step = _('manifests')
2116 pr.step = _('manifests')
2115 pr.count = 1
2117 pr.count = 1
2116 pr.total = changesets # manifests <= changesets
2118 pr.total = changesets # manifests <= changesets
2117 chunkiter = changegroup.chunkiter(source, progress=pr)
2119 chunkiter = changegroup.chunkiter(source, progress=pr)
2118 # no need to check for empty manifest group here:
2120 # no need to check for empty manifest group here:
2119 # if the result of the merge of 1 and 2 is the same in 3 and 4,
2121 # if the result of the merge of 1 and 2 is the same in 3 and 4,
2120 # no new manifest will be created and the manifest group will
2122 # no new manifest will be created and the manifest group will
2121 # be empty during the pull
2123 # be empty during the pull
2122 self.manifest.addgroup(chunkiter, revmap, trp)
2124 self.manifest.addgroup(chunkiter, revmap, trp)
2123 self.ui.progress(_('manifests'), None)
2125 self.ui.progress(_('manifests'), None)
2124
2126
2125 needfiles = {}
2127 needfiles = {}
2126 if self.ui.configbool('server', 'validate', default=False):
2128 if self.ui.configbool('server', 'validate', default=False):
2127 # validate incoming csets have their manifests
2129 # validate incoming csets have their manifests
2128 for cset in xrange(clstart, clend):
2130 for cset in xrange(clstart, clend):
2129 mfest = self.changelog.read(self.changelog.node(cset))[0]
2131 mfest = self.changelog.read(self.changelog.node(cset))[0]
2130 mfest = self.manifest.readdelta(mfest)
2132 mfest = self.manifest.readdelta(mfest)
2131 # store file nodes we must see
2133 # store file nodes we must see
2132 for f, n in mfest.iteritems():
2134 for f, n in mfest.iteritems():
2133 needfiles.setdefault(f, set()).add(n)
2135 needfiles.setdefault(f, set()).add(n)
2134
2136
2135 # process the files
2137 # process the files
2136 self.ui.status(_("adding file changes\n"))
2138 self.ui.status(_("adding file changes\n"))
2137 pr.step = 'files'
2139 pr.step = 'files'
2138 pr.count = 1
2140 pr.count = 1
2139 pr.total = efiles
2141 pr.total = efiles
2140 while 1:
2142 while 1:
2141 f = changegroup.getchunk(source)
2143 f = changegroup.getchunk(source)
2142 if not f:
2144 if not f:
2143 break
2145 break
2144 self.ui.debug("adding %s revisions\n" % f)
2146 self.ui.debug("adding %s revisions\n" % f)
2145 pr()
2147 pr()
2146 fl = self.file(f)
2148 fl = self.file(f)
2147 o = len(fl)
2149 o = len(fl)
2148 chunkiter = changegroup.chunkiter(source)
2150 chunkiter = changegroup.chunkiter(source)
2149 if fl.addgroup(chunkiter, revmap, trp) is None:
2151 if fl.addgroup(chunkiter, revmap, trp) is None:
2150 raise util.Abort(_("received file revlog group is empty"))
2152 raise util.Abort(_("received file revlog group is empty"))
2151 revisions += len(fl) - o
2153 revisions += len(fl) - o
2152 files += 1
2154 files += 1
2153 if f in needfiles:
2155 if f in needfiles:
2154 needs = needfiles[f]
2156 needs = needfiles[f]
2155 for new in xrange(o, len(fl)):
2157 for new in xrange(o, len(fl)):
2156 n = fl.node(new)
2158 n = fl.node(new)
2157 if n in needs:
2159 if n in needs:
2158 needs.remove(n)
2160 needs.remove(n)
2159 if not needs:
2161 if not needs:
2160 del needfiles[f]
2162 del needfiles[f]
2161 self.ui.progress(_('files'), None)
2163 self.ui.progress(_('files'), None)
2162
2164
2163 for f, needs in needfiles.iteritems():
2165 for f, needs in needfiles.iteritems():
2164 fl = self.file(f)
2166 fl = self.file(f)
2165 for n in needs:
2167 for n in needs:
2166 try:
2168 try:
2167 fl.rev(n)
2169 fl.rev(n)
2168 except error.LookupError:
2170 except error.LookupError:
2169 raise util.Abort(
2171 raise util.Abort(
2170 _('missing file data for %s:%s - run hg verify') %
2172 _('missing file data for %s:%s - run hg verify') %
2171 (f, hex(n)))
2173 (f, hex(n)))
2172
2174
2173 newheads = len(cl.heads())
2175 newheads = len(cl.heads())
2174 heads = ""
2176 heads = ""
2175 if oldheads and newheads != oldheads:
2177 if oldheads and newheads != oldheads:
2176 heads = _(" (%+d heads)") % (newheads - oldheads)
2178 heads = _(" (%+d heads)") % (newheads - oldheads)
2177
2179
2178 self.ui.status(_("added %d changesets"
2180 self.ui.status(_("added %d changesets"
2179 " with %d changes to %d files%s\n")
2181 " with %d changes to %d files%s\n")
2180 % (changesets, revisions, files, heads))
2182 % (changesets, revisions, files, heads))
2181
2183
2182 if changesets > 0:
2184 if changesets > 0:
2183 p = lambda: cl.writepending() and self.root or ""
2185 p = lambda: cl.writepending() and self.root or ""
2184 self.hook('pretxnchangegroup', throw=True,
2186 self.hook('pretxnchangegroup', throw=True,
2185 node=hex(cl.node(clstart)), source=srctype,
2187 node=hex(cl.node(clstart)), source=srctype,
2186 url=url, pending=p)
2188 url=url, pending=p)
2187
2189
2188 # make changelog see real files again
2190 # make changelog see real files again
2189 cl.finalize(trp)
2191 cl.finalize(trp)
2190
2192
2191 tr.close()
2193 tr.close()
2192 finally:
2194 finally:
2193 del tr
2195 del tr
2194
2196
2195 if changesets > 0:
2197 if changesets > 0:
2196 # forcefully update the on-disk branch cache
2198 # forcefully update the on-disk branch cache
2197 self.ui.debug("updating the branch cache\n")
2199 self.ui.debug("updating the branch cache\n")
2198 self.branchtags()
2200 self.branchtags()
2199 self.hook("changegroup", node=hex(cl.node(clstart)),
2201 self.hook("changegroup", node=hex(cl.node(clstart)),
2200 source=srctype, url=url)
2202 source=srctype, url=url)
2201
2203
2202 for i in xrange(clstart, clend):
2204 for i in xrange(clstart, clend):
2203 self.hook("incoming", node=hex(cl.node(i)),
2205 self.hook("incoming", node=hex(cl.node(i)),
2204 source=srctype, url=url)
2206 source=srctype, url=url)
2205
2207
2206 # never return 0 here:
2208 # never return 0 here:
2207 if newheads < oldheads:
2209 if newheads < oldheads:
2208 return newheads - oldheads - 1
2210 return newheads - oldheads - 1
2209 else:
2211 else:
2210 return newheads - oldheads + 1
2212 return newheads - oldheads + 1
2211
2213
2212
2214
2213 def stream_in(self, remote):
2215 def stream_in(self, remote):
2214 fp = remote.stream_out()
2216 fp = remote.stream_out()
2215 l = fp.readline()
2217 l = fp.readline()
2216 try:
2218 try:
2217 resp = int(l)
2219 resp = int(l)
2218 except ValueError:
2220 except ValueError:
2219 raise error.ResponseError(
2221 raise error.ResponseError(
2220 _('Unexpected response from remote server:'), l)
2222 _('Unexpected response from remote server:'), l)
2221 if resp == 1:
2223 if resp == 1:
2222 raise util.Abort(_('operation forbidden by server'))
2224 raise util.Abort(_('operation forbidden by server'))
2223 elif resp == 2:
2225 elif resp == 2:
2224 raise util.Abort(_('locking the remote repository failed'))
2226 raise util.Abort(_('locking the remote repository failed'))
2225 elif resp != 0:
2227 elif resp != 0:
2226 raise util.Abort(_('the server sent an unknown error code'))
2228 raise util.Abort(_('the server sent an unknown error code'))
2227 self.ui.status(_('streaming all changes\n'))
2229 self.ui.status(_('streaming all changes\n'))
2228 l = fp.readline()
2230 l = fp.readline()
2229 try:
2231 try:
2230 total_files, total_bytes = map(int, l.split(' ', 1))
2232 total_files, total_bytes = map(int, l.split(' ', 1))
2231 except (ValueError, TypeError):
2233 except (ValueError, TypeError):
2232 raise error.ResponseError(
2234 raise error.ResponseError(
2233 _('Unexpected response from remote server:'), l)
2235 _('Unexpected response from remote server:'), l)
2234 self.ui.status(_('%d files to transfer, %s of data\n') %
2236 self.ui.status(_('%d files to transfer, %s of data\n') %
2235 (total_files, util.bytecount(total_bytes)))
2237 (total_files, util.bytecount(total_bytes)))
2236 start = time.time()
2238 start = time.time()
2237 for i in xrange(total_files):
2239 for i in xrange(total_files):
2238 # XXX doesn't support '\n' or '\r' in filenames
2240 # XXX doesn't support '\n' or '\r' in filenames
2239 l = fp.readline()
2241 l = fp.readline()
2240 try:
2242 try:
2241 name, size = l.split('\0', 1)
2243 name, size = l.split('\0', 1)
2242 size = int(size)
2244 size = int(size)
2243 except (ValueError, TypeError):
2245 except (ValueError, TypeError):
2244 raise error.ResponseError(
2246 raise error.ResponseError(
2245 _('Unexpected response from remote server:'), l)
2247 _('Unexpected response from remote server:'), l)
2246 self.ui.debug('adding %s (%s)\n' % (name, util.bytecount(size)))
2248 self.ui.debug('adding %s (%s)\n' % (name, util.bytecount(size)))
2247 # for backwards compat, name was partially encoded
2249 # for backwards compat, name was partially encoded
2248 ofp = self.sopener(store.decodedir(name), 'w')
2250 ofp = self.sopener(store.decodedir(name), 'w')
2249 for chunk in util.filechunkiter(fp, limit=size):
2251 for chunk in util.filechunkiter(fp, limit=size):
2250 ofp.write(chunk)
2252 ofp.write(chunk)
2251 ofp.close()
2253 ofp.close()
2252 elapsed = time.time() - start
2254 elapsed = time.time() - start
2253 if elapsed <= 0:
2255 if elapsed <= 0:
2254 elapsed = 0.001
2256 elapsed = 0.001
2255 self.ui.status(_('transferred %s in %.1f seconds (%s/sec)\n') %
2257 self.ui.status(_('transferred %s in %.1f seconds (%s/sec)\n') %
2256 (util.bytecount(total_bytes), elapsed,
2258 (util.bytecount(total_bytes), elapsed,
2257 util.bytecount(total_bytes / elapsed)))
2259 util.bytecount(total_bytes / elapsed)))
2258 self.invalidate()
2260 self.invalidate()
2259 return len(self.heads()) + 1
2261 return len(self.heads()) + 1
2260
2262
2261 def clone(self, remote, heads=[], stream=False):
2263 def clone(self, remote, heads=[], stream=False):
2262 '''clone remote repository.
2264 '''clone remote repository.
2263
2265
2264 keyword arguments:
2266 keyword arguments:
2265 heads: list of revs to clone (forces use of pull)
2267 heads: list of revs to clone (forces use of pull)
2266 stream: use streaming clone if possible'''
2268 stream: use streaming clone if possible'''
2267
2269
2268 # now, all clients that can request uncompressed clones can
2270 # now, all clients that can request uncompressed clones can
2269 # read repo formats supported by all servers that can serve
2271 # read repo formats supported by all servers that can serve
2270 # them.
2272 # them.
2271
2273
2272 # if revlog format changes, client will have to check version
2274 # if revlog format changes, client will have to check version
2273 # and format flags on "stream" capability, and use
2275 # and format flags on "stream" capability, and use
2274 # uncompressed only if compatible.
2276 # uncompressed only if compatible.
2275
2277
2276 if stream and not heads and remote.capable('stream'):
2278 if stream and not heads and remote.capable('stream'):
2277 return self.stream_in(remote)
2279 return self.stream_in(remote)
2278 return self.pull(remote, heads)
2280 return self.pull(remote, heads)
2279
2281
2280 # used to avoid circular references so destructors work
2282 # used to avoid circular references so destructors work
2281 def aftertrans(files):
2283 def aftertrans(files):
2282 renamefiles = [tuple(t) for t in files]
2284 renamefiles = [tuple(t) for t in files]
2283 def a():
2285 def a():
2284 for src, dest in renamefiles:
2286 for src, dest in renamefiles:
2285 util.rename(src, dest)
2287 util.rename(src, dest)
2286 return a
2288 return a
2287
2289
2288 def instance(ui, path, create):
2290 def instance(ui, path, create):
2289 return localrepository(ui, util.drop_scheme('file', path), create)
2291 return localrepository(ui, util.drop_scheme('file', path), create)
2290
2292
2291 def islocal(path):
2293 def islocal(path):
2292 return True
2294 return True
@@ -1,231 +1,231 b''
1 % Show all commands except debug commands
1 % Show all commands except debug commands
2 add
2 add
3 addremove
3 addremove
4 annotate
4 annotate
5 archive
5 archive
6 backout
6 backout
7 bisect
7 bisect
8 branch
8 branch
9 branches
9 branches
10 bundle
10 bundle
11 cat
11 cat
12 clone
12 clone
13 commit
13 commit
14 copy
14 copy
15 diff
15 diff
16 export
16 export
17 forget
17 forget
18 grep
18 grep
19 heads
19 heads
20 help
20 help
21 identify
21 identify
22 import
22 import
23 incoming
23 incoming
24 init
24 init
25 locate
25 locate
26 log
26 log
27 manifest
27 manifest
28 merge
28 merge
29 outgoing
29 outgoing
30 parents
30 parents
31 paths
31 paths
32 pull
32 pull
33 push
33 push
34 recover
34 recover
35 remove
35 remove
36 rename
36 rename
37 resolve
37 resolve
38 revert
38 revert
39 rollback
39 rollback
40 root
40 root
41 serve
41 serve
42 showconfig
42 showconfig
43 status
43 status
44 summary
44 summary
45 tag
45 tag
46 tags
46 tags
47 tip
47 tip
48 unbundle
48 unbundle
49 update
49 update
50 verify
50 verify
51 version
51 version
52
52
53 % Show all commands that start with "a"
53 % Show all commands that start with "a"
54 add
54 add
55 addremove
55 addremove
56 annotate
56 annotate
57 archive
57 archive
58
58
59 % Do not show debug commands if there are other candidates
59 % Do not show debug commands if there are other candidates
60 diff
60 diff
61
61
62 % Show debug commands if there are no other candidates
62 % Show debug commands if there are no other candidates
63 debugancestor
63 debugancestor
64 debugcheckstate
64 debugcheckstate
65 debugcommands
65 debugcommands
66 debugcomplete
66 debugcomplete
67 debugconfig
67 debugconfig
68 debugdata
68 debugdata
69 debugdate
69 debugdate
70 debugfsinfo
70 debugfsinfo
71 debugindex
71 debugindex
72 debugindexdot
72 debugindexdot
73 debuginstall
73 debuginstall
74 debugrebuildstate
74 debugrebuildstate
75 debugrename
75 debugrename
76 debugsetparents
76 debugsetparents
77 debugstate
77 debugstate
78 debugsub
78 debugsub
79 debugwalk
79 debugwalk
80
80
81 % Do not show the alias of a debug command if there are other candidates
81 % Do not show the alias of a debug command if there are other candidates
82 % (this should hide rawcommit)
82 % (this should hide rawcommit)
83 recover
83 recover
84 remove
84 remove
85 rename
85 rename
86 resolve
86 resolve
87 revert
87 revert
88 rollback
88 rollback
89 root
89 root
90
90
91 % Show the alias of a debug command if there are no other candidates
91 % Show the alias of a debug command if there are no other candidates
92
92
93
93
94 % Show the global options
94 % Show the global options
95 --config
95 --config
96 --cwd
96 --cwd
97 --debug
97 --debug
98 --debugger
98 --debugger
99 --encoding
99 --encoding
100 --encodingmode
100 --encodingmode
101 --help
101 --help
102 --noninteractive
102 --noninteractive
103 --profile
103 --profile
104 --quiet
104 --quiet
105 --repository
105 --repository
106 --time
106 --time
107 --traceback
107 --traceback
108 --verbose
108 --verbose
109 --version
109 --version
110 -R
110 -R
111 -h
111 -h
112 -q
112 -q
113 -v
113 -v
114 -y
114 -y
115
115
116 % Show the options for the "serve" command
116 % Show the options for the "serve" command
117 --accesslog
117 --accesslog
118 --address
118 --address
119 --certificate
119 --certificate
120 --config
120 --config
121 --cwd
121 --cwd
122 --daemon
122 --daemon
123 --daemon-pipefds
123 --daemon-pipefds
124 --debug
124 --debug
125 --debugger
125 --debugger
126 --encoding
126 --encoding
127 --encodingmode
127 --encodingmode
128 --errorlog
128 --errorlog
129 --help
129 --help
130 --ipv6
130 --ipv6
131 --name
131 --name
132 --noninteractive
132 --noninteractive
133 --pid-file
133 --pid-file
134 --port
134 --port
135 --prefix
135 --prefix
136 --profile
136 --profile
137 --quiet
137 --quiet
138 --repository
138 --repository
139 --stdio
139 --stdio
140 --style
140 --style
141 --templates
141 --templates
142 --time
142 --time
143 --traceback
143 --traceback
144 --verbose
144 --verbose
145 --version
145 --version
146 --web-conf
146 --web-conf
147 -6
147 -6
148 -A
148 -A
149 -E
149 -E
150 -R
150 -R
151 -a
151 -a
152 -d
152 -d
153 -h
153 -h
154 -n
154 -n
155 -p
155 -p
156 -q
156 -q
157 -t
157 -t
158 -v
158 -v
159 -y
159 -y
160
160
161 % Show an error if we use --options with an ambiguous abbreviation
161 % Show an error if we use --options with an ambiguous abbreviation
162 hg: command 's' is ambiguous:
162 hg: command 's' is ambiguous:
163 serve showconfig status summary
163 serve showconfig status summary
164
164
165 % Show all commands + options
165 % Show all commands + options
166 add: include, exclude, dry-run
166 add: include, exclude, dry-run
167 annotate: rev, follow, no-follow, text, user, file, date, number, changeset, line-number, include, exclude
167 annotate: rev, follow, no-follow, text, user, file, date, number, changeset, line-number, include, exclude
168 clone: noupdate, updaterev, rev, branch, pull, uncompressed, ssh, remotecmd
168 clone: noupdate, updaterev, rev, branch, pull, uncompressed, ssh, remotecmd
169 commit: addremove, close-branch, include, exclude, message, logfile, date, user
169 commit: addremove, close-branch, include, exclude, message, logfile, date, user
170 diff: rev, change, text, git, nodates, show-function, reverse, ignore-all-space, ignore-space-change, ignore-blank-lines, unified, stat, include, exclude
170 diff: rev, change, text, git, nodates, show-function, reverse, ignore-all-space, ignore-space-change, ignore-blank-lines, unified, stat, include, exclude
171 export: output, switch-parent, rev, text, git, nodates
171 export: output, switch-parent, rev, text, git, nodates
172 forget: include, exclude
172 forget: include, exclude
173 init: ssh, remotecmd
173 init: ssh, remotecmd
174 log: follow, follow-first, date, copies, keyword, rev, removed, only-merges, user, only-branch, branch, prune, patch, git, limit, no-merges, stat, style, template, include, exclude
174 log: follow, follow-first, date, copies, keyword, rev, removed, only-merges, user, only-branch, branch, prune, patch, git, limit, no-merges, stat, style, template, include, exclude
175 merge: force, rev, preview
175 merge: force, rev, preview
176 pull: update, force, rev, branch, ssh, remotecmd
176 pull: update, force, rev, branch, ssh, remotecmd
177 push: force, rev, branch, ssh, remotecmd
177 push: force, rev, branch, new-branch, ssh, remotecmd
178 remove: after, force, include, exclude
178 remove: after, force, include, exclude
179 serve: accesslog, daemon, daemon-pipefds, errorlog, port, address, prefix, name, web-conf, webdir-conf, pid-file, stdio, templates, style, ipv6, certificate
179 serve: accesslog, daemon, daemon-pipefds, errorlog, port, address, prefix, name, web-conf, webdir-conf, pid-file, stdio, templates, style, ipv6, certificate
180 status: all, modified, added, removed, deleted, clean, unknown, ignored, no-status, copies, print0, rev, change, include, exclude
180 status: all, modified, added, removed, deleted, clean, unknown, ignored, no-status, copies, print0, rev, change, include, exclude
181 summary: remote
181 summary: remote
182 update: clean, check, date, rev
182 update: clean, check, date, rev
183 addremove: similarity, include, exclude, dry-run
183 addremove: similarity, include, exclude, dry-run
184 archive: no-decode, prefix, rev, type, include, exclude
184 archive: no-decode, prefix, rev, type, include, exclude
185 backout: merge, parent, rev, include, exclude, message, logfile, date, user
185 backout: merge, parent, rev, include, exclude, message, logfile, date, user
186 bisect: reset, good, bad, skip, command, noupdate
186 bisect: reset, good, bad, skip, command, noupdate
187 branch: force, clean
187 branch: force, clean
188 branches: active, closed
188 branches: active, closed
189 bundle: force, rev, branch, base, all, type, ssh, remotecmd
189 bundle: force, rev, branch, base, all, type, ssh, remotecmd
190 cat: output, rev, decode, include, exclude
190 cat: output, rev, decode, include, exclude
191 copy: after, force, include, exclude, dry-run
191 copy: after, force, include, exclude, dry-run
192 debugancestor:
192 debugancestor:
193 debugcheckstate:
193 debugcheckstate:
194 debugcommands:
194 debugcommands:
195 debugcomplete: options
195 debugcomplete: options
196 debugdata:
196 debugdata:
197 debugdate: extended
197 debugdate: extended
198 debugfsinfo:
198 debugfsinfo:
199 debugindex:
199 debugindex:
200 debugindexdot:
200 debugindexdot:
201 debuginstall:
201 debuginstall:
202 debugrebuildstate: rev
202 debugrebuildstate: rev
203 debugrename: rev
203 debugrename: rev
204 debugsetparents:
204 debugsetparents:
205 debugstate: nodates
205 debugstate: nodates
206 debugsub: rev
206 debugsub: rev
207 debugwalk: include, exclude
207 debugwalk: include, exclude
208 grep: print0, all, follow, ignore-case, files-with-matches, line-number, rev, user, date, include, exclude
208 grep: print0, all, follow, ignore-case, files-with-matches, line-number, rev, user, date, include, exclude
209 heads: rev, topo, active, closed, style, template
209 heads: rev, topo, active, closed, style, template
210 help:
210 help:
211 identify: rev, num, id, branch, tags
211 identify: rev, num, id, branch, tags
212 import: strip, base, force, no-commit, exact, import-branch, message, logfile, date, user, similarity
212 import: strip, base, force, no-commit, exact, import-branch, message, logfile, date, user, similarity
213 incoming: force, newest-first, bundle, rev, branch, patch, git, limit, no-merges, stat, style, template, ssh, remotecmd
213 incoming: force, newest-first, bundle, rev, branch, patch, git, limit, no-merges, stat, style, template, ssh, remotecmd
214 locate: rev, print0, fullpath, include, exclude
214 locate: rev, print0, fullpath, include, exclude
215 manifest: rev
215 manifest: rev
216 outgoing: force, rev, newest-first, branch, patch, git, limit, no-merges, stat, style, template, ssh, remotecmd
216 outgoing: force, rev, newest-first, branch, patch, git, limit, no-merges, stat, style, template, ssh, remotecmd
217 parents: rev, style, template
217 parents: rev, style, template
218 paths:
218 paths:
219 recover:
219 recover:
220 rename: after, force, include, exclude, dry-run
220 rename: after, force, include, exclude, dry-run
221 resolve: all, list, mark, unmark, no-status, include, exclude
221 resolve: all, list, mark, unmark, no-status, include, exclude
222 revert: all, date, rev, no-backup, include, exclude, dry-run
222 revert: all, date, rev, no-backup, include, exclude, dry-run
223 rollback: dry-run
223 rollback: dry-run
224 root:
224 root:
225 showconfig: untrusted
225 showconfig: untrusted
226 tag: force, local, rev, remove, edit, message, date, user
226 tag: force, local, rev, remove, edit, message, date, user
227 tags:
227 tags:
228 tip: patch, git, style, template
228 tip: patch, git, style, template
229 unbundle: update
229 unbundle: update
230 verify:
230 verify:
231 version:
231 version:
@@ -1,305 +1,317 b''
1 #!/bin/sh
1 #!/bin/sh
2
2
3 echo "[extensions]" >> $HGRCPATH
3 echo "[extensions]" >> $HGRCPATH
4 echo "graphlog=" >> $HGRCPATH
4 echo "graphlog=" >> $HGRCPATH
5
5
6 mkdir a
6 mkdir a
7 cd a
7 cd a
8 hg init
8 hg init
9 echo foo > t1
9 echo foo > t1
10 hg add t1
10 hg add t1
11 hg commit -m "1" -d "1000000 0"
11 hg commit -m "1" -d "1000000 0"
12
12
13 cd ..
13 cd ..
14 hg clone a b
14 hg clone a b
15
15
16 cd a
16 cd a
17 echo foo > t2
17 echo foo > t2
18 hg add t2
18 hg add t2
19 hg commit -m "2" -d "1000000 0"
19 hg commit -m "2" -d "1000000 0"
20
20
21 cd ../b
21 cd ../b
22 echo foo > t3
22 echo foo > t3
23 hg add t3
23 hg add t3
24 hg commit -m "3" -d "1000000 0"
24 hg commit -m "3" -d "1000000 0"
25
25
26 hg push ../a
26 hg push ../a
27 hg pull ../a
27 hg pull ../a
28 hg push ../a
28 hg push ../a
29 hg merge
29 hg merge
30 hg commit -m "4" -d "1000000 0"
30 hg commit -m "4" -d "1000000 0"
31 hg push ../a
31 hg push ../a
32 cd ..
32 cd ..
33
33
34 hg init c
34 hg init c
35 cd c
35 cd c
36 for i in 0 1 2; do
36 for i in 0 1 2; do
37 echo $i >> foo
37 echo $i >> foo
38 hg ci -Am $i -d "1000000 0"
38 hg ci -Am $i -d "1000000 0"
39 done
39 done
40 cd ..
40 cd ..
41
41
42 hg clone c d
42 hg clone c d
43 cd d
43 cd d
44 for i in 0 1; do
44 for i in 0 1; do
45 hg co -C $i
45 hg co -C $i
46 echo d-$i >> foo
46 echo d-$i >> foo
47 hg ci -m d-$i -d "1000000 0"
47 hg ci -m d-$i -d "1000000 0"
48 done
48 done
49
49
50 HGMERGE=true hg merge 3
50 HGMERGE=true hg merge 3
51 hg ci -m c-d -d "1000000 0"
51 hg ci -m c-d -d "1000000 0"
52
52
53 hg push ../c; echo $?
53 hg push ../c; echo $?
54 hg push -r 2 ../c; echo $?
54 hg push -r 2 ../c; echo $?
55 hg push -r 3 ../c; echo $?
55 hg push -r 3 ../c; echo $?
56 hg push -r 3 -r 4 ../c; echo $?
56 hg push -r 3 -r 4 ../c; echo $?
57 hg push -f -r 3 -r 4 ../c; echo $?
57 hg push -f -r 3 -r 4 ../c; echo $?
58 hg push -r 5 ../c; echo $?
58 hg push -r 5 ../c; echo $?
59 hg in ../c
59 hg in ../c
60
60
61 echo % issue 450
61 echo % issue 450
62 hg init ../e
62 hg init ../e
63 hg push -r 0 ../e ; echo $?
63 hg push -r 0 ../e ; echo $?
64 hg push -r 1 ../e ; echo $?
64 hg push -r 1 ../e ; echo $?
65
65
66 cd ..
66 cd ..
67
67
68 echo % issue 736
68 echo % issue 736
69 hg init f
69 hg init f
70 cd f
70 cd f
71 hg -q branch a
71 hg -q branch a
72 echo 0 > foo
72 echo 0 > foo
73 hg -q ci -d "1000000 0" -Am 0
73 hg -q ci -d "1000000 0" -Am 0
74 echo 1 > foo
74 echo 1 > foo
75 hg -q ci -d "1000000 0" -m 1
75 hg -q ci -d "1000000 0" -m 1
76 hg -q up 0
76 hg -q up 0
77 echo 2 > foo
77 echo 2 > foo
78 hg -q ci -d "1000000 0" -m 2
78 hg -q ci -d "1000000 0" -m 2
79 hg -q up 0
79 hg -q up 0
80 hg -q branch b
80 hg -q branch b
81 echo 3 > foo
81 echo 3 > foo
82 hg -q ci -d "1000000 0" -m 3
82 hg -q ci -d "1000000 0" -m 3
83 cd ..
83 cd ..
84
84
85 hg -q clone f g
85 hg -q clone f g
86 cd g
86 cd g
87
87
88 echo % push on existing branch and new branch
88 echo % push on existing branch and new branch
89 hg -q up 1
89 hg -q up 1
90 echo 4 > foo
90 echo 4 > foo
91 hg -q ci -d "1000000 0" -m 4
91 hg -q ci -d "1000000 0" -m 4
92 hg -q up 0
92 hg -q up 0
93 echo 5 > foo
93 echo 5 > foo
94 hg -q branch c
94 hg -q branch c
95 hg -q ci -d "1000000 0" -m 5
95 hg -q ci -d "1000000 0" -m 5
96 hg push ../f; echo $?
96 hg push ../f; echo $?
97 hg push -r 4 -r 5 ../f; echo $?
97 hg push -r 4 -r 5 ../f; echo $?
98
98
99 echo % multiple new branches
99 echo % multiple new branches
100 hg -q branch d
100 hg -q branch d
101 echo 6 > foo
101 echo 6 > foo
102 hg -q ci -d "1000000 0" -m 6
102 hg -q ci -d "1000000 0" -m 6
103 hg push ../f; echo $?
103 hg push ../f; echo $?
104 hg push -r 4 -r 6 ../f; echo $?
104 hg push -r 4 -r 6 ../f; echo $?
105 cd ../g
105 cd ../g
106
106
107 echo % fail on multiple head push
107 echo % fail on multiple head push
108 hg -q up 1
108 hg -q up 1
109 echo 7 > foo
109 echo 7 > foo
110 hg -q ci -d "1000000 0" -m 7
110 hg -q ci -d "1000000 0" -m 7
111 hg push -r 4 -r 7 ../f; echo $?
111 hg push -r 4 -r 7 ../f; echo $?
112
112
113 echo % push replacement head on existing branches
113 echo % push replacement head on existing branches
114 hg -q up 3
114 hg -q up 3
115 echo 8 > foo
115 echo 8 > foo
116 hg -q ci -d "1000000 0" -m 8
116 hg -q ci -d "1000000 0" -m 8
117 hg push -r 7 -r 8 ../f; echo $?
117 hg push -r 7 -r 8 ../f; echo $?
118
118
119 echo % merge of branch a to other branch b followed by unrelated push on branch a
119 echo % merge of branch a to other branch b followed by unrelated push on branch a
120 hg -q up 7
120 hg -q up 7
121 HGMERGE=true hg -q merge 8
121 HGMERGE=true hg -q merge 8
122 hg -q ci -d "1000000 0" -m 9
122 hg -q ci -d "1000000 0" -m 9
123 hg -q up 8
123 hg -q up 8
124 echo 10 > foo
124 echo 10 > foo
125 hg -q ci -d "1000000 0" -m 10
125 hg -q ci -d "1000000 0" -m 10
126 hg push -r 9 ../f; echo $?
126 hg push -r 9 ../f; echo $?
127 hg push -r 10 ../f; echo $?
127 hg push -r 10 ../f; echo $?
128
128
129 echo % cheating the counting algorithm
129 echo % cheating the counting algorithm
130 hg -q up 9
130 hg -q up 9
131 HGMERGE=true hg -q merge 2
131 HGMERGE=true hg -q merge 2
132 hg -q ci -d "1000000 0" -m 11
132 hg -q ci -d "1000000 0" -m 11
133 hg -q up 1
133 hg -q up 1
134 echo 12 > foo
134 echo 12 > foo
135 hg -q ci -d "1000000 0" -m 12
135 hg -q ci -d "1000000 0" -m 12
136 hg push -r 11 -r 12 ../f; echo $?
136 hg push -r 11 -r 12 ../f; echo $?
137
137
138 echo % failed push of new named branch
139 echo 12 > foo
140 hg -q ci -d "1000000 0" -m 12a
141 hg -q up 11
142 echo 13 > foo
143 hg -q branch e
144 hg -q ci -d "1000000 0" -m 13d
145 hg push -r 12 -r 13 ../f; echo $?
146
147 echo % using --new-branch to push new named branch
148 hg push --new-branch -r 12 -r 13 ../f; echo $?
149
138 echo % checking prepush logic does not allow silently pushing multiple new heads
150 echo % checking prepush logic does not allow silently pushing multiple new heads
139 cd ..
151 cd ..
140 hg init h
152 hg init h
141 echo init > h/init
153 echo init > h/init
142 hg -R h ci -Am init
154 hg -R h ci -Am init
143 echo a > h/a
155 echo a > h/a
144 hg -R h ci -Am a
156 hg -R h ci -Am a
145 hg clone h i
157 hg clone h i
146 hg -R h up 0
158 hg -R h up 0
147 echo b > h/b
159 echo b > h/b
148 hg -R h ci -Am b
160 hg -R h ci -Am b
149 hg -R i up 0
161 hg -R i up 0
150 echo c > i/c
162 echo c > i/c
151 hg -R i ci -Am c
163 hg -R i ci -Am c
152 hg -R i push h
164 hg -R i push h
153 echo
165 echo
154
166
155 echo % check prepush logic with merged branches
167 echo % check prepush logic with merged branches
156 hg init j
168 hg init j
157 hg -R j branch a
169 hg -R j branch a
158 echo init > j/foo
170 echo init > j/foo
159 hg -R j ci -Am init
171 hg -R j ci -Am init
160 hg clone j k
172 hg clone j k
161 echo a1 > j/foo
173 echo a1 > j/foo
162 hg -R j ci -m a1
174 hg -R j ci -m a1
163 hg -R k branch b
175 hg -R k branch b
164 echo b > k/foo
176 echo b > k/foo
165 hg -R k ci -m b
177 hg -R k ci -m b
166 hg -R k up 0
178 hg -R k up 0
167 hg -R k merge b
179 hg -R k merge b
168 hg -R k ci -m merge
180 hg -R k ci -m merge
169 hg -R k push -r a j
181 hg -R k push -r a j
170 echo
182 echo
171
183
172 echo % prepush -r should not allow you to sneak in new heads
184 echo % prepush -r should not allow you to sneak in new heads
173 hg init l
185 hg init l
174 cd l
186 cd l
175 echo a >> foo
187 echo a >> foo
176 hg -q add foo
188 hg -q add foo
177 hg -q branch a
189 hg -q branch a
178 hg -q ci -d '0 0' -ma
190 hg -q ci -d '0 0' -ma
179 hg -q up null
191 hg -q up null
180 echo a >> foo
192 echo a >> foo
181 hg -q add foo
193 hg -q add foo
182 hg -q branch b
194 hg -q branch b
183 hg -q ci -d '0 0' -mb
195 hg -q ci -d '0 0' -mb
184 cd ..
196 cd ..
185 hg -q clone l m -u a
197 hg -q clone l m -u a
186 cd m
198 cd m
187 hg -q merge b
199 hg -q merge b
188 hg -q ci -d '0 0' -mmb
200 hg -q ci -d '0 0' -mmb
189 hg -q up 0
201 hg -q up 0
190 echo a >> foo
202 echo a >> foo
191 hg -q ci -ma2
203 hg -q ci -ma2
192 hg -q up 2
204 hg -q up 2
193 echo a >> foo
205 echo a >> foo
194 hg -q branch -f b
206 hg -q branch -f b
195 hg -q ci -d '0 0' -mb2
207 hg -q ci -d '0 0' -mb2
196 hg -q merge 3
208 hg -q merge 3
197 hg -q ci -d '0 0' -mma
209 hg -q ci -d '0 0' -mma
198 hg push ../l -b b
210 hg push ../l -b b
199 cd ..
211 cd ..
200
212
201 echo % check prepush with new branch head on former topo non-head
213 echo % check prepush with new branch head on former topo non-head
202 hg init n
214 hg init n
203 cd n
215 cd n
204 hg branch A
216 hg branch A
205 echo a >a
217 echo a >a
206 hg ci -Ama
218 hg ci -Ama
207 hg branch B
219 hg branch B
208 echo b >b
220 echo b >b
209 hg ci -Amb
221 hg ci -Amb
210 # b is now branch head of B, and a topological head
222 # b is now branch head of B, and a topological head
211 # a is now branch head of A, but not a topological head
223 # a is now branch head of A, but not a topological head
212 hg clone . inner
224 hg clone . inner
213 cd inner
225 cd inner
214 hg up B
226 hg up B
215 echo b1 >b1
227 echo b1 >b1
216 hg ci -Amb1
228 hg ci -Amb1
217 # in the clone b1 is now the head of B
229 # in the clone b1 is now the head of B
218 cd ..
230 cd ..
219 hg up 0
231 hg up 0
220 echo a2 >a2
232 echo a2 >a2
221 hg ci -Ama2
233 hg ci -Ama2
222 # a2 is now the new branch head of A, and a new topological head
234 # a2 is now the new branch head of A, and a new topological head
223 # it replaces a former inner branch head, so it should at most warn about A, not B
235 # it replaces a former inner branch head, so it should at most warn about A, not B
224 echo %% glog of local
236 echo %% glog of local
225 hg glog --template "{rev}: {branches} {desc}\n"
237 hg glog --template "{rev}: {branches} {desc}\n"
226 echo %% glog of remote
238 echo %% glog of remote
227 hg glog -R inner --template "{rev}: {branches} {desc}\n"
239 hg glog -R inner --template "{rev}: {branches} {desc}\n"
228 echo %% outgoing
240 echo %% outgoing
229 hg out inner --template "{rev}: {branches} {desc}\n"
241 hg out inner --template "{rev}: {branches} {desc}\n"
230 hg push inner
242 hg push inner
231 cd ..
243 cd ..
232
244
233 echo % check prepush with new branch head on former topo head
245 echo % check prepush with new branch head on former topo head
234 hg init o
246 hg init o
235 cd o
247 cd o
236 hg branch A
248 hg branch A
237 echo a >a
249 echo a >a
238 hg ci -Ama
250 hg ci -Ama
239 hg branch B
251 hg branch B
240 echo b >b
252 echo b >b
241 hg ci -Amb
253 hg ci -Amb
242 # b is now branch head of B, and a topological head
254 # b is now branch head of B, and a topological head
243 hg up 0
255 hg up 0
244 echo a1 >a1
256 echo a1 >a1
245 hg ci -Ama1
257 hg ci -Ama1
246 # a1 is now branch head of A, and a topological head
258 # a1 is now branch head of A, and a topological head
247 hg clone . inner
259 hg clone . inner
248 cd inner
260 cd inner
249 hg up B
261 hg up B
250 echo b1 >b1
262 echo b1 >b1
251 hg ci -Amb1
263 hg ci -Amb1
252 # in the clone b1 is now the head of B
264 # in the clone b1 is now the head of B
253 cd ..
265 cd ..
254 echo a2 >a2
266 echo a2 >a2
255 hg ci -Ama2
267 hg ci -Ama2
256 # a2 is now the new branch head of A, and a topological head
268 # a2 is now the new branch head of A, and a topological head
257 # it replaces a former topological and branch head, so this should not warn
269 # it replaces a former topological and branch head, so this should not warn
258 echo %% glog of local
270 echo %% glog of local
259 hg glog --template "{rev}: {branches} {desc}\n"
271 hg glog --template "{rev}: {branches} {desc}\n"
260 echo %% glog of remote
272 echo %% glog of remote
261 hg glog -R inner --template "{rev}: {branches} {desc}\n"
273 hg glog -R inner --template "{rev}: {branches} {desc}\n"
262 echo %% outgoing
274 echo %% outgoing
263 hg out inner --template "{rev}: {branches} {desc}\n"
275 hg out inner --template "{rev}: {branches} {desc}\n"
264 hg push inner
276 hg push inner
265 cd ..
277 cd ..
266
278
267 echo % check prepush with new branch head and new child of former branch head
279 echo % check prepush with new branch head and new child of former branch head
268 echo % but child is on different branch
280 echo % but child is on different branch
269 hg init p
281 hg init p
270 cd p
282 cd p
271 hg branch A
283 hg branch A
272 echo a0 >a
284 echo a0 >a
273 hg ci -Ama0
285 hg ci -Ama0
274 echo a1 >a
286 echo a1 >a
275 hg ci -ma1
287 hg ci -ma1
276 hg up null
288 hg up null
277 hg branch B
289 hg branch B
278 echo b0 >b
290 echo b0 >b
279 hg ci -Amb0
291 hg ci -Amb0
280 echo b1 >b
292 echo b1 >b
281 hg ci -mb1
293 hg ci -mb1
282
294
283 hg clone . inner
295 hg clone . inner
284
296
285 hg up A
297 hg up A
286 hg branch -f B
298 hg branch -f B
287 echo a3 >a
299 echo a3 >a
288 hg ci -ma3
300 hg ci -ma3
289 hg up 3
301 hg up 3
290 hg branch -f A
302 hg branch -f A
291 echo b3 >b
303 echo b3 >b
292 hg ci -mb3
304 hg ci -mb3
293
305
294 echo %% glog of local
306 echo %% glog of local
295 hg glog --template "{rev}: {branches} {desc}\n"
307 hg glog --template "{rev}: {branches} {desc}\n"
296 echo %% glog of remote
308 echo %% glog of remote
297 hg glog -R inner --template "{rev}: {branches} {desc}\n"
309 hg glog -R inner --template "{rev}: {branches} {desc}\n"
298 echo %% outgoing
310 echo %% outgoing
299 hg out inner --template "{rev}: {branches} {desc}\n"
311 hg out inner --template "{rev}: {branches} {desc}\n"
300 hg push inner
312 hg push inner
301 hg push inner -r4 -r5
313 hg push inner -r4 -r5
302 hg in inner
314 hg in inner
303 cd ..
315 cd ..
304
316
305 exit 0
317 exit 0
@@ -1,307 +1,321 b''
1 updating to branch default
1 updating to branch default
2 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
2 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
3 pushing to ../a
3 pushing to ../a
4 searching for changes
4 searching for changes
5 abort: push creates new remote heads on branch 'default'!
5 abort: push creates new remote heads on branch 'default'!
6 (you should pull and merge or use push -f to force)
6 (you should pull and merge or use push -f to force)
7 pulling from ../a
7 pulling from ../a
8 searching for changes
8 searching for changes
9 adding changesets
9 adding changesets
10 adding manifests
10 adding manifests
11 adding file changes
11 adding file changes
12 added 1 changesets with 1 changes to 1 files (+1 heads)
12 added 1 changesets with 1 changes to 1 files (+1 heads)
13 (run 'hg heads' to see heads, 'hg merge' to merge)
13 (run 'hg heads' to see heads, 'hg merge' to merge)
14 pushing to ../a
14 pushing to ../a
15 searching for changes
15 searching for changes
16 abort: push creates new remote heads on branch 'default'!
16 abort: push creates new remote heads on branch 'default'!
17 (did you forget to merge? use push -f to force)
17 (did you forget to merge? use push -f to force)
18 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
18 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
19 (branch merge, don't forget to commit)
19 (branch merge, don't forget to commit)
20 pushing to ../a
20 pushing to ../a
21 searching for changes
21 searching for changes
22 adding changesets
22 adding changesets
23 adding manifests
23 adding manifests
24 adding file changes
24 adding file changes
25 added 2 changesets with 1 changes to 1 files
25 added 2 changesets with 1 changes to 1 files
26 adding foo
26 adding foo
27 updating to branch default
27 updating to branch default
28 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
28 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
29 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
29 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
30 created new head
30 created new head
31 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
31 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
32 created new head
32 created new head
33 merging foo
33 merging foo
34 0 files updated, 1 files merged, 0 files removed, 0 files unresolved
34 0 files updated, 1 files merged, 0 files removed, 0 files unresolved
35 (branch merge, don't forget to commit)
35 (branch merge, don't forget to commit)
36 pushing to ../c
36 pushing to ../c
37 searching for changes
37 searching for changes
38 abort: push creates new remote heads on branch 'default'!
38 abort: push creates new remote heads on branch 'default'!
39 (did you forget to merge? use push -f to force)
39 (did you forget to merge? use push -f to force)
40 1
40 1
41 pushing to ../c
41 pushing to ../c
42 searching for changes
42 searching for changes
43 no changes found
43 no changes found
44 0
44 0
45 pushing to ../c
45 pushing to ../c
46 searching for changes
46 searching for changes
47 abort: push creates new remote heads on branch 'default'!
47 abort: push creates new remote heads on branch 'default'!
48 (did you forget to merge? use push -f to force)
48 (did you forget to merge? use push -f to force)
49 1
49 1
50 pushing to ../c
50 pushing to ../c
51 searching for changes
51 searching for changes
52 abort: push creates new remote heads on branch 'default'!
52 abort: push creates new remote heads on branch 'default'!
53 (did you forget to merge? use push -f to force)
53 (did you forget to merge? use push -f to force)
54 1
54 1
55 pushing to ../c
55 pushing to ../c
56 searching for changes
56 searching for changes
57 adding changesets
57 adding changesets
58 adding manifests
58 adding manifests
59 adding file changes
59 adding file changes
60 added 2 changesets with 2 changes to 1 files (+2 heads)
60 added 2 changesets with 2 changes to 1 files (+2 heads)
61 0
61 0
62 pushing to ../c
62 pushing to ../c
63 searching for changes
63 searching for changes
64 adding changesets
64 adding changesets
65 adding manifests
65 adding manifests
66 adding file changes
66 adding file changes
67 added 1 changesets with 1 changes to 1 files (-1 heads)
67 added 1 changesets with 1 changes to 1 files (-1 heads)
68 0
68 0
69 comparing with ../c
69 comparing with ../c
70 searching for changes
70 searching for changes
71 no changes found
71 no changes found
72 % issue 450
72 % issue 450
73 pushing to ../e
73 pushing to ../e
74 searching for changes
74 searching for changes
75 adding changesets
75 adding changesets
76 adding manifests
76 adding manifests
77 adding file changes
77 adding file changes
78 added 1 changesets with 1 changes to 1 files
78 added 1 changesets with 1 changes to 1 files
79 0
79 0
80 pushing to ../e
80 pushing to ../e
81 searching for changes
81 searching for changes
82 adding changesets
82 adding changesets
83 adding manifests
83 adding manifests
84 adding file changes
84 adding file changes
85 added 1 changesets with 1 changes to 1 files
85 added 1 changesets with 1 changes to 1 files
86 0
86 0
87 % issue 736
87 % issue 736
88 % push on existing branch and new branch
88 % push on existing branch and new branch
89 pushing to ../f
89 pushing to ../f
90 searching for changes
90 searching for changes
91 abort: push creates new remote branches: c!
91 abort: push creates new remote branches: c!
92 (use 'hg push -f' to force)
92 (use 'hg push --new-branch' to create new remote branches)
93 1
93 1
94 pushing to ../f
94 pushing to ../f
95 searching for changes
95 searching for changes
96 abort: push creates new remote branches: c!
96 abort: push creates new remote branches: c!
97 (use 'hg push -f' to force)
97 (use 'hg push --new-branch' to create new remote branches)
98 1
98 1
99 % multiple new branches
99 % multiple new branches
100 pushing to ../f
100 pushing to ../f
101 searching for changes
101 searching for changes
102 abort: push creates new remote branches: c, d!
102 abort: push creates new remote branches: c, d!
103 (use 'hg push -f' to force)
103 (use 'hg push --new-branch' to create new remote branches)
104 1
104 1
105 pushing to ../f
105 pushing to ../f
106 searching for changes
106 searching for changes
107 abort: push creates new remote branches: c, d!
107 abort: push creates new remote branches: c, d!
108 (use 'hg push -f' to force)
108 (use 'hg push --new-branch' to create new remote branches)
109 1
109 1
110 % fail on multiple head push
110 % fail on multiple head push
111 pushing to ../f
111 pushing to ../f
112 searching for changes
112 searching for changes
113 abort: push creates new remote heads on branch 'a'!
113 abort: push creates new remote heads on branch 'a'!
114 (did you forget to merge? use push -f to force)
114 (did you forget to merge? use push -f to force)
115 1
115 1
116 % push replacement head on existing branches
116 % push replacement head on existing branches
117 pushing to ../f
117 pushing to ../f
118 searching for changes
118 searching for changes
119 adding changesets
119 adding changesets
120 adding manifests
120 adding manifests
121 adding file changes
121 adding file changes
122 added 2 changesets with 2 changes to 1 files
122 added 2 changesets with 2 changes to 1 files
123 0
123 0
124 % merge of branch a to other branch b followed by unrelated push on branch a
124 % merge of branch a to other branch b followed by unrelated push on branch a
125 pushing to ../f
125 pushing to ../f
126 searching for changes
126 searching for changes
127 adding changesets
127 adding changesets
128 adding manifests
128 adding manifests
129 adding file changes
129 adding file changes
130 added 1 changesets with 1 changes to 1 files (-1 heads)
130 added 1 changesets with 1 changes to 1 files (-1 heads)
131 0
131 0
132 pushing to ../f
132 pushing to ../f
133 searching for changes
133 searching for changes
134 adding changesets
134 adding changesets
135 adding manifests
135 adding manifests
136 adding file changes
136 adding file changes
137 added 1 changesets with 1 changes to 1 files (+1 heads)
137 added 1 changesets with 1 changes to 1 files (+1 heads)
138 0
138 0
139 % cheating the counting algorithm
139 % cheating the counting algorithm
140 pushing to ../f
140 pushing to ../f
141 searching for changes
141 searching for changes
142 adding changesets
142 adding changesets
143 adding manifests
143 adding manifests
144 adding file changes
144 adding file changes
145 added 2 changesets with 2 changes to 1 files
145 added 2 changesets with 2 changes to 1 files
146 0
146 0
147 % failed push of new named branch
148 pushing to ../f
149 searching for changes
150 abort: push creates new remote branches: e!
151 (use 'hg push --new-branch' to create new remote branches)
152 1
153 % using --new-branch to push new named branch
154 pushing to ../f
155 searching for changes
156 adding changesets
157 adding manifests
158 adding file changes
159 added 1 changesets with 1 changes to 1 files
160 0
147 % checking prepush logic does not allow silently pushing multiple new heads
161 % checking prepush logic does not allow silently pushing multiple new heads
148 adding init
162 adding init
149 adding a
163 adding a
150 updating to branch default
164 updating to branch default
151 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
165 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
152 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
166 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
153 adding b
167 adding b
154 created new head
168 created new head
155 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
169 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
156 adding c
170 adding c
157 created new head
171 created new head
158 pushing to h
172 pushing to h
159 searching for changes
173 searching for changes
160 abort: push creates new remote heads on branch 'default'!
174 abort: push creates new remote heads on branch 'default'!
161 (you should pull and merge or use push -f to force)
175 (you should pull and merge or use push -f to force)
162
176
163 % check prepush logic with merged branches
177 % check prepush logic with merged branches
164 marked working directory as branch a
178 marked working directory as branch a
165 adding foo
179 adding foo
166 updating to branch a
180 updating to branch a
167 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
181 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
168 marked working directory as branch b
182 marked working directory as branch b
169 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
183 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
170 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
184 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
171 (branch merge, don't forget to commit)
185 (branch merge, don't forget to commit)
172 pushing to j
186 pushing to j
173 searching for changes
187 searching for changes
174 abort: push creates new remote branches: b!
188 abort: push creates new remote branches: b!
175 (use 'hg push -f' to force)
189 (use 'hg push --new-branch' to create new remote branches)
176
190
177 % prepush -r should not allow you to sneak in new heads
191 % prepush -r should not allow you to sneak in new heads
178 pushing to ../l
192 pushing to ../l
179 searching for changes
193 searching for changes
180 abort: push creates new remote heads on branch 'a'!
194 abort: push creates new remote heads on branch 'a'!
181 (did you forget to merge? use push -f to force)
195 (did you forget to merge? use push -f to force)
182 % check prepush with new branch head on former topo non-head
196 % check prepush with new branch head on former topo non-head
183 marked working directory as branch A
197 marked working directory as branch A
184 adding a
198 adding a
185 marked working directory as branch B
199 marked working directory as branch B
186 adding b
200 adding b
187 updating to branch B
201 updating to branch B
188 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
202 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
189 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
203 0 files updated, 0 files merged, 0 files removed, 0 files unresolved
190 adding b1
204 adding b1
191 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
205 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
192 adding a2
206 adding a2
193 %% glog of local
207 %% glog of local
194 @ 2: A a2
208 @ 2: A a2
195 |
209 |
196 | o 1: B b
210 | o 1: B b
197 |/
211 |/
198 o 0: A a
212 o 0: A a
199
213
200 %% glog of remote
214 %% glog of remote
201 @ 2: B b1
215 @ 2: B b1
202 |
216 |
203 o 1: B b
217 o 1: B b
204 |
218 |
205 o 0: A a
219 o 0: A a
206
220
207 %% outgoing
221 %% outgoing
208 comparing with inner
222 comparing with inner
209 searching for changes
223 searching for changes
210 2: A a2
224 2: A a2
211 pushing to inner
225 pushing to inner
212 searching for changes
226 searching for changes
213 adding changesets
227 adding changesets
214 adding manifests
228 adding manifests
215 adding file changes
229 adding file changes
216 added 1 changesets with 1 changes to 1 files (+1 heads)
230 added 1 changesets with 1 changes to 1 files (+1 heads)
217 % check prepush with new branch head on former topo head
231 % check prepush with new branch head on former topo head
218 marked working directory as branch A
232 marked working directory as branch A
219 adding a
233 adding a
220 marked working directory as branch B
234 marked working directory as branch B
221 adding b
235 adding b
222 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
236 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
223 adding a1
237 adding a1
224 updating to branch A
238 updating to branch A
225 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
239 2 files updated, 0 files merged, 0 files removed, 0 files unresolved
226 1 files updated, 0 files merged, 1 files removed, 0 files unresolved
240 1 files updated, 0 files merged, 1 files removed, 0 files unresolved
227 adding b1
241 adding b1
228 adding a2
242 adding a2
229 %% glog of local
243 %% glog of local
230 @ 3: A a2
244 @ 3: A a2
231 |
245 |
232 o 2: A a1
246 o 2: A a1
233 |
247 |
234 | o 1: B b
248 | o 1: B b
235 |/
249 |/
236 o 0: A a
250 o 0: A a
237
251
238 %% glog of remote
252 %% glog of remote
239 @ 3: B b1
253 @ 3: B b1
240 |
254 |
241 | o 2: A a1
255 | o 2: A a1
242 | |
256 | |
243 o | 1: B b
257 o | 1: B b
244 |/
258 |/
245 o 0: A a
259 o 0: A a
246
260
247 %% outgoing
261 %% outgoing
248 comparing with inner
262 comparing with inner
249 searching for changes
263 searching for changes
250 3: A a2
264 3: A a2
251 pushing to inner
265 pushing to inner
252 searching for changes
266 searching for changes
253 adding changesets
267 adding changesets
254 adding manifests
268 adding manifests
255 adding file changes
269 adding file changes
256 added 1 changesets with 1 changes to 1 files
270 added 1 changesets with 1 changes to 1 files
257 % check prepush with new branch head and new child of former branch head
271 % check prepush with new branch head and new child of former branch head
258 % but child is on different branch
272 % but child is on different branch
259 marked working directory as branch A
273 marked working directory as branch A
260 adding a
274 adding a
261 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
275 0 files updated, 0 files merged, 1 files removed, 0 files unresolved
262 marked working directory as branch B
276 marked working directory as branch B
263 adding b
277 adding b
264 updating to branch B
278 updating to branch B
265 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
279 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
266 1 files updated, 0 files merged, 1 files removed, 0 files unresolved
280 1 files updated, 0 files merged, 1 files removed, 0 files unresolved
267 marked working directory as branch B
281 marked working directory as branch B
268 1 files updated, 0 files merged, 1 files removed, 0 files unresolved
282 1 files updated, 0 files merged, 1 files removed, 0 files unresolved
269 marked working directory as branch A
283 marked working directory as branch A
270 %% glog of local
284 %% glog of local
271 @ 5: A b3
285 @ 5: A b3
272 |
286 |
273 | o 4: B a3
287 | o 4: B a3
274 | |
288 | |
275 o | 3: B b1
289 o | 3: B b1
276 | |
290 | |
277 o | 2: B b0
291 o | 2: B b0
278 /
292 /
279 o 1: A a1
293 o 1: A a1
280 |
294 |
281 o 0: A a0
295 o 0: A a0
282
296
283 %% glog of remote
297 %% glog of remote
284 @ 3: B b1
298 @ 3: B b1
285 |
299 |
286 o 2: B b0
300 o 2: B b0
287
301
288 o 1: A a1
302 o 1: A a1
289 |
303 |
290 o 0: A a0
304 o 0: A a0
291
305
292 %% outgoing
306 %% outgoing
293 comparing with inner
307 comparing with inner
294 searching for changes
308 searching for changes
295 4: B a3
309 4: B a3
296 5: A b3
310 5: A b3
297 pushing to inner
311 pushing to inner
298 searching for changes
312 searching for changes
299 abort: push creates new remote heads on branch 'A'!
313 abort: push creates new remote heads on branch 'A'!
300 (did you forget to merge? use push -f to force)
314 (did you forget to merge? use push -f to force)
301 pushing to inner
315 pushing to inner
302 searching for changes
316 searching for changes
303 abort: push creates new remote heads on branch 'A'!
317 abort: push creates new remote heads on branch 'A'!
304 (did you forget to merge? use push -f to force)
318 (did you forget to merge? use push -f to force)
305 comparing with inner
319 comparing with inner
306 searching for changes
320 searching for changes
307 no changes found
321 no changes found
General Comments 0
You need to be logged in to leave comments. Login now