##// END OF EJS Templates
Merge with crew.
Bryan O'Sullivan -
r4494:649dd249 merge default
parent child Browse files
Show More
@@ -1,2300 +1,2300 b''
1 # queue.py - patch queues for mercurial
1 # queue.py - patch queues for mercurial
2 #
2 #
3 # Copyright 2005, 2006 Chris Mason <mason@suse.com>
3 # Copyright 2005, 2006 Chris Mason <mason@suse.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7
7
8 '''patch management and development
8 '''patch management and development
9
9
10 This extension lets you work with a stack of patches in a Mercurial
10 This extension lets you work with a stack of patches in a Mercurial
11 repository. It manages two stacks of patches - all known patches, and
11 repository. It manages two stacks of patches - all known patches, and
12 applied patches (subset of known patches).
12 applied patches (subset of known patches).
13
13
14 Known patches are represented as patch files in the .hg/patches
14 Known patches are represented as patch files in the .hg/patches
15 directory. Applied patches are both patch files and changesets.
15 directory. Applied patches are both patch files and changesets.
16
16
17 Common tasks (use "hg help command" for more details):
17 Common tasks (use "hg help command" for more details):
18
18
19 prepare repository to work with patches qinit
19 prepare repository to work with patches qinit
20 create new patch qnew
20 create new patch qnew
21 import existing patch qimport
21 import existing patch qimport
22
22
23 print patch series qseries
23 print patch series qseries
24 print applied patches qapplied
24 print applied patches qapplied
25 print name of top applied patch qtop
25 print name of top applied patch qtop
26
26
27 add known patch to applied stack qpush
27 add known patch to applied stack qpush
28 remove patch from applied stack qpop
28 remove patch from applied stack qpop
29 refresh contents of top applied patch qrefresh
29 refresh contents of top applied patch qrefresh
30 '''
30 '''
31
31
32 from mercurial.i18n import _
32 from mercurial.i18n import _
33 from mercurial import commands, cmdutil, hg, patch, revlog, util, changegroup
33 from mercurial import commands, cmdutil, hg, patch, revlog, util, changegroup
34 import os, sys, re, errno
34 import os, sys, re, errno
35
35
36 commands.norepo += " qclone qversion"
36 commands.norepo += " qclone qversion"
37
37
38 # Patch names looks like unix-file names.
38 # Patch names looks like unix-file names.
39 # They must be joinable with queue directory and result in the patch path.
39 # They must be joinable with queue directory and result in the patch path.
40 normname = util.normpath
40 normname = util.normpath
41
41
42 class statusentry:
42 class statusentry:
43 def __init__(self, rev, name=None):
43 def __init__(self, rev, name=None):
44 if not name:
44 if not name:
45 fields = rev.split(':', 1)
45 fields = rev.split(':', 1)
46 if len(fields) == 2:
46 if len(fields) == 2:
47 self.rev, self.name = fields
47 self.rev, self.name = fields
48 else:
48 else:
49 self.rev, self.name = None, None
49 self.rev, self.name = None, None
50 else:
50 else:
51 self.rev, self.name = rev, name
51 self.rev, self.name = rev, name
52
52
53 def __str__(self):
53 def __str__(self):
54 return self.rev + ':' + self.name
54 return self.rev + ':' + self.name
55
55
56 class queue:
56 class queue:
57 def __init__(self, ui, path, patchdir=None):
57 def __init__(self, ui, path, patchdir=None):
58 self.basepath = path
58 self.basepath = path
59 self.path = patchdir or os.path.join(path, "patches")
59 self.path = patchdir or os.path.join(path, "patches")
60 self.opener = util.opener(self.path)
60 self.opener = util.opener(self.path)
61 self.ui = ui
61 self.ui = ui
62 self.applied = []
62 self.applied = []
63 self.full_series = []
63 self.full_series = []
64 self.applied_dirty = 0
64 self.applied_dirty = 0
65 self.series_dirty = 0
65 self.series_dirty = 0
66 self.series_path = "series"
66 self.series_path = "series"
67 self.status_path = "status"
67 self.status_path = "status"
68 self.guards_path = "guards"
68 self.guards_path = "guards"
69 self.active_guards = None
69 self.active_guards = None
70 self.guards_dirty = False
70 self.guards_dirty = False
71 self._diffopts = None
71 self._diffopts = None
72
72
73 if os.path.exists(self.join(self.series_path)):
73 if os.path.exists(self.join(self.series_path)):
74 self.full_series = self.opener(self.series_path).read().splitlines()
74 self.full_series = self.opener(self.series_path).read().splitlines()
75 self.parse_series()
75 self.parse_series()
76
76
77 if os.path.exists(self.join(self.status_path)):
77 if os.path.exists(self.join(self.status_path)):
78 lines = self.opener(self.status_path).read().splitlines()
78 lines = self.opener(self.status_path).read().splitlines()
79 self.applied = [statusentry(l) for l in lines]
79 self.applied = [statusentry(l) for l in lines]
80
80
81 def diffopts(self):
81 def diffopts(self):
82 if self._diffopts is None:
82 if self._diffopts is None:
83 self._diffopts = patch.diffopts(self.ui)
83 self._diffopts = patch.diffopts(self.ui)
84 return self._diffopts
84 return self._diffopts
85
85
86 def join(self, *p):
86 def join(self, *p):
87 return os.path.join(self.path, *p)
87 return os.path.join(self.path, *p)
88
88
89 def find_series(self, patch):
89 def find_series(self, patch):
90 pre = re.compile("(\s*)([^#]+)")
90 pre = re.compile("(\s*)([^#]+)")
91 index = 0
91 index = 0
92 for l in self.full_series:
92 for l in self.full_series:
93 m = pre.match(l)
93 m = pre.match(l)
94 if m:
94 if m:
95 s = m.group(2)
95 s = m.group(2)
96 s = s.rstrip()
96 s = s.rstrip()
97 if s == patch:
97 if s == patch:
98 return index
98 return index
99 index += 1
99 index += 1
100 return None
100 return None
101
101
102 guard_re = re.compile(r'\s?#([-+][^-+# \t\r\n\f][^# \t\r\n\f]*)')
102 guard_re = re.compile(r'\s?#([-+][^-+# \t\r\n\f][^# \t\r\n\f]*)')
103
103
104 def parse_series(self):
104 def parse_series(self):
105 self.series = []
105 self.series = []
106 self.series_guards = []
106 self.series_guards = []
107 for l in self.full_series:
107 for l in self.full_series:
108 h = l.find('#')
108 h = l.find('#')
109 if h == -1:
109 if h == -1:
110 patch = l
110 patch = l
111 comment = ''
111 comment = ''
112 elif h == 0:
112 elif h == 0:
113 continue
113 continue
114 else:
114 else:
115 patch = l[:h]
115 patch = l[:h]
116 comment = l[h:]
116 comment = l[h:]
117 patch = patch.strip()
117 patch = patch.strip()
118 if patch:
118 if patch:
119 if patch in self.series:
119 if patch in self.series:
120 raise util.Abort(_('%s appears more than once in %s') %
120 raise util.Abort(_('%s appears more than once in %s') %
121 (patch, self.join(self.series_path)))
121 (patch, self.join(self.series_path)))
122 self.series.append(patch)
122 self.series.append(patch)
123 self.series_guards.append(self.guard_re.findall(comment))
123 self.series_guards.append(self.guard_re.findall(comment))
124
124
125 def check_guard(self, guard):
125 def check_guard(self, guard):
126 bad_chars = '# \t\r\n\f'
126 bad_chars = '# \t\r\n\f'
127 first = guard[0]
127 first = guard[0]
128 for c in '-+':
128 for c in '-+':
129 if first == c:
129 if first == c:
130 return (_('guard %r starts with invalid character: %r') %
130 return (_('guard %r starts with invalid character: %r') %
131 (guard, c))
131 (guard, c))
132 for c in bad_chars:
132 for c in bad_chars:
133 if c in guard:
133 if c in guard:
134 return _('invalid character in guard %r: %r') % (guard, c)
134 return _('invalid character in guard %r: %r') % (guard, c)
135
135
136 def set_active(self, guards):
136 def set_active(self, guards):
137 for guard in guards:
137 for guard in guards:
138 bad = self.check_guard(guard)
138 bad = self.check_guard(guard)
139 if bad:
139 if bad:
140 raise util.Abort(bad)
140 raise util.Abort(bad)
141 guards = dict.fromkeys(guards).keys()
141 guards = dict.fromkeys(guards).keys()
142 guards.sort()
142 guards.sort()
143 self.ui.debug('active guards: %s\n' % ' '.join(guards))
143 self.ui.debug('active guards: %s\n' % ' '.join(guards))
144 self.active_guards = guards
144 self.active_guards = guards
145 self.guards_dirty = True
145 self.guards_dirty = True
146
146
147 def active(self):
147 def active(self):
148 if self.active_guards is None:
148 if self.active_guards is None:
149 self.active_guards = []
149 self.active_guards = []
150 try:
150 try:
151 guards = self.opener(self.guards_path).read().split()
151 guards = self.opener(self.guards_path).read().split()
152 except IOError, err:
152 except IOError, err:
153 if err.errno != errno.ENOENT: raise
153 if err.errno != errno.ENOENT: raise
154 guards = []
154 guards = []
155 for i, guard in enumerate(guards):
155 for i, guard in enumerate(guards):
156 bad = self.check_guard(guard)
156 bad = self.check_guard(guard)
157 if bad:
157 if bad:
158 self.ui.warn('%s:%d: %s\n' %
158 self.ui.warn('%s:%d: %s\n' %
159 (self.join(self.guards_path), i + 1, bad))
159 (self.join(self.guards_path), i + 1, bad))
160 else:
160 else:
161 self.active_guards.append(guard)
161 self.active_guards.append(guard)
162 return self.active_guards
162 return self.active_guards
163
163
164 def set_guards(self, idx, guards):
164 def set_guards(self, idx, guards):
165 for g in guards:
165 for g in guards:
166 if len(g) < 2:
166 if len(g) < 2:
167 raise util.Abort(_('guard %r too short') % g)
167 raise util.Abort(_('guard %r too short') % g)
168 if g[0] not in '-+':
168 if g[0] not in '-+':
169 raise util.Abort(_('guard %r starts with invalid char') % g)
169 raise util.Abort(_('guard %r starts with invalid char') % g)
170 bad = self.check_guard(g[1:])
170 bad = self.check_guard(g[1:])
171 if bad:
171 if bad:
172 raise util.Abort(bad)
172 raise util.Abort(bad)
173 drop = self.guard_re.sub('', self.full_series[idx])
173 drop = self.guard_re.sub('', self.full_series[idx])
174 self.full_series[idx] = drop + ''.join([' #' + g for g in guards])
174 self.full_series[idx] = drop + ''.join([' #' + g for g in guards])
175 self.parse_series()
175 self.parse_series()
176 self.series_dirty = True
176 self.series_dirty = True
177
177
178 def pushable(self, idx):
178 def pushable(self, idx):
179 if isinstance(idx, str):
179 if isinstance(idx, str):
180 idx = self.series.index(idx)
180 idx = self.series.index(idx)
181 patchguards = self.series_guards[idx]
181 patchguards = self.series_guards[idx]
182 if not patchguards:
182 if not patchguards:
183 return True, None
183 return True, None
184 default = False
184 default = False
185 guards = self.active()
185 guards = self.active()
186 exactneg = [g for g in patchguards if g[0] == '-' and g[1:] in guards]
186 exactneg = [g for g in patchguards if g[0] == '-' and g[1:] in guards]
187 if exactneg:
187 if exactneg:
188 return False, exactneg[0]
188 return False, exactneg[0]
189 pos = [g for g in patchguards if g[0] == '+']
189 pos = [g for g in patchguards if g[0] == '+']
190 exactpos = [g for g in pos if g[1:] in guards]
190 exactpos = [g for g in pos if g[1:] in guards]
191 if pos:
191 if pos:
192 if exactpos:
192 if exactpos:
193 return True, exactpos[0]
193 return True, exactpos[0]
194 return False, pos
194 return False, pos
195 return True, ''
195 return True, ''
196
196
197 def explain_pushable(self, idx, all_patches=False):
197 def explain_pushable(self, idx, all_patches=False):
198 write = all_patches and self.ui.write or self.ui.warn
198 write = all_patches and self.ui.write or self.ui.warn
199 if all_patches or self.ui.verbose:
199 if all_patches or self.ui.verbose:
200 if isinstance(idx, str):
200 if isinstance(idx, str):
201 idx = self.series.index(idx)
201 idx = self.series.index(idx)
202 pushable, why = self.pushable(idx)
202 pushable, why = self.pushable(idx)
203 if all_patches and pushable:
203 if all_patches and pushable:
204 if why is None:
204 if why is None:
205 write(_('allowing %s - no guards in effect\n') %
205 write(_('allowing %s - no guards in effect\n') %
206 self.series[idx])
206 self.series[idx])
207 else:
207 else:
208 if not why:
208 if not why:
209 write(_('allowing %s - no matching negative guards\n') %
209 write(_('allowing %s - no matching negative guards\n') %
210 self.series[idx])
210 self.series[idx])
211 else:
211 else:
212 write(_('allowing %s - guarded by %r\n') %
212 write(_('allowing %s - guarded by %r\n') %
213 (self.series[idx], why))
213 (self.series[idx], why))
214 if not pushable:
214 if not pushable:
215 if why:
215 if why:
216 write(_('skipping %s - guarded by %r\n') %
216 write(_('skipping %s - guarded by %r\n') %
217 (self.series[idx], why))
217 (self.series[idx], why))
218 else:
218 else:
219 write(_('skipping %s - no matching guards\n') %
219 write(_('skipping %s - no matching guards\n') %
220 self.series[idx])
220 self.series[idx])
221
221
222 def save_dirty(self):
222 def save_dirty(self):
223 def write_list(items, path):
223 def write_list(items, path):
224 fp = self.opener(path, 'w')
224 fp = self.opener(path, 'w')
225 for i in items:
225 for i in items:
226 print >> fp, i
226 print >> fp, i
227 fp.close()
227 fp.close()
228 if self.applied_dirty: write_list(map(str, self.applied), self.status_path)
228 if self.applied_dirty: write_list(map(str, self.applied), self.status_path)
229 if self.series_dirty: write_list(self.full_series, self.series_path)
229 if self.series_dirty: write_list(self.full_series, self.series_path)
230 if self.guards_dirty: write_list(self.active_guards, self.guards_path)
230 if self.guards_dirty: write_list(self.active_guards, self.guards_path)
231
231
232 def readheaders(self, patch):
232 def readheaders(self, patch):
233 def eatdiff(lines):
233 def eatdiff(lines):
234 while lines:
234 while lines:
235 l = lines[-1]
235 l = lines[-1]
236 if (l.startswith("diff -") or
236 if (l.startswith("diff -") or
237 l.startswith("Index:") or
237 l.startswith("Index:") or
238 l.startswith("===========")):
238 l.startswith("===========")):
239 del lines[-1]
239 del lines[-1]
240 else:
240 else:
241 break
241 break
242 def eatempty(lines):
242 def eatempty(lines):
243 while lines:
243 while lines:
244 l = lines[-1]
244 l = lines[-1]
245 if re.match('\s*$', l):
245 if re.match('\s*$', l):
246 del lines[-1]
246 del lines[-1]
247 else:
247 else:
248 break
248 break
249
249
250 pf = self.join(patch)
250 pf = self.join(patch)
251 message = []
251 message = []
252 comments = []
252 comments = []
253 user = None
253 user = None
254 date = None
254 date = None
255 format = None
255 format = None
256 subject = None
256 subject = None
257 diffstart = 0
257 diffstart = 0
258
258
259 for line in file(pf):
259 for line in file(pf):
260 line = line.rstrip()
260 line = line.rstrip()
261 if line.startswith('diff --git'):
261 if line.startswith('diff --git'):
262 diffstart = 2
262 diffstart = 2
263 break
263 break
264 if diffstart:
264 if diffstart:
265 if line.startswith('+++ '):
265 if line.startswith('+++ '):
266 diffstart = 2
266 diffstart = 2
267 break
267 break
268 if line.startswith("--- "):
268 if line.startswith("--- "):
269 diffstart = 1
269 diffstart = 1
270 continue
270 continue
271 elif format == "hgpatch":
271 elif format == "hgpatch":
272 # parse values when importing the result of an hg export
272 # parse values when importing the result of an hg export
273 if line.startswith("# User "):
273 if line.startswith("# User "):
274 user = line[7:]
274 user = line[7:]
275 elif line.startswith("# Date "):
275 elif line.startswith("# Date "):
276 date = line[7:]
276 date = line[7:]
277 elif not line.startswith("# ") and line:
277 elif not line.startswith("# ") and line:
278 message.append(line)
278 message.append(line)
279 format = None
279 format = None
280 elif line == '# HG changeset patch':
280 elif line == '# HG changeset patch':
281 format = "hgpatch"
281 format = "hgpatch"
282 elif (format != "tagdone" and (line.startswith("Subject: ") or
282 elif (format != "tagdone" and (line.startswith("Subject: ") or
283 line.startswith("subject: "))):
283 line.startswith("subject: "))):
284 subject = line[9:]
284 subject = line[9:]
285 format = "tag"
285 format = "tag"
286 elif (format != "tagdone" and (line.startswith("From: ") or
286 elif (format != "tagdone" and (line.startswith("From: ") or
287 line.startswith("from: "))):
287 line.startswith("from: "))):
288 user = line[6:]
288 user = line[6:]
289 format = "tag"
289 format = "tag"
290 elif format == "tag" and line == "":
290 elif format == "tag" and line == "":
291 # when looking for tags (subject: from: etc) they
291 # when looking for tags (subject: from: etc) they
292 # end once you find a blank line in the source
292 # end once you find a blank line in the source
293 format = "tagdone"
293 format = "tagdone"
294 elif message or line:
294 elif message or line:
295 message.append(line)
295 message.append(line)
296 comments.append(line)
296 comments.append(line)
297
297
298 eatdiff(message)
298 eatdiff(message)
299 eatdiff(comments)
299 eatdiff(comments)
300 eatempty(message)
300 eatempty(message)
301 eatempty(comments)
301 eatempty(comments)
302
302
303 # make sure message isn't empty
303 # make sure message isn't empty
304 if format and format.startswith("tag") and subject:
304 if format and format.startswith("tag") and subject:
305 message.insert(0, "")
305 message.insert(0, "")
306 message.insert(0, subject)
306 message.insert(0, subject)
307 return (message, comments, user, date, diffstart > 1)
307 return (message, comments, user, date, diffstart > 1)
308
308
309 def removeundo(self, repo):
309 def removeundo(self, repo):
310 undo = repo.sjoin('undo')
310 undo = repo.sjoin('undo')
311 if not os.path.exists(undo):
311 if not os.path.exists(undo):
312 return
312 return
313 try:
313 try:
314 os.unlink(undo)
314 os.unlink(undo)
315 except OSError, inst:
315 except OSError, inst:
316 self.ui.warn('error removing undo: %s\n' % str(inst))
316 self.ui.warn('error removing undo: %s\n' % str(inst))
317
317
318 def printdiff(self, repo, node1, node2=None, files=None,
318 def printdiff(self, repo, node1, node2=None, files=None,
319 fp=None, changes=None, opts={}):
319 fp=None, changes=None, opts={}):
320 fns, matchfn, anypats = cmdutil.matchpats(repo, files, opts)
320 fns, matchfn, anypats = cmdutil.matchpats(repo, files, opts)
321
321
322 patch.diff(repo, node1, node2, fns, match=matchfn,
322 patch.diff(repo, node1, node2, fns, match=matchfn,
323 fp=fp, changes=changes, opts=self.diffopts())
323 fp=fp, changes=changes, opts=self.diffopts())
324
324
325 def mergeone(self, repo, mergeq, head, patch, rev, wlock):
325 def mergeone(self, repo, mergeq, head, patch, rev, wlock):
326 # first try just applying the patch
326 # first try just applying the patch
327 (err, n) = self.apply(repo, [ patch ], update_status=False,
327 (err, n) = self.apply(repo, [ patch ], update_status=False,
328 strict=True, merge=rev, wlock=wlock)
328 strict=True, merge=rev, wlock=wlock)
329
329
330 if err == 0:
330 if err == 0:
331 return (err, n)
331 return (err, n)
332
332
333 if n is None:
333 if n is None:
334 raise util.Abort(_("apply failed for patch %s") % patch)
334 raise util.Abort(_("apply failed for patch %s") % patch)
335
335
336 self.ui.warn("patch didn't work out, merging %s\n" % patch)
336 self.ui.warn("patch didn't work out, merging %s\n" % patch)
337
337
338 # apply failed, strip away that rev and merge.
338 # apply failed, strip away that rev and merge.
339 hg.clean(repo, head, wlock=wlock)
339 hg.clean(repo, head, wlock=wlock)
340 self.strip(repo, n, update=False, backup='strip', wlock=wlock)
340 self.strip(repo, n, update=False, backup='strip', wlock=wlock)
341
341
342 ctx = repo.changectx(rev)
342 ctx = repo.changectx(rev)
343 ret = hg.merge(repo, rev, wlock=wlock)
343 ret = hg.merge(repo, rev, wlock=wlock)
344 if ret:
344 if ret:
345 raise util.Abort(_("update returned %d") % ret)
345 raise util.Abort(_("update returned %d") % ret)
346 n = repo.commit(None, ctx.description(), ctx.user(),
346 n = repo.commit(None, ctx.description(), ctx.user(),
347 force=1, wlock=wlock)
347 force=1, wlock=wlock)
348 if n == None:
348 if n == None:
349 raise util.Abort(_("repo commit failed"))
349 raise util.Abort(_("repo commit failed"))
350 try:
350 try:
351 message, comments, user, date, patchfound = mergeq.readheaders(patch)
351 message, comments, user, date, patchfound = mergeq.readheaders(patch)
352 except:
352 except:
353 raise util.Abort(_("unable to read %s") % patch)
353 raise util.Abort(_("unable to read %s") % patch)
354
354
355 patchf = self.opener(patch, "w")
355 patchf = self.opener(patch, "w")
356 if comments:
356 if comments:
357 comments = "\n".join(comments) + '\n\n'
357 comments = "\n".join(comments) + '\n\n'
358 patchf.write(comments)
358 patchf.write(comments)
359 self.printdiff(repo, head, n, fp=patchf)
359 self.printdiff(repo, head, n, fp=patchf)
360 patchf.close()
360 patchf.close()
361 self.removeundo(repo)
361 self.removeundo(repo)
362 return (0, n)
362 return (0, n)
363
363
364 def qparents(self, repo, rev=None):
364 def qparents(self, repo, rev=None):
365 if rev is None:
365 if rev is None:
366 (p1, p2) = repo.dirstate.parents()
366 (p1, p2) = repo.dirstate.parents()
367 if p2 == revlog.nullid:
367 if p2 == revlog.nullid:
368 return p1
368 return p1
369 if len(self.applied) == 0:
369 if len(self.applied) == 0:
370 return None
370 return None
371 return revlog.bin(self.applied[-1].rev)
371 return revlog.bin(self.applied[-1].rev)
372 pp = repo.changelog.parents(rev)
372 pp = repo.changelog.parents(rev)
373 if pp[1] != revlog.nullid:
373 if pp[1] != revlog.nullid:
374 arevs = [ x.rev for x in self.applied ]
374 arevs = [ x.rev for x in self.applied ]
375 p0 = revlog.hex(pp[0])
375 p0 = revlog.hex(pp[0])
376 p1 = revlog.hex(pp[1])
376 p1 = revlog.hex(pp[1])
377 if p0 in arevs:
377 if p0 in arevs:
378 return pp[0]
378 return pp[0]
379 if p1 in arevs:
379 if p1 in arevs:
380 return pp[1]
380 return pp[1]
381 return pp[0]
381 return pp[0]
382
382
383 def mergepatch(self, repo, mergeq, series, wlock):
383 def mergepatch(self, repo, mergeq, series, wlock):
384 if len(self.applied) == 0:
384 if len(self.applied) == 0:
385 # each of the patches merged in will have two parents. This
385 # each of the patches merged in will have two parents. This
386 # can confuse the qrefresh, qdiff, and strip code because it
386 # can confuse the qrefresh, qdiff, and strip code because it
387 # needs to know which parent is actually in the patch queue.
387 # needs to know which parent is actually in the patch queue.
388 # so, we insert a merge marker with only one parent. This way
388 # so, we insert a merge marker with only one parent. This way
389 # the first patch in the queue is never a merge patch
389 # the first patch in the queue is never a merge patch
390 #
390 #
391 pname = ".hg.patches.merge.marker"
391 pname = ".hg.patches.merge.marker"
392 n = repo.commit(None, '[mq]: merge marker', user=None, force=1,
392 n = repo.commit(None, '[mq]: merge marker', user=None, force=1,
393 wlock=wlock)
393 wlock=wlock)
394 self.removeundo(repo)
394 self.removeundo(repo)
395 self.applied.append(statusentry(revlog.hex(n), pname))
395 self.applied.append(statusentry(revlog.hex(n), pname))
396 self.applied_dirty = 1
396 self.applied_dirty = 1
397
397
398 head = self.qparents(repo)
398 head = self.qparents(repo)
399
399
400 for patch in series:
400 for patch in series:
401 patch = mergeq.lookup(patch, strict=True)
401 patch = mergeq.lookup(patch, strict=True)
402 if not patch:
402 if not patch:
403 self.ui.warn("patch %s does not exist\n" % patch)
403 self.ui.warn("patch %s does not exist\n" % patch)
404 return (1, None)
404 return (1, None)
405 pushable, reason = self.pushable(patch)
405 pushable, reason = self.pushable(patch)
406 if not pushable:
406 if not pushable:
407 self.explain_pushable(patch, all_patches=True)
407 self.explain_pushable(patch, all_patches=True)
408 continue
408 continue
409 info = mergeq.isapplied(patch)
409 info = mergeq.isapplied(patch)
410 if not info:
410 if not info:
411 self.ui.warn("patch %s is not applied\n" % patch)
411 self.ui.warn("patch %s is not applied\n" % patch)
412 return (1, None)
412 return (1, None)
413 rev = revlog.bin(info[1])
413 rev = revlog.bin(info[1])
414 (err, head) = self.mergeone(repo, mergeq, head, patch, rev, wlock)
414 (err, head) = self.mergeone(repo, mergeq, head, patch, rev, wlock)
415 if head:
415 if head:
416 self.applied.append(statusentry(revlog.hex(head), patch))
416 self.applied.append(statusentry(revlog.hex(head), patch))
417 self.applied_dirty = 1
417 self.applied_dirty = 1
418 if err:
418 if err:
419 return (err, head)
419 return (err, head)
420 self.save_dirty()
420 self.save_dirty()
421 return (0, head)
421 return (0, head)
422
422
423 def patch(self, repo, patchfile):
423 def patch(self, repo, patchfile):
424 '''Apply patchfile to the working directory.
424 '''Apply patchfile to the working directory.
425 patchfile: file name of patch'''
425 patchfile: file name of patch'''
426 files = {}
426 files = {}
427 try:
427 try:
428 fuzz = patch.patch(patchfile, self.ui, strip=1, cwd=repo.root,
428 fuzz = patch.patch(patchfile, self.ui, strip=1, cwd=repo.root,
429 files=files)
429 files=files)
430 except Exception, inst:
430 except Exception, inst:
431 self.ui.note(str(inst) + '\n')
431 self.ui.note(str(inst) + '\n')
432 if not self.ui.verbose:
432 if not self.ui.verbose:
433 self.ui.warn("patch failed, unable to continue (try -v)\n")
433 self.ui.warn("patch failed, unable to continue (try -v)\n")
434 return (False, files, False)
434 return (False, files, False)
435
435
436 return (True, files, fuzz)
436 return (True, files, fuzz)
437
437
438 def apply(self, repo, series, list=False, update_status=True,
438 def apply(self, repo, series, list=False, update_status=True,
439 strict=False, patchdir=None, merge=None, wlock=None,
439 strict=False, patchdir=None, merge=None, wlock=None,
440 all_files={}):
440 all_files={}):
441 tr = repo.transaction()
441 tr = repo.transaction()
442 try:
442 try:
443 ret = self._apply(tr, repo, series, list, update_status,
443 ret = self._apply(tr, repo, series, list, update_status,
444 strict, patchdir, merge, wlock,
444 strict, patchdir, merge, wlock,
445 all_files=all_files)
445 all_files=all_files)
446 tr.close()
446 tr.close()
447 self.save_dirty()
447 self.save_dirty()
448 return ret
448 return ret
449 except:
449 except:
450 try:
450 try:
451 tr.abort()
451 tr.abort()
452 finally:
452 finally:
453 repo.reload()
453 repo.reload()
454 repo.wreload()
454 repo.wreload()
455 raise
455 raise
456
456
457 def _apply(self, tr, repo, series, list=False, update_status=True,
457 def _apply(self, tr, repo, series, list=False, update_status=True,
458 strict=False, patchdir=None, merge=None, wlock=None,
458 strict=False, patchdir=None, merge=None, wlock=None,
459 all_files={}):
459 all_files={}):
460 # TODO unify with commands.py
460 # TODO unify with commands.py
461 if not patchdir:
461 if not patchdir:
462 patchdir = self.path
462 patchdir = self.path
463 err = 0
463 err = 0
464 if not wlock:
464 if not wlock:
465 wlock = repo.wlock()
465 wlock = repo.wlock()
466 lock = repo.lock()
466 lock = repo.lock()
467 n = None
467 n = None
468 for patchname in series:
468 for patchname in series:
469 pushable, reason = self.pushable(patchname)
469 pushable, reason = self.pushable(patchname)
470 if not pushable:
470 if not pushable:
471 self.explain_pushable(patchname, all_patches=True)
471 self.explain_pushable(patchname, all_patches=True)
472 continue
472 continue
473 self.ui.warn("applying %s\n" % patchname)
473 self.ui.warn("applying %s\n" % patchname)
474 pf = os.path.join(patchdir, patchname)
474 pf = os.path.join(patchdir, patchname)
475
475
476 try:
476 try:
477 message, comments, user, date, patchfound = self.readheaders(patchname)
477 message, comments, user, date, patchfound = self.readheaders(patchname)
478 except:
478 except:
479 self.ui.warn("Unable to read %s\n" % patchname)
479 self.ui.warn("Unable to read %s\n" % patchname)
480 err = 1
480 err = 1
481 break
481 break
482
482
483 if not message:
483 if not message:
484 message = "imported patch %s\n" % patchname
484 message = "imported patch %s\n" % patchname
485 else:
485 else:
486 if list:
486 if list:
487 message.append("\nimported patch %s" % patchname)
487 message.append("\nimported patch %s" % patchname)
488 message = '\n'.join(message)
488 message = '\n'.join(message)
489
489
490 (patcherr, files, fuzz) = self.patch(repo, pf)
490 (patcherr, files, fuzz) = self.patch(repo, pf)
491 all_files.update(files)
491 all_files.update(files)
492 patcherr = not patcherr
492 patcherr = not patcherr
493
493
494 if merge and files:
494 if merge and files:
495 # Mark as removed/merged and update dirstate parent info
495 # Mark as removed/merged and update dirstate parent info
496 removed = []
496 removed = []
497 merged = []
497 merged = []
498 for f in files:
498 for f in files:
499 if os.path.exists(repo.dirstate.wjoin(f)):
499 if os.path.exists(repo.dirstate.wjoin(f)):
500 merged.append(f)
500 merged.append(f)
501 else:
501 else:
502 removed.append(f)
502 removed.append(f)
503 repo.dirstate.update(repo.dirstate.filterfiles(removed), 'r')
503 repo.dirstate.update(repo.dirstate.filterfiles(removed), 'r')
504 repo.dirstate.update(repo.dirstate.filterfiles(merged), 'm')
504 repo.dirstate.update(repo.dirstate.filterfiles(merged), 'm')
505 p1, p2 = repo.dirstate.parents()
505 p1, p2 = repo.dirstate.parents()
506 repo.dirstate.setparents(p1, merge)
506 repo.dirstate.setparents(p1, merge)
507 files = patch.updatedir(self.ui, repo, files, wlock=wlock)
507 files = patch.updatedir(self.ui, repo, files, wlock=wlock)
508 n = repo.commit(files, message, user, date, force=1, lock=lock,
508 n = repo.commit(files, message, user, date, force=1, lock=lock,
509 wlock=wlock)
509 wlock=wlock)
510
510
511 if n == None:
511 if n == None:
512 raise util.Abort(_("repo commit failed"))
512 raise util.Abort(_("repo commit failed"))
513
513
514 if update_status:
514 if update_status:
515 self.applied.append(statusentry(revlog.hex(n), patchname))
515 self.applied.append(statusentry(revlog.hex(n), patchname))
516
516
517 if patcherr:
517 if patcherr:
518 if not patchfound:
518 if not patchfound:
519 self.ui.warn("patch %s is empty\n" % patchname)
519 self.ui.warn("patch %s is empty\n" % patchname)
520 err = 0
520 err = 0
521 else:
521 else:
522 self.ui.warn("patch failed, rejects left in working dir\n")
522 self.ui.warn("patch failed, rejects left in working dir\n")
523 err = 1
523 err = 1
524 break
524 break
525
525
526 if fuzz and strict:
526 if fuzz and strict:
527 self.ui.warn("fuzz found when applying patch, stopping\n")
527 self.ui.warn("fuzz found when applying patch, stopping\n")
528 err = 1
528 err = 1
529 break
529 break
530 self.removeundo(repo)
530 self.removeundo(repo)
531 return (err, n)
531 return (err, n)
532
532
533 def delete(self, repo, patches, opts):
533 def delete(self, repo, patches, opts):
534 realpatches = []
534 realpatches = []
535 for patch in patches:
535 for patch in patches:
536 patch = self.lookup(patch, strict=True)
536 patch = self.lookup(patch, strict=True)
537 info = self.isapplied(patch)
537 info = self.isapplied(patch)
538 if info:
538 if info:
539 raise util.Abort(_("cannot delete applied patch %s") % patch)
539 raise util.Abort(_("cannot delete applied patch %s") % patch)
540 if patch not in self.series:
540 if patch not in self.series:
541 raise util.Abort(_("patch %s not in series file") % patch)
541 raise util.Abort(_("patch %s not in series file") % patch)
542 realpatches.append(patch)
542 realpatches.append(patch)
543
543
544 appliedbase = 0
544 appliedbase = 0
545 if opts.get('rev'):
545 if opts.get('rev'):
546 if not self.applied:
546 if not self.applied:
547 raise util.Abort(_('no patches applied'))
547 raise util.Abort(_('no patches applied'))
548 revs = cmdutil.revrange(repo, opts['rev'])
548 revs = cmdutil.revrange(repo, opts['rev'])
549 if len(revs) > 1 and revs[0] > revs[1]:
549 if len(revs) > 1 and revs[0] > revs[1]:
550 revs.reverse()
550 revs.reverse()
551 for rev in revs:
551 for rev in revs:
552 if appliedbase >= len(self.applied):
552 if appliedbase >= len(self.applied):
553 raise util.Abort(_("revision %d is not managed") % rev)
553 raise util.Abort(_("revision %d is not managed") % rev)
554
554
555 base = revlog.bin(self.applied[appliedbase].rev)
555 base = revlog.bin(self.applied[appliedbase].rev)
556 node = repo.changelog.node(rev)
556 node = repo.changelog.node(rev)
557 if node != base:
557 if node != base:
558 raise util.Abort(_("cannot delete revision %d above "
558 raise util.Abort(_("cannot delete revision %d above "
559 "applied patches") % rev)
559 "applied patches") % rev)
560 realpatches.append(self.applied[appliedbase].name)
560 realpatches.append(self.applied[appliedbase].name)
561 appliedbase += 1
561 appliedbase += 1
562
562
563 if not opts.get('keep'):
563 if not opts.get('keep'):
564 r = self.qrepo()
564 r = self.qrepo()
565 if r:
565 if r:
566 r.remove(realpatches, True)
566 r.remove(realpatches, True)
567 else:
567 else:
568 for p in realpatches:
568 for p in realpatches:
569 os.unlink(self.join(p))
569 os.unlink(self.join(p))
570
570
571 if appliedbase:
571 if appliedbase:
572 del self.applied[:appliedbase]
572 del self.applied[:appliedbase]
573 self.applied_dirty = 1
573 self.applied_dirty = 1
574 indices = [self.find_series(p) for p in realpatches]
574 indices = [self.find_series(p) for p in realpatches]
575 indices.sort()
575 indices.sort()
576 for i in indices[-1::-1]:
576 for i in indices[-1::-1]:
577 del self.full_series[i]
577 del self.full_series[i]
578 self.parse_series()
578 self.parse_series()
579 self.series_dirty = 1
579 self.series_dirty = 1
580
580
581 def check_toppatch(self, repo):
581 def check_toppatch(self, repo):
582 if len(self.applied) > 0:
582 if len(self.applied) > 0:
583 top = revlog.bin(self.applied[-1].rev)
583 top = revlog.bin(self.applied[-1].rev)
584 pp = repo.dirstate.parents()
584 pp = repo.dirstate.parents()
585 if top not in pp:
585 if top not in pp:
586 raise util.Abort(_("queue top not at same revision as working directory"))
586 raise util.Abort(_("queue top not at same revision as working directory"))
587 return top
587 return top
588 return None
588 return None
589 def check_localchanges(self, repo, force=False, refresh=True):
589 def check_localchanges(self, repo, force=False, refresh=True):
590 m, a, r, d = repo.status()[:4]
590 m, a, r, d = repo.status()[:4]
591 if m or a or r or d:
591 if m or a or r or d:
592 if not force:
592 if not force:
593 if refresh:
593 if refresh:
594 raise util.Abort(_("local changes found, refresh first"))
594 raise util.Abort(_("local changes found, refresh first"))
595 else:
595 else:
596 raise util.Abort(_("local changes found"))
596 raise util.Abort(_("local changes found"))
597 return m, a, r, d
597 return m, a, r, d
598 def new(self, repo, patch, msg=None, force=None):
598 def new(self, repo, patch, msg=None, force=None):
599 if os.path.exists(self.join(patch)):
599 if os.path.exists(self.join(patch)):
600 raise util.Abort(_('patch "%s" already exists') % patch)
600 raise util.Abort(_('patch "%s" already exists') % patch)
601 m, a, r, d = self.check_localchanges(repo, force)
601 m, a, r, d = self.check_localchanges(repo, force)
602 commitfiles = m + a + r
602 commitfiles = m + a + r
603 self.check_toppatch(repo)
603 self.check_toppatch(repo)
604 wlock = repo.wlock()
604 wlock = repo.wlock()
605 insert = self.full_series_end()
605 insert = self.full_series_end()
606 if msg:
606 if msg:
607 n = repo.commit(commitfiles, "[mq]: %s" % msg, force=True,
607 n = repo.commit(commitfiles, "[mq]: %s" % msg, force=True,
608 wlock=wlock)
608 wlock=wlock)
609 else:
609 else:
610 n = repo.commit(commitfiles,
610 n = repo.commit(commitfiles,
611 "New patch: %s" % patch, force=True, wlock=wlock)
611 "New patch: %s" % patch, force=True, wlock=wlock)
612 if n == None:
612 if n == None:
613 raise util.Abort(_("repo commit failed"))
613 raise util.Abort(_("repo commit failed"))
614 self.full_series[insert:insert] = [patch]
614 self.full_series[insert:insert] = [patch]
615 self.applied.append(statusentry(revlog.hex(n), patch))
615 self.applied.append(statusentry(revlog.hex(n), patch))
616 self.parse_series()
616 self.parse_series()
617 self.series_dirty = 1
617 self.series_dirty = 1
618 self.applied_dirty = 1
618 self.applied_dirty = 1
619 p = self.opener(patch, "w")
619 p = self.opener(patch, "w")
620 if msg:
620 if msg:
621 msg = msg + "\n"
621 msg = msg + "\n"
622 p.write(msg)
622 p.write(msg)
623 p.close()
623 p.close()
624 wlock = None
624 wlock = None
625 r = self.qrepo()
625 r = self.qrepo()
626 if r: r.add([patch])
626 if r: r.add([patch])
627 if commitfiles:
627 if commitfiles:
628 self.refresh(repo, short=True)
628 self.refresh(repo, short=True)
629 self.removeundo(repo)
629 self.removeundo(repo)
630
630
631 def strip(self, repo, rev, update=True, backup="all", wlock=None):
631 def strip(self, repo, rev, update=True, backup="all", wlock=None):
632 def limitheads(chlog, stop):
632 def limitheads(chlog, stop):
633 """return the list of all nodes that have no children"""
633 """return the list of all nodes that have no children"""
634 p = {}
634 p = {}
635 h = []
635 h = []
636 stoprev = 0
636 stoprev = 0
637 if stop in chlog.nodemap:
637 if stop in chlog.nodemap:
638 stoprev = chlog.rev(stop)
638 stoprev = chlog.rev(stop)
639
639
640 for r in xrange(chlog.count() - 1, -1, -1):
640 for r in xrange(chlog.count() - 1, -1, -1):
641 n = chlog.node(r)
641 n = chlog.node(r)
642 if n not in p:
642 if n not in p:
643 h.append(n)
643 h.append(n)
644 if n == stop:
644 if n == stop:
645 break
645 break
646 if r < stoprev:
646 if r < stoprev:
647 break
647 break
648 for pn in chlog.parents(n):
648 for pn in chlog.parents(n):
649 p[pn] = 1
649 p[pn] = 1
650 return h
650 return h
651
651
652 def bundle(cg):
652 def bundle(cg):
653 backupdir = repo.join("strip-backup")
653 backupdir = repo.join("strip-backup")
654 if not os.path.isdir(backupdir):
654 if not os.path.isdir(backupdir):
655 os.mkdir(backupdir)
655 os.mkdir(backupdir)
656 name = os.path.join(backupdir, "%s" % revlog.short(rev))
656 name = os.path.join(backupdir, "%s" % revlog.short(rev))
657 name = savename(name)
657 name = savename(name)
658 self.ui.warn("saving bundle to %s\n" % name)
658 self.ui.warn("saving bundle to %s\n" % name)
659 return changegroup.writebundle(cg, name, "HG10BZ")
659 return changegroup.writebundle(cg, name, "HG10BZ")
660
660
661 def stripall(revnum):
661 def stripall(revnum):
662 mm = repo.changectx(rev).manifest()
662 mm = repo.changectx(rev).manifest()
663 seen = {}
663 seen = {}
664
664
665 for x in xrange(revnum, repo.changelog.count()):
665 for x in xrange(revnum, repo.changelog.count()):
666 for f in repo.changectx(x).files():
666 for f in repo.changectx(x).files():
667 if f in seen:
667 if f in seen:
668 continue
668 continue
669 seen[f] = 1
669 seen[f] = 1
670 if f in mm:
670 if f in mm:
671 filerev = mm[f]
671 filerev = mm[f]
672 else:
672 else:
673 filerev = 0
673 filerev = 0
674 seen[f] = filerev
674 seen[f] = filerev
675 # we go in two steps here so the strip loop happens in a
675 # we go in two steps here so the strip loop happens in a
676 # sensible order. When stripping many files, this helps keep
676 # sensible order. When stripping many files, this helps keep
677 # our disk access patterns under control.
677 # our disk access patterns under control.
678 seen_list = seen.keys()
678 seen_list = seen.keys()
679 seen_list.sort()
679 seen_list.sort()
680 for f in seen_list:
680 for f in seen_list:
681 ff = repo.file(f)
681 ff = repo.file(f)
682 filerev = seen[f]
682 filerev = seen[f]
683 if filerev != 0:
683 if filerev != 0:
684 if filerev in ff.nodemap:
684 if filerev in ff.nodemap:
685 filerev = ff.rev(filerev)
685 filerev = ff.rev(filerev)
686 else:
686 else:
687 filerev = 0
687 filerev = 0
688 ff.strip(filerev, revnum)
688 ff.strip(filerev, revnum)
689
689
690 if not wlock:
690 if not wlock:
691 wlock = repo.wlock()
691 wlock = repo.wlock()
692 lock = repo.lock()
692 lock = repo.lock()
693 chlog = repo.changelog
693 chlog = repo.changelog
694 # TODO delete the undo files, and handle undo of merge sets
694 # TODO delete the undo files, and handle undo of merge sets
695 pp = chlog.parents(rev)
695 pp = chlog.parents(rev)
696 revnum = chlog.rev(rev)
696 revnum = chlog.rev(rev)
697
697
698 if update:
698 if update:
699 self.check_localchanges(repo, refresh=False)
699 self.check_localchanges(repo, refresh=False)
700 urev = self.qparents(repo, rev)
700 urev = self.qparents(repo, rev)
701 hg.clean(repo, urev, wlock=wlock)
701 hg.clean(repo, urev, wlock=wlock)
702 repo.dirstate.write()
702 repo.dirstate.write()
703
703
704 # save is a list of all the branches we are truncating away
704 # save is a list of all the branches we are truncating away
705 # that we actually want to keep. changegroup will be used
705 # that we actually want to keep. changegroup will be used
706 # to preserve them and add them back after the truncate
706 # to preserve them and add them back after the truncate
707 saveheads = []
707 saveheads = []
708 savebases = {}
708 savebases = {}
709
709
710 heads = limitheads(chlog, rev)
710 heads = limitheads(chlog, rev)
711 seen = {}
711 seen = {}
712
712
713 # search through all the heads, finding those where the revision
713 # search through all the heads, finding those where the revision
714 # we want to strip away is an ancestor. Also look for merges
714 # we want to strip away is an ancestor. Also look for merges
715 # that might be turned into new heads by the strip.
715 # that might be turned into new heads by the strip.
716 while heads:
716 while heads:
717 h = heads.pop()
717 h = heads.pop()
718 n = h
718 n = h
719 while True:
719 while True:
720 seen[n] = 1
720 seen[n] = 1
721 pp = chlog.parents(n)
721 pp = chlog.parents(n)
722 if pp[1] != revlog.nullid:
722 if pp[1] != revlog.nullid:
723 for p in pp:
723 for p in pp:
724 if chlog.rev(p) > revnum and p not in seen:
724 if chlog.rev(p) > revnum and p not in seen:
725 heads.append(p)
725 heads.append(p)
726 if pp[0] == revlog.nullid:
726 if pp[0] == revlog.nullid:
727 break
727 break
728 if chlog.rev(pp[0]) < revnum:
728 if chlog.rev(pp[0]) < revnum:
729 break
729 break
730 n = pp[0]
730 n = pp[0]
731 if n == rev:
731 if n == rev:
732 break
732 break
733 r = chlog.reachable(h, rev)
733 r = chlog.reachable(h, rev)
734 if rev not in r:
734 if rev not in r:
735 saveheads.append(h)
735 saveheads.append(h)
736 for x in r:
736 for x in r:
737 if chlog.rev(x) > revnum:
737 if chlog.rev(x) > revnum:
738 savebases[x] = 1
738 savebases[x] = 1
739
739
740 # create a changegroup for all the branches we need to keep
740 # create a changegroup for all the branches we need to keep
741 if backup == "all":
741 if backup == "all":
742 backupch = repo.changegroupsubset([rev], chlog.heads(), 'strip')
742 backupch = repo.changegroupsubset([rev], chlog.heads(), 'strip')
743 bundle(backupch)
743 bundle(backupch)
744 if saveheads:
744 if saveheads:
745 backupch = repo.changegroupsubset(savebases.keys(), saveheads, 'strip')
745 backupch = repo.changegroupsubset(savebases.keys(), saveheads, 'strip')
746 chgrpfile = bundle(backupch)
746 chgrpfile = bundle(backupch)
747
747
748 stripall(revnum)
748 stripall(revnum)
749
749
750 change = chlog.read(rev)
750 change = chlog.read(rev)
751 chlog.strip(revnum, revnum)
751 chlog.strip(revnum, revnum)
752 repo.manifest.strip(repo.manifest.rev(change[0]), revnum)
752 repo.manifest.strip(repo.manifest.rev(change[0]), revnum)
753 self.removeundo(repo)
753 self.removeundo(repo)
754 if saveheads:
754 if saveheads:
755 self.ui.status("adding branch\n")
755 self.ui.status("adding branch\n")
756 commands.unbundle(self.ui, repo, "file:%s" % chgrpfile,
756 commands.unbundle(self.ui, repo, "file:%s" % chgrpfile,
757 update=False)
757 update=False)
758 if backup != "strip":
758 if backup != "strip":
759 os.unlink(chgrpfile)
759 os.unlink(chgrpfile)
760
760
761 def isapplied(self, patch):
761 def isapplied(self, patch):
762 """returns (index, rev, patch)"""
762 """returns (index, rev, patch)"""
763 for i in xrange(len(self.applied)):
763 for i in xrange(len(self.applied)):
764 a = self.applied[i]
764 a = self.applied[i]
765 if a.name == patch:
765 if a.name == patch:
766 return (i, a.rev, a.name)
766 return (i, a.rev, a.name)
767 return None
767 return None
768
768
769 # if the exact patch name does not exist, we try a few
769 # if the exact patch name does not exist, we try a few
770 # variations. If strict is passed, we try only #1
770 # variations. If strict is passed, we try only #1
771 #
771 #
772 # 1) a number to indicate an offset in the series file
772 # 1) a number to indicate an offset in the series file
773 # 2) a unique substring of the patch name was given
773 # 2) a unique substring of the patch name was given
774 # 3) patchname[-+]num to indicate an offset in the series file
774 # 3) patchname[-+]num to indicate an offset in the series file
775 def lookup(self, patch, strict=False):
775 def lookup(self, patch, strict=False):
776 patch = patch and str(patch)
776 patch = patch and str(patch)
777
777
778 def partial_name(s):
778 def partial_name(s):
779 if s in self.series:
779 if s in self.series:
780 return s
780 return s
781 matches = [x for x in self.series if s in x]
781 matches = [x for x in self.series if s in x]
782 if len(matches) > 1:
782 if len(matches) > 1:
783 self.ui.warn(_('patch name "%s" is ambiguous:\n') % s)
783 self.ui.warn(_('patch name "%s" is ambiguous:\n') % s)
784 for m in matches:
784 for m in matches:
785 self.ui.warn(' %s\n' % m)
785 self.ui.warn(' %s\n' % m)
786 return None
786 return None
787 if matches:
787 if matches:
788 return matches[0]
788 return matches[0]
789 if len(self.series) > 0 and len(self.applied) > 0:
789 if len(self.series) > 0 and len(self.applied) > 0:
790 if s == 'qtip':
790 if s == 'qtip':
791 return self.series[self.series_end(True)-1]
791 return self.series[self.series_end(True)-1]
792 if s == 'qbase':
792 if s == 'qbase':
793 return self.series[0]
793 return self.series[0]
794 return None
794 return None
795 if patch == None:
795 if patch == None:
796 return None
796 return None
797
797
798 # we don't want to return a partial match until we make
798 # we don't want to return a partial match until we make
799 # sure the file name passed in does not exist (checked below)
799 # sure the file name passed in does not exist (checked below)
800 res = partial_name(patch)
800 res = partial_name(patch)
801 if res and res == patch:
801 if res and res == patch:
802 return res
802 return res
803
803
804 if not os.path.isfile(self.join(patch)):
804 if not os.path.isfile(self.join(patch)):
805 try:
805 try:
806 sno = int(patch)
806 sno = int(patch)
807 except(ValueError, OverflowError):
807 except(ValueError, OverflowError):
808 pass
808 pass
809 else:
809 else:
810 if sno < len(self.series):
810 if sno < len(self.series):
811 return self.series[sno]
811 return self.series[sno]
812 if not strict:
812 if not strict:
813 # return any partial match made above
813 # return any partial match made above
814 if res:
814 if res:
815 return res
815 return res
816 minus = patch.rfind('-')
816 minus = patch.rfind('-')
817 if minus >= 0:
817 if minus >= 0:
818 res = partial_name(patch[:minus])
818 res = partial_name(patch[:minus])
819 if res:
819 if res:
820 i = self.series.index(res)
820 i = self.series.index(res)
821 try:
821 try:
822 off = int(patch[minus+1:] or 1)
822 off = int(patch[minus+1:] or 1)
823 except(ValueError, OverflowError):
823 except(ValueError, OverflowError):
824 pass
824 pass
825 else:
825 else:
826 if i - off >= 0:
826 if i - off >= 0:
827 return self.series[i - off]
827 return self.series[i - off]
828 plus = patch.rfind('+')
828 plus = patch.rfind('+')
829 if plus >= 0:
829 if plus >= 0:
830 res = partial_name(patch[:plus])
830 res = partial_name(patch[:plus])
831 if res:
831 if res:
832 i = self.series.index(res)
832 i = self.series.index(res)
833 try:
833 try:
834 off = int(patch[plus+1:] or 1)
834 off = int(patch[plus+1:] or 1)
835 except(ValueError, OverflowError):
835 except(ValueError, OverflowError):
836 pass
836 pass
837 else:
837 else:
838 if i + off < len(self.series):
838 if i + off < len(self.series):
839 return self.series[i + off]
839 return self.series[i + off]
840 raise util.Abort(_("patch %s not in series") % patch)
840 raise util.Abort(_("patch %s not in series") % patch)
841
841
842 def push(self, repo, patch=None, force=False, list=False,
842 def push(self, repo, patch=None, force=False, list=False,
843 mergeq=None, wlock=None):
843 mergeq=None, wlock=None):
844 if not wlock:
844 if not wlock:
845 wlock = repo.wlock()
845 wlock = repo.wlock()
846 patch = self.lookup(patch)
846 patch = self.lookup(patch)
847 # Suppose our series file is: A B C and the current 'top' patch is B.
847 # Suppose our series file is: A B C and the current 'top' patch is B.
848 # qpush C should be performed (moving forward)
848 # qpush C should be performed (moving forward)
849 # qpush B is a NOP (no change)
849 # qpush B is a NOP (no change)
850 # qpush A is an error (can't go backwards with qpush)
850 # qpush A is an error (can't go backwards with qpush)
851 if patch:
851 if patch:
852 info = self.isapplied(patch)
852 info = self.isapplied(patch)
853 if info:
853 if info:
854 if info[0] < len(self.applied) - 1:
854 if info[0] < len(self.applied) - 1:
855 raise util.Abort(_("cannot push to a previous patch: %s") %
855 raise util.Abort(_("cannot push to a previous patch: %s") %
856 patch)
856 patch)
857 if info[0] < len(self.series) - 1:
857 if info[0] < len(self.series) - 1:
858 self.ui.warn(_('qpush: %s is already at the top\n') % patch)
858 self.ui.warn(_('qpush: %s is already at the top\n') % patch)
859 else:
859 else:
860 self.ui.warn(_('all patches are currently applied\n'))
860 self.ui.warn(_('all patches are currently applied\n'))
861 return
861 return
862
862
863 # Following the above example, starting at 'top' of B:
863 # Following the above example, starting at 'top' of B:
864 # qpush should be performed (pushes C), but a subsequent qpush without
864 # qpush should be performed (pushes C), but a subsequent qpush without
865 # an argument is an error (nothing to apply). This allows a loop
865 # an argument is an error (nothing to apply). This allows a loop
866 # of "...while hg qpush..." to work as it detects an error when done
866 # of "...while hg qpush..." to work as it detects an error when done
867 if self.series_end() == len(self.series):
867 if self.series_end() == len(self.series):
868 self.ui.warn(_('patch series already fully applied\n'))
868 self.ui.warn(_('patch series already fully applied\n'))
869 return 1
869 return 1
870 if not force:
870 if not force:
871 self.check_localchanges(repo)
871 self.check_localchanges(repo)
872
872
873 self.applied_dirty = 1;
873 self.applied_dirty = 1;
874 start = self.series_end()
874 start = self.series_end()
875 if start > 0:
875 if start > 0:
876 self.check_toppatch(repo)
876 self.check_toppatch(repo)
877 if not patch:
877 if not patch:
878 patch = self.series[start]
878 patch = self.series[start]
879 end = start + 1
879 end = start + 1
880 else:
880 else:
881 end = self.series.index(patch, start) + 1
881 end = self.series.index(patch, start) + 1
882 s = self.series[start:end]
882 s = self.series[start:end]
883 all_files = {}
883 all_files = {}
884 try:
884 try:
885 if mergeq:
885 if mergeq:
886 ret = self.mergepatch(repo, mergeq, s, wlock)
886 ret = self.mergepatch(repo, mergeq, s, wlock)
887 else:
887 else:
888 ret = self.apply(repo, s, list, wlock=wlock,
888 ret = self.apply(repo, s, list, wlock=wlock,
889 all_files=all_files)
889 all_files=all_files)
890 except:
890 except:
891 self.ui.warn(_('cleaning up working directory...'))
891 self.ui.warn(_('cleaning up working directory...'))
892 node = repo.dirstate.parents()[0]
892 node = repo.dirstate.parents()[0]
893 hg.revert(repo, node, None, wlock)
893 hg.revert(repo, node, None, wlock)
894 unknown = repo.status(wlock=wlock)[4]
894 unknown = repo.status(wlock=wlock)[4]
895 # only remove unknown files that we know we touched or
895 # only remove unknown files that we know we touched or
896 # created while patching
896 # created while patching
897 for f in unknown:
897 for f in unknown:
898 if f in all_files:
898 if f in all_files:
899 util.unlink(repo.wjoin(f))
899 util.unlink(repo.wjoin(f))
900 self.ui.warn(_('done\n'))
900 self.ui.warn(_('done\n'))
901 raise
901 raise
902 top = self.applied[-1].name
902 top = self.applied[-1].name
903 if ret[0]:
903 if ret[0]:
904 self.ui.write("Errors during apply, please fix and refresh %s\n" %
904 self.ui.write("Errors during apply, please fix and refresh %s\n" %
905 top)
905 top)
906 else:
906 else:
907 self.ui.write("Now at: %s\n" % top)
907 self.ui.write("Now at: %s\n" % top)
908 return ret[0]
908 return ret[0]
909
909
910 def pop(self, repo, patch=None, force=False, update=True, all=False,
910 def pop(self, repo, patch=None, force=False, update=True, all=False,
911 wlock=None):
911 wlock=None):
912 def getfile(f, rev):
912 def getfile(f, rev):
913 t = repo.file(f).read(rev)
913 t = repo.file(f).read(rev)
914 repo.wfile(f, "w").write(t)
914 repo.wfile(f, "w").write(t)
915
915
916 if not wlock:
916 if not wlock:
917 wlock = repo.wlock()
917 wlock = repo.wlock()
918 if patch:
918 if patch:
919 # index, rev, patch
919 # index, rev, patch
920 info = self.isapplied(patch)
920 info = self.isapplied(patch)
921 if not info:
921 if not info:
922 patch = self.lookup(patch)
922 patch = self.lookup(patch)
923 info = self.isapplied(patch)
923 info = self.isapplied(patch)
924 if not info:
924 if not info:
925 raise util.Abort(_("patch %s is not applied") % patch)
925 raise util.Abort(_("patch %s is not applied") % patch)
926
926
927 if len(self.applied) == 0:
927 if len(self.applied) == 0:
928 # Allow qpop -a to work repeatedly,
928 # Allow qpop -a to work repeatedly,
929 # but not qpop without an argument
929 # but not qpop without an argument
930 self.ui.warn(_("no patches applied\n"))
930 self.ui.warn(_("no patches applied\n"))
931 return not all
931 return not all
932
932
933 if not update:
933 if not update:
934 parents = repo.dirstate.parents()
934 parents = repo.dirstate.parents()
935 rr = [ revlog.bin(x.rev) for x in self.applied ]
935 rr = [ revlog.bin(x.rev) for x in self.applied ]
936 for p in parents:
936 for p in parents:
937 if p in rr:
937 if p in rr:
938 self.ui.warn("qpop: forcing dirstate update\n")
938 self.ui.warn("qpop: forcing dirstate update\n")
939 update = True
939 update = True
940
940
941 if not force and update:
941 if not force and update:
942 self.check_localchanges(repo)
942 self.check_localchanges(repo)
943
943
944 self.applied_dirty = 1;
944 self.applied_dirty = 1;
945 end = len(self.applied)
945 end = len(self.applied)
946 if not patch:
946 if not patch:
947 if all:
947 if all:
948 popi = 0
948 popi = 0
949 else:
949 else:
950 popi = len(self.applied) - 1
950 popi = len(self.applied) - 1
951 else:
951 else:
952 popi = info[0] + 1
952 popi = info[0] + 1
953 if popi >= end:
953 if popi >= end:
954 self.ui.warn("qpop: %s is already at the top\n" % patch)
954 self.ui.warn("qpop: %s is already at the top\n" % patch)
955 return
955 return
956 info = [ popi ] + [self.applied[popi].rev, self.applied[popi].name]
956 info = [ popi ] + [self.applied[popi].rev, self.applied[popi].name]
957
957
958 start = info[0]
958 start = info[0]
959 rev = revlog.bin(info[1])
959 rev = revlog.bin(info[1])
960
960
961 # we know there are no local changes, so we can make a simplified
961 # we know there are no local changes, so we can make a simplified
962 # form of hg.update.
962 # form of hg.update.
963 if update:
963 if update:
964 top = self.check_toppatch(repo)
964 top = self.check_toppatch(repo)
965 qp = self.qparents(repo, rev)
965 qp = self.qparents(repo, rev)
966 changes = repo.changelog.read(qp)
966 changes = repo.changelog.read(qp)
967 mmap = repo.manifest.read(changes[0])
967 mmap = repo.manifest.read(changes[0])
968 m, a, r, d, u = repo.status(qp, top)[:5]
968 m, a, r, d, u = repo.status(qp, top)[:5]
969 if d:
969 if d:
970 raise util.Abort("deletions found between repo revs")
970 raise util.Abort("deletions found between repo revs")
971 for f in m:
971 for f in m:
972 getfile(f, mmap[f])
972 getfile(f, mmap[f])
973 for f in r:
973 for f in r:
974 getfile(f, mmap[f])
974 getfile(f, mmap[f])
975 util.set_exec(repo.wjoin(f), mmap.execf(f))
975 util.set_exec(repo.wjoin(f), mmap.execf(f))
976 repo.dirstate.update(m + r, 'n')
976 repo.dirstate.update(m + r, 'n')
977 for f in a:
977 for f in a:
978 try:
978 try:
979 os.unlink(repo.wjoin(f))
979 os.unlink(repo.wjoin(f))
980 except OSError, e:
980 except OSError, e:
981 if e.errno != errno.ENOENT:
981 if e.errno != errno.ENOENT:
982 raise
982 raise
983 try: os.removedirs(os.path.dirname(repo.wjoin(f)))
983 try: os.removedirs(os.path.dirname(repo.wjoin(f)))
984 except: pass
984 except: pass
985 if a:
985 if a:
986 repo.dirstate.forget(a)
986 repo.dirstate.forget(a)
987 repo.dirstate.setparents(qp, revlog.nullid)
987 repo.dirstate.setparents(qp, revlog.nullid)
988 self.strip(repo, rev, update=False, backup='strip', wlock=wlock)
988 self.strip(repo, rev, update=False, backup='strip', wlock=wlock)
989 del self.applied[start:end]
989 del self.applied[start:end]
990 if len(self.applied):
990 if len(self.applied):
991 self.ui.write("Now at: %s\n" % self.applied[-1].name)
991 self.ui.write("Now at: %s\n" % self.applied[-1].name)
992 else:
992 else:
993 self.ui.write("Patch queue now empty\n")
993 self.ui.write("Patch queue now empty\n")
994
994
995 def diff(self, repo, pats, opts):
995 def diff(self, repo, pats, opts):
996 top = self.check_toppatch(repo)
996 top = self.check_toppatch(repo)
997 if not top:
997 if not top:
998 self.ui.write("No patches applied\n")
998 self.ui.write("No patches applied\n")
999 return
999 return
1000 qp = self.qparents(repo, top)
1000 qp = self.qparents(repo, top)
1001 if opts.get('git'):
1001 if opts.get('git'):
1002 self.diffopts().git = True
1002 self.diffopts().git = True
1003 self.printdiff(repo, qp, files=pats, opts=opts)
1003 self.printdiff(repo, qp, files=pats, opts=opts)
1004
1004
1005 def refresh(self, repo, pats=None, **opts):
1005 def refresh(self, repo, pats=None, **opts):
1006 if len(self.applied) == 0:
1006 if len(self.applied) == 0:
1007 self.ui.write("No patches applied\n")
1007 self.ui.write("No patches applied\n")
1008 return 1
1008 return 1
1009 wlock = repo.wlock()
1009 wlock = repo.wlock()
1010 self.check_toppatch(repo)
1010 self.check_toppatch(repo)
1011 (top, patchfn) = (self.applied[-1].rev, self.applied[-1].name)
1011 (top, patchfn) = (self.applied[-1].rev, self.applied[-1].name)
1012 top = revlog.bin(top)
1012 top = revlog.bin(top)
1013 cparents = repo.changelog.parents(top)
1013 cparents = repo.changelog.parents(top)
1014 patchparent = self.qparents(repo, top)
1014 patchparent = self.qparents(repo, top)
1015 message, comments, user, date, patchfound = self.readheaders(patchfn)
1015 message, comments, user, date, patchfound = self.readheaders(patchfn)
1016
1016
1017 patchf = self.opener(patchfn, "w")
1017 patchf = self.opener(patchfn, "w")
1018 msg = opts.get('msg', '').rstrip()
1018 msg = opts.get('msg', '').rstrip()
1019 if msg:
1019 if msg:
1020 if comments:
1020 if comments:
1021 # Remove existing message.
1021 # Remove existing message.
1022 ci = 0
1022 ci = 0
1023 subj = None
1023 subj = None
1024 for mi in xrange(len(message)):
1024 for mi in xrange(len(message)):
1025 if comments[ci].lower().startswith('subject: '):
1025 if comments[ci].lower().startswith('subject: '):
1026 subj = comments[ci][9:]
1026 subj = comments[ci][9:]
1027 while message[mi] != comments[ci] and message[mi] != subj:
1027 while message[mi] != comments[ci] and message[mi] != subj:
1028 ci += 1
1028 ci += 1
1029 del comments[ci]
1029 del comments[ci]
1030 comments.append(msg)
1030 comments.append(msg)
1031 if comments:
1031 if comments:
1032 comments = "\n".join(comments) + '\n\n'
1032 comments = "\n".join(comments) + '\n\n'
1033 patchf.write(comments)
1033 patchf.write(comments)
1034
1034
1035 if opts.get('git'):
1035 if opts.get('git'):
1036 self.diffopts().git = True
1036 self.diffopts().git = True
1037 fns, matchfn, anypats = cmdutil.matchpats(repo, pats, opts)
1037 fns, matchfn, anypats = cmdutil.matchpats(repo, pats, opts)
1038 tip = repo.changelog.tip()
1038 tip = repo.changelog.tip()
1039 if top == tip:
1039 if top == tip:
1040 # if the top of our patch queue is also the tip, there is an
1040 # if the top of our patch queue is also the tip, there is an
1041 # optimization here. We update the dirstate in place and strip
1041 # optimization here. We update the dirstate in place and strip
1042 # off the tip commit. Then just commit the current directory
1042 # off the tip commit. Then just commit the current directory
1043 # tree. We can also send repo.commit the list of files
1043 # tree. We can also send repo.commit the list of files
1044 # changed to speed up the diff
1044 # changed to speed up the diff
1045 #
1045 #
1046 # in short mode, we only diff the files included in the
1046 # in short mode, we only diff the files included in the
1047 # patch already
1047 # patch already
1048 #
1048 #
1049 # this should really read:
1049 # this should really read:
1050 # mm, dd, aa, aa2, uu = repo.status(tip, patchparent)[:5]
1050 # mm, dd, aa, aa2, uu = repo.status(tip, patchparent)[:5]
1051 # but we do it backwards to take advantage of manifest/chlog
1051 # but we do it backwards to take advantage of manifest/chlog
1052 # caching against the next repo.status call
1052 # caching against the next repo.status call
1053 #
1053 #
1054 mm, aa, dd, aa2, uu = repo.status(patchparent, tip)[:5]
1054 mm, aa, dd, aa2, uu = repo.status(patchparent, tip)[:5]
1055 changes = repo.changelog.read(tip)
1055 changes = repo.changelog.read(tip)
1056 man = repo.manifest.read(changes[0])
1056 man = repo.manifest.read(changes[0])
1057 aaa = aa[:]
1057 aaa = aa[:]
1058 if opts.get('short'):
1058 if opts.get('short'):
1059 filelist = mm + aa + dd
1059 filelist = mm + aa + dd
1060 else:
1060 else:
1061 filelist = None
1061 filelist = None
1062 m, a, r, d, u = repo.status(files=filelist)[:5]
1062 m, a, r, d, u = repo.status(files=filelist)[:5]
1063
1063
1064 # we might end up with files that were added between tip and
1064 # we might end up with files that were added between tip and
1065 # the dirstate parent, but then changed in the local dirstate.
1065 # the dirstate parent, but then changed in the local dirstate.
1066 # in this case, we want them to only show up in the added section
1066 # in this case, we want them to only show up in the added section
1067 for x in m:
1067 for x in m:
1068 if x not in aa:
1068 if x not in aa:
1069 mm.append(x)
1069 mm.append(x)
1070 # we might end up with files added by the local dirstate that
1070 # we might end up with files added by the local dirstate that
1071 # were deleted by the patch. In this case, they should only
1071 # were deleted by the patch. In this case, they should only
1072 # show up in the changed section.
1072 # show up in the changed section.
1073 for x in a:
1073 for x in a:
1074 if x in dd:
1074 if x in dd:
1075 del dd[dd.index(x)]
1075 del dd[dd.index(x)]
1076 mm.append(x)
1076 mm.append(x)
1077 else:
1077 else:
1078 aa.append(x)
1078 aa.append(x)
1079 # make sure any files deleted in the local dirstate
1079 # make sure any files deleted in the local dirstate
1080 # are not in the add or change column of the patch
1080 # are not in the add or change column of the patch
1081 forget = []
1081 forget = []
1082 for x in d + r:
1082 for x in d + r:
1083 if x in aa:
1083 if x in aa:
1084 del aa[aa.index(x)]
1084 del aa[aa.index(x)]
1085 forget.append(x)
1085 forget.append(x)
1086 continue
1086 continue
1087 elif x in mm:
1087 elif x in mm:
1088 del mm[mm.index(x)]
1088 del mm[mm.index(x)]
1089 dd.append(x)
1089 dd.append(x)
1090
1090
1091 m = util.unique(mm)
1091 m = util.unique(mm)
1092 r = util.unique(dd)
1092 r = util.unique(dd)
1093 a = util.unique(aa)
1093 a = util.unique(aa)
1094 c = [filter(matchfn, l) for l in (m, a, r, [], u)]
1094 c = [filter(matchfn, l) for l in (m, a, r, [], u)]
1095 filelist = util.unique(c[0] + c[1] + c[2])
1095 filelist = util.unique(c[0] + c[1] + c[2])
1096 patch.diff(repo, patchparent, files=filelist, match=matchfn,
1096 patch.diff(repo, patchparent, files=filelist, match=matchfn,
1097 fp=patchf, changes=c, opts=self.diffopts())
1097 fp=patchf, changes=c, opts=self.diffopts())
1098 patchf.close()
1098 patchf.close()
1099
1099
1100 repo.dirstate.setparents(*cparents)
1100 repo.dirstate.setparents(*cparents)
1101 copies = {}
1101 copies = {}
1102 for dst in a:
1102 for dst in a:
1103 src = repo.dirstate.copied(dst)
1103 src = repo.dirstate.copied(dst)
1104 if src is None:
1104 if src is None:
1105 continue
1105 continue
1106 copies.setdefault(src, []).append(dst)
1106 copies.setdefault(src, []).append(dst)
1107 repo.dirstate.update(a, 'a')
1107 repo.dirstate.update(a, 'a')
1108 # remember the copies between patchparent and tip
1108 # remember the copies between patchparent and tip
1109 # this may be slow, so don't do it if we're not tracking copies
1109 # this may be slow, so don't do it if we're not tracking copies
1110 if self.diffopts().git:
1110 if self.diffopts().git:
1111 for dst in aaa:
1111 for dst in aaa:
1112 f = repo.file(dst)
1112 f = repo.file(dst)
1113 src = f.renamed(man[dst])
1113 src = f.renamed(man[dst])
1114 if src:
1114 if src:
1115 copies[src[0]] = copies.get(dst, [])
1115 copies[src[0]] = copies.get(dst, [])
1116 if dst in a:
1116 if dst in a:
1117 copies[src[0]].append(dst)
1117 copies[src[0]].append(dst)
1118 # we can't copy a file created by the patch itself
1118 # we can't copy a file created by the patch itself
1119 if dst in copies:
1119 if dst in copies:
1120 del copies[dst]
1120 del copies[dst]
1121 for src, dsts in copies.iteritems():
1121 for src, dsts in copies.iteritems():
1122 for dst in dsts:
1122 for dst in dsts:
1123 repo.dirstate.copy(src, dst)
1123 repo.dirstate.copy(src, dst)
1124 repo.dirstate.update(r, 'r')
1124 repo.dirstate.update(r, 'r')
1125 # if the patch excludes a modified file, mark that file with mtime=0
1125 # if the patch excludes a modified file, mark that file with mtime=0
1126 # so status can see it.
1126 # so status can see it.
1127 mm = []
1127 mm = []
1128 for i in xrange(len(m)-1, -1, -1):
1128 for i in xrange(len(m)-1, -1, -1):
1129 if not matchfn(m[i]):
1129 if not matchfn(m[i]):
1130 mm.append(m[i])
1130 mm.append(m[i])
1131 del m[i]
1131 del m[i]
1132 repo.dirstate.update(m, 'n')
1132 repo.dirstate.update(m, 'n')
1133 repo.dirstate.update(mm, 'n', st_mtime=-1, st_size=-1)
1133 repo.dirstate.update(mm, 'n', st_mtime=-1, st_size=-1)
1134 repo.dirstate.forget(forget)
1134 repo.dirstate.forget(forget)
1135
1135
1136 if not msg:
1136 if not msg:
1137 if not message:
1137 if not message:
1138 message = "patch queue: %s\n" % patchfn
1138 message = "patch queue: %s\n" % patchfn
1139 else:
1139 else:
1140 message = "\n".join(message)
1140 message = "\n".join(message)
1141 else:
1141 else:
1142 message = msg
1142 message = msg
1143
1143
1144 self.strip(repo, top, update=False, backup='strip', wlock=wlock)
1144 self.strip(repo, top, update=False, backup='strip', wlock=wlock)
1145 n = repo.commit(filelist, message, changes[1], match=matchfn,
1145 n = repo.commit(filelist, message, changes[1], match=matchfn,
1146 force=1, wlock=wlock)
1146 force=1, wlock=wlock)
1147 self.applied[-1] = statusentry(revlog.hex(n), patchfn)
1147 self.applied[-1] = statusentry(revlog.hex(n), patchfn)
1148 self.applied_dirty = 1
1148 self.applied_dirty = 1
1149 self.removeundo(repo)
1149 self.removeundo(repo)
1150 else:
1150 else:
1151 self.printdiff(repo, patchparent, fp=patchf)
1151 self.printdiff(repo, patchparent, fp=patchf)
1152 patchf.close()
1152 patchf.close()
1153 added = repo.status()[1]
1153 added = repo.status()[1]
1154 for a in added:
1154 for a in added:
1155 f = repo.wjoin(a)
1155 f = repo.wjoin(a)
1156 try:
1156 try:
1157 os.unlink(f)
1157 os.unlink(f)
1158 except OSError, e:
1158 except OSError, e:
1159 if e.errno != errno.ENOENT:
1159 if e.errno != errno.ENOENT:
1160 raise
1160 raise
1161 try: os.removedirs(os.path.dirname(f))
1161 try: os.removedirs(os.path.dirname(f))
1162 except: pass
1162 except: pass
1163 # forget the file copies in the dirstate
1163 # forget the file copies in the dirstate
1164 # push should readd the files later on
1164 # push should readd the files later on
1165 repo.dirstate.forget(added)
1165 repo.dirstate.forget(added)
1166 self.pop(repo, force=True, wlock=wlock)
1166 self.pop(repo, force=True, wlock=wlock)
1167 self.push(repo, force=True, wlock=wlock)
1167 self.push(repo, force=True, wlock=wlock)
1168
1168
1169 def init(self, repo, create=False):
1169 def init(self, repo, create=False):
1170 if not create and os.path.isdir(self.path):
1170 if not create and os.path.isdir(self.path):
1171 raise util.Abort(_("patch queue directory already exists"))
1171 raise util.Abort(_("patch queue directory already exists"))
1172 try:
1172 try:
1173 os.mkdir(self.path)
1173 os.mkdir(self.path)
1174 except OSError, inst:
1174 except OSError, inst:
1175 if inst.errno != errno.EEXIST or not create:
1175 if inst.errno != errno.EEXIST or not create:
1176 raise
1176 raise
1177 if create:
1177 if create:
1178 return self.qrepo(create=True)
1178 return self.qrepo(create=True)
1179
1179
1180 def unapplied(self, repo, patch=None):
1180 def unapplied(self, repo, patch=None):
1181 if patch and patch not in self.series:
1181 if patch and patch not in self.series:
1182 raise util.Abort(_("patch %s is not in series file") % patch)
1182 raise util.Abort(_("patch %s is not in series file") % patch)
1183 if not patch:
1183 if not patch:
1184 start = self.series_end()
1184 start = self.series_end()
1185 else:
1185 else:
1186 start = self.series.index(patch) + 1
1186 start = self.series.index(patch) + 1
1187 unapplied = []
1187 unapplied = []
1188 for i in xrange(start, len(self.series)):
1188 for i in xrange(start, len(self.series)):
1189 pushable, reason = self.pushable(i)
1189 pushable, reason = self.pushable(i)
1190 if pushable:
1190 if pushable:
1191 unapplied.append((i, self.series[i]))
1191 unapplied.append((i, self.series[i]))
1192 self.explain_pushable(i)
1192 self.explain_pushable(i)
1193 return unapplied
1193 return unapplied
1194
1194
1195 def qseries(self, repo, missing=None, start=0, length=None, status=None,
1195 def qseries(self, repo, missing=None, start=0, length=None, status=None,
1196 summary=False):
1196 summary=False):
1197 def displayname(patchname):
1197 def displayname(patchname):
1198 if summary:
1198 if summary:
1199 msg = self.readheaders(patchname)[0]
1199 msg = self.readheaders(patchname)[0]
1200 msg = msg and ': ' + msg[0] or ': '
1200 msg = msg and ': ' + msg[0] or ': '
1201 else:
1201 else:
1202 msg = ''
1202 msg = ''
1203 return '%s%s' % (patchname, msg)
1203 return '%s%s' % (patchname, msg)
1204
1204
1205 applied = dict.fromkeys([p.name for p in self.applied])
1205 applied = dict.fromkeys([p.name for p in self.applied])
1206 if length is None:
1206 if length is None:
1207 length = len(self.series) - start
1207 length = len(self.series) - start
1208 if not missing:
1208 if not missing:
1209 for i in xrange(start, start+length):
1209 for i in xrange(start, start+length):
1210 patch = self.series[i]
1210 patch = self.series[i]
1211 if patch in applied:
1211 if patch in applied:
1212 stat = 'A'
1212 stat = 'A'
1213 elif self.pushable(i)[0]:
1213 elif self.pushable(i)[0]:
1214 stat = 'U'
1214 stat = 'U'
1215 else:
1215 else:
1216 stat = 'G'
1216 stat = 'G'
1217 pfx = ''
1217 pfx = ''
1218 if self.ui.verbose:
1218 if self.ui.verbose:
1219 pfx = '%d %s ' % (i, stat)
1219 pfx = '%d %s ' % (i, stat)
1220 elif status and status != stat:
1220 elif status and status != stat:
1221 continue
1221 continue
1222 self.ui.write('%s%s\n' % (pfx, displayname(patch)))
1222 self.ui.write('%s%s\n' % (pfx, displayname(patch)))
1223 else:
1223 else:
1224 msng_list = []
1224 msng_list = []
1225 for root, dirs, files in os.walk(self.path):
1225 for root, dirs, files in os.walk(self.path):
1226 d = root[len(self.path) + 1:]
1226 d = root[len(self.path) + 1:]
1227 for f in files:
1227 for f in files:
1228 fl = os.path.join(d, f)
1228 fl = os.path.join(d, f)
1229 if (fl not in self.series and
1229 if (fl not in self.series and
1230 fl not in (self.status_path, self.series_path,
1230 fl not in (self.status_path, self.series_path,
1231 self.guards_path)
1231 self.guards_path)
1232 and not fl.startswith('.')):
1232 and not fl.startswith('.')):
1233 msng_list.append(fl)
1233 msng_list.append(fl)
1234 msng_list.sort()
1234 msng_list.sort()
1235 for x in msng_list:
1235 for x in msng_list:
1236 pfx = self.ui.verbose and ('D ') or ''
1236 pfx = self.ui.verbose and ('D ') or ''
1237 self.ui.write("%s%s\n" % (pfx, displayname(x)))
1237 self.ui.write("%s%s\n" % (pfx, displayname(x)))
1238
1238
1239 def issaveline(self, l):
1239 def issaveline(self, l):
1240 if l.name == '.hg.patches.save.line':
1240 if l.name == '.hg.patches.save.line':
1241 return True
1241 return True
1242
1242
1243 def qrepo(self, create=False):
1243 def qrepo(self, create=False):
1244 if create or os.path.isdir(self.join(".hg")):
1244 if create or os.path.isdir(self.join(".hg")):
1245 return hg.repository(self.ui, path=self.path, create=create)
1245 return hg.repository(self.ui, path=self.path, create=create)
1246
1246
1247 def restore(self, repo, rev, delete=None, qupdate=None):
1247 def restore(self, repo, rev, delete=None, qupdate=None):
1248 c = repo.changelog.read(rev)
1248 c = repo.changelog.read(rev)
1249 desc = c[4].strip()
1249 desc = c[4].strip()
1250 lines = desc.splitlines()
1250 lines = desc.splitlines()
1251 i = 0
1251 i = 0
1252 datastart = None
1252 datastart = None
1253 series = []
1253 series = []
1254 applied = []
1254 applied = []
1255 qpp = None
1255 qpp = None
1256 for i in xrange(0, len(lines)):
1256 for i in xrange(0, len(lines)):
1257 if lines[i] == 'Patch Data:':
1257 if lines[i] == 'Patch Data:':
1258 datastart = i + 1
1258 datastart = i + 1
1259 elif lines[i].startswith('Dirstate:'):
1259 elif lines[i].startswith('Dirstate:'):
1260 l = lines[i].rstrip()
1260 l = lines[i].rstrip()
1261 l = l[10:].split(' ')
1261 l = l[10:].split(' ')
1262 qpp = [ hg.bin(x) for x in l ]
1262 qpp = [ hg.bin(x) for x in l ]
1263 elif datastart != None:
1263 elif datastart != None:
1264 l = lines[i].rstrip()
1264 l = lines[i].rstrip()
1265 se = statusentry(l)
1265 se = statusentry(l)
1266 file_ = se.name
1266 file_ = se.name
1267 if se.rev:
1267 if se.rev:
1268 applied.append(se)
1268 applied.append(se)
1269 else:
1269 else:
1270 series.append(file_)
1270 series.append(file_)
1271 if datastart == None:
1271 if datastart == None:
1272 self.ui.warn("No saved patch data found\n")
1272 self.ui.warn("No saved patch data found\n")
1273 return 1
1273 return 1
1274 self.ui.warn("restoring status: %s\n" % lines[0])
1274 self.ui.warn("restoring status: %s\n" % lines[0])
1275 self.full_series = series
1275 self.full_series = series
1276 self.applied = applied
1276 self.applied = applied
1277 self.parse_series()
1277 self.parse_series()
1278 self.series_dirty = 1
1278 self.series_dirty = 1
1279 self.applied_dirty = 1
1279 self.applied_dirty = 1
1280 heads = repo.changelog.heads()
1280 heads = repo.changelog.heads()
1281 if delete:
1281 if delete:
1282 if rev not in heads:
1282 if rev not in heads:
1283 self.ui.warn("save entry has children, leaving it alone\n")
1283 self.ui.warn("save entry has children, leaving it alone\n")
1284 else:
1284 else:
1285 self.ui.warn("removing save entry %s\n" % hg.short(rev))
1285 self.ui.warn("removing save entry %s\n" % hg.short(rev))
1286 pp = repo.dirstate.parents()
1286 pp = repo.dirstate.parents()
1287 if rev in pp:
1287 if rev in pp:
1288 update = True
1288 update = True
1289 else:
1289 else:
1290 update = False
1290 update = False
1291 self.strip(repo, rev, update=update, backup='strip')
1291 self.strip(repo, rev, update=update, backup='strip')
1292 if qpp:
1292 if qpp:
1293 self.ui.warn("saved queue repository parents: %s %s\n" %
1293 self.ui.warn("saved queue repository parents: %s %s\n" %
1294 (hg.short(qpp[0]), hg.short(qpp[1])))
1294 (hg.short(qpp[0]), hg.short(qpp[1])))
1295 if qupdate:
1295 if qupdate:
1296 print "queue directory updating"
1296 print "queue directory updating"
1297 r = self.qrepo()
1297 r = self.qrepo()
1298 if not r:
1298 if not r:
1299 self.ui.warn("Unable to load queue repository\n")
1299 self.ui.warn("Unable to load queue repository\n")
1300 return 1
1300 return 1
1301 hg.clean(r, qpp[0])
1301 hg.clean(r, qpp[0])
1302
1302
1303 def save(self, repo, msg=None):
1303 def save(self, repo, msg=None):
1304 if len(self.applied) == 0:
1304 if len(self.applied) == 0:
1305 self.ui.warn("save: no patches applied, exiting\n")
1305 self.ui.warn("save: no patches applied, exiting\n")
1306 return 1
1306 return 1
1307 if self.issaveline(self.applied[-1]):
1307 if self.issaveline(self.applied[-1]):
1308 self.ui.warn("status is already saved\n")
1308 self.ui.warn("status is already saved\n")
1309 return 1
1309 return 1
1310
1310
1311 ar = [ ':' + x for x in self.full_series ]
1311 ar = [ ':' + x for x in self.full_series ]
1312 if not msg:
1312 if not msg:
1313 msg = "hg patches saved state"
1313 msg = "hg patches saved state"
1314 else:
1314 else:
1315 msg = "hg patches: " + msg.rstrip('\r\n')
1315 msg = "hg patches: " + msg.rstrip('\r\n')
1316 r = self.qrepo()
1316 r = self.qrepo()
1317 if r:
1317 if r:
1318 pp = r.dirstate.parents()
1318 pp = r.dirstate.parents()
1319 msg += "\nDirstate: %s %s" % (hg.hex(pp[0]), hg.hex(pp[1]))
1319 msg += "\nDirstate: %s %s" % (hg.hex(pp[0]), hg.hex(pp[1]))
1320 msg += "\n\nPatch Data:\n"
1320 msg += "\n\nPatch Data:\n"
1321 text = msg + "\n".join([str(x) for x in self.applied]) + '\n' + (ar and
1321 text = msg + "\n".join([str(x) for x in self.applied]) + '\n' + (ar and
1322 "\n".join(ar) + '\n' or "")
1322 "\n".join(ar) + '\n' or "")
1323 n = repo.commit(None, text, user=None, force=1)
1323 n = repo.commit(None, text, user=None, force=1)
1324 if not n:
1324 if not n:
1325 self.ui.warn("repo commit failed\n")
1325 self.ui.warn("repo commit failed\n")
1326 return 1
1326 return 1
1327 self.applied.append(statusentry(revlog.hex(n),'.hg.patches.save.line'))
1327 self.applied.append(statusentry(revlog.hex(n),'.hg.patches.save.line'))
1328 self.applied_dirty = 1
1328 self.applied_dirty = 1
1329 self.removeundo(repo)
1329 self.removeundo(repo)
1330
1330
1331 def full_series_end(self):
1331 def full_series_end(self):
1332 if len(self.applied) > 0:
1332 if len(self.applied) > 0:
1333 p = self.applied[-1].name
1333 p = self.applied[-1].name
1334 end = self.find_series(p)
1334 end = self.find_series(p)
1335 if end == None:
1335 if end == None:
1336 return len(self.full_series)
1336 return len(self.full_series)
1337 return end + 1
1337 return end + 1
1338 return 0
1338 return 0
1339
1339
1340 def series_end(self, all_patches=False):
1340 def series_end(self, all_patches=False):
1341 """If all_patches is False, return the index of the next pushable patch
1341 """If all_patches is False, return the index of the next pushable patch
1342 in the series, or the series length. If all_patches is True, return the
1342 in the series, or the series length. If all_patches is True, return the
1343 index of the first patch past the last applied one.
1343 index of the first patch past the last applied one.
1344 """
1344 """
1345 end = 0
1345 end = 0
1346 def next(start):
1346 def next(start):
1347 if all_patches:
1347 if all_patches:
1348 return start
1348 return start
1349 i = start
1349 i = start
1350 while i < len(self.series):
1350 while i < len(self.series):
1351 p, reason = self.pushable(i)
1351 p, reason = self.pushable(i)
1352 if p:
1352 if p:
1353 break
1353 break
1354 self.explain_pushable(i)
1354 self.explain_pushable(i)
1355 i += 1
1355 i += 1
1356 return i
1356 return i
1357 if len(self.applied) > 0:
1357 if len(self.applied) > 0:
1358 p = self.applied[-1].name
1358 p = self.applied[-1].name
1359 try:
1359 try:
1360 end = self.series.index(p)
1360 end = self.series.index(p)
1361 except ValueError:
1361 except ValueError:
1362 return 0
1362 return 0
1363 return next(end + 1)
1363 return next(end + 1)
1364 return next(end)
1364 return next(end)
1365
1365
1366 def appliedname(self, index):
1366 def appliedname(self, index):
1367 pname = self.applied[index].name
1367 pname = self.applied[index].name
1368 if not self.ui.verbose:
1368 if not self.ui.verbose:
1369 p = pname
1369 p = pname
1370 else:
1370 else:
1371 p = str(self.series.index(pname)) + " " + pname
1371 p = str(self.series.index(pname)) + " " + pname
1372 return p
1372 return p
1373
1373
1374 def qimport(self, repo, files, patchname=None, rev=None, existing=None,
1374 def qimport(self, repo, files, patchname=None, rev=None, existing=None,
1375 force=None, git=False):
1375 force=None, git=False):
1376 def checkseries(patchname):
1376 def checkseries(patchname):
1377 if patchname in self.series:
1377 if patchname in self.series:
1378 raise util.Abort(_('patch %s is already in the series file')
1378 raise util.Abort(_('patch %s is already in the series file')
1379 % patchname)
1379 % patchname)
1380 def checkfile(patchname):
1380 def checkfile(patchname):
1381 if not force and os.path.exists(self.join(patchname)):
1381 if not force and os.path.exists(self.join(patchname)):
1382 raise util.Abort(_('patch "%s" already exists')
1382 raise util.Abort(_('patch "%s" already exists')
1383 % patchname)
1383 % patchname)
1384
1384
1385 if rev:
1385 if rev:
1386 if files:
1386 if files:
1387 raise util.Abort(_('option "-r" not valid when importing '
1387 raise util.Abort(_('option "-r" not valid when importing '
1388 'files'))
1388 'files'))
1389 rev = cmdutil.revrange(repo, rev)
1389 rev = cmdutil.revrange(repo, rev)
1390 rev.sort(lambda x, y: cmp(y, x))
1390 rev.sort(lambda x, y: cmp(y, x))
1391 if (len(files) > 1 or len(rev) > 1) and patchname:
1391 if (len(files) > 1 or len(rev) > 1) and patchname:
1392 raise util.Abort(_('option "-n" not valid when importing multiple '
1392 raise util.Abort(_('option "-n" not valid when importing multiple '
1393 'patches'))
1393 'patches'))
1394 i = 0
1394 i = 0
1395 added = []
1395 added = []
1396 if rev:
1396 if rev:
1397 # If mq patches are applied, we can only import revisions
1397 # If mq patches are applied, we can only import revisions
1398 # that form a linear path to qbase.
1398 # that form a linear path to qbase.
1399 # Otherwise, they should form a linear path to a head.
1399 # Otherwise, they should form a linear path to a head.
1400 heads = repo.changelog.heads(repo.changelog.node(rev[-1]))
1400 heads = repo.changelog.heads(repo.changelog.node(rev[-1]))
1401 if len(heads) > 1:
1401 if len(heads) > 1:
1402 raise util.Abort(_('revision %d is the root of more than one '
1402 raise util.Abort(_('revision %d is the root of more than one '
1403 'branch') % rev[-1])
1403 'branch') % rev[-1])
1404 if self.applied:
1404 if self.applied:
1405 base = revlog.hex(repo.changelog.node(rev[0]))
1405 base = revlog.hex(repo.changelog.node(rev[0]))
1406 if base in [n.rev for n in self.applied]:
1406 if base in [n.rev for n in self.applied]:
1407 raise util.Abort(_('revision %d is already managed')
1407 raise util.Abort(_('revision %d is already managed')
1408 % rev[0])
1408 % rev[0])
1409 if heads != [revlog.bin(self.applied[-1].rev)]:
1409 if heads != [revlog.bin(self.applied[-1].rev)]:
1410 raise util.Abort(_('revision %d is not the parent of '
1410 raise util.Abort(_('revision %d is not the parent of '
1411 'the queue') % rev[0])
1411 'the queue') % rev[0])
1412 base = repo.changelog.rev(revlog.bin(self.applied[0].rev))
1412 base = repo.changelog.rev(revlog.bin(self.applied[0].rev))
1413 lastparent = repo.changelog.parentrevs(base)[0]
1413 lastparent = repo.changelog.parentrevs(base)[0]
1414 else:
1414 else:
1415 if heads != [repo.changelog.node(rev[0])]:
1415 if heads != [repo.changelog.node(rev[0])]:
1416 raise util.Abort(_('revision %d has unmanaged children')
1416 raise util.Abort(_('revision %d has unmanaged children')
1417 % rev[0])
1417 % rev[0])
1418 lastparent = None
1418 lastparent = None
1419
1419
1420 if git:
1420 if git:
1421 self.diffopts().git = True
1421 self.diffopts().git = True
1422
1422
1423 for r in rev:
1423 for r in rev:
1424 p1, p2 = repo.changelog.parentrevs(r)
1424 p1, p2 = repo.changelog.parentrevs(r)
1425 n = repo.changelog.node(r)
1425 n = repo.changelog.node(r)
1426 if p2 != revlog.nullrev:
1426 if p2 != revlog.nullrev:
1427 raise util.Abort(_('cannot import merge revision %d') % r)
1427 raise util.Abort(_('cannot import merge revision %d') % r)
1428 if lastparent and lastparent != r:
1428 if lastparent and lastparent != r:
1429 raise util.Abort(_('revision %d is not the parent of %d')
1429 raise util.Abort(_('revision %d is not the parent of %d')
1430 % (r, lastparent))
1430 % (r, lastparent))
1431 lastparent = p1
1431 lastparent = p1
1432
1432
1433 if not patchname:
1433 if not patchname:
1434 patchname = normname('%d.diff' % r)
1434 patchname = normname('%d.diff' % r)
1435 checkseries(patchname)
1435 checkseries(patchname)
1436 checkfile(patchname)
1436 checkfile(patchname)
1437 self.full_series.insert(0, patchname)
1437 self.full_series.insert(0, patchname)
1438
1438
1439 patchf = self.opener(patchname, "w")
1439 patchf = self.opener(patchname, "w")
1440 patch.export(repo, [n], fp=patchf, opts=self.diffopts())
1440 patch.export(repo, [n], fp=patchf, opts=self.diffopts())
1441 patchf.close()
1441 patchf.close()
1442
1442
1443 se = statusentry(revlog.hex(n), patchname)
1443 se = statusentry(revlog.hex(n), patchname)
1444 self.applied.insert(0, se)
1444 self.applied.insert(0, se)
1445
1445
1446 added.append(patchname)
1446 added.append(patchname)
1447 patchname = None
1447 patchname = None
1448 self.parse_series()
1448 self.parse_series()
1449 self.applied_dirty = 1
1449 self.applied_dirty = 1
1450
1450
1451 for filename in files:
1451 for filename in files:
1452 if existing:
1452 if existing:
1453 if filename == '-':
1453 if filename == '-':
1454 raise util.Abort(_('-e is incompatible with import from -'))
1454 raise util.Abort(_('-e is incompatible with import from -'))
1455 if not patchname:
1455 if not patchname:
1456 patchname = normname(filename)
1456 patchname = normname(filename)
1457 if not os.path.isfile(self.join(patchname)):
1457 if not os.path.isfile(self.join(patchname)):
1458 raise util.Abort(_("patch %s does not exist") % patchname)
1458 raise util.Abort(_("patch %s does not exist") % patchname)
1459 else:
1459 else:
1460 try:
1460 try:
1461 if filename == '-':
1461 if filename == '-':
1462 if not patchname:
1462 if not patchname:
1463 raise util.Abort(_('need --name to import a patch from -'))
1463 raise util.Abort(_('need --name to import a patch from -'))
1464 text = sys.stdin.read()
1464 text = sys.stdin.read()
1465 else:
1465 else:
1466 text = file(filename).read()
1466 text = file(filename).read()
1467 except IOError:
1467 except IOError:
1468 raise util.Abort(_("unable to read %s") % patchname)
1468 raise util.Abort(_("unable to read %s") % patchname)
1469 if not patchname:
1469 if not patchname:
1470 patchname = normname(os.path.basename(filename))
1470 patchname = normname(os.path.basename(filename))
1471 checkfile(patchname)
1471 checkfile(patchname)
1472 patchf = self.opener(patchname, "w")
1472 patchf = self.opener(patchname, "w")
1473 patchf.write(text)
1473 patchf.write(text)
1474 checkseries(patchname)
1474 checkseries(patchname)
1475 index = self.full_series_end() + i
1475 index = self.full_series_end() + i
1476 self.full_series[index:index] = [patchname]
1476 self.full_series[index:index] = [patchname]
1477 self.parse_series()
1477 self.parse_series()
1478 self.ui.warn("adding %s to series file\n" % patchname)
1478 self.ui.warn("adding %s to series file\n" % patchname)
1479 i += 1
1479 i += 1
1480 added.append(patchname)
1480 added.append(patchname)
1481 patchname = None
1481 patchname = None
1482 self.series_dirty = 1
1482 self.series_dirty = 1
1483 qrepo = self.qrepo()
1483 qrepo = self.qrepo()
1484 if qrepo:
1484 if qrepo:
1485 qrepo.add(added)
1485 qrepo.add(added)
1486
1486
1487 def delete(ui, repo, *patches, **opts):
1487 def delete(ui, repo, *patches, **opts):
1488 """remove patches from queue
1488 """remove patches from queue
1489
1489
1490 With --rev, mq will stop managing the named revisions. The
1490 With --rev, mq will stop managing the named revisions. The
1491 patches must be applied and at the base of the stack. This option
1491 patches must be applied and at the base of the stack. This option
1492 is useful when the patches have been applied upstream.
1492 is useful when the patches have been applied upstream.
1493
1493
1494 Otherwise, the patches must not be applied.
1494 Otherwise, the patches must not be applied.
1495
1495
1496 With --keep, the patch files are preserved in the patch directory."""
1496 With --keep, the patch files are preserved in the patch directory."""
1497 q = repo.mq
1497 q = repo.mq
1498 q.delete(repo, patches, opts)
1498 q.delete(repo, patches, opts)
1499 q.save_dirty()
1499 q.save_dirty()
1500 return 0
1500 return 0
1501
1501
1502 def applied(ui, repo, patch=None, **opts):
1502 def applied(ui, repo, patch=None, **opts):
1503 """print the patches already applied"""
1503 """print the patches already applied"""
1504 q = repo.mq
1504 q = repo.mq
1505 if patch:
1505 if patch:
1506 if patch not in q.series:
1506 if patch not in q.series:
1507 raise util.Abort(_("patch %s is not in series file") % patch)
1507 raise util.Abort(_("patch %s is not in series file") % patch)
1508 end = q.series.index(patch) + 1
1508 end = q.series.index(patch) + 1
1509 else:
1509 else:
1510 end = q.series_end(True)
1510 end = q.series_end(True)
1511 return q.qseries(repo, length=end, status='A', summary=opts.get('summary'))
1511 return q.qseries(repo, length=end, status='A', summary=opts.get('summary'))
1512
1512
1513 def unapplied(ui, repo, patch=None, **opts):
1513 def unapplied(ui, repo, patch=None, **opts):
1514 """print the patches not yet applied"""
1514 """print the patches not yet applied"""
1515 q = repo.mq
1515 q = repo.mq
1516 if patch:
1516 if patch:
1517 if patch not in q.series:
1517 if patch not in q.series:
1518 raise util.Abort(_("patch %s is not in series file") % patch)
1518 raise util.Abort(_("patch %s is not in series file") % patch)
1519 start = q.series.index(patch) + 1
1519 start = q.series.index(patch) + 1
1520 else:
1520 else:
1521 start = q.series_end(True)
1521 start = q.series_end(True)
1522 q.qseries(repo, start=start, status='U', summary=opts.get('summary'))
1522 q.qseries(repo, start=start, status='U', summary=opts.get('summary'))
1523
1523
1524 def qimport(ui, repo, *filename, **opts):
1524 def qimport(ui, repo, *filename, **opts):
1525 """import a patch
1525 """import a patch
1526
1526
1527 The patch will have the same name as its source file unless you
1527 The patch will have the same name as its source file unless you
1528 give it a new one with --name.
1528 give it a new one with --name.
1529
1529
1530 You can register an existing patch inside the patch directory
1530 You can register an existing patch inside the patch directory
1531 with the --existing flag.
1531 with the --existing flag.
1532
1532
1533 With --force, an existing patch of the same name will be overwritten.
1533 With --force, an existing patch of the same name will be overwritten.
1534
1534
1535 An existing changeset may be placed under mq control with --rev
1535 An existing changeset may be placed under mq control with --rev
1536 (e.g. qimport --rev tip -n patch will place tip under mq control).
1536 (e.g. qimport --rev tip -n patch will place tip under mq control).
1537 With --git, patches imported with --rev will use the git diff
1537 With --git, patches imported with --rev will use the git diff
1538 format.
1538 format.
1539 """
1539 """
1540 q = repo.mq
1540 q = repo.mq
1541 q.qimport(repo, filename, patchname=opts['name'],
1541 q.qimport(repo, filename, patchname=opts['name'],
1542 existing=opts['existing'], force=opts['force'], rev=opts['rev'],
1542 existing=opts['existing'], force=opts['force'], rev=opts['rev'],
1543 git=opts['git'])
1543 git=opts['git'])
1544 q.save_dirty()
1544 q.save_dirty()
1545 return 0
1545 return 0
1546
1546
1547 def init(ui, repo, **opts):
1547 def init(ui, repo, **opts):
1548 """init a new queue repository
1548 """init a new queue repository
1549
1549
1550 The queue repository is unversioned by default. If -c is
1550 The queue repository is unversioned by default. If -c is
1551 specified, qinit will create a separate nested repository
1551 specified, qinit will create a separate nested repository
1552 for patches. Use qcommit to commit changes to this queue
1552 for patches. Use qcommit to commit changes to this queue
1553 repository."""
1553 repository."""
1554 q = repo.mq
1554 q = repo.mq
1555 r = q.init(repo, create=opts['create_repo'])
1555 r = q.init(repo, create=opts['create_repo'])
1556 q.save_dirty()
1556 q.save_dirty()
1557 if r:
1557 if r:
1558 if not os.path.exists(r.wjoin('.hgignore')):
1558 if not os.path.exists(r.wjoin('.hgignore')):
1559 fp = r.wopener('.hgignore', 'w')
1559 fp = r.wopener('.hgignore', 'w')
1560 fp.write('syntax: glob\n')
1560 fp.write('syntax: glob\n')
1561 fp.write('status\n')
1561 fp.write('status\n')
1562 fp.write('guards\n')
1562 fp.write('guards\n')
1563 fp.close()
1563 fp.close()
1564 if not os.path.exists(r.wjoin('series')):
1564 if not os.path.exists(r.wjoin('series')):
1565 r.wopener('series', 'w').close()
1565 r.wopener('series', 'w').close()
1566 r.add(['.hgignore', 'series'])
1566 r.add(['.hgignore', 'series'])
1567 commands.add(ui, r)
1567 commands.add(ui, r)
1568 return 0
1568 return 0
1569
1569
1570 def clone(ui, source, dest=None, **opts):
1570 def clone(ui, source, dest=None, **opts):
1571 '''clone main and patch repository at same time
1571 '''clone main and patch repository at same time
1572
1572
1573 If source is local, destination will have no patches applied. If
1573 If source is local, destination will have no patches applied. If
1574 source is remote, this command can not check if patches are
1574 source is remote, this command can not check if patches are
1575 applied in source, so cannot guarantee that patches are not
1575 applied in source, so cannot guarantee that patches are not
1576 applied in destination. If you clone remote repository, be sure
1576 applied in destination. If you clone remote repository, be sure
1577 before that it has no patches applied.
1577 before that it has no patches applied.
1578
1578
1579 Source patch repository is looked for in <src>/.hg/patches by
1579 Source patch repository is looked for in <src>/.hg/patches by
1580 default. Use -p <url> to change.
1580 default. Use -p <url> to change.
1581 '''
1581 '''
1582 commands.setremoteconfig(ui, opts)
1582 commands.setremoteconfig(ui, opts)
1583 if dest is None:
1583 if dest is None:
1584 dest = hg.defaultdest(source)
1584 dest = hg.defaultdest(source)
1585 sr = hg.repository(ui, ui.expandpath(source))
1585 sr = hg.repository(ui, ui.expandpath(source))
1586 qbase, destrev = None, None
1586 qbase, destrev = None, None
1587 if sr.local():
1587 if sr.local():
1588 if sr.mq.applied:
1588 if sr.mq.applied:
1589 qbase = revlog.bin(sr.mq.applied[0].rev)
1589 qbase = revlog.bin(sr.mq.applied[0].rev)
1590 if not hg.islocal(dest):
1590 if not hg.islocal(dest):
1591 heads = dict.fromkeys(sr.heads())
1591 heads = dict.fromkeys(sr.heads())
1592 for h in sr.heads(qbase):
1592 for h in sr.heads(qbase):
1593 del heads[h]
1593 del heads[h]
1594 destrev = heads.keys()
1594 destrev = heads.keys()
1595 destrev.append(sr.changelog.parents(qbase)[0])
1595 destrev.append(sr.changelog.parents(qbase)[0])
1596 ui.note(_('cloning main repo\n'))
1596 ui.note(_('cloning main repo\n'))
1597 sr, dr = hg.clone(ui, sr, dest,
1597 sr, dr = hg.clone(ui, sr.url(), dest,
1598 pull=opts['pull'],
1598 pull=opts['pull'],
1599 rev=destrev,
1599 rev=destrev,
1600 update=False,
1600 update=False,
1601 stream=opts['uncompressed'])
1601 stream=opts['uncompressed'])
1602 ui.note(_('cloning patch repo\n'))
1602 ui.note(_('cloning patch repo\n'))
1603 spr, dpr = hg.clone(ui, opts['patches'] or (sr.url() + '/.hg/patches'),
1603 spr, dpr = hg.clone(ui, opts['patches'] or (sr.url() + '/.hg/patches'),
1604 dr.url() + '/.hg/patches',
1604 dr.url() + '/.hg/patches',
1605 pull=opts['pull'],
1605 pull=opts['pull'],
1606 update=not opts['noupdate'],
1606 update=not opts['noupdate'],
1607 stream=opts['uncompressed'])
1607 stream=opts['uncompressed'])
1608 if dr.local():
1608 if dr.local():
1609 if qbase:
1609 if qbase:
1610 ui.note(_('stripping applied patches from destination repo\n'))
1610 ui.note(_('stripping applied patches from destination repo\n'))
1611 dr.mq.strip(dr, qbase, update=False, backup=None)
1611 dr.mq.strip(dr, qbase, update=False, backup=None)
1612 if not opts['noupdate']:
1612 if not opts['noupdate']:
1613 ui.note(_('updating destination repo\n'))
1613 ui.note(_('updating destination repo\n'))
1614 hg.update(dr, dr.changelog.tip())
1614 hg.update(dr, dr.changelog.tip())
1615
1615
1616 def commit(ui, repo, *pats, **opts):
1616 def commit(ui, repo, *pats, **opts):
1617 """commit changes in the queue repository"""
1617 """commit changes in the queue repository"""
1618 q = repo.mq
1618 q = repo.mq
1619 r = q.qrepo()
1619 r = q.qrepo()
1620 if not r: raise util.Abort('no queue repository')
1620 if not r: raise util.Abort('no queue repository')
1621 commands.commit(r.ui, r, *pats, **opts)
1621 commands.commit(r.ui, r, *pats, **opts)
1622
1622
1623 def series(ui, repo, **opts):
1623 def series(ui, repo, **opts):
1624 """print the entire series file"""
1624 """print the entire series file"""
1625 repo.mq.qseries(repo, missing=opts['missing'], summary=opts['summary'])
1625 repo.mq.qseries(repo, missing=opts['missing'], summary=opts['summary'])
1626 return 0
1626 return 0
1627
1627
1628 def top(ui, repo, **opts):
1628 def top(ui, repo, **opts):
1629 """print the name of the current patch"""
1629 """print the name of the current patch"""
1630 q = repo.mq
1630 q = repo.mq
1631 t = q.applied and q.series_end(True) or 0
1631 t = q.applied and q.series_end(True) or 0
1632 if t:
1632 if t:
1633 return q.qseries(repo, start=t-1, length=1, status='A',
1633 return q.qseries(repo, start=t-1, length=1, status='A',
1634 summary=opts.get('summary'))
1634 summary=opts.get('summary'))
1635 else:
1635 else:
1636 ui.write("No patches applied\n")
1636 ui.write("No patches applied\n")
1637 return 1
1637 return 1
1638
1638
1639 def next(ui, repo, **opts):
1639 def next(ui, repo, **opts):
1640 """print the name of the next patch"""
1640 """print the name of the next patch"""
1641 q = repo.mq
1641 q = repo.mq
1642 end = q.series_end()
1642 end = q.series_end()
1643 if end == len(q.series):
1643 if end == len(q.series):
1644 ui.write("All patches applied\n")
1644 ui.write("All patches applied\n")
1645 return 1
1645 return 1
1646 return q.qseries(repo, start=end, length=1, summary=opts.get('summary'))
1646 return q.qseries(repo, start=end, length=1, summary=opts.get('summary'))
1647
1647
1648 def prev(ui, repo, **opts):
1648 def prev(ui, repo, **opts):
1649 """print the name of the previous patch"""
1649 """print the name of the previous patch"""
1650 q = repo.mq
1650 q = repo.mq
1651 l = len(q.applied)
1651 l = len(q.applied)
1652 if l == 1:
1652 if l == 1:
1653 ui.write("Only one patch applied\n")
1653 ui.write("Only one patch applied\n")
1654 return 1
1654 return 1
1655 if not l:
1655 if not l:
1656 ui.write("No patches applied\n")
1656 ui.write("No patches applied\n")
1657 return 1
1657 return 1
1658 return q.qseries(repo, start=l-2, length=1, status='A',
1658 return q.qseries(repo, start=l-2, length=1, status='A',
1659 summary=opts.get('summary'))
1659 summary=opts.get('summary'))
1660
1660
1661 def new(ui, repo, patch, **opts):
1661 def new(ui, repo, patch, **opts):
1662 """create a new patch
1662 """create a new patch
1663
1663
1664 qnew creates a new patch on top of the currently-applied patch
1664 qnew creates a new patch on top of the currently-applied patch
1665 (if any). It will refuse to run if there are any outstanding
1665 (if any). It will refuse to run if there are any outstanding
1666 changes unless -f is specified, in which case the patch will
1666 changes unless -f is specified, in which case the patch will
1667 be initialised with them.
1667 be initialised with them.
1668
1668
1669 -e, -m or -l set the patch header as well as the commit message.
1669 -e, -m or -l set the patch header as well as the commit message.
1670 If none is specified, the patch header is empty and the
1670 If none is specified, the patch header is empty and the
1671 commit message is 'New patch: PATCH'"""
1671 commit message is 'New patch: PATCH'"""
1672 q = repo.mq
1672 q = repo.mq
1673 message = commands.logmessage(opts)
1673 message = commands.logmessage(opts)
1674 if opts['edit']:
1674 if opts['edit']:
1675 message = ui.edit(message, ui.username())
1675 message = ui.edit(message, ui.username())
1676 q.new(repo, patch, msg=message, force=opts['force'])
1676 q.new(repo, patch, msg=message, force=opts['force'])
1677 q.save_dirty()
1677 q.save_dirty()
1678 return 0
1678 return 0
1679
1679
1680 def refresh(ui, repo, *pats, **opts):
1680 def refresh(ui, repo, *pats, **opts):
1681 """update the current patch
1681 """update the current patch
1682
1682
1683 If any file patterns are provided, the refreshed patch will contain only
1683 If any file patterns are provided, the refreshed patch will contain only
1684 the modifications that match those patterns; the remaining modifications
1684 the modifications that match those patterns; the remaining modifications
1685 will remain in the working directory.
1685 will remain in the working directory.
1686
1686
1687 hg add/remove/copy/rename work as usual, though you might want to use
1687 hg add/remove/copy/rename work as usual, though you might want to use
1688 git-style patches (--git or [diff] git=1) to track copies and renames.
1688 git-style patches (--git or [diff] git=1) to track copies and renames.
1689 """
1689 """
1690 q = repo.mq
1690 q = repo.mq
1691 message = commands.logmessage(opts)
1691 message = commands.logmessage(opts)
1692 if opts['edit']:
1692 if opts['edit']:
1693 if message:
1693 if message:
1694 raise util.Abort(_('option "-e" incompatible with "-m" or "-l"'))
1694 raise util.Abort(_('option "-e" incompatible with "-m" or "-l"'))
1695 patch = q.applied[-1].name
1695 patch = q.applied[-1].name
1696 (message, comment, user, date, hasdiff) = q.readheaders(patch)
1696 (message, comment, user, date, hasdiff) = q.readheaders(patch)
1697 message = ui.edit('\n'.join(message), user or ui.username())
1697 message = ui.edit('\n'.join(message), user or ui.username())
1698 ret = q.refresh(repo, pats, msg=message, **opts)
1698 ret = q.refresh(repo, pats, msg=message, **opts)
1699 q.save_dirty()
1699 q.save_dirty()
1700 return ret
1700 return ret
1701
1701
1702 def diff(ui, repo, *pats, **opts):
1702 def diff(ui, repo, *pats, **opts):
1703 """diff of the current patch"""
1703 """diff of the current patch"""
1704 repo.mq.diff(repo, pats, opts)
1704 repo.mq.diff(repo, pats, opts)
1705 return 0
1705 return 0
1706
1706
1707 def fold(ui, repo, *files, **opts):
1707 def fold(ui, repo, *files, **opts):
1708 """fold the named patches into the current patch
1708 """fold the named patches into the current patch
1709
1709
1710 Patches must not yet be applied. Each patch will be successively
1710 Patches must not yet be applied. Each patch will be successively
1711 applied to the current patch in the order given. If all the
1711 applied to the current patch in the order given. If all the
1712 patches apply successfully, the current patch will be refreshed
1712 patches apply successfully, the current patch will be refreshed
1713 with the new cumulative patch, and the folded patches will
1713 with the new cumulative patch, and the folded patches will
1714 be deleted. With -k/--keep, the folded patch files will not
1714 be deleted. With -k/--keep, the folded patch files will not
1715 be removed afterwards.
1715 be removed afterwards.
1716
1716
1717 The header for each folded patch will be concatenated with
1717 The header for each folded patch will be concatenated with
1718 the current patch header, separated by a line of '* * *'."""
1718 the current patch header, separated by a line of '* * *'."""
1719
1719
1720 q = repo.mq
1720 q = repo.mq
1721
1721
1722 if not files:
1722 if not files:
1723 raise util.Abort(_('qfold requires at least one patch name'))
1723 raise util.Abort(_('qfold requires at least one patch name'))
1724 if not q.check_toppatch(repo):
1724 if not q.check_toppatch(repo):
1725 raise util.Abort(_('No patches applied'))
1725 raise util.Abort(_('No patches applied'))
1726
1726
1727 message = commands.logmessage(opts)
1727 message = commands.logmessage(opts)
1728 if opts['edit']:
1728 if opts['edit']:
1729 if message:
1729 if message:
1730 raise util.Abort(_('option "-e" incompatible with "-m" or "-l"'))
1730 raise util.Abort(_('option "-e" incompatible with "-m" or "-l"'))
1731
1731
1732 parent = q.lookup('qtip')
1732 parent = q.lookup('qtip')
1733 patches = []
1733 patches = []
1734 messages = []
1734 messages = []
1735 for f in files:
1735 for f in files:
1736 p = q.lookup(f)
1736 p = q.lookup(f)
1737 if p in patches or p == parent:
1737 if p in patches or p == parent:
1738 ui.warn(_('Skipping already folded patch %s') % p)
1738 ui.warn(_('Skipping already folded patch %s') % p)
1739 if q.isapplied(p):
1739 if q.isapplied(p):
1740 raise util.Abort(_('qfold cannot fold already applied patch %s') % p)
1740 raise util.Abort(_('qfold cannot fold already applied patch %s') % p)
1741 patches.append(p)
1741 patches.append(p)
1742
1742
1743 for p in patches:
1743 for p in patches:
1744 if not message:
1744 if not message:
1745 messages.append(q.readheaders(p)[0])
1745 messages.append(q.readheaders(p)[0])
1746 pf = q.join(p)
1746 pf = q.join(p)
1747 (patchsuccess, files, fuzz) = q.patch(repo, pf)
1747 (patchsuccess, files, fuzz) = q.patch(repo, pf)
1748 if not patchsuccess:
1748 if not patchsuccess:
1749 raise util.Abort(_('Error folding patch %s') % p)
1749 raise util.Abort(_('Error folding patch %s') % p)
1750 patch.updatedir(ui, repo, files)
1750 patch.updatedir(ui, repo, files)
1751
1751
1752 if not message:
1752 if not message:
1753 message, comments, user = q.readheaders(parent)[0:3]
1753 message, comments, user = q.readheaders(parent)[0:3]
1754 for msg in messages:
1754 for msg in messages:
1755 message.append('* * *')
1755 message.append('* * *')
1756 message.extend(msg)
1756 message.extend(msg)
1757 message = '\n'.join(message)
1757 message = '\n'.join(message)
1758
1758
1759 if opts['edit']:
1759 if opts['edit']:
1760 message = ui.edit(message, user or ui.username())
1760 message = ui.edit(message, user or ui.username())
1761
1761
1762 q.refresh(repo, msg=message)
1762 q.refresh(repo, msg=message)
1763 q.delete(repo, patches, opts)
1763 q.delete(repo, patches, opts)
1764 q.save_dirty()
1764 q.save_dirty()
1765
1765
1766 def goto(ui, repo, patch, **opts):
1766 def goto(ui, repo, patch, **opts):
1767 '''push or pop patches until named patch is at top of stack'''
1767 '''push or pop patches until named patch is at top of stack'''
1768 q = repo.mq
1768 q = repo.mq
1769 patch = q.lookup(patch)
1769 patch = q.lookup(patch)
1770 if q.isapplied(patch):
1770 if q.isapplied(patch):
1771 ret = q.pop(repo, patch, force=opts['force'])
1771 ret = q.pop(repo, patch, force=opts['force'])
1772 else:
1772 else:
1773 ret = q.push(repo, patch, force=opts['force'])
1773 ret = q.push(repo, patch, force=opts['force'])
1774 q.save_dirty()
1774 q.save_dirty()
1775 return ret
1775 return ret
1776
1776
1777 def guard(ui, repo, *args, **opts):
1777 def guard(ui, repo, *args, **opts):
1778 '''set or print guards for a patch
1778 '''set or print guards for a patch
1779
1779
1780 Guards control whether a patch can be pushed. A patch with no
1780 Guards control whether a patch can be pushed. A patch with no
1781 guards is always pushed. A patch with a positive guard ("+foo") is
1781 guards is always pushed. A patch with a positive guard ("+foo") is
1782 pushed only if the qselect command has activated it. A patch with
1782 pushed only if the qselect command has activated it. A patch with
1783 a negative guard ("-foo") is never pushed if the qselect command
1783 a negative guard ("-foo") is never pushed if the qselect command
1784 has activated it.
1784 has activated it.
1785
1785
1786 With no arguments, print the currently active guards.
1786 With no arguments, print the currently active guards.
1787 With arguments, set guards for the named patch.
1787 With arguments, set guards for the named patch.
1788
1788
1789 To set a negative guard "-foo" on topmost patch ("--" is needed so
1789 To set a negative guard "-foo" on topmost patch ("--" is needed so
1790 hg will not interpret "-foo" as an option):
1790 hg will not interpret "-foo" as an option):
1791 hg qguard -- -foo
1791 hg qguard -- -foo
1792
1792
1793 To set guards on another patch:
1793 To set guards on another patch:
1794 hg qguard other.patch +2.6.17 -stable
1794 hg qguard other.patch +2.6.17 -stable
1795 '''
1795 '''
1796 def status(idx):
1796 def status(idx):
1797 guards = q.series_guards[idx] or ['unguarded']
1797 guards = q.series_guards[idx] or ['unguarded']
1798 ui.write('%s: %s\n' % (q.series[idx], ' '.join(guards)))
1798 ui.write('%s: %s\n' % (q.series[idx], ' '.join(guards)))
1799 q = repo.mq
1799 q = repo.mq
1800 patch = None
1800 patch = None
1801 args = list(args)
1801 args = list(args)
1802 if opts['list']:
1802 if opts['list']:
1803 if args or opts['none']:
1803 if args or opts['none']:
1804 raise util.Abort(_('cannot mix -l/--list with options or arguments'))
1804 raise util.Abort(_('cannot mix -l/--list with options or arguments'))
1805 for i in xrange(len(q.series)):
1805 for i in xrange(len(q.series)):
1806 status(i)
1806 status(i)
1807 return
1807 return
1808 if not args or args[0][0:1] in '-+':
1808 if not args or args[0][0:1] in '-+':
1809 if not q.applied:
1809 if not q.applied:
1810 raise util.Abort(_('no patches applied'))
1810 raise util.Abort(_('no patches applied'))
1811 patch = q.applied[-1].name
1811 patch = q.applied[-1].name
1812 if patch is None and args[0][0:1] not in '-+':
1812 if patch is None and args[0][0:1] not in '-+':
1813 patch = args.pop(0)
1813 patch = args.pop(0)
1814 if patch is None:
1814 if patch is None:
1815 raise util.Abort(_('no patch to work with'))
1815 raise util.Abort(_('no patch to work with'))
1816 if args or opts['none']:
1816 if args or opts['none']:
1817 idx = q.find_series(patch)
1817 idx = q.find_series(patch)
1818 if idx is None:
1818 if idx is None:
1819 raise util.Abort(_('no patch named %s') % patch)
1819 raise util.Abort(_('no patch named %s') % patch)
1820 q.set_guards(idx, args)
1820 q.set_guards(idx, args)
1821 q.save_dirty()
1821 q.save_dirty()
1822 else:
1822 else:
1823 status(q.series.index(q.lookup(patch)))
1823 status(q.series.index(q.lookup(patch)))
1824
1824
1825 def header(ui, repo, patch=None):
1825 def header(ui, repo, patch=None):
1826 """Print the header of the topmost or specified patch"""
1826 """Print the header of the topmost or specified patch"""
1827 q = repo.mq
1827 q = repo.mq
1828
1828
1829 if patch:
1829 if patch:
1830 patch = q.lookup(patch)
1830 patch = q.lookup(patch)
1831 else:
1831 else:
1832 if not q.applied:
1832 if not q.applied:
1833 ui.write('No patches applied\n')
1833 ui.write('No patches applied\n')
1834 return 1
1834 return 1
1835 patch = q.lookup('qtip')
1835 patch = q.lookup('qtip')
1836 message = repo.mq.readheaders(patch)[0]
1836 message = repo.mq.readheaders(patch)[0]
1837
1837
1838 ui.write('\n'.join(message) + '\n')
1838 ui.write('\n'.join(message) + '\n')
1839
1839
1840 def lastsavename(path):
1840 def lastsavename(path):
1841 (directory, base) = os.path.split(path)
1841 (directory, base) = os.path.split(path)
1842 names = os.listdir(directory)
1842 names = os.listdir(directory)
1843 namere = re.compile("%s.([0-9]+)" % base)
1843 namere = re.compile("%s.([0-9]+)" % base)
1844 maxindex = None
1844 maxindex = None
1845 maxname = None
1845 maxname = None
1846 for f in names:
1846 for f in names:
1847 m = namere.match(f)
1847 m = namere.match(f)
1848 if m:
1848 if m:
1849 index = int(m.group(1))
1849 index = int(m.group(1))
1850 if maxindex == None or index > maxindex:
1850 if maxindex == None or index > maxindex:
1851 maxindex = index
1851 maxindex = index
1852 maxname = f
1852 maxname = f
1853 if maxname:
1853 if maxname:
1854 return (os.path.join(directory, maxname), maxindex)
1854 return (os.path.join(directory, maxname), maxindex)
1855 return (None, None)
1855 return (None, None)
1856
1856
1857 def savename(path):
1857 def savename(path):
1858 (last, index) = lastsavename(path)
1858 (last, index) = lastsavename(path)
1859 if last is None:
1859 if last is None:
1860 index = 0
1860 index = 0
1861 newpath = path + ".%d" % (index + 1)
1861 newpath = path + ".%d" % (index + 1)
1862 return newpath
1862 return newpath
1863
1863
1864 def push(ui, repo, patch=None, **opts):
1864 def push(ui, repo, patch=None, **opts):
1865 """push the next patch onto the stack"""
1865 """push the next patch onto the stack"""
1866 q = repo.mq
1866 q = repo.mq
1867 mergeq = None
1867 mergeq = None
1868
1868
1869 if opts['all']:
1869 if opts['all']:
1870 if not q.series:
1870 if not q.series:
1871 ui.warn(_('no patches in series\n'))
1871 ui.warn(_('no patches in series\n'))
1872 return 0
1872 return 0
1873 patch = q.series[-1]
1873 patch = q.series[-1]
1874 if opts['merge']:
1874 if opts['merge']:
1875 if opts['name']:
1875 if opts['name']:
1876 newpath = opts['name']
1876 newpath = opts['name']
1877 else:
1877 else:
1878 newpath, i = lastsavename(q.path)
1878 newpath, i = lastsavename(q.path)
1879 if not newpath:
1879 if not newpath:
1880 ui.warn("no saved queues found, please use -n\n")
1880 ui.warn("no saved queues found, please use -n\n")
1881 return 1
1881 return 1
1882 mergeq = queue(ui, repo.join(""), newpath)
1882 mergeq = queue(ui, repo.join(""), newpath)
1883 ui.warn("merging with queue at: %s\n" % mergeq.path)
1883 ui.warn("merging with queue at: %s\n" % mergeq.path)
1884 ret = q.push(repo, patch, force=opts['force'], list=opts['list'],
1884 ret = q.push(repo, patch, force=opts['force'], list=opts['list'],
1885 mergeq=mergeq)
1885 mergeq=mergeq)
1886 return ret
1886 return ret
1887
1887
1888 def pop(ui, repo, patch=None, **opts):
1888 def pop(ui, repo, patch=None, **opts):
1889 """pop the current patch off the stack"""
1889 """pop the current patch off the stack"""
1890 localupdate = True
1890 localupdate = True
1891 if opts['name']:
1891 if opts['name']:
1892 q = queue(ui, repo.join(""), repo.join(opts['name']))
1892 q = queue(ui, repo.join(""), repo.join(opts['name']))
1893 ui.warn('using patch queue: %s\n' % q.path)
1893 ui.warn('using patch queue: %s\n' % q.path)
1894 localupdate = False
1894 localupdate = False
1895 else:
1895 else:
1896 q = repo.mq
1896 q = repo.mq
1897 ret = q.pop(repo, patch, force=opts['force'], update=localupdate,
1897 ret = q.pop(repo, patch, force=opts['force'], update=localupdate,
1898 all=opts['all'])
1898 all=opts['all'])
1899 q.save_dirty()
1899 q.save_dirty()
1900 return ret
1900 return ret
1901
1901
1902 def rename(ui, repo, patch, name=None, **opts):
1902 def rename(ui, repo, patch, name=None, **opts):
1903 """rename a patch
1903 """rename a patch
1904
1904
1905 With one argument, renames the current patch to PATCH1.
1905 With one argument, renames the current patch to PATCH1.
1906 With two arguments, renames PATCH1 to PATCH2."""
1906 With two arguments, renames PATCH1 to PATCH2."""
1907
1907
1908 q = repo.mq
1908 q = repo.mq
1909
1909
1910 if not name:
1910 if not name:
1911 name = patch
1911 name = patch
1912 patch = None
1912 patch = None
1913
1913
1914 if patch:
1914 if patch:
1915 patch = q.lookup(patch)
1915 patch = q.lookup(patch)
1916 else:
1916 else:
1917 if not q.applied:
1917 if not q.applied:
1918 ui.write(_('No patches applied\n'))
1918 ui.write(_('No patches applied\n'))
1919 return
1919 return
1920 patch = q.lookup('qtip')
1920 patch = q.lookup('qtip')
1921 absdest = q.join(name)
1921 absdest = q.join(name)
1922 if os.path.isdir(absdest):
1922 if os.path.isdir(absdest):
1923 name = normname(os.path.join(name, os.path.basename(patch)))
1923 name = normname(os.path.join(name, os.path.basename(patch)))
1924 absdest = q.join(name)
1924 absdest = q.join(name)
1925 if os.path.exists(absdest):
1925 if os.path.exists(absdest):
1926 raise util.Abort(_('%s already exists') % absdest)
1926 raise util.Abort(_('%s already exists') % absdest)
1927
1927
1928 if name in q.series:
1928 if name in q.series:
1929 raise util.Abort(_('A patch named %s already exists in the series file') % name)
1929 raise util.Abort(_('A patch named %s already exists in the series file') % name)
1930
1930
1931 if ui.verbose:
1931 if ui.verbose:
1932 ui.write('Renaming %s to %s\n' % (patch, name))
1932 ui.write('Renaming %s to %s\n' % (patch, name))
1933 i = q.find_series(patch)
1933 i = q.find_series(patch)
1934 guards = q.guard_re.findall(q.full_series[i])
1934 guards = q.guard_re.findall(q.full_series[i])
1935 q.full_series[i] = name + ''.join([' #' + g for g in guards])
1935 q.full_series[i] = name + ''.join([' #' + g for g in guards])
1936 q.parse_series()
1936 q.parse_series()
1937 q.series_dirty = 1
1937 q.series_dirty = 1
1938
1938
1939 info = q.isapplied(patch)
1939 info = q.isapplied(patch)
1940 if info:
1940 if info:
1941 q.applied[info[0]] = statusentry(info[1], name)
1941 q.applied[info[0]] = statusentry(info[1], name)
1942 q.applied_dirty = 1
1942 q.applied_dirty = 1
1943
1943
1944 util.rename(q.join(patch), absdest)
1944 util.rename(q.join(patch), absdest)
1945 r = q.qrepo()
1945 r = q.qrepo()
1946 if r:
1946 if r:
1947 wlock = r.wlock()
1947 wlock = r.wlock()
1948 if r.dirstate.state(name) == 'r':
1948 if r.dirstate.state(name) == 'r':
1949 r.undelete([name], wlock)
1949 r.undelete([name], wlock)
1950 r.copy(patch, name, wlock)
1950 r.copy(patch, name, wlock)
1951 r.remove([patch], False, wlock)
1951 r.remove([patch], False, wlock)
1952
1952
1953 q.save_dirty()
1953 q.save_dirty()
1954
1954
1955 def restore(ui, repo, rev, **opts):
1955 def restore(ui, repo, rev, **opts):
1956 """restore the queue state saved by a rev"""
1956 """restore the queue state saved by a rev"""
1957 rev = repo.lookup(rev)
1957 rev = repo.lookup(rev)
1958 q = repo.mq
1958 q = repo.mq
1959 q.restore(repo, rev, delete=opts['delete'],
1959 q.restore(repo, rev, delete=opts['delete'],
1960 qupdate=opts['update'])
1960 qupdate=opts['update'])
1961 q.save_dirty()
1961 q.save_dirty()
1962 return 0
1962 return 0
1963
1963
1964 def save(ui, repo, **opts):
1964 def save(ui, repo, **opts):
1965 """save current queue state"""
1965 """save current queue state"""
1966 q = repo.mq
1966 q = repo.mq
1967 message = commands.logmessage(opts)
1967 message = commands.logmessage(opts)
1968 ret = q.save(repo, msg=message)
1968 ret = q.save(repo, msg=message)
1969 if ret:
1969 if ret:
1970 return ret
1970 return ret
1971 q.save_dirty()
1971 q.save_dirty()
1972 if opts['copy']:
1972 if opts['copy']:
1973 path = q.path
1973 path = q.path
1974 if opts['name']:
1974 if opts['name']:
1975 newpath = os.path.join(q.basepath, opts['name'])
1975 newpath = os.path.join(q.basepath, opts['name'])
1976 if os.path.exists(newpath):
1976 if os.path.exists(newpath):
1977 if not os.path.isdir(newpath):
1977 if not os.path.isdir(newpath):
1978 raise util.Abort(_('destination %s exists and is not '
1978 raise util.Abort(_('destination %s exists and is not '
1979 'a directory') % newpath)
1979 'a directory') % newpath)
1980 if not opts['force']:
1980 if not opts['force']:
1981 raise util.Abort(_('destination %s exists, '
1981 raise util.Abort(_('destination %s exists, '
1982 'use -f to force') % newpath)
1982 'use -f to force') % newpath)
1983 else:
1983 else:
1984 newpath = savename(path)
1984 newpath = savename(path)
1985 ui.warn("copy %s to %s\n" % (path, newpath))
1985 ui.warn("copy %s to %s\n" % (path, newpath))
1986 util.copyfiles(path, newpath)
1986 util.copyfiles(path, newpath)
1987 if opts['empty']:
1987 if opts['empty']:
1988 try:
1988 try:
1989 os.unlink(q.join(q.status_path))
1989 os.unlink(q.join(q.status_path))
1990 except:
1990 except:
1991 pass
1991 pass
1992 return 0
1992 return 0
1993
1993
1994 def strip(ui, repo, rev, **opts):
1994 def strip(ui, repo, rev, **opts):
1995 """strip a revision and all later revs on the same branch"""
1995 """strip a revision and all later revs on the same branch"""
1996 rev = repo.lookup(rev)
1996 rev = repo.lookup(rev)
1997 backup = 'all'
1997 backup = 'all'
1998 if opts['backup']:
1998 if opts['backup']:
1999 backup = 'strip'
1999 backup = 'strip'
2000 elif opts['nobackup']:
2000 elif opts['nobackup']:
2001 backup = 'none'
2001 backup = 'none'
2002 update = repo.dirstate.parents()[0] != revlog.nullid
2002 update = repo.dirstate.parents()[0] != revlog.nullid
2003 repo.mq.strip(repo, rev, backup=backup, update=update)
2003 repo.mq.strip(repo, rev, backup=backup, update=update)
2004 return 0
2004 return 0
2005
2005
2006 def select(ui, repo, *args, **opts):
2006 def select(ui, repo, *args, **opts):
2007 '''set or print guarded patches to push
2007 '''set or print guarded patches to push
2008
2008
2009 Use the qguard command to set or print guards on patch, then use
2009 Use the qguard command to set or print guards on patch, then use
2010 qselect to tell mq which guards to use. A patch will be pushed if it
2010 qselect to tell mq which guards to use. A patch will be pushed if it
2011 has no guards or any positive guards match the currently selected guard,
2011 has no guards or any positive guards match the currently selected guard,
2012 but will not be pushed if any negative guards match the current guard.
2012 but will not be pushed if any negative guards match the current guard.
2013 For example:
2013 For example:
2014
2014
2015 qguard foo.patch -stable (negative guard)
2015 qguard foo.patch -stable (negative guard)
2016 qguard bar.patch +stable (positive guard)
2016 qguard bar.patch +stable (positive guard)
2017 qselect stable
2017 qselect stable
2018
2018
2019 This activates the "stable" guard. mq will skip foo.patch (because
2019 This activates the "stable" guard. mq will skip foo.patch (because
2020 it has a negative match) but push bar.patch (because it
2020 it has a negative match) but push bar.patch (because it
2021 has a positive match).
2021 has a positive match).
2022
2022
2023 With no arguments, prints the currently active guards.
2023 With no arguments, prints the currently active guards.
2024 With one argument, sets the active guard.
2024 With one argument, sets the active guard.
2025
2025
2026 Use -n/--none to deactivate guards (no other arguments needed).
2026 Use -n/--none to deactivate guards (no other arguments needed).
2027 When no guards are active, patches with positive guards are skipped
2027 When no guards are active, patches with positive guards are skipped
2028 and patches with negative guards are pushed.
2028 and patches with negative guards are pushed.
2029
2029
2030 qselect can change the guards on applied patches. It does not pop
2030 qselect can change the guards on applied patches. It does not pop
2031 guarded patches by default. Use --pop to pop back to the last applied
2031 guarded patches by default. Use --pop to pop back to the last applied
2032 patch that is not guarded. Use --reapply (which implies --pop) to push
2032 patch that is not guarded. Use --reapply (which implies --pop) to push
2033 back to the current patch afterwards, but skip guarded patches.
2033 back to the current patch afterwards, but skip guarded patches.
2034
2034
2035 Use -s/--series to print a list of all guards in the series file (no
2035 Use -s/--series to print a list of all guards in the series file (no
2036 other arguments needed). Use -v for more information.'''
2036 other arguments needed). Use -v for more information.'''
2037
2037
2038 q = repo.mq
2038 q = repo.mq
2039 guards = q.active()
2039 guards = q.active()
2040 if args or opts['none']:
2040 if args or opts['none']:
2041 old_unapplied = q.unapplied(repo)
2041 old_unapplied = q.unapplied(repo)
2042 old_guarded = [i for i in xrange(len(q.applied)) if
2042 old_guarded = [i for i in xrange(len(q.applied)) if
2043 not q.pushable(i)[0]]
2043 not q.pushable(i)[0]]
2044 q.set_active(args)
2044 q.set_active(args)
2045 q.save_dirty()
2045 q.save_dirty()
2046 if not args:
2046 if not args:
2047 ui.status(_('guards deactivated\n'))
2047 ui.status(_('guards deactivated\n'))
2048 if not opts['pop'] and not opts['reapply']:
2048 if not opts['pop'] and not opts['reapply']:
2049 unapplied = q.unapplied(repo)
2049 unapplied = q.unapplied(repo)
2050 guarded = [i for i in xrange(len(q.applied))
2050 guarded = [i for i in xrange(len(q.applied))
2051 if not q.pushable(i)[0]]
2051 if not q.pushable(i)[0]]
2052 if len(unapplied) != len(old_unapplied):
2052 if len(unapplied) != len(old_unapplied):
2053 ui.status(_('number of unguarded, unapplied patches has '
2053 ui.status(_('number of unguarded, unapplied patches has '
2054 'changed from %d to %d\n') %
2054 'changed from %d to %d\n') %
2055 (len(old_unapplied), len(unapplied)))
2055 (len(old_unapplied), len(unapplied)))
2056 if len(guarded) != len(old_guarded):
2056 if len(guarded) != len(old_guarded):
2057 ui.status(_('number of guarded, applied patches has changed '
2057 ui.status(_('number of guarded, applied patches has changed '
2058 'from %d to %d\n') %
2058 'from %d to %d\n') %
2059 (len(old_guarded), len(guarded)))
2059 (len(old_guarded), len(guarded)))
2060 elif opts['series']:
2060 elif opts['series']:
2061 guards = {}
2061 guards = {}
2062 noguards = 0
2062 noguards = 0
2063 for gs in q.series_guards:
2063 for gs in q.series_guards:
2064 if not gs:
2064 if not gs:
2065 noguards += 1
2065 noguards += 1
2066 for g in gs:
2066 for g in gs:
2067 guards.setdefault(g, 0)
2067 guards.setdefault(g, 0)
2068 guards[g] += 1
2068 guards[g] += 1
2069 if ui.verbose:
2069 if ui.verbose:
2070 guards['NONE'] = noguards
2070 guards['NONE'] = noguards
2071 guards = guards.items()
2071 guards = guards.items()
2072 guards.sort(lambda a, b: cmp(a[0][1:], b[0][1:]))
2072 guards.sort(lambda a, b: cmp(a[0][1:], b[0][1:]))
2073 if guards:
2073 if guards:
2074 ui.note(_('guards in series file:\n'))
2074 ui.note(_('guards in series file:\n'))
2075 for guard, count in guards:
2075 for guard, count in guards:
2076 ui.note('%2d ' % count)
2076 ui.note('%2d ' % count)
2077 ui.write(guard, '\n')
2077 ui.write(guard, '\n')
2078 else:
2078 else:
2079 ui.note(_('no guards in series file\n'))
2079 ui.note(_('no guards in series file\n'))
2080 else:
2080 else:
2081 if guards:
2081 if guards:
2082 ui.note(_('active guards:\n'))
2082 ui.note(_('active guards:\n'))
2083 for g in guards:
2083 for g in guards:
2084 ui.write(g, '\n')
2084 ui.write(g, '\n')
2085 else:
2085 else:
2086 ui.write(_('no active guards\n'))
2086 ui.write(_('no active guards\n'))
2087 reapply = opts['reapply'] and q.applied and q.appliedname(-1)
2087 reapply = opts['reapply'] and q.applied and q.appliedname(-1)
2088 popped = False
2088 popped = False
2089 if opts['pop'] or opts['reapply']:
2089 if opts['pop'] or opts['reapply']:
2090 for i in xrange(len(q.applied)):
2090 for i in xrange(len(q.applied)):
2091 pushable, reason = q.pushable(i)
2091 pushable, reason = q.pushable(i)
2092 if not pushable:
2092 if not pushable:
2093 ui.status(_('popping guarded patches\n'))
2093 ui.status(_('popping guarded patches\n'))
2094 popped = True
2094 popped = True
2095 if i == 0:
2095 if i == 0:
2096 q.pop(repo, all=True)
2096 q.pop(repo, all=True)
2097 else:
2097 else:
2098 q.pop(repo, i-1)
2098 q.pop(repo, i-1)
2099 break
2099 break
2100 if popped:
2100 if popped:
2101 try:
2101 try:
2102 if reapply:
2102 if reapply:
2103 ui.status(_('reapplying unguarded patches\n'))
2103 ui.status(_('reapplying unguarded patches\n'))
2104 q.push(repo, reapply)
2104 q.push(repo, reapply)
2105 finally:
2105 finally:
2106 q.save_dirty()
2106 q.save_dirty()
2107
2107
2108 def reposetup(ui, repo):
2108 def reposetup(ui, repo):
2109 class mqrepo(repo.__class__):
2109 class mqrepo(repo.__class__):
2110 def abort_if_wdir_patched(self, errmsg, force=False):
2110 def abort_if_wdir_patched(self, errmsg, force=False):
2111 if self.mq.applied and not force:
2111 if self.mq.applied and not force:
2112 parent = revlog.hex(self.dirstate.parents()[0])
2112 parent = revlog.hex(self.dirstate.parents()[0])
2113 if parent in [s.rev for s in self.mq.applied]:
2113 if parent in [s.rev for s in self.mq.applied]:
2114 raise util.Abort(errmsg)
2114 raise util.Abort(errmsg)
2115
2115
2116 def commit(self, *args, **opts):
2116 def commit(self, *args, **opts):
2117 if len(args) >= 6:
2117 if len(args) >= 6:
2118 force = args[5]
2118 force = args[5]
2119 else:
2119 else:
2120 force = opts.get('force')
2120 force = opts.get('force')
2121 self.abort_if_wdir_patched(
2121 self.abort_if_wdir_patched(
2122 _('cannot commit over an applied mq patch'),
2122 _('cannot commit over an applied mq patch'),
2123 force)
2123 force)
2124
2124
2125 return super(mqrepo, self).commit(*args, **opts)
2125 return super(mqrepo, self).commit(*args, **opts)
2126
2126
2127 def push(self, remote, force=False, revs=None):
2127 def push(self, remote, force=False, revs=None):
2128 if self.mq.applied and not force and not revs:
2128 if self.mq.applied and not force and not revs:
2129 raise util.Abort(_('source has mq patches applied'))
2129 raise util.Abort(_('source has mq patches applied'))
2130 return super(mqrepo, self).push(remote, force, revs)
2130 return super(mqrepo, self).push(remote, force, revs)
2131
2131
2132 def tags(self):
2132 def tags(self):
2133 if self.tagscache:
2133 if self.tagscache:
2134 return self.tagscache
2134 return self.tagscache
2135
2135
2136 tagscache = super(mqrepo, self).tags()
2136 tagscache = super(mqrepo, self).tags()
2137
2137
2138 q = self.mq
2138 q = self.mq
2139 if not q.applied:
2139 if not q.applied:
2140 return tagscache
2140 return tagscache
2141
2141
2142 mqtags = [(revlog.bin(patch.rev), patch.name) for patch in q.applied]
2142 mqtags = [(revlog.bin(patch.rev), patch.name) for patch in q.applied]
2143 mqtags.append((mqtags[-1][0], 'qtip'))
2143 mqtags.append((mqtags[-1][0], 'qtip'))
2144 mqtags.append((mqtags[0][0], 'qbase'))
2144 mqtags.append((mqtags[0][0], 'qbase'))
2145 mqtags.append((self.changelog.parents(mqtags[0][0])[0], 'qparent'))
2145 mqtags.append((self.changelog.parents(mqtags[0][0])[0], 'qparent'))
2146 for patch in mqtags:
2146 for patch in mqtags:
2147 if patch[1] in tagscache:
2147 if patch[1] in tagscache:
2148 self.ui.warn('Tag %s overrides mq patch of the same name\n' % patch[1])
2148 self.ui.warn('Tag %s overrides mq patch of the same name\n' % patch[1])
2149 else:
2149 else:
2150 tagscache[patch[1]] = patch[0]
2150 tagscache[patch[1]] = patch[0]
2151
2151
2152 return tagscache
2152 return tagscache
2153
2153
2154 def _branchtags(self):
2154 def _branchtags(self):
2155 q = self.mq
2155 q = self.mq
2156 if not q.applied:
2156 if not q.applied:
2157 return super(mqrepo, self)._branchtags()
2157 return super(mqrepo, self)._branchtags()
2158
2158
2159 self.branchcache = {} # avoid recursion in changectx
2159 self.branchcache = {} # avoid recursion in changectx
2160 cl = self.changelog
2160 cl = self.changelog
2161 partial, last, lrev = self._readbranchcache()
2161 partial, last, lrev = self._readbranchcache()
2162
2162
2163 qbase = cl.rev(revlog.bin(q.applied[0].rev))
2163 qbase = cl.rev(revlog.bin(q.applied[0].rev))
2164 start = lrev + 1
2164 start = lrev + 1
2165 if start < qbase:
2165 if start < qbase:
2166 # update the cache (excluding the patches) and save it
2166 # update the cache (excluding the patches) and save it
2167 self._updatebranchcache(partial, lrev+1, qbase)
2167 self._updatebranchcache(partial, lrev+1, qbase)
2168 self._writebranchcache(partial, cl.node(qbase-1), qbase-1)
2168 self._writebranchcache(partial, cl.node(qbase-1), qbase-1)
2169 start = qbase
2169 start = qbase
2170 # if start = qbase, the cache is as updated as it should be.
2170 # if start = qbase, the cache is as updated as it should be.
2171 # if start > qbase, the cache includes (part of) the patches.
2171 # if start > qbase, the cache includes (part of) the patches.
2172 # we might as well use it, but we won't save it.
2172 # we might as well use it, but we won't save it.
2173
2173
2174 # update the cache up to the tip
2174 # update the cache up to the tip
2175 self._updatebranchcache(partial, start, cl.count())
2175 self._updatebranchcache(partial, start, cl.count())
2176
2176
2177 return partial
2177 return partial
2178
2178
2179 if repo.local():
2179 if repo.local():
2180 repo.__class__ = mqrepo
2180 repo.__class__ = mqrepo
2181 repo.mq = queue(ui, repo.join(""))
2181 repo.mq = queue(ui, repo.join(""))
2182
2182
2183 seriesopts = [('s', 'summary', None, _('print first line of patch header'))]
2183 seriesopts = [('s', 'summary', None, _('print first line of patch header'))]
2184
2184
2185 cmdtable = {
2185 cmdtable = {
2186 "qapplied": (applied, [] + seriesopts, 'hg qapplied [-s] [PATCH]'),
2186 "qapplied": (applied, [] + seriesopts, 'hg qapplied [-s] [PATCH]'),
2187 "qclone": (clone,
2187 "qclone": (clone,
2188 [('', 'pull', None, _('use pull protocol to copy metadata')),
2188 [('', 'pull', None, _('use pull protocol to copy metadata')),
2189 ('U', 'noupdate', None, _('do not update the new working directories')),
2189 ('U', 'noupdate', None, _('do not update the new working directories')),
2190 ('', 'uncompressed', None,
2190 ('', 'uncompressed', None,
2191 _('use uncompressed transfer (fast over LAN)')),
2191 _('use uncompressed transfer (fast over LAN)')),
2192 ('e', 'ssh', '', _('specify ssh command to use')),
2192 ('e', 'ssh', '', _('specify ssh command to use')),
2193 ('p', 'patches', '', _('location of source patch repo')),
2193 ('p', 'patches', '', _('location of source patch repo')),
2194 ('', 'remotecmd', '',
2194 ('', 'remotecmd', '',
2195 _('specify hg command to run on the remote side'))],
2195 _('specify hg command to run on the remote side'))],
2196 'hg qclone [OPTION]... SOURCE [DEST]'),
2196 'hg qclone [OPTION]... SOURCE [DEST]'),
2197 "qcommit|qci":
2197 "qcommit|qci":
2198 (commit,
2198 (commit,
2199 commands.table["^commit|ci"][1],
2199 commands.table["^commit|ci"][1],
2200 'hg qcommit [OPTION]... [FILE]...'),
2200 'hg qcommit [OPTION]... [FILE]...'),
2201 "^qdiff": (diff,
2201 "^qdiff": (diff,
2202 [('g', 'git', None, _('use git extended diff format')),
2202 [('g', 'git', None, _('use git extended diff format')),
2203 ('I', 'include', [], _('include names matching the given patterns')),
2203 ('I', 'include', [], _('include names matching the given patterns')),
2204 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2204 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2205 'hg qdiff [-I] [-X] [FILE]...'),
2205 'hg qdiff [-I] [-X] [FILE]...'),
2206 "qdelete|qremove|qrm":
2206 "qdelete|qremove|qrm":
2207 (delete,
2207 (delete,
2208 [('k', 'keep', None, _('keep patch file')),
2208 [('k', 'keep', None, _('keep patch file')),
2209 ('r', 'rev', [], _('stop managing a revision'))],
2209 ('r', 'rev', [], _('stop managing a revision'))],
2210 'hg qdelete [-k] [-r REV]... PATCH...'),
2210 'hg qdelete [-k] [-r REV]... PATCH...'),
2211 'qfold':
2211 'qfold':
2212 (fold,
2212 (fold,
2213 [('e', 'edit', None, _('edit patch header')),
2213 [('e', 'edit', None, _('edit patch header')),
2214 ('k', 'keep', None, _('keep folded patch files'))
2214 ('k', 'keep', None, _('keep folded patch files'))
2215 ] + commands.commitopts,
2215 ] + commands.commitopts,
2216 'hg qfold [-e] [-m <text>] [-l <file] PATCH...'),
2216 'hg qfold [-e] [-m <text>] [-l <file] PATCH...'),
2217 'qgoto': (goto, [('f', 'force', None, _('overwrite any local changes'))],
2217 'qgoto': (goto, [('f', 'force', None, _('overwrite any local changes'))],
2218 'hg qgoto [OPT]... PATCH'),
2218 'hg qgoto [OPT]... PATCH'),
2219 'qguard': (guard, [('l', 'list', None, _('list all patches and guards')),
2219 'qguard': (guard, [('l', 'list', None, _('list all patches and guards')),
2220 ('n', 'none', None, _('drop all guards'))],
2220 ('n', 'none', None, _('drop all guards'))],
2221 'hg qguard [PATCH] [+GUARD]... [-GUARD]...'),
2221 'hg qguard [PATCH] [+GUARD]... [-GUARD]...'),
2222 'qheader': (header, [],
2222 'qheader': (header, [],
2223 _('hg qheader [PATCH]')),
2223 _('hg qheader [PATCH]')),
2224 "^qimport":
2224 "^qimport":
2225 (qimport,
2225 (qimport,
2226 [('e', 'existing', None, 'import file in patch dir'),
2226 [('e', 'existing', None, 'import file in patch dir'),
2227 ('n', 'name', '', 'patch file name'),
2227 ('n', 'name', '', 'patch file name'),
2228 ('f', 'force', None, 'overwrite existing files'),
2228 ('f', 'force', None, 'overwrite existing files'),
2229 ('r', 'rev', [], 'place existing revisions under mq control'),
2229 ('r', 'rev', [], 'place existing revisions under mq control'),
2230 ('g', 'git', None, _('use git extended diff format'))],
2230 ('g', 'git', None, _('use git extended diff format'))],
2231 'hg qimport [-e] [-n NAME] [-f] [-g] [-r REV]... FILE...'),
2231 'hg qimport [-e] [-n NAME] [-f] [-g] [-r REV]... FILE...'),
2232 "^qinit":
2232 "^qinit":
2233 (init,
2233 (init,
2234 [('c', 'create-repo', None, 'create queue repository')],
2234 [('c', 'create-repo', None, 'create queue repository')],
2235 'hg qinit [-c]'),
2235 'hg qinit [-c]'),
2236 "qnew":
2236 "qnew":
2237 (new,
2237 (new,
2238 [('e', 'edit', None, _('edit commit message')),
2238 [('e', 'edit', None, _('edit commit message')),
2239 ('f', 'force', None, _('import uncommitted changes into patch'))
2239 ('f', 'force', None, _('import uncommitted changes into patch'))
2240 ] + commands.commitopts,
2240 ] + commands.commitopts,
2241 'hg qnew [-e] [-m TEXT] [-l FILE] [-f] PATCH'),
2241 'hg qnew [-e] [-m TEXT] [-l FILE] [-f] PATCH'),
2242 "qnext": (next, [] + seriesopts, 'hg qnext [-s]'),
2242 "qnext": (next, [] + seriesopts, 'hg qnext [-s]'),
2243 "qprev": (prev, [] + seriesopts, 'hg qprev [-s]'),
2243 "qprev": (prev, [] + seriesopts, 'hg qprev [-s]'),
2244 "^qpop":
2244 "^qpop":
2245 (pop,
2245 (pop,
2246 [('a', 'all', None, 'pop all patches'),
2246 [('a', 'all', None, 'pop all patches'),
2247 ('n', 'name', '', 'queue name to pop'),
2247 ('n', 'name', '', 'queue name to pop'),
2248 ('f', 'force', None, 'forget any local changes')],
2248 ('f', 'force', None, 'forget any local changes')],
2249 'hg qpop [-a] [-n NAME] [-f] [PATCH | INDEX]'),
2249 'hg qpop [-a] [-n NAME] [-f] [PATCH | INDEX]'),
2250 "^qpush":
2250 "^qpush":
2251 (push,
2251 (push,
2252 [('f', 'force', None, 'apply if the patch has rejects'),
2252 [('f', 'force', None, 'apply if the patch has rejects'),
2253 ('l', 'list', None, 'list patch name in commit text'),
2253 ('l', 'list', None, 'list patch name in commit text'),
2254 ('a', 'all', None, 'apply all patches'),
2254 ('a', 'all', None, 'apply all patches'),
2255 ('m', 'merge', None, 'merge from another queue'),
2255 ('m', 'merge', None, 'merge from another queue'),
2256 ('n', 'name', '', 'merge queue name')],
2256 ('n', 'name', '', 'merge queue name')],
2257 'hg qpush [-f] [-l] [-a] [-m] [-n NAME] [PATCH | INDEX]'),
2257 'hg qpush [-f] [-l] [-a] [-m] [-n NAME] [PATCH | INDEX]'),
2258 "^qrefresh":
2258 "^qrefresh":
2259 (refresh,
2259 (refresh,
2260 [('e', 'edit', None, _('edit commit message')),
2260 [('e', 'edit', None, _('edit commit message')),
2261 ('g', 'git', None, _('use git extended diff format')),
2261 ('g', 'git', None, _('use git extended diff format')),
2262 ('s', 'short', None, 'refresh only files already in the patch'),
2262 ('s', 'short', None, 'refresh only files already in the patch'),
2263 ('I', 'include', [], _('include names matching the given patterns')),
2263 ('I', 'include', [], _('include names matching the given patterns')),
2264 ('X', 'exclude', [], _('exclude names matching the given patterns'))
2264 ('X', 'exclude', [], _('exclude names matching the given patterns'))
2265 ] + commands.commitopts,
2265 ] + commands.commitopts,
2266 'hg qrefresh [-I] [-X] [-e] [-m TEXT] [-l FILE] [-s] [FILE]...'),
2266 'hg qrefresh [-I] [-X] [-e] [-m TEXT] [-l FILE] [-s] [FILE]...'),
2267 'qrename|qmv':
2267 'qrename|qmv':
2268 (rename, [], 'hg qrename PATCH1 [PATCH2]'),
2268 (rename, [], 'hg qrename PATCH1 [PATCH2]'),
2269 "qrestore":
2269 "qrestore":
2270 (restore,
2270 (restore,
2271 [('d', 'delete', None, 'delete save entry'),
2271 [('d', 'delete', None, 'delete save entry'),
2272 ('u', 'update', None, 'update queue working dir')],
2272 ('u', 'update', None, 'update queue working dir')],
2273 'hg qrestore [-d] [-u] REV'),
2273 'hg qrestore [-d] [-u] REV'),
2274 "qsave":
2274 "qsave":
2275 (save,
2275 (save,
2276 [('c', 'copy', None, 'copy patch directory'),
2276 [('c', 'copy', None, 'copy patch directory'),
2277 ('n', 'name', '', 'copy directory name'),
2277 ('n', 'name', '', 'copy directory name'),
2278 ('e', 'empty', None, 'clear queue status file'),
2278 ('e', 'empty', None, 'clear queue status file'),
2279 ('f', 'force', None, 'force copy')] + commands.commitopts,
2279 ('f', 'force', None, 'force copy')] + commands.commitopts,
2280 'hg qsave [-m TEXT] [-l FILE] [-c] [-n NAME] [-e] [-f]'),
2280 'hg qsave [-m TEXT] [-l FILE] [-c] [-n NAME] [-e] [-f]'),
2281 "qselect": (select,
2281 "qselect": (select,
2282 [('n', 'none', None, _('disable all guards')),
2282 [('n', 'none', None, _('disable all guards')),
2283 ('s', 'series', None, _('list all guards in series file')),
2283 ('s', 'series', None, _('list all guards in series file')),
2284 ('', 'pop', None,
2284 ('', 'pop', None,
2285 _('pop to before first guarded applied patch')),
2285 _('pop to before first guarded applied patch')),
2286 ('', 'reapply', None, _('pop, then reapply patches'))],
2286 ('', 'reapply', None, _('pop, then reapply patches'))],
2287 'hg qselect [OPTION]... [GUARD]...'),
2287 'hg qselect [OPTION]... [GUARD]...'),
2288 "qseries":
2288 "qseries":
2289 (series,
2289 (series,
2290 [('m', 'missing', None, 'print patches not in series')] + seriesopts,
2290 [('m', 'missing', None, 'print patches not in series')] + seriesopts,
2291 'hg qseries [-ms]'),
2291 'hg qseries [-ms]'),
2292 "^strip":
2292 "^strip":
2293 (strip,
2293 (strip,
2294 [('f', 'force', None, 'force multi-head removal'),
2294 [('f', 'force', None, 'force multi-head removal'),
2295 ('b', 'backup', None, 'bundle unrelated changesets'),
2295 ('b', 'backup', None, 'bundle unrelated changesets'),
2296 ('n', 'nobackup', None, 'no backups')],
2296 ('n', 'nobackup', None, 'no backups')],
2297 'hg strip [-f] [-b] [-n] REV'),
2297 'hg strip [-f] [-b] [-n] REV'),
2298 "qtop": (top, [] + seriesopts, 'hg qtop [-s]'),
2298 "qtop": (top, [] + seriesopts, 'hg qtop [-s]'),
2299 "qunapplied": (unapplied, [] + seriesopts, 'hg qunapplied [-s] [PATCH]'),
2299 "qunapplied": (unapplied, [] + seriesopts, 'hg qunapplied [-s] [PATCH]'),
2300 }
2300 }
@@ -1,159 +1,164 b''
1 # Copyright (C) 2006 - Marco Barisione <marco@barisione.org>
1 # Copyright (C) 2006 - Marco Barisione <marco@barisione.org>
2 #
2 #
3 # This is a small extension for Mercurial (http://www.selenic.com/mercurial)
3 # This is a small extension for Mercurial (http://www.selenic.com/mercurial)
4 # that removes files not known to mercurial
4 # that removes files not known to mercurial
5 #
5 #
6 # This program was inspired by the "cvspurge" script contained in CVS utilities
6 # This program was inspired by the "cvspurge" script contained in CVS utilities
7 # (http://www.red-bean.com/cvsutils/).
7 # (http://www.red-bean.com/cvsutils/).
8 #
8 #
9 # To enable the "purge" extension put these lines in your ~/.hgrc:
9 # To enable the "purge" extension put these lines in your ~/.hgrc:
10 # [extensions]
10 # [extensions]
11 # hgext.purge =
11 # hgext.purge =
12 #
12 #
13 # For help on the usage of "hg purge" use:
13 # For help on the usage of "hg purge" use:
14 # hg help purge
14 # hg help purge
15 #
15 #
16 # This program is free software; you can redistribute it and/or modify
16 # This program is free software; you can redistribute it and/or modify
17 # it under the terms of the GNU General Public License as published by
17 # it under the terms of the GNU General Public License as published by
18 # the Free Software Foundation; either version 2 of the License, or
18 # the Free Software Foundation; either version 2 of the License, or
19 # (at your option) any later version.
19 # (at your option) any later version.
20 #
20 #
21 # This program is distributed in the hope that it will be useful,
21 # This program is distributed in the hope that it will be useful,
22 # but WITHOUT ANY WARRANTY; without even the implied warranty of
22 # but WITHOUT ANY WARRANTY; without even the implied warranty of
23 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
23 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
24 # GNU General Public License for more details.
24 # GNU General Public License for more details.
25 #
25 #
26 # You should have received a copy of the GNU General Public License
26 # You should have received a copy of the GNU General Public License
27 # along with this program; if not, write to the Free Software
27 # along with this program; if not, write to the Free Software
28 # Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
28 # Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
29
29
30 from mercurial import hg, util
30 from mercurial import hg, util
31 from mercurial.i18n import _
31 from mercurial.i18n import _
32 import os
32 import os
33
33
34 def dopurge(ui, repo, dirs=None, act=True, abort_on_err=False, eol='\n',
34 def dopurge(ui, repo, dirs=None, act=True, abort_on_err=False, eol='\n',
35 force=False):
35 force=False, include=None, exclude=None):
36 def error(msg):
36 def error(msg):
37 if abort_on_err:
37 if abort_on_err:
38 raise util.Abort(msg)
38 raise util.Abort(msg)
39 else:
39 else:
40 ui.warn(_('warning: %s\n') % msg)
40 ui.warn(_('warning: %s\n') % msg)
41
41
42 def remove(remove_func, name):
42 def remove(remove_func, name):
43 if act:
43 if act:
44 try:
44 try:
45 remove_func(os.path.join(repo.root, name))
45 remove_func(os.path.join(repo.root, name))
46 except OSError, e:
46 except OSError, e:
47 error(_('%s cannot be removed') % name)
47 error(_('%s cannot be removed') % name)
48 else:
48 else:
49 ui.write('%s%s' % (name, eol))
49 ui.write('%s%s' % (name, eol))
50
50
51 directories = []
51 directories = []
52 files = []
52 files = []
53 missing = []
53 missing = []
54 roots, match, anypats = util.cmdmatcher(repo.root, repo.getcwd(), dirs)
54 roots, match, anypats = util.cmdmatcher(repo.root, repo.getcwd(), dirs,
55 include, exclude)
55 for src, f, st in repo.dirstate.statwalk(files=roots, match=match,
56 for src, f, st in repo.dirstate.statwalk(files=roots, match=match,
56 ignored=True, directories=True):
57 ignored=True, directories=True):
57 if src == 'd':
58 if src == 'd':
58 directories.append(f)
59 directories.append(f)
59 elif src == 'm':
60 elif src == 'm':
60 missing.append(f)
61 missing.append(f)
61 elif src == 'f' and f not in repo.dirstate:
62 elif src == 'f' and f not in repo.dirstate:
62 files.append(f)
63 files.append(f)
63
64
64 _check_missing(ui, repo, missing, force)
65 _check_missing(ui, repo, missing, force)
65
66
66 directories.sort()
67 directories.sort()
67
68
68 for f in files:
69 for f in files:
69 if f not in repo.dirstate:
70 if f not in repo.dirstate:
70 ui.note(_('Removing file %s\n') % f)
71 ui.note(_('Removing file %s\n') % f)
71 remove(os.remove, f)
72 remove(os.remove, f)
72
73
73 for f in directories[::-1]:
74 for f in directories[::-1]:
74 if not os.listdir(repo.wjoin(f)):
75 if match(f) and not os.listdir(repo.wjoin(f)):
75 ui.note(_('Removing directory %s\n') % f)
76 ui.note(_('Removing directory %s\n') % f)
76 remove(os.rmdir, f)
77 remove(os.rmdir, f)
77
78
78 def _check_missing(ui, repo, missing, force=False):
79 def _check_missing(ui, repo, missing, force=False):
79 """Abort if there is the chance of having problems with name-mangling fs
80 """Abort if there is the chance of having problems with name-mangling fs
80
81
81 In a name mangling filesystem (e.g. a case insensitive one)
82 In a name mangling filesystem (e.g. a case insensitive one)
82 dirstate.walk() can yield filenames different from the ones
83 dirstate.walk() can yield filenames different from the ones
83 stored in the dirstate. This already confuses the status and
84 stored in the dirstate. This already confuses the status and
84 add commands, but with purge this may cause data loss.
85 add commands, but with purge this may cause data loss.
85
86
86 To prevent this, _check_missing will abort if there are missing
87 To prevent this, _check_missing will abort if there are missing
87 files. The force option will let the user skip the check if he
88 files. The force option will let the user skip the check if he
88 knows it is safe.
89 knows it is safe.
89
90
90 Even with the force option this function will check if any of the
91 Even with the force option this function will check if any of the
91 missing files is still available in the working dir: if so there
92 missing files is still available in the working dir: if so there
92 may be some problem with the underlying filesystem, so it
93 may be some problem with the underlying filesystem, so it
93 aborts unconditionally."""
94 aborts unconditionally."""
94
95
95 found = [f for f in missing if util.lexists(repo.wjoin(f))]
96 found = [f for f in missing if util.lexists(repo.wjoin(f))]
96
97
97 if found:
98 if found:
98 if not ui.quiet:
99 if not ui.quiet:
99 ui.warn(_("The following tracked files weren't listed by the "
100 ui.warn(_("The following tracked files weren't listed by the "
100 "filesystem, but could still be found:\n"))
101 "filesystem, but could still be found:\n"))
101 for f in found:
102 for f in found:
102 ui.warn("%s\n" % f)
103 ui.warn("%s\n" % f)
103 if util.checkfolding(repo.path):
104 if util.checkfolding(repo.path):
104 ui.warn(_("This is probably due to a case-insensitive "
105 ui.warn(_("This is probably due to a case-insensitive "
105 "filesystem\n"))
106 "filesystem\n"))
106 raise util.Abort(_("purging on name mangling filesystems is not "
107 raise util.Abort(_("purging on name mangling filesystems is not "
107 "yet fully supported"))
108 "yet fully supported"))
108
109
109 if missing and not force:
110 if missing and not force:
110 raise util.Abort(_("there are missing files in the working dir and "
111 raise util.Abort(_("there are missing files in the working dir and "
111 "purge still has problems with them due to name "
112 "purge still has problems with them due to name "
112 "mangling filesystems. "
113 "mangling filesystems. "
113 "Use --force if you know what you are doing"))
114 "Use --force if you know what you are doing"))
114
115
115
116
116 def purge(ui, repo, *dirs, **opts):
117 def purge(ui, repo, *dirs, **opts):
117 '''removes files not tracked by mercurial
118 '''removes files not tracked by mercurial
118
119
119 Delete files not known to mercurial, this is useful to test local and
120 Delete files not known to mercurial, this is useful to test local and
120 uncommitted changes in the otherwise clean source tree.
121 uncommitted changes in the otherwise clean source tree.
121
122
122 This means that purge will delete:
123 This means that purge will delete:
123 - Unknown files: files marked with "?" by "hg status"
124 - Unknown files: files marked with "?" by "hg status"
124 - Ignored files: files usually ignored by Mercurial because they match
125 - Ignored files: files usually ignored by Mercurial because they match
125 a pattern in a ".hgignore" file
126 a pattern in a ".hgignore" file
126 - Empty directories: in fact Mercurial ignores directories unless they
127 - Empty directories: in fact Mercurial ignores directories unless they
127 contain files under source control managment
128 contain files under source control managment
128 But it will leave untouched:
129 But it will leave untouched:
129 - Unmodified tracked files
130 - Unmodified tracked files
130 - Modified tracked files
131 - Modified tracked files
131 - New files added to the repository (with "hg add")
132 - New files added to the repository (with "hg add")
132
133
133 If directories are given on the command line, only files in these
134 If directories are given on the command line, only files in these
134 directories are considered.
135 directories are considered.
135
136
136 Be careful with purge, you could irreversibly delete some files you
137 Be careful with purge, you could irreversibly delete some files you
137 forgot to add to the repository. If you only want to print the list of
138 forgot to add to the repository. If you only want to print the list of
138 files that this program would delete use the --print option.
139 files that this program would delete use the --print option.
139 '''
140 '''
140 act = not opts['print']
141 act = not opts['print']
141 abort_on_err = bool(opts['abort_on_err'])
142 abort_on_err = bool(opts['abort_on_err'])
142 eol = opts['print0'] and '\0' or '\n'
143 eol = opts['print0'] and '\0' or '\n'
143 if eol == '\0':
144 if eol == '\0':
144 # --print0 implies --print
145 # --print0 implies --print
145 act = False
146 act = False
146 force = bool(opts['force'])
147 force = bool(opts['force'])
147 dopurge(ui, repo, dirs, act, abort_on_err, eol, force)
148 include = opts['include']
149 exclude = opts['exclude']
150 dopurge(ui, repo, dirs, act, abort_on_err, eol, force, include, exclude)
148
151
149
152
150 cmdtable = {
153 cmdtable = {
151 'purge':
154 'purge':
152 (purge,
155 (purge,
153 [('a', 'abort-on-err', None, _('abort if an error occurs')),
156 [('a', 'abort-on-err', None, _('abort if an error occurs')),
154 ('f', 'force', None, _('purge even when missing files are detected')),
157 ('f', 'force', None, _('purge even when missing files are detected')),
155 ('p', 'print', None, _('print the file names instead of deleting them')),
158 ('p', 'print', None, _('print the file names instead of deleting them')),
156 ('0', 'print0', None, _('end filenames with NUL, for use with xargs'
159 ('0', 'print0', None, _('end filenames with NUL, for use with xargs'
157 ' (implies -p)'))],
160 ' (implies -p)')),
161 ('I', 'include', [], _('include names matching the given patterns')),
162 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
158 _('hg purge [OPTION]... [DIR]...'))
163 _('hg purge [OPTION]... [DIR]...'))
159 }
164 }
@@ -1,809 +1,820 b''
1 # cmdutil.py - help for command processing in mercurial
1 # cmdutil.py - help for command processing in mercurial
2 #
2 #
3 # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7
7
8 from node import *
8 from node import *
9 from i18n import _
9 from i18n import _
10 import os, sys, mdiff, bdiff, util, templater, patch
10 import os, sys, mdiff, bdiff, util, templater, patch
11
11
12 revrangesep = ':'
12 revrangesep = ':'
13
13
14 def parseurl(url, revs):
15 '''parse url#branch, returning url, branch + revs'''
16
17 if '#' not in url:
18 return url, (revs or None)
19
20 url, rev = url.split('#', 1)
21 return url, revs + [rev]
22
14 def revpair(repo, revs):
23 def revpair(repo, revs):
15 '''return pair of nodes, given list of revisions. second item can
24 '''return pair of nodes, given list of revisions. second item can
16 be None, meaning use working dir.'''
25 be None, meaning use working dir.'''
17
26
18 def revfix(repo, val, defval):
27 def revfix(repo, val, defval):
19 if not val and val != 0 and defval is not None:
28 if not val and val != 0 and defval is not None:
20 val = defval
29 val = defval
21 return repo.lookup(val)
30 return repo.lookup(val)
22
31
23 if not revs:
32 if not revs:
24 return repo.dirstate.parents()[0], None
33 return repo.dirstate.parents()[0], None
25 end = None
34 end = None
26 if len(revs) == 1:
35 if len(revs) == 1:
27 if revrangesep in revs[0]:
36 if revrangesep in revs[0]:
28 start, end = revs[0].split(revrangesep, 1)
37 start, end = revs[0].split(revrangesep, 1)
29 start = revfix(repo, start, 0)
38 start = revfix(repo, start, 0)
30 end = revfix(repo, end, repo.changelog.count() - 1)
39 end = revfix(repo, end, repo.changelog.count() - 1)
31 else:
40 else:
32 start = revfix(repo, revs[0], None)
41 start = revfix(repo, revs[0], None)
33 elif len(revs) == 2:
42 elif len(revs) == 2:
34 if revrangesep in revs[0] or revrangesep in revs[1]:
43 if revrangesep in revs[0] or revrangesep in revs[1]:
35 raise util.Abort(_('too many revisions specified'))
44 raise util.Abort(_('too many revisions specified'))
36 start = revfix(repo, revs[0], None)
45 start = revfix(repo, revs[0], None)
37 end = revfix(repo, revs[1], None)
46 end = revfix(repo, revs[1], None)
38 else:
47 else:
39 raise util.Abort(_('too many revisions specified'))
48 raise util.Abort(_('too many revisions specified'))
40 return start, end
49 return start, end
41
50
42 def revrange(repo, revs):
51 def revrange(repo, revs):
43 """Yield revision as strings from a list of revision specifications."""
52 """Yield revision as strings from a list of revision specifications."""
44
53
45 def revfix(repo, val, defval):
54 def revfix(repo, val, defval):
46 if not val and val != 0 and defval is not None:
55 if not val and val != 0 and defval is not None:
47 return defval
56 return defval
48 return repo.changelog.rev(repo.lookup(val))
57 return repo.changelog.rev(repo.lookup(val))
49
58
50 seen, l = {}, []
59 seen, l = {}, []
51 for spec in revs:
60 for spec in revs:
52 if revrangesep in spec:
61 if revrangesep in spec:
53 start, end = spec.split(revrangesep, 1)
62 start, end = spec.split(revrangesep, 1)
54 start = revfix(repo, start, 0)
63 start = revfix(repo, start, 0)
55 end = revfix(repo, end, repo.changelog.count() - 1)
64 end = revfix(repo, end, repo.changelog.count() - 1)
56 step = start > end and -1 or 1
65 step = start > end and -1 or 1
57 for rev in xrange(start, end+step, step):
66 for rev in xrange(start, end+step, step):
58 if rev in seen:
67 if rev in seen:
59 continue
68 continue
60 seen[rev] = 1
69 seen[rev] = 1
61 l.append(rev)
70 l.append(rev)
62 else:
71 else:
63 rev = revfix(repo, spec, None)
72 rev = revfix(repo, spec, None)
64 if rev in seen:
73 if rev in seen:
65 continue
74 continue
66 seen[rev] = 1
75 seen[rev] = 1
67 l.append(rev)
76 l.append(rev)
68
77
69 return l
78 return l
70
79
71 def make_filename(repo, pat, node,
80 def make_filename(repo, pat, node,
72 total=None, seqno=None, revwidth=None, pathname=None):
81 total=None, seqno=None, revwidth=None, pathname=None):
73 node_expander = {
82 node_expander = {
74 'H': lambda: hex(node),
83 'H': lambda: hex(node),
75 'R': lambda: str(repo.changelog.rev(node)),
84 'R': lambda: str(repo.changelog.rev(node)),
76 'h': lambda: short(node),
85 'h': lambda: short(node),
77 }
86 }
78 expander = {
87 expander = {
79 '%': lambda: '%',
88 '%': lambda: '%',
80 'b': lambda: os.path.basename(repo.root),
89 'b': lambda: os.path.basename(repo.root),
81 }
90 }
82
91
83 try:
92 try:
84 if node:
93 if node:
85 expander.update(node_expander)
94 expander.update(node_expander)
86 if node and revwidth is not None:
95 if node and revwidth is not None:
87 expander['r'] = (lambda:
96 expander['r'] = (lambda:
88 str(repo.changelog.rev(node)).zfill(revwidth))
97 str(repo.changelog.rev(node)).zfill(revwidth))
89 if total is not None:
98 if total is not None:
90 expander['N'] = lambda: str(total)
99 expander['N'] = lambda: str(total)
91 if seqno is not None:
100 if seqno is not None:
92 expander['n'] = lambda: str(seqno)
101 expander['n'] = lambda: str(seqno)
93 if total is not None and seqno is not None:
102 if total is not None and seqno is not None:
94 expander['n'] = lambda: str(seqno).zfill(len(str(total)))
103 expander['n'] = lambda: str(seqno).zfill(len(str(total)))
95 if pathname is not None:
104 if pathname is not None:
96 expander['s'] = lambda: os.path.basename(pathname)
105 expander['s'] = lambda: os.path.basename(pathname)
97 expander['d'] = lambda: os.path.dirname(pathname) or '.'
106 expander['d'] = lambda: os.path.dirname(pathname) or '.'
98 expander['p'] = lambda: pathname
107 expander['p'] = lambda: pathname
99
108
100 newname = []
109 newname = []
101 patlen = len(pat)
110 patlen = len(pat)
102 i = 0
111 i = 0
103 while i < patlen:
112 while i < patlen:
104 c = pat[i]
113 c = pat[i]
105 if c == '%':
114 if c == '%':
106 i += 1
115 i += 1
107 c = pat[i]
116 c = pat[i]
108 c = expander[c]()
117 c = expander[c]()
109 newname.append(c)
118 newname.append(c)
110 i += 1
119 i += 1
111 return ''.join(newname)
120 return ''.join(newname)
112 except KeyError, inst:
121 except KeyError, inst:
113 raise util.Abort(_("invalid format spec '%%%s' in output file name") %
122 raise util.Abort(_("invalid format spec '%%%s' in output file name") %
114 inst.args[0])
123 inst.args[0])
115
124
116 def make_file(repo, pat, node=None,
125 def make_file(repo, pat, node=None,
117 total=None, seqno=None, revwidth=None, mode='wb', pathname=None):
126 total=None, seqno=None, revwidth=None, mode='wb', pathname=None):
118 if not pat or pat == '-':
127 if not pat or pat == '-':
119 return 'w' in mode and sys.stdout or sys.stdin
128 return 'w' in mode and sys.stdout or sys.stdin
120 if hasattr(pat, 'write') and 'w' in mode:
129 if hasattr(pat, 'write') and 'w' in mode:
121 return pat
130 return pat
122 if hasattr(pat, 'read') and 'r' in mode:
131 if hasattr(pat, 'read') and 'r' in mode:
123 return pat
132 return pat
124 return open(make_filename(repo, pat, node, total, seqno, revwidth,
133 return open(make_filename(repo, pat, node, total, seqno, revwidth,
125 pathname),
134 pathname),
126 mode)
135 mode)
127
136
128 def matchpats(repo, pats=[], opts={}, globbed=False, default=None):
137 def matchpats(repo, pats=[], opts={}, globbed=False, default=None):
129 cwd = repo.getcwd()
138 cwd = repo.getcwd()
130 return util.cmdmatcher(repo.root, cwd, pats or [], opts.get('include'),
139 return util.cmdmatcher(repo.root, cwd, pats or [], opts.get('include'),
131 opts.get('exclude'), globbed=globbed,
140 opts.get('exclude'), globbed=globbed,
132 default=default)
141 default=default)
133
142
134 def walk(repo, pats=[], opts={}, node=None, badmatch=None, globbed=False,
143 def walk(repo, pats=[], opts={}, node=None, badmatch=None, globbed=False,
135 default=None):
144 default=None):
136 files, matchfn, anypats = matchpats(repo, pats, opts, globbed=globbed,
145 files, matchfn, anypats = matchpats(repo, pats, opts, globbed=globbed,
137 default=default)
146 default=default)
138 exact = dict.fromkeys(files)
147 exact = dict.fromkeys(files)
139 for src, fn in repo.walk(node=node, files=files, match=matchfn,
148 for src, fn in repo.walk(node=node, files=files, match=matchfn,
140 badmatch=badmatch):
149 badmatch=badmatch):
141 yield src, fn, util.pathto(repo.root, repo.getcwd(), fn), fn in exact
150 yield src, fn, util.pathto(repo.root, repo.getcwd(), fn), fn in exact
142
151
143 def findrenames(repo, added=None, removed=None, threshold=0.5):
152 def findrenames(repo, added=None, removed=None, threshold=0.5):
144 '''find renamed files -- yields (before, after, score) tuples'''
153 '''find renamed files -- yields (before, after, score) tuples'''
145 if added is None or removed is None:
154 if added is None or removed is None:
146 added, removed = repo.status()[1:3]
155 added, removed = repo.status()[1:3]
147 ctx = repo.changectx()
156 ctx = repo.changectx()
148 for a in added:
157 for a in added:
149 aa = repo.wread(a)
158 aa = repo.wread(a)
150 bestname, bestscore = None, threshold
159 bestname, bestscore = None, threshold
151 for r in removed:
160 for r in removed:
152 rr = ctx.filectx(r).data()
161 rr = ctx.filectx(r).data()
153
162
154 # bdiff.blocks() returns blocks of matching lines
163 # bdiff.blocks() returns blocks of matching lines
155 # count the number of bytes in each
164 # count the number of bytes in each
156 equal = 0
165 equal = 0
157 alines = mdiff.splitnewlines(aa)
166 alines = mdiff.splitnewlines(aa)
158 matches = bdiff.blocks(aa, rr)
167 matches = bdiff.blocks(aa, rr)
159 for x1,x2,y1,y2 in matches:
168 for x1,x2,y1,y2 in matches:
160 for line in alines[x1:x2]:
169 for line in alines[x1:x2]:
161 equal += len(line)
170 equal += len(line)
162
171
163 myscore = equal*2.0 / (len(aa)+len(rr))
172 lengths = len(aa) + len(rr)
173 if lengths:
174 myscore = equal*2.0 / lengths
164 if myscore >= bestscore:
175 if myscore >= bestscore:
165 bestname, bestscore = r, myscore
176 bestname, bestscore = r, myscore
166 if bestname:
177 if bestname:
167 yield bestname, a, bestscore
178 yield bestname, a, bestscore
168
179
169 def addremove(repo, pats=[], opts={}, wlock=None, dry_run=None,
180 def addremove(repo, pats=[], opts={}, wlock=None, dry_run=None,
170 similarity=None):
181 similarity=None):
171 if dry_run is None:
182 if dry_run is None:
172 dry_run = opts.get('dry_run')
183 dry_run = opts.get('dry_run')
173 if similarity is None:
184 if similarity is None:
174 similarity = float(opts.get('similarity') or 0)
185 similarity = float(opts.get('similarity') or 0)
175 add, remove = [], []
186 add, remove = [], []
176 mapping = {}
187 mapping = {}
177 for src, abs, rel, exact in walk(repo, pats, opts):
188 for src, abs, rel, exact in walk(repo, pats, opts):
178 if src == 'f' and repo.dirstate.state(abs) == '?':
189 if src == 'f' and repo.dirstate.state(abs) == '?':
179 add.append(abs)
190 add.append(abs)
180 mapping[abs] = rel, exact
191 mapping[abs] = rel, exact
181 if repo.ui.verbose or not exact:
192 if repo.ui.verbose or not exact:
182 repo.ui.status(_('adding %s\n') % ((pats and rel) or abs))
193 repo.ui.status(_('adding %s\n') % ((pats and rel) or abs))
183 islink = os.path.islink(rel)
194 islink = os.path.islink(rel)
184 if repo.dirstate.state(abs) != 'r' and not islink and not os.path.exists(rel):
195 if repo.dirstate.state(abs) != 'r' and not islink and not os.path.exists(rel):
185 remove.append(abs)
196 remove.append(abs)
186 mapping[abs] = rel, exact
197 mapping[abs] = rel, exact
187 if repo.ui.verbose or not exact:
198 if repo.ui.verbose or not exact:
188 repo.ui.status(_('removing %s\n') % ((pats and rel) or abs))
199 repo.ui.status(_('removing %s\n') % ((pats and rel) or abs))
189 if not dry_run:
200 if not dry_run:
190 repo.add(add, wlock=wlock)
201 repo.add(add, wlock=wlock)
191 repo.remove(remove, wlock=wlock)
202 repo.remove(remove, wlock=wlock)
192 if similarity > 0:
203 if similarity > 0:
193 for old, new, score in findrenames(repo, add, remove, similarity):
204 for old, new, score in findrenames(repo, add, remove, similarity):
194 oldrel, oldexact = mapping[old]
205 oldrel, oldexact = mapping[old]
195 newrel, newexact = mapping[new]
206 newrel, newexact = mapping[new]
196 if repo.ui.verbose or not oldexact or not newexact:
207 if repo.ui.verbose or not oldexact or not newexact:
197 repo.ui.status(_('recording removal of %s as rename to %s '
208 repo.ui.status(_('recording removal of %s as rename to %s '
198 '(%d%% similar)\n') %
209 '(%d%% similar)\n') %
199 (oldrel, newrel, score * 100))
210 (oldrel, newrel, score * 100))
200 if not dry_run:
211 if not dry_run:
201 repo.copy(old, new, wlock=wlock)
212 repo.copy(old, new, wlock=wlock)
202
213
203 def service(opts, parentfn=None, initfn=None, runfn=None):
214 def service(opts, parentfn=None, initfn=None, runfn=None):
204 '''Run a command as a service.'''
215 '''Run a command as a service.'''
205
216
206 if opts['daemon'] and not opts['daemon_pipefds']:
217 if opts['daemon'] and not opts['daemon_pipefds']:
207 rfd, wfd = os.pipe()
218 rfd, wfd = os.pipe()
208 args = sys.argv[:]
219 args = sys.argv[:]
209 args.append('--daemon-pipefds=%d,%d' % (rfd, wfd))
220 args.append('--daemon-pipefds=%d,%d' % (rfd, wfd))
210 pid = os.spawnvp(os.P_NOWAIT | getattr(os, 'P_DETACH', 0),
221 pid = os.spawnvp(os.P_NOWAIT | getattr(os, 'P_DETACH', 0),
211 args[0], args)
222 args[0], args)
212 os.close(wfd)
223 os.close(wfd)
213 os.read(rfd, 1)
224 os.read(rfd, 1)
214 if parentfn:
225 if parentfn:
215 return parentfn(pid)
226 return parentfn(pid)
216 else:
227 else:
217 os._exit(0)
228 os._exit(0)
218
229
219 if initfn:
230 if initfn:
220 initfn()
231 initfn()
221
232
222 if opts['pid_file']:
233 if opts['pid_file']:
223 fp = open(opts['pid_file'], 'w')
234 fp = open(opts['pid_file'], 'w')
224 fp.write(str(os.getpid()) + '\n')
235 fp.write(str(os.getpid()) + '\n')
225 fp.close()
236 fp.close()
226
237
227 if opts['daemon_pipefds']:
238 if opts['daemon_pipefds']:
228 rfd, wfd = [int(x) for x in opts['daemon_pipefds'].split(',')]
239 rfd, wfd = [int(x) for x in opts['daemon_pipefds'].split(',')]
229 os.close(rfd)
240 os.close(rfd)
230 try:
241 try:
231 os.setsid()
242 os.setsid()
232 except AttributeError:
243 except AttributeError:
233 pass
244 pass
234 os.write(wfd, 'y')
245 os.write(wfd, 'y')
235 os.close(wfd)
246 os.close(wfd)
236 sys.stdout.flush()
247 sys.stdout.flush()
237 sys.stderr.flush()
248 sys.stderr.flush()
238 fd = os.open(util.nulldev, os.O_RDWR)
249 fd = os.open(util.nulldev, os.O_RDWR)
239 if fd != 0: os.dup2(fd, 0)
250 if fd != 0: os.dup2(fd, 0)
240 if fd != 1: os.dup2(fd, 1)
251 if fd != 1: os.dup2(fd, 1)
241 if fd != 2: os.dup2(fd, 2)
252 if fd != 2: os.dup2(fd, 2)
242 if fd not in (0, 1, 2): os.close(fd)
253 if fd not in (0, 1, 2): os.close(fd)
243
254
244 if runfn:
255 if runfn:
245 return runfn()
256 return runfn()
246
257
247 class changeset_printer(object):
258 class changeset_printer(object):
248 '''show changeset information when templating not requested.'''
259 '''show changeset information when templating not requested.'''
249
260
250 def __init__(self, ui, repo, patch, buffered):
261 def __init__(self, ui, repo, patch, buffered):
251 self.ui = ui
262 self.ui = ui
252 self.repo = repo
263 self.repo = repo
253 self.buffered = buffered
264 self.buffered = buffered
254 self.patch = patch
265 self.patch = patch
255 self.header = {}
266 self.header = {}
256 self.hunk = {}
267 self.hunk = {}
257 self.lastheader = None
268 self.lastheader = None
258
269
259 def flush(self, rev):
270 def flush(self, rev):
260 if rev in self.header:
271 if rev in self.header:
261 h = self.header[rev]
272 h = self.header[rev]
262 if h != self.lastheader:
273 if h != self.lastheader:
263 self.lastheader = h
274 self.lastheader = h
264 self.ui.write(h)
275 self.ui.write(h)
265 del self.header[rev]
276 del self.header[rev]
266 if rev in self.hunk:
277 if rev in self.hunk:
267 self.ui.write(self.hunk[rev])
278 self.ui.write(self.hunk[rev])
268 del self.hunk[rev]
279 del self.hunk[rev]
269 return 1
280 return 1
270 return 0
281 return 0
271
282
272 def show(self, rev=0, changenode=None, copies=(), **props):
283 def show(self, rev=0, changenode=None, copies=(), **props):
273 if self.buffered:
284 if self.buffered:
274 self.ui.pushbuffer()
285 self.ui.pushbuffer()
275 self._show(rev, changenode, copies, props)
286 self._show(rev, changenode, copies, props)
276 self.hunk[rev] = self.ui.popbuffer()
287 self.hunk[rev] = self.ui.popbuffer()
277 else:
288 else:
278 self._show(rev, changenode, copies, props)
289 self._show(rev, changenode, copies, props)
279
290
280 def _show(self, rev, changenode, copies, props):
291 def _show(self, rev, changenode, copies, props):
281 '''show a single changeset or file revision'''
292 '''show a single changeset or file revision'''
282 log = self.repo.changelog
293 log = self.repo.changelog
283 if changenode is None:
294 if changenode is None:
284 changenode = log.node(rev)
295 changenode = log.node(rev)
285 elif not rev:
296 elif not rev:
286 rev = log.rev(changenode)
297 rev = log.rev(changenode)
287
298
288 if self.ui.quiet:
299 if self.ui.quiet:
289 self.ui.write("%d:%s\n" % (rev, short(changenode)))
300 self.ui.write("%d:%s\n" % (rev, short(changenode)))
290 return
301 return
291
302
292 changes = log.read(changenode)
303 changes = log.read(changenode)
293 date = util.datestr(changes[2])
304 date = util.datestr(changes[2])
294 extra = changes[5]
305 extra = changes[5]
295 branch = extra.get("branch")
306 branch = extra.get("branch")
296
307
297 hexfunc = self.ui.debugflag and hex or short
308 hexfunc = self.ui.debugflag and hex or short
298
309
299 parents = log.parentrevs(rev)
310 parents = log.parentrevs(rev)
300 if not self.ui.debugflag:
311 if not self.ui.debugflag:
301 if parents[1] == nullrev:
312 if parents[1] == nullrev:
302 if parents[0] >= rev - 1:
313 if parents[0] >= rev - 1:
303 parents = []
314 parents = []
304 else:
315 else:
305 parents = [parents[0]]
316 parents = [parents[0]]
306 parents = [(p, hexfunc(log.node(p))) for p in parents]
317 parents = [(p, hexfunc(log.node(p))) for p in parents]
307
318
308 self.ui.write(_("changeset: %d:%s\n") % (rev, hexfunc(changenode)))
319 self.ui.write(_("changeset: %d:%s\n") % (rev, hexfunc(changenode)))
309
320
310 # don't show the default branch name
321 # don't show the default branch name
311 if branch != 'default':
322 if branch != 'default':
312 branch = util.tolocal(branch)
323 branch = util.tolocal(branch)
313 self.ui.write(_("branch: %s\n") % branch)
324 self.ui.write(_("branch: %s\n") % branch)
314 for tag in self.repo.nodetags(changenode):
325 for tag in self.repo.nodetags(changenode):
315 self.ui.write(_("tag: %s\n") % tag)
326 self.ui.write(_("tag: %s\n") % tag)
316 for parent in parents:
327 for parent in parents:
317 self.ui.write(_("parent: %d:%s\n") % parent)
328 self.ui.write(_("parent: %d:%s\n") % parent)
318
329
319 if self.ui.debugflag:
330 if self.ui.debugflag:
320 self.ui.write(_("manifest: %d:%s\n") %
331 self.ui.write(_("manifest: %d:%s\n") %
321 (self.repo.manifest.rev(changes[0]), hex(changes[0])))
332 (self.repo.manifest.rev(changes[0]), hex(changes[0])))
322 self.ui.write(_("user: %s\n") % changes[1])
333 self.ui.write(_("user: %s\n") % changes[1])
323 self.ui.write(_("date: %s\n") % date)
334 self.ui.write(_("date: %s\n") % date)
324
335
325 if self.ui.debugflag:
336 if self.ui.debugflag:
326 files = self.repo.status(log.parents(changenode)[0], changenode)[:3]
337 files = self.repo.status(log.parents(changenode)[0], changenode)[:3]
327 for key, value in zip([_("files:"), _("files+:"), _("files-:")],
338 for key, value in zip([_("files:"), _("files+:"), _("files-:")],
328 files):
339 files):
329 if value:
340 if value:
330 self.ui.write("%-12s %s\n" % (key, " ".join(value)))
341 self.ui.write("%-12s %s\n" % (key, " ".join(value)))
331 elif changes[3] and self.ui.verbose:
342 elif changes[3] and self.ui.verbose:
332 self.ui.write(_("files: %s\n") % " ".join(changes[3]))
343 self.ui.write(_("files: %s\n") % " ".join(changes[3]))
333 if copies and self.ui.verbose:
344 if copies and self.ui.verbose:
334 copies = ['%s (%s)' % c for c in copies]
345 copies = ['%s (%s)' % c for c in copies]
335 self.ui.write(_("copies: %s\n") % ' '.join(copies))
346 self.ui.write(_("copies: %s\n") % ' '.join(copies))
336
347
337 if extra and self.ui.debugflag:
348 if extra and self.ui.debugflag:
338 extraitems = extra.items()
349 extraitems = extra.items()
339 extraitems.sort()
350 extraitems.sort()
340 for key, value in extraitems:
351 for key, value in extraitems:
341 self.ui.write(_("extra: %s=%s\n")
352 self.ui.write(_("extra: %s=%s\n")
342 % (key, value.encode('string_escape')))
353 % (key, value.encode('string_escape')))
343
354
344 description = changes[4].strip()
355 description = changes[4].strip()
345 if description:
356 if description:
346 if self.ui.verbose:
357 if self.ui.verbose:
347 self.ui.write(_("description:\n"))
358 self.ui.write(_("description:\n"))
348 self.ui.write(description)
359 self.ui.write(description)
349 self.ui.write("\n\n")
360 self.ui.write("\n\n")
350 else:
361 else:
351 self.ui.write(_("summary: %s\n") %
362 self.ui.write(_("summary: %s\n") %
352 description.splitlines()[0])
363 description.splitlines()[0])
353 self.ui.write("\n")
364 self.ui.write("\n")
354
365
355 self.showpatch(changenode)
366 self.showpatch(changenode)
356
367
357 def showpatch(self, node):
368 def showpatch(self, node):
358 if self.patch:
369 if self.patch:
359 prev = self.repo.changelog.parents(node)[0]
370 prev = self.repo.changelog.parents(node)[0]
360 patch.diff(self.repo, prev, node, match=self.patch, fp=self.ui)
371 patch.diff(self.repo, prev, node, match=self.patch, fp=self.ui)
361 self.ui.write("\n")
372 self.ui.write("\n")
362
373
363 class changeset_templater(changeset_printer):
374 class changeset_templater(changeset_printer):
364 '''format changeset information.'''
375 '''format changeset information.'''
365
376
366 def __init__(self, ui, repo, patch, mapfile, buffered):
377 def __init__(self, ui, repo, patch, mapfile, buffered):
367 changeset_printer.__init__(self, ui, repo, patch, buffered)
378 changeset_printer.__init__(self, ui, repo, patch, buffered)
368 filters = templater.common_filters.copy()
379 filters = templater.common_filters.copy()
369 filters['formatnode'] = (ui.debugflag and (lambda x: x)
380 filters['formatnode'] = (ui.debugflag and (lambda x: x)
370 or (lambda x: x[:12]))
381 or (lambda x: x[:12]))
371 self.t = templater.templater(mapfile, filters,
382 self.t = templater.templater(mapfile, filters,
372 cache={
383 cache={
373 'parent': '{rev}:{node|formatnode} ',
384 'parent': '{rev}:{node|formatnode} ',
374 'manifest': '{rev}:{node|formatnode}',
385 'manifest': '{rev}:{node|formatnode}',
375 'filecopy': '{name} ({source})'})
386 'filecopy': '{name} ({source})'})
376
387
377 def use_template(self, t):
388 def use_template(self, t):
378 '''set template string to use'''
389 '''set template string to use'''
379 self.t.cache['changeset'] = t
390 self.t.cache['changeset'] = t
380
391
381 def _show(self, rev, changenode, copies, props):
392 def _show(self, rev, changenode, copies, props):
382 '''show a single changeset or file revision'''
393 '''show a single changeset or file revision'''
383 log = self.repo.changelog
394 log = self.repo.changelog
384 if changenode is None:
395 if changenode is None:
385 changenode = log.node(rev)
396 changenode = log.node(rev)
386 elif not rev:
397 elif not rev:
387 rev = log.rev(changenode)
398 rev = log.rev(changenode)
388
399
389 changes = log.read(changenode)
400 changes = log.read(changenode)
390
401
391 def showlist(name, values, plural=None, **args):
402 def showlist(name, values, plural=None, **args):
392 '''expand set of values.
403 '''expand set of values.
393 name is name of key in template map.
404 name is name of key in template map.
394 values is list of strings or dicts.
405 values is list of strings or dicts.
395 plural is plural of name, if not simply name + 's'.
406 plural is plural of name, if not simply name + 's'.
396
407
397 expansion works like this, given name 'foo'.
408 expansion works like this, given name 'foo'.
398
409
399 if values is empty, expand 'no_foos'.
410 if values is empty, expand 'no_foos'.
400
411
401 if 'foo' not in template map, return values as a string,
412 if 'foo' not in template map, return values as a string,
402 joined by space.
413 joined by space.
403
414
404 expand 'start_foos'.
415 expand 'start_foos'.
405
416
406 for each value, expand 'foo'. if 'last_foo' in template
417 for each value, expand 'foo'. if 'last_foo' in template
407 map, expand it instead of 'foo' for last key.
418 map, expand it instead of 'foo' for last key.
408
419
409 expand 'end_foos'.
420 expand 'end_foos'.
410 '''
421 '''
411 if plural: names = plural
422 if plural: names = plural
412 else: names = name + 's'
423 else: names = name + 's'
413 if not values:
424 if not values:
414 noname = 'no_' + names
425 noname = 'no_' + names
415 if noname in self.t:
426 if noname in self.t:
416 yield self.t(noname, **args)
427 yield self.t(noname, **args)
417 return
428 return
418 if name not in self.t:
429 if name not in self.t:
419 if isinstance(values[0], str):
430 if isinstance(values[0], str):
420 yield ' '.join(values)
431 yield ' '.join(values)
421 else:
432 else:
422 for v in values:
433 for v in values:
423 yield dict(v, **args)
434 yield dict(v, **args)
424 return
435 return
425 startname = 'start_' + names
436 startname = 'start_' + names
426 if startname in self.t:
437 if startname in self.t:
427 yield self.t(startname, **args)
438 yield self.t(startname, **args)
428 vargs = args.copy()
439 vargs = args.copy()
429 def one(v, tag=name):
440 def one(v, tag=name):
430 try:
441 try:
431 vargs.update(v)
442 vargs.update(v)
432 except (AttributeError, ValueError):
443 except (AttributeError, ValueError):
433 try:
444 try:
434 for a, b in v:
445 for a, b in v:
435 vargs[a] = b
446 vargs[a] = b
436 except ValueError:
447 except ValueError:
437 vargs[name] = v
448 vargs[name] = v
438 return self.t(tag, **vargs)
449 return self.t(tag, **vargs)
439 lastname = 'last_' + name
450 lastname = 'last_' + name
440 if lastname in self.t:
451 if lastname in self.t:
441 last = values.pop()
452 last = values.pop()
442 else:
453 else:
443 last = None
454 last = None
444 for v in values:
455 for v in values:
445 yield one(v)
456 yield one(v)
446 if last is not None:
457 if last is not None:
447 yield one(last, tag=lastname)
458 yield one(last, tag=lastname)
448 endname = 'end_' + names
459 endname = 'end_' + names
449 if endname in self.t:
460 if endname in self.t:
450 yield self.t(endname, **args)
461 yield self.t(endname, **args)
451
462
452 def showbranches(**args):
463 def showbranches(**args):
453 branch = changes[5].get("branch")
464 branch = changes[5].get("branch")
454 if branch != 'default':
465 if branch != 'default':
455 branch = util.tolocal(branch)
466 branch = util.tolocal(branch)
456 return showlist('branch', [branch], plural='branches', **args)
467 return showlist('branch', [branch], plural='branches', **args)
457
468
458 def showparents(**args):
469 def showparents(**args):
459 parents = [[('rev', log.rev(p)), ('node', hex(p))]
470 parents = [[('rev', log.rev(p)), ('node', hex(p))]
460 for p in log.parents(changenode)
471 for p in log.parents(changenode)
461 if self.ui.debugflag or p != nullid]
472 if self.ui.debugflag or p != nullid]
462 if (not self.ui.debugflag and len(parents) == 1 and
473 if (not self.ui.debugflag and len(parents) == 1 and
463 parents[0][0][1] == rev - 1):
474 parents[0][0][1] == rev - 1):
464 return
475 return
465 return showlist('parent', parents, **args)
476 return showlist('parent', parents, **args)
466
477
467 def showtags(**args):
478 def showtags(**args):
468 return showlist('tag', self.repo.nodetags(changenode), **args)
479 return showlist('tag', self.repo.nodetags(changenode), **args)
469
480
470 def showextras(**args):
481 def showextras(**args):
471 extras = changes[5].items()
482 extras = changes[5].items()
472 extras.sort()
483 extras.sort()
473 for key, value in extras:
484 for key, value in extras:
474 args = args.copy()
485 args = args.copy()
475 args.update(dict(key=key, value=value))
486 args.update(dict(key=key, value=value))
476 yield self.t('extra', **args)
487 yield self.t('extra', **args)
477
488
478 def showcopies(**args):
489 def showcopies(**args):
479 c = [{'name': x[0], 'source': x[1]} for x in copies]
490 c = [{'name': x[0], 'source': x[1]} for x in copies]
480 return showlist('file_copy', c, plural='file_copies', **args)
491 return showlist('file_copy', c, plural='file_copies', **args)
481
492
482 if self.ui.debugflag:
493 if self.ui.debugflag:
483 files = self.repo.status(log.parents(changenode)[0], changenode)[:3]
494 files = self.repo.status(log.parents(changenode)[0], changenode)[:3]
484 def showfiles(**args):
495 def showfiles(**args):
485 return showlist('file', files[0], **args)
496 return showlist('file', files[0], **args)
486 def showadds(**args):
497 def showadds(**args):
487 return showlist('file_add', files[1], **args)
498 return showlist('file_add', files[1], **args)
488 def showdels(**args):
499 def showdels(**args):
489 return showlist('file_del', files[2], **args)
500 return showlist('file_del', files[2], **args)
490 def showmanifest(**args):
501 def showmanifest(**args):
491 args = args.copy()
502 args = args.copy()
492 args.update(dict(rev=self.repo.manifest.rev(changes[0]),
503 args.update(dict(rev=self.repo.manifest.rev(changes[0]),
493 node=hex(changes[0])))
504 node=hex(changes[0])))
494 return self.t('manifest', **args)
505 return self.t('manifest', **args)
495 else:
506 else:
496 def showfiles(**args):
507 def showfiles(**args):
497 return showlist('file', changes[3], **args)
508 return showlist('file', changes[3], **args)
498 showadds = ''
509 showadds = ''
499 showdels = ''
510 showdels = ''
500 showmanifest = ''
511 showmanifest = ''
501
512
502 defprops = {
513 defprops = {
503 'author': changes[1],
514 'author': changes[1],
504 'branches': showbranches,
515 'branches': showbranches,
505 'date': changes[2],
516 'date': changes[2],
506 'desc': changes[4],
517 'desc': changes[4],
507 'file_adds': showadds,
518 'file_adds': showadds,
508 'file_dels': showdels,
519 'file_dels': showdels,
509 'files': showfiles,
520 'files': showfiles,
510 'file_copies': showcopies,
521 'file_copies': showcopies,
511 'manifest': showmanifest,
522 'manifest': showmanifest,
512 'node': hex(changenode),
523 'node': hex(changenode),
513 'parents': showparents,
524 'parents': showparents,
514 'rev': rev,
525 'rev': rev,
515 'tags': showtags,
526 'tags': showtags,
516 'extras': showextras,
527 'extras': showextras,
517 }
528 }
518 props = props.copy()
529 props = props.copy()
519 props.update(defprops)
530 props.update(defprops)
520
531
521 try:
532 try:
522 if self.ui.debugflag and 'header_debug' in self.t:
533 if self.ui.debugflag and 'header_debug' in self.t:
523 key = 'header_debug'
534 key = 'header_debug'
524 elif self.ui.quiet and 'header_quiet' in self.t:
535 elif self.ui.quiet and 'header_quiet' in self.t:
525 key = 'header_quiet'
536 key = 'header_quiet'
526 elif self.ui.verbose and 'header_verbose' in self.t:
537 elif self.ui.verbose and 'header_verbose' in self.t:
527 key = 'header_verbose'
538 key = 'header_verbose'
528 elif 'header' in self.t:
539 elif 'header' in self.t:
529 key = 'header'
540 key = 'header'
530 else:
541 else:
531 key = ''
542 key = ''
532 if key:
543 if key:
533 h = templater.stringify(self.t(key, **props))
544 h = templater.stringify(self.t(key, **props))
534 if self.buffered:
545 if self.buffered:
535 self.header[rev] = h
546 self.header[rev] = h
536 else:
547 else:
537 self.ui.write(h)
548 self.ui.write(h)
538 if self.ui.debugflag and 'changeset_debug' in self.t:
549 if self.ui.debugflag and 'changeset_debug' in self.t:
539 key = 'changeset_debug'
550 key = 'changeset_debug'
540 elif self.ui.quiet and 'changeset_quiet' in self.t:
551 elif self.ui.quiet and 'changeset_quiet' in self.t:
541 key = 'changeset_quiet'
552 key = 'changeset_quiet'
542 elif self.ui.verbose and 'changeset_verbose' in self.t:
553 elif self.ui.verbose and 'changeset_verbose' in self.t:
543 key = 'changeset_verbose'
554 key = 'changeset_verbose'
544 else:
555 else:
545 key = 'changeset'
556 key = 'changeset'
546 self.ui.write(templater.stringify(self.t(key, **props)))
557 self.ui.write(templater.stringify(self.t(key, **props)))
547 self.showpatch(changenode)
558 self.showpatch(changenode)
548 except KeyError, inst:
559 except KeyError, inst:
549 raise util.Abort(_("%s: no key named '%s'") % (self.t.mapfile,
560 raise util.Abort(_("%s: no key named '%s'") % (self.t.mapfile,
550 inst.args[0]))
561 inst.args[0]))
551 except SyntaxError, inst:
562 except SyntaxError, inst:
552 raise util.Abort(_('%s: %s') % (self.t.mapfile, inst.args[0]))
563 raise util.Abort(_('%s: %s') % (self.t.mapfile, inst.args[0]))
553
564
554 def show_changeset(ui, repo, opts, buffered=False, matchfn=False):
565 def show_changeset(ui, repo, opts, buffered=False, matchfn=False):
555 """show one changeset using template or regular display.
566 """show one changeset using template or regular display.
556
567
557 Display format will be the first non-empty hit of:
568 Display format will be the first non-empty hit of:
558 1. option 'template'
569 1. option 'template'
559 2. option 'style'
570 2. option 'style'
560 3. [ui] setting 'logtemplate'
571 3. [ui] setting 'logtemplate'
561 4. [ui] setting 'style'
572 4. [ui] setting 'style'
562 If all of these values are either the unset or the empty string,
573 If all of these values are either the unset or the empty string,
563 regular display via changeset_printer() is done.
574 regular display via changeset_printer() is done.
564 """
575 """
565 # options
576 # options
566 patch = False
577 patch = False
567 if opts.get('patch'):
578 if opts.get('patch'):
568 patch = matchfn or util.always
579 patch = matchfn or util.always
569
580
570 tmpl = opts.get('template')
581 tmpl = opts.get('template')
571 mapfile = None
582 mapfile = None
572 if tmpl:
583 if tmpl:
573 tmpl = templater.parsestring(tmpl, quoted=False)
584 tmpl = templater.parsestring(tmpl, quoted=False)
574 else:
585 else:
575 mapfile = opts.get('style')
586 mapfile = opts.get('style')
576 # ui settings
587 # ui settings
577 if not mapfile:
588 if not mapfile:
578 tmpl = ui.config('ui', 'logtemplate')
589 tmpl = ui.config('ui', 'logtemplate')
579 if tmpl:
590 if tmpl:
580 tmpl = templater.parsestring(tmpl)
591 tmpl = templater.parsestring(tmpl)
581 else:
592 else:
582 mapfile = ui.config('ui', 'style')
593 mapfile = ui.config('ui', 'style')
583
594
584 if tmpl or mapfile:
595 if tmpl or mapfile:
585 if mapfile:
596 if mapfile:
586 if not os.path.split(mapfile)[0]:
597 if not os.path.split(mapfile)[0]:
587 mapname = (templater.templatepath('map-cmdline.' + mapfile)
598 mapname = (templater.templatepath('map-cmdline.' + mapfile)
588 or templater.templatepath(mapfile))
599 or templater.templatepath(mapfile))
589 if mapname: mapfile = mapname
600 if mapname: mapfile = mapname
590 try:
601 try:
591 t = changeset_templater(ui, repo, patch, mapfile, buffered)
602 t = changeset_templater(ui, repo, patch, mapfile, buffered)
592 except SyntaxError, inst:
603 except SyntaxError, inst:
593 raise util.Abort(inst.args[0])
604 raise util.Abort(inst.args[0])
594 if tmpl: t.use_template(tmpl)
605 if tmpl: t.use_template(tmpl)
595 return t
606 return t
596 return changeset_printer(ui, repo, patch, buffered)
607 return changeset_printer(ui, repo, patch, buffered)
597
608
598 def finddate(ui, repo, date):
609 def finddate(ui, repo, date):
599 """Find the tipmost changeset that matches the given date spec"""
610 """Find the tipmost changeset that matches the given date spec"""
600 df = util.matchdate(date + " to " + date)
611 df = util.matchdate(date + " to " + date)
601 get = util.cachefunc(lambda r: repo.changectx(r).changeset())
612 get = util.cachefunc(lambda r: repo.changectx(r).changeset())
602 changeiter, matchfn = walkchangerevs(ui, repo, [], get, {'rev':None})
613 changeiter, matchfn = walkchangerevs(ui, repo, [], get, {'rev':None})
603 results = {}
614 results = {}
604 for st, rev, fns in changeiter:
615 for st, rev, fns in changeiter:
605 if st == 'add':
616 if st == 'add':
606 d = get(rev)[2]
617 d = get(rev)[2]
607 if df(d[0]):
618 if df(d[0]):
608 results[rev] = d
619 results[rev] = d
609 elif st == 'iter':
620 elif st == 'iter':
610 if rev in results:
621 if rev in results:
611 ui.status("Found revision %s from %s\n" %
622 ui.status("Found revision %s from %s\n" %
612 (rev, util.datestr(results[rev])))
623 (rev, util.datestr(results[rev])))
613 return str(rev)
624 return str(rev)
614
625
615 raise util.Abort(_("revision matching date not found"))
626 raise util.Abort(_("revision matching date not found"))
616
627
617 def walkchangerevs(ui, repo, pats, change, opts):
628 def walkchangerevs(ui, repo, pats, change, opts):
618 '''Iterate over files and the revs they changed in.
629 '''Iterate over files and the revs they changed in.
619
630
620 Callers most commonly need to iterate backwards over the history
631 Callers most commonly need to iterate backwards over the history
621 it is interested in. Doing so has awful (quadratic-looking)
632 it is interested in. Doing so has awful (quadratic-looking)
622 performance, so we use iterators in a "windowed" way.
633 performance, so we use iterators in a "windowed" way.
623
634
624 We walk a window of revisions in the desired order. Within the
635 We walk a window of revisions in the desired order. Within the
625 window, we first walk forwards to gather data, then in the desired
636 window, we first walk forwards to gather data, then in the desired
626 order (usually backwards) to display it.
637 order (usually backwards) to display it.
627
638
628 This function returns an (iterator, matchfn) tuple. The iterator
639 This function returns an (iterator, matchfn) tuple. The iterator
629 yields 3-tuples. They will be of one of the following forms:
640 yields 3-tuples. They will be of one of the following forms:
630
641
631 "window", incrementing, lastrev: stepping through a window,
642 "window", incrementing, lastrev: stepping through a window,
632 positive if walking forwards through revs, last rev in the
643 positive if walking forwards through revs, last rev in the
633 sequence iterated over - use to reset state for the current window
644 sequence iterated over - use to reset state for the current window
634
645
635 "add", rev, fns: out-of-order traversal of the given file names
646 "add", rev, fns: out-of-order traversal of the given file names
636 fns, which changed during revision rev - use to gather data for
647 fns, which changed during revision rev - use to gather data for
637 possible display
648 possible display
638
649
639 "iter", rev, None: in-order traversal of the revs earlier iterated
650 "iter", rev, None: in-order traversal of the revs earlier iterated
640 over with "add" - use to display data'''
651 over with "add" - use to display data'''
641
652
642 def increasing_windows(start, end, windowsize=8, sizelimit=512):
653 def increasing_windows(start, end, windowsize=8, sizelimit=512):
643 if start < end:
654 if start < end:
644 while start < end:
655 while start < end:
645 yield start, min(windowsize, end-start)
656 yield start, min(windowsize, end-start)
646 start += windowsize
657 start += windowsize
647 if windowsize < sizelimit:
658 if windowsize < sizelimit:
648 windowsize *= 2
659 windowsize *= 2
649 else:
660 else:
650 while start > end:
661 while start > end:
651 yield start, min(windowsize, start-end-1)
662 yield start, min(windowsize, start-end-1)
652 start -= windowsize
663 start -= windowsize
653 if windowsize < sizelimit:
664 if windowsize < sizelimit:
654 windowsize *= 2
665 windowsize *= 2
655
666
656 files, matchfn, anypats = matchpats(repo, pats, opts)
667 files, matchfn, anypats = matchpats(repo, pats, opts)
657 follow = opts.get('follow') or opts.get('follow_first')
668 follow = opts.get('follow') or opts.get('follow_first')
658
669
659 if repo.changelog.count() == 0:
670 if repo.changelog.count() == 0:
660 return [], matchfn
671 return [], matchfn
661
672
662 if follow:
673 if follow:
663 defrange = '%s:0' % repo.changectx().rev()
674 defrange = '%s:0' % repo.changectx().rev()
664 else:
675 else:
665 defrange = 'tip:0'
676 defrange = 'tip:0'
666 revs = revrange(repo, opts['rev'] or [defrange])
677 revs = revrange(repo, opts['rev'] or [defrange])
667 wanted = {}
678 wanted = {}
668 slowpath = anypats or opts.get('removed')
679 slowpath = anypats or opts.get('removed')
669 fncache = {}
680 fncache = {}
670
681
671 if not slowpath and not files:
682 if not slowpath and not files:
672 # No files, no patterns. Display all revs.
683 # No files, no patterns. Display all revs.
673 wanted = dict.fromkeys(revs)
684 wanted = dict.fromkeys(revs)
674 copies = []
685 copies = []
675 if not slowpath:
686 if not slowpath:
676 # Only files, no patterns. Check the history of each file.
687 # Only files, no patterns. Check the history of each file.
677 def filerevgen(filelog, node):
688 def filerevgen(filelog, node):
678 cl_count = repo.changelog.count()
689 cl_count = repo.changelog.count()
679 if node is None:
690 if node is None:
680 last = filelog.count() - 1
691 last = filelog.count() - 1
681 else:
692 else:
682 last = filelog.rev(node)
693 last = filelog.rev(node)
683 for i, window in increasing_windows(last, nullrev):
694 for i, window in increasing_windows(last, nullrev):
684 revs = []
695 revs = []
685 for j in xrange(i - window, i + 1):
696 for j in xrange(i - window, i + 1):
686 n = filelog.node(j)
697 n = filelog.node(j)
687 revs.append((filelog.linkrev(n),
698 revs.append((filelog.linkrev(n),
688 follow and filelog.renamed(n)))
699 follow and filelog.renamed(n)))
689 revs.reverse()
700 revs.reverse()
690 for rev in revs:
701 for rev in revs:
691 # only yield rev for which we have the changelog, it can
702 # only yield rev for which we have the changelog, it can
692 # happen while doing "hg log" during a pull or commit
703 # happen while doing "hg log" during a pull or commit
693 if rev[0] < cl_count:
704 if rev[0] < cl_count:
694 yield rev
705 yield rev
695 def iterfiles():
706 def iterfiles():
696 for filename in files:
707 for filename in files:
697 yield filename, None
708 yield filename, None
698 for filename_node in copies:
709 for filename_node in copies:
699 yield filename_node
710 yield filename_node
700 minrev, maxrev = min(revs), max(revs)
711 minrev, maxrev = min(revs), max(revs)
701 for file_, node in iterfiles():
712 for file_, node in iterfiles():
702 filelog = repo.file(file_)
713 filelog = repo.file(file_)
703 # A zero count may be a directory or deleted file, so
714 # A zero count may be a directory or deleted file, so
704 # try to find matching entries on the slow path.
715 # try to find matching entries on the slow path.
705 if filelog.count() == 0:
716 if filelog.count() == 0:
706 slowpath = True
717 slowpath = True
707 break
718 break
708 for rev, copied in filerevgen(filelog, node):
719 for rev, copied in filerevgen(filelog, node):
709 if rev <= maxrev:
720 if rev <= maxrev:
710 if rev < minrev:
721 if rev < minrev:
711 break
722 break
712 fncache.setdefault(rev, [])
723 fncache.setdefault(rev, [])
713 fncache[rev].append(file_)
724 fncache[rev].append(file_)
714 wanted[rev] = 1
725 wanted[rev] = 1
715 if follow and copied:
726 if follow and copied:
716 copies.append(copied)
727 copies.append(copied)
717 if slowpath:
728 if slowpath:
718 if follow:
729 if follow:
719 raise util.Abort(_('can only follow copies/renames for explicit '
730 raise util.Abort(_('can only follow copies/renames for explicit '
720 'file names'))
731 'file names'))
721
732
722 # The slow path checks files modified in every changeset.
733 # The slow path checks files modified in every changeset.
723 def changerevgen():
734 def changerevgen():
724 for i, window in increasing_windows(repo.changelog.count()-1,
735 for i, window in increasing_windows(repo.changelog.count()-1,
725 nullrev):
736 nullrev):
726 for j in xrange(i - window, i + 1):
737 for j in xrange(i - window, i + 1):
727 yield j, change(j)[3]
738 yield j, change(j)[3]
728
739
729 for rev, changefiles in changerevgen():
740 for rev, changefiles in changerevgen():
730 matches = filter(matchfn, changefiles)
741 matches = filter(matchfn, changefiles)
731 if matches:
742 if matches:
732 fncache[rev] = matches
743 fncache[rev] = matches
733 wanted[rev] = 1
744 wanted[rev] = 1
734
745
735 class followfilter:
746 class followfilter:
736 def __init__(self, onlyfirst=False):
747 def __init__(self, onlyfirst=False):
737 self.startrev = nullrev
748 self.startrev = nullrev
738 self.roots = []
749 self.roots = []
739 self.onlyfirst = onlyfirst
750 self.onlyfirst = onlyfirst
740
751
741 def match(self, rev):
752 def match(self, rev):
742 def realparents(rev):
753 def realparents(rev):
743 if self.onlyfirst:
754 if self.onlyfirst:
744 return repo.changelog.parentrevs(rev)[0:1]
755 return repo.changelog.parentrevs(rev)[0:1]
745 else:
756 else:
746 return filter(lambda x: x != nullrev,
757 return filter(lambda x: x != nullrev,
747 repo.changelog.parentrevs(rev))
758 repo.changelog.parentrevs(rev))
748
759
749 if self.startrev == nullrev:
760 if self.startrev == nullrev:
750 self.startrev = rev
761 self.startrev = rev
751 return True
762 return True
752
763
753 if rev > self.startrev:
764 if rev > self.startrev:
754 # forward: all descendants
765 # forward: all descendants
755 if not self.roots:
766 if not self.roots:
756 self.roots.append(self.startrev)
767 self.roots.append(self.startrev)
757 for parent in realparents(rev):
768 for parent in realparents(rev):
758 if parent in self.roots:
769 if parent in self.roots:
759 self.roots.append(rev)
770 self.roots.append(rev)
760 return True
771 return True
761 else:
772 else:
762 # backwards: all parents
773 # backwards: all parents
763 if not self.roots:
774 if not self.roots:
764 self.roots.extend(realparents(self.startrev))
775 self.roots.extend(realparents(self.startrev))
765 if rev in self.roots:
776 if rev in self.roots:
766 self.roots.remove(rev)
777 self.roots.remove(rev)
767 self.roots.extend(realparents(rev))
778 self.roots.extend(realparents(rev))
768 return True
779 return True
769
780
770 return False
781 return False
771
782
772 # it might be worthwhile to do this in the iterator if the rev range
783 # it might be worthwhile to do this in the iterator if the rev range
773 # is descending and the prune args are all within that range
784 # is descending and the prune args are all within that range
774 for rev in opts.get('prune', ()):
785 for rev in opts.get('prune', ()):
775 rev = repo.changelog.rev(repo.lookup(rev))
786 rev = repo.changelog.rev(repo.lookup(rev))
776 ff = followfilter()
787 ff = followfilter()
777 stop = min(revs[0], revs[-1])
788 stop = min(revs[0], revs[-1])
778 for x in xrange(rev, stop-1, -1):
789 for x in xrange(rev, stop-1, -1):
779 if ff.match(x) and x in wanted:
790 if ff.match(x) and x in wanted:
780 del wanted[x]
791 del wanted[x]
781
792
782 def iterate():
793 def iterate():
783 if follow and not files:
794 if follow and not files:
784 ff = followfilter(onlyfirst=opts.get('follow_first'))
795 ff = followfilter(onlyfirst=opts.get('follow_first'))
785 def want(rev):
796 def want(rev):
786 if ff.match(rev) and rev in wanted:
797 if ff.match(rev) and rev in wanted:
787 return True
798 return True
788 return False
799 return False
789 else:
800 else:
790 def want(rev):
801 def want(rev):
791 return rev in wanted
802 return rev in wanted
792
803
793 for i, window in increasing_windows(0, len(revs)):
804 for i, window in increasing_windows(0, len(revs)):
794 yield 'window', revs[0] < revs[-1], revs[-1]
805 yield 'window', revs[0] < revs[-1], revs[-1]
795 nrevs = [rev for rev in revs[i:i+window] if want(rev)]
806 nrevs = [rev for rev in revs[i:i+window] if want(rev)]
796 srevs = list(nrevs)
807 srevs = list(nrevs)
797 srevs.sort()
808 srevs.sort()
798 for rev in srevs:
809 for rev in srevs:
799 fns = fncache.get(rev)
810 fns = fncache.get(rev)
800 if not fns:
811 if not fns:
801 def fns_generator():
812 def fns_generator():
802 for f in change(rev)[3]:
813 for f in change(rev)[3]:
803 if matchfn(f):
814 if matchfn(f):
804 yield f
815 yield f
805 fns = fns_generator()
816 fns = fns_generator()
806 yield 'add', rev, fns
817 yield 'add', rev, fns
807 for rev in nrevs:
818 for rev in nrevs:
808 yield 'iter', rev, None
819 yield 'iter', rev, None
809 return iterate(), matchfn
820 return iterate(), matchfn
@@ -1,3394 +1,3409 b''
1 # commands.py - command processing for mercurial
1 # commands.py - command processing for mercurial
2 #
2 #
3 # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7
7
8 import demandimport; demandimport.enable()
8 import demandimport; demandimport.enable()
9 from node import *
9 from node import *
10 from i18n import _
10 from i18n import _
11 import bisect, os, re, sys, signal, imp, urllib, pdb, shlex, stat
11 import bisect, os, re, sys, signal, imp, urllib, pdb, shlex, stat
12 import fancyopts, ui, hg, util, lock, revlog, bundlerepo
12 import fancyopts, ui, hg, util, lock, revlog, bundlerepo
13 import difflib, patch, time, help, mdiff, tempfile
13 import difflib, patch, time, help, mdiff, tempfile
14 import traceback, errno, version, atexit, socket
14 import traceback, errno, version, atexit, socket
15 import archival, changegroup, cmdutil, hgweb.server, sshserver
15 import archival, changegroup, cmdutil, hgweb.server, sshserver
16
16
17 class UnknownCommand(Exception):
17 class UnknownCommand(Exception):
18 """Exception raised if command is not in the command table."""
18 """Exception raised if command is not in the command table."""
19 class AmbiguousCommand(Exception):
19 class AmbiguousCommand(Exception):
20 """Exception raised if command shortcut matches more than one command."""
20 """Exception raised if command shortcut matches more than one command."""
21
21
22 def bail_if_changed(repo):
22 def bail_if_changed(repo):
23 modified, added, removed, deleted = repo.status()[:4]
23 modified, added, removed, deleted = repo.status()[:4]
24 if modified or added or removed or deleted:
24 if modified or added or removed or deleted:
25 raise util.Abort(_("outstanding uncommitted changes"))
25 raise util.Abort(_("outstanding uncommitted changes"))
26
26
27 def logmessage(opts):
27 def logmessage(opts):
28 """ get the log message according to -m and -l option """
28 """ get the log message according to -m and -l option """
29 message = opts['message']
29 message = opts['message']
30 logfile = opts['logfile']
30 logfile = opts['logfile']
31
31
32 if message and logfile:
32 if message and logfile:
33 raise util.Abort(_('options --message and --logfile are mutually '
33 raise util.Abort(_('options --message and --logfile are mutually '
34 'exclusive'))
34 'exclusive'))
35 if not message and logfile:
35 if not message and logfile:
36 try:
36 try:
37 if logfile == '-':
37 if logfile == '-':
38 message = sys.stdin.read()
38 message = sys.stdin.read()
39 else:
39 else:
40 message = open(logfile).read()
40 message = open(logfile).read()
41 except IOError, inst:
41 except IOError, inst:
42 raise util.Abort(_("can't read commit message '%s': %s") %
42 raise util.Abort(_("can't read commit message '%s': %s") %
43 (logfile, inst.strerror))
43 (logfile, inst.strerror))
44 return message
44 return message
45
45
46 def setremoteconfig(ui, opts):
46 def setremoteconfig(ui, opts):
47 "copy remote options to ui tree"
47 "copy remote options to ui tree"
48 if opts.get('ssh'):
48 if opts.get('ssh'):
49 ui.setconfig("ui", "ssh", opts['ssh'])
49 ui.setconfig("ui", "ssh", opts['ssh'])
50 if opts.get('remotecmd'):
50 if opts.get('remotecmd'):
51 ui.setconfig("ui", "remotecmd", opts['remotecmd'])
51 ui.setconfig("ui", "remotecmd", opts['remotecmd'])
52
52
53 # Commands start here, listed alphabetically
53 # Commands start here, listed alphabetically
54
54
55 def add(ui, repo, *pats, **opts):
55 def add(ui, repo, *pats, **opts):
56 """add the specified files on the next commit
56 """add the specified files on the next commit
57
57
58 Schedule files to be version controlled and added to the repository.
58 Schedule files to be version controlled and added to the repository.
59
59
60 The files will be added to the repository at the next commit. To
60 The files will be added to the repository at the next commit. To
61 undo an add before that, see hg revert.
61 undo an add before that, see hg revert.
62
62
63 If no names are given, add all files in the repository.
63 If no names are given, add all files in the repository.
64 """
64 """
65
65
66 names = []
66 names = []
67 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts):
67 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts):
68 if exact:
68 if exact:
69 if ui.verbose:
69 if ui.verbose:
70 ui.status(_('adding %s\n') % rel)
70 ui.status(_('adding %s\n') % rel)
71 names.append(abs)
71 names.append(abs)
72 elif repo.dirstate.state(abs) == '?':
72 elif repo.dirstate.state(abs) == '?':
73 ui.status(_('adding %s\n') % rel)
73 ui.status(_('adding %s\n') % rel)
74 names.append(abs)
74 names.append(abs)
75 if not opts.get('dry_run'):
75 if not opts.get('dry_run'):
76 repo.add(names)
76 repo.add(names)
77
77
78 def addremove(ui, repo, *pats, **opts):
78 def addremove(ui, repo, *pats, **opts):
79 """add all new files, delete all missing files
79 """add all new files, delete all missing files
80
80
81 Add all new files and remove all missing files from the repository.
81 Add all new files and remove all missing files from the repository.
82
82
83 New files are ignored if they match any of the patterns in .hgignore. As
83 New files are ignored if they match any of the patterns in .hgignore. As
84 with add, these changes take effect at the next commit.
84 with add, these changes take effect at the next commit.
85
85
86 Use the -s option to detect renamed files. With a parameter > 0,
86 Use the -s option to detect renamed files. With a parameter > 0,
87 this compares every removed file with every added file and records
87 this compares every removed file with every added file and records
88 those similar enough as renames. This option takes a percentage
88 those similar enough as renames. This option takes a percentage
89 between 0 (disabled) and 100 (files must be identical) as its
89 between 0 (disabled) and 100 (files must be identical) as its
90 parameter. Detecting renamed files this way can be expensive.
90 parameter. Detecting renamed files this way can be expensive.
91 """
91 """
92 sim = float(opts.get('similarity') or 0)
92 sim = float(opts.get('similarity') or 0)
93 if sim < 0 or sim > 100:
93 if sim < 0 or sim > 100:
94 raise util.Abort(_('similarity must be between 0 and 100'))
94 raise util.Abort(_('similarity must be between 0 and 100'))
95 return cmdutil.addremove(repo, pats, opts, similarity=sim/100.)
95 return cmdutil.addremove(repo, pats, opts, similarity=sim/100.)
96
96
97 def annotate(ui, repo, *pats, **opts):
97 def annotate(ui, repo, *pats, **opts):
98 """show changeset information per file line
98 """show changeset information per file line
99
99
100 List changes in files, showing the revision id responsible for each line
100 List changes in files, showing the revision id responsible for each line
101
101
102 This command is useful to discover who did a change or when a change took
102 This command is useful to discover who did a change or when a change took
103 place.
103 place.
104
104
105 Without the -a option, annotate will avoid processing files it
105 Without the -a option, annotate will avoid processing files it
106 detects as binary. With -a, annotate will generate an annotation
106 detects as binary. With -a, annotate will generate an annotation
107 anyway, probably with undesirable results.
107 anyway, probably with undesirable results.
108 """
108 """
109 getdate = util.cachefunc(lambda x: util.datestr(x.date()))
109 getdate = util.cachefunc(lambda x: util.datestr(x.date()))
110
110
111 if not pats:
111 if not pats:
112 raise util.Abort(_('at least one file name or pattern required'))
112 raise util.Abort(_('at least one file name or pattern required'))
113
113
114 opmap = [['user', lambda x: ui.shortuser(x.user())],
114 opmap = [['user', lambda x: ui.shortuser(x.user())],
115 ['number', lambda x: str(x.rev())],
115 ['number', lambda x: str(x.rev())],
116 ['changeset', lambda x: short(x.node())],
116 ['changeset', lambda x: short(x.node())],
117 ['date', getdate], ['follow', lambda x: x.path()]]
117 ['date', getdate], ['follow', lambda x: x.path()]]
118 if (not opts['user'] and not opts['changeset'] and not opts['date']
118 if (not opts['user'] and not opts['changeset'] and not opts['date']
119 and not opts['follow']):
119 and not opts['follow']):
120 opts['number'] = 1
120 opts['number'] = 1
121
121
122 ctx = repo.changectx(opts['rev'])
122 ctx = repo.changectx(opts['rev'])
123
123
124 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts,
124 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts,
125 node=ctx.node()):
125 node=ctx.node()):
126 fctx = ctx.filectx(abs)
126 fctx = ctx.filectx(abs)
127 if not opts['text'] and util.binary(fctx.data()):
127 if not opts['text'] and util.binary(fctx.data()):
128 ui.write(_("%s: binary file\n") % ((pats and rel) or abs))
128 ui.write(_("%s: binary file\n") % ((pats and rel) or abs))
129 continue
129 continue
130
130
131 lines = fctx.annotate(follow=opts.get('follow'))
131 lines = fctx.annotate(follow=opts.get('follow'))
132 pieces = []
132 pieces = []
133
133
134 for o, f in opmap:
134 for o, f in opmap:
135 if opts[o]:
135 if opts[o]:
136 l = [f(n) for n, dummy in lines]
136 l = [f(n) for n, dummy in lines]
137 if l:
137 if l:
138 m = max(map(len, l))
138 m = max(map(len, l))
139 pieces.append(["%*s" % (m, x) for x in l])
139 pieces.append(["%*s" % (m, x) for x in l])
140
140
141 if pieces:
141 if pieces:
142 for p, l in zip(zip(*pieces), lines):
142 for p, l in zip(zip(*pieces), lines):
143 ui.write("%s: %s" % (" ".join(p), l[1]))
143 ui.write("%s: %s" % (" ".join(p), l[1]))
144
144
145 def archive(ui, repo, dest, **opts):
145 def archive(ui, repo, dest, **opts):
146 '''create unversioned archive of a repository revision
146 '''create unversioned archive of a repository revision
147
147
148 By default, the revision used is the parent of the working
148 By default, the revision used is the parent of the working
149 directory; use "-r" to specify a different revision.
149 directory; use "-r" to specify a different revision.
150
150
151 To specify the type of archive to create, use "-t". Valid
151 To specify the type of archive to create, use "-t". Valid
152 types are:
152 types are:
153
153
154 "files" (default): a directory full of files
154 "files" (default): a directory full of files
155 "tar": tar archive, uncompressed
155 "tar": tar archive, uncompressed
156 "tbz2": tar archive, compressed using bzip2
156 "tbz2": tar archive, compressed using bzip2
157 "tgz": tar archive, compressed using gzip
157 "tgz": tar archive, compressed using gzip
158 "uzip": zip archive, uncompressed
158 "uzip": zip archive, uncompressed
159 "zip": zip archive, compressed using deflate
159 "zip": zip archive, compressed using deflate
160
160
161 The exact name of the destination archive or directory is given
161 The exact name of the destination archive or directory is given
162 using a format string; see "hg help export" for details.
162 using a format string; see "hg help export" for details.
163
163
164 Each member added to an archive file has a directory prefix
164 Each member added to an archive file has a directory prefix
165 prepended. Use "-p" to specify a format string for the prefix.
165 prepended. Use "-p" to specify a format string for the prefix.
166 The default is the basename of the archive, with suffixes removed.
166 The default is the basename of the archive, with suffixes removed.
167 '''
167 '''
168
168
169 node = repo.changectx(opts['rev']).node()
169 node = repo.changectx(opts['rev']).node()
170 dest = cmdutil.make_filename(repo, dest, node)
170 dest = cmdutil.make_filename(repo, dest, node)
171 if os.path.realpath(dest) == repo.root:
171 if os.path.realpath(dest) == repo.root:
172 raise util.Abort(_('repository root cannot be destination'))
172 raise util.Abort(_('repository root cannot be destination'))
173 dummy, matchfn, dummy = cmdutil.matchpats(repo, [], opts)
173 dummy, matchfn, dummy = cmdutil.matchpats(repo, [], opts)
174 kind = opts.get('type') or 'files'
174 kind = opts.get('type') or 'files'
175 prefix = opts['prefix']
175 prefix = opts['prefix']
176 if dest == '-':
176 if dest == '-':
177 if kind == 'files':
177 if kind == 'files':
178 raise util.Abort(_('cannot archive plain files to stdout'))
178 raise util.Abort(_('cannot archive plain files to stdout'))
179 dest = sys.stdout
179 dest = sys.stdout
180 if not prefix: prefix = os.path.basename(repo.root) + '-%h'
180 if not prefix: prefix = os.path.basename(repo.root) + '-%h'
181 prefix = cmdutil.make_filename(repo, prefix, node)
181 prefix = cmdutil.make_filename(repo, prefix, node)
182 archival.archive(repo, dest, node, kind, not opts['no_decode'],
182 archival.archive(repo, dest, node, kind, not opts['no_decode'],
183 matchfn, prefix)
183 matchfn, prefix)
184
184
185 def backout(ui, repo, node=None, rev=None, **opts):
185 def backout(ui, repo, node=None, rev=None, **opts):
186 '''reverse effect of earlier changeset
186 '''reverse effect of earlier changeset
187
187
188 Commit the backed out changes as a new changeset. The new
188 Commit the backed out changes as a new changeset. The new
189 changeset is a child of the backed out changeset.
189 changeset is a child of the backed out changeset.
190
190
191 If you back out a changeset other than the tip, a new head is
191 If you back out a changeset other than the tip, a new head is
192 created. This head is the parent of the working directory. If
192 created. This head is the parent of the working directory. If
193 you back out an old changeset, your working directory will appear
193 you back out an old changeset, your working directory will appear
194 old after the backout. You should merge the backout changeset
194 old after the backout. You should merge the backout changeset
195 with another head.
195 with another head.
196
196
197 The --merge option remembers the parent of the working directory
197 The --merge option remembers the parent of the working directory
198 before starting the backout, then merges the new head with that
198 before starting the backout, then merges the new head with that
199 changeset afterwards. This saves you from doing the merge by
199 changeset afterwards. This saves you from doing the merge by
200 hand. The result of this merge is not committed, as for a normal
200 hand. The result of this merge is not committed, as for a normal
201 merge.'''
201 merge.'''
202 if rev and node:
202 if rev and node:
203 raise util.Abort(_("please specify just one revision"))
203 raise util.Abort(_("please specify just one revision"))
204
204
205 if not rev:
205 if not rev:
206 rev = node
206 rev = node
207
207
208 bail_if_changed(repo)
208 bail_if_changed(repo)
209 op1, op2 = repo.dirstate.parents()
209 op1, op2 = repo.dirstate.parents()
210 if op2 != nullid:
210 if op2 != nullid:
211 raise util.Abort(_('outstanding uncommitted merge'))
211 raise util.Abort(_('outstanding uncommitted merge'))
212 node = repo.lookup(rev)
212 node = repo.lookup(rev)
213 p1, p2 = repo.changelog.parents(node)
213 p1, p2 = repo.changelog.parents(node)
214 if p1 == nullid:
214 if p1 == nullid:
215 raise util.Abort(_('cannot back out a change with no parents'))
215 raise util.Abort(_('cannot back out a change with no parents'))
216 if p2 != nullid:
216 if p2 != nullid:
217 if not opts['parent']:
217 if not opts['parent']:
218 raise util.Abort(_('cannot back out a merge changeset without '
218 raise util.Abort(_('cannot back out a merge changeset without '
219 '--parent'))
219 '--parent'))
220 p = repo.lookup(opts['parent'])
220 p = repo.lookup(opts['parent'])
221 if p not in (p1, p2):
221 if p not in (p1, p2):
222 raise util.Abort(_('%s is not a parent of %s') %
222 raise util.Abort(_('%s is not a parent of %s') %
223 (short(p), short(node)))
223 (short(p), short(node)))
224 parent = p
224 parent = p
225 else:
225 else:
226 if opts['parent']:
226 if opts['parent']:
227 raise util.Abort(_('cannot use --parent on non-merge changeset'))
227 raise util.Abort(_('cannot use --parent on non-merge changeset'))
228 parent = p1
228 parent = p1
229 hg.clean(repo, node, show_stats=False)
229 hg.clean(repo, node, show_stats=False)
230 revert_opts = opts.copy()
230 revert_opts = opts.copy()
231 revert_opts['date'] = None
231 revert_opts['date'] = None
232 revert_opts['all'] = True
232 revert_opts['all'] = True
233 revert_opts['rev'] = hex(parent)
233 revert_opts['rev'] = hex(parent)
234 revert(ui, repo, **revert_opts)
234 revert(ui, repo, **revert_opts)
235 commit_opts = opts.copy()
235 commit_opts = opts.copy()
236 commit_opts['addremove'] = False
236 commit_opts['addremove'] = False
237 if not commit_opts['message'] and not commit_opts['logfile']:
237 if not commit_opts['message'] and not commit_opts['logfile']:
238 commit_opts['message'] = _("Backed out changeset %s") % (hex(node))
238 commit_opts['message'] = _("Backed out changeset %s") % (hex(node))
239 commit_opts['force_editor'] = True
239 commit_opts['force_editor'] = True
240 commit(ui, repo, **commit_opts)
240 commit(ui, repo, **commit_opts)
241 def nice(node):
241 def nice(node):
242 return '%d:%s' % (repo.changelog.rev(node), short(node))
242 return '%d:%s' % (repo.changelog.rev(node), short(node))
243 ui.status(_('changeset %s backs out changeset %s\n') %
243 ui.status(_('changeset %s backs out changeset %s\n') %
244 (nice(repo.changelog.tip()), nice(node)))
244 (nice(repo.changelog.tip()), nice(node)))
245 if op1 != node:
245 if op1 != node:
246 if opts['merge']:
246 if opts['merge']:
247 ui.status(_('merging with changeset %s\n') % nice(op1))
247 ui.status(_('merging with changeset %s\n') % nice(op1))
248 hg.merge(repo, hex(op1))
248 hg.merge(repo, hex(op1))
249 else:
249 else:
250 ui.status(_('the backout changeset is a new head - '
250 ui.status(_('the backout changeset is a new head - '
251 'do not forget to merge\n'))
251 'do not forget to merge\n'))
252 ui.status(_('(use "backout --merge" '
252 ui.status(_('(use "backout --merge" '
253 'if you want to auto-merge)\n'))
253 'if you want to auto-merge)\n'))
254
254
255 def branch(ui, repo, label=None, **opts):
255 def branch(ui, repo, label=None, **opts):
256 """set or show the current branch name
256 """set or show the current branch name
257
257
258 With <name>, set the current branch name. Otherwise, show the
258 With <name>, set the current branch name. Otherwise, show the
259 current branch name.
259 current branch name.
260
260
261 Unless --force is specified, branch will not let you set a
261 Unless --force is specified, branch will not let you set a
262 branch name that shadows an existing branch.
262 branch name that shadows an existing branch.
263 """
263 """
264
264
265 if label:
265 if label:
266 if not opts.get('force') and label in repo.branchtags():
266 if not opts.get('force') and label in repo.branchtags():
267 if label not in [p.branch() for p in repo.workingctx().parents()]:
267 if label not in [p.branch() for p in repo.workingctx().parents()]:
268 raise util.Abort(_('a branch of the same name already exists'
268 raise util.Abort(_('a branch of the same name already exists'
269 ' (use --force to override)'))
269 ' (use --force to override)'))
270 repo.dirstate.setbranch(util.fromlocal(label))
270 repo.dirstate.setbranch(util.fromlocal(label))
271 else:
271 else:
272 ui.write("%s\n" % util.tolocal(repo.dirstate.branch()))
272 ui.write("%s\n" % util.tolocal(repo.dirstate.branch()))
273
273
274 def branches(ui, repo):
274 def branches(ui, repo):
275 """list repository named branches
275 """list repository named branches
276
276
277 List the repository's named branches.
277 List the repository's named branches.
278 """
278 """
279 b = repo.branchtags()
279 b = repo.branchtags()
280 l = [(-repo.changelog.rev(n), n, t) for t, n in b.items()]
280 l = [(-repo.changelog.rev(n), n, t) for t, n in b.items()]
281 l.sort()
281 l.sort()
282 for r, n, t in l:
282 for r, n, t in l:
283 hexfunc = ui.debugflag and hex or short
283 hexfunc = ui.debugflag and hex or short
284 if ui.quiet:
284 if ui.quiet:
285 ui.write("%s\n" % t)
285 ui.write("%s\n" % t)
286 else:
286 else:
287 spaces = " " * (30 - util.locallen(t))
287 spaces = " " * (30 - util.locallen(t))
288 ui.write("%s%s %s:%s\n" % (t, spaces, -r, hexfunc(n)))
288 ui.write("%s%s %s:%s\n" % (t, spaces, -r, hexfunc(n)))
289
289
290 def bundle(ui, repo, fname, dest=None, **opts):
290 def bundle(ui, repo, fname, dest=None, **opts):
291 """create a changegroup file
291 """create a changegroup file
292
292
293 Generate a compressed changegroup file collecting changesets not
293 Generate a compressed changegroup file collecting changesets not
294 found in the other repository.
294 found in the other repository.
295
295
296 If no destination repository is specified the destination is assumed
296 If no destination repository is specified the destination is assumed
297 to have all the nodes specified by one or more --base parameters.
297 to have all the nodes specified by one or more --base parameters.
298
298
299 The bundle file can then be transferred using conventional means and
299 The bundle file can then be transferred using conventional means and
300 applied to another repository with the unbundle or pull command.
300 applied to another repository with the unbundle or pull command.
301 This is useful when direct push and pull are not available or when
301 This is useful when direct push and pull are not available or when
302 exporting an entire repository is undesirable.
302 exporting an entire repository is undesirable.
303
303
304 Applying bundles preserves all changeset contents including
304 Applying bundles preserves all changeset contents including
305 permissions, copy/rename information, and revision history.
305 permissions, copy/rename information, and revision history.
306 """
306 """
307 revs = opts.get('rev') or None
307 revs = opts.get('rev') or None
308 if revs:
308 if revs:
309 revs = [repo.lookup(rev) for rev in revs]
309 revs = [repo.lookup(rev) for rev in revs]
310 base = opts.get('base')
310 base = opts.get('base')
311 if base:
311 if base:
312 if dest:
312 if dest:
313 raise util.Abort(_("--base is incompatible with specifiying "
313 raise util.Abort(_("--base is incompatible with specifiying "
314 "a destination"))
314 "a destination"))
315 base = [repo.lookup(rev) for rev in base]
315 base = [repo.lookup(rev) for rev in base]
316 # create the right base
316 # create the right base
317 # XXX: nodesbetween / changegroup* should be "fixed" instead
317 # XXX: nodesbetween / changegroup* should be "fixed" instead
318 o = []
318 o = []
319 has = {nullid: None}
319 has = {nullid: None}
320 for n in base:
320 for n in base:
321 has.update(repo.changelog.reachable(n))
321 has.update(repo.changelog.reachable(n))
322 if revs:
322 if revs:
323 visit = list(revs)
323 visit = list(revs)
324 else:
324 else:
325 visit = repo.changelog.heads()
325 visit = repo.changelog.heads()
326 seen = {}
326 seen = {}
327 while visit:
327 while visit:
328 n = visit.pop(0)
328 n = visit.pop(0)
329 parents = [p for p in repo.changelog.parents(n) if p not in has]
329 parents = [p for p in repo.changelog.parents(n) if p not in has]
330 if len(parents) == 0:
330 if len(parents) == 0:
331 o.insert(0, n)
331 o.insert(0, n)
332 else:
332 else:
333 for p in parents:
333 for p in parents:
334 if p not in seen:
334 if p not in seen:
335 seen[p] = 1
335 seen[p] = 1
336 visit.append(p)
336 visit.append(p)
337 else:
337 else:
338 setremoteconfig(ui, opts)
338 setremoteconfig(ui, opts)
339 dest = ui.expandpath(dest or 'default-push', dest or 'default')
339 dest, revs = cmdutil.parseurl(
340 ui.expandpath(dest or 'default-push', dest or 'default'), revs)
340 other = hg.repository(ui, dest)
341 other = hg.repository(ui, dest)
341 o = repo.findoutgoing(other, force=opts['force'])
342 o = repo.findoutgoing(other, force=opts['force'])
342
343
343 if revs:
344 if revs:
344 cg = repo.changegroupsubset(o, revs, 'bundle')
345 cg = repo.changegroupsubset(o, revs, 'bundle')
345 else:
346 else:
346 cg = repo.changegroup(o, 'bundle')
347 cg = repo.changegroup(o, 'bundle')
347 changegroup.writebundle(cg, fname, "HG10BZ")
348 changegroup.writebundle(cg, fname, "HG10BZ")
348
349
349 def cat(ui, repo, file1, *pats, **opts):
350 def cat(ui, repo, file1, *pats, **opts):
350 """output the current or given revision of files
351 """output the current or given revision of files
351
352
352 Print the specified files as they were at the given revision.
353 Print the specified files as they were at the given revision.
353 If no revision is given, the parent of the working directory is used,
354 If no revision is given, the parent of the working directory is used,
354 or tip if no revision is checked out.
355 or tip if no revision is checked out.
355
356
356 Output may be to a file, in which case the name of the file is
357 Output may be to a file, in which case the name of the file is
357 given using a format string. The formatting rules are the same as
358 given using a format string. The formatting rules are the same as
358 for the export command, with the following additions:
359 for the export command, with the following additions:
359
360
360 %s basename of file being printed
361 %s basename of file being printed
361 %d dirname of file being printed, or '.' if in repo root
362 %d dirname of file being printed, or '.' if in repo root
362 %p root-relative path name of file being printed
363 %p root-relative path name of file being printed
363 """
364 """
364 ctx = repo.changectx(opts['rev'])
365 ctx = repo.changectx(opts['rev'])
365 for src, abs, rel, exact in cmdutil.walk(repo, (file1,) + pats, opts,
366 for src, abs, rel, exact in cmdutil.walk(repo, (file1,) + pats, opts,
366 ctx.node()):
367 ctx.node()):
367 fp = cmdutil.make_file(repo, opts['output'], ctx.node(), pathname=abs)
368 fp = cmdutil.make_file(repo, opts['output'], ctx.node(), pathname=abs)
368 fp.write(ctx.filectx(abs).data())
369 fp.write(ctx.filectx(abs).data())
369
370
370 def clone(ui, source, dest=None, **opts):
371 def clone(ui, source, dest=None, **opts):
371 """make a copy of an existing repository
372 """make a copy of an existing repository
372
373
373 Create a copy of an existing repository in a new directory.
374 Create a copy of an existing repository in a new directory.
374
375
375 If no destination directory name is specified, it defaults to the
376 If no destination directory name is specified, it defaults to the
376 basename of the source.
377 basename of the source.
377
378
378 The location of the source is added to the new repository's
379 The location of the source is added to the new repository's
379 .hg/hgrc file, as the default to be used for future pulls.
380 .hg/hgrc file, as the default to be used for future pulls.
380
381
381 For efficiency, hardlinks are used for cloning whenever the source
382 For efficiency, hardlinks are used for cloning whenever the source
382 and destination are on the same filesystem (note this applies only
383 and destination are on the same filesystem (note this applies only
383 to the repository data, not to the checked out files). Some
384 to the repository data, not to the checked out files). Some
384 filesystems, such as AFS, implement hardlinking incorrectly, but
385 filesystems, such as AFS, implement hardlinking incorrectly, but
385 do not report errors. In these cases, use the --pull option to
386 do not report errors. In these cases, use the --pull option to
386 avoid hardlinking.
387 avoid hardlinking.
387
388
388 You can safely clone repositories and checked out files using full
389 You can safely clone repositories and checked out files using full
389 hardlinks with
390 hardlinks with
390
391
391 $ cp -al REPO REPOCLONE
392 $ cp -al REPO REPOCLONE
392
393
393 which is the fastest way to clone. However, the operation is not
394 which is the fastest way to clone. However, the operation is not
394 atomic (making sure REPO is not modified during the operation is
395 atomic (making sure REPO is not modified during the operation is
395 up to you) and you have to make sure your editor breaks hardlinks
396 up to you) and you have to make sure your editor breaks hardlinks
396 (Emacs and most Linux Kernel tools do so).
397 (Emacs and most Linux Kernel tools do so).
397
398
398 If you use the -r option to clone up to a specific revision, no
399 If you use the -r option to clone up to a specific revision, no
399 subsequent revisions will be present in the cloned repository.
400 subsequent revisions will be present in the cloned repository.
400 This option implies --pull, even on local repositories.
401 This option implies --pull, even on local repositories.
401
402
402 See pull for valid source format details.
403 See pull for valid source format details.
403
404
404 It is possible to specify an ssh:// URL as the destination, but no
405 It is possible to specify an ssh:// URL as the destination, but no
405 .hg/hgrc and working directory will be created on the remote side.
406 .hg/hgrc and working directory will be created on the remote side.
406 Look at the help text for the pull command for important details
407 Look at the help text for the pull command for important details
407 about ssh:// URLs.
408 about ssh:// URLs.
408 """
409 """
409 setremoteconfig(ui, opts)
410 setremoteconfig(ui, opts)
410 hg.clone(ui, ui.expandpath(source), dest,
411 hg.clone(ui, source, dest,
411 pull=opts['pull'],
412 pull=opts['pull'],
412 stream=opts['uncompressed'],
413 stream=opts['uncompressed'],
413 rev=opts['rev'],
414 rev=opts['rev'],
414 update=not opts['noupdate'])
415 update=not opts['noupdate'])
415
416
416 def commit(ui, repo, *pats, **opts):
417 def commit(ui, repo, *pats, **opts):
417 """commit the specified files or all outstanding changes
418 """commit the specified files or all outstanding changes
418
419
419 Commit changes to the given files into the repository.
420 Commit changes to the given files into the repository.
420
421
421 If a list of files is omitted, all changes reported by "hg status"
422 If a list of files is omitted, all changes reported by "hg status"
422 will be committed.
423 will be committed.
423
424
424 If no commit message is specified, the editor configured in your hgrc
425 If no commit message is specified, the editor configured in your hgrc
425 or in the EDITOR environment variable is started to enter a message.
426 or in the EDITOR environment variable is started to enter a message.
426 """
427 """
427 message = logmessage(opts)
428 message = logmessage(opts)
428
429
429 if opts['addremove']:
430 if opts['addremove']:
430 cmdutil.addremove(repo, pats, opts)
431 cmdutil.addremove(repo, pats, opts)
431 fns, match, anypats = cmdutil.matchpats(repo, pats, opts)
432 fns, match, anypats = cmdutil.matchpats(repo, pats, opts)
432 if pats:
433 if pats:
433 status = repo.status(files=fns, match=match)
434 status = repo.status(files=fns, match=match)
434 modified, added, removed, deleted, unknown = status[:5]
435 modified, added, removed, deleted, unknown = status[:5]
435 files = modified + added + removed
436 files = modified + added + removed
436 slist = None
437 slist = None
437 for f in fns:
438 for f in fns:
438 if f == '.':
439 if f == '.':
439 continue
440 continue
440 if f not in files:
441 if f not in files:
441 rf = repo.wjoin(f)
442 rf = repo.wjoin(f)
442 if f in unknown:
443 if f in unknown:
443 raise util.Abort(_("file %s not tracked!") % rf)
444 raise util.Abort(_("file %s not tracked!") % rf)
444 try:
445 try:
445 mode = os.lstat(rf)[stat.ST_MODE]
446 mode = os.lstat(rf)[stat.ST_MODE]
446 except OSError:
447 except OSError:
447 raise util.Abort(_("file %s not found!") % rf)
448 raise util.Abort(_("file %s not found!") % rf)
448 if stat.S_ISDIR(mode):
449 if stat.S_ISDIR(mode):
449 name = f + '/'
450 name = f + '/'
450 if slist is None:
451 if slist is None:
451 slist = list(files)
452 slist = list(files)
452 slist.sort()
453 slist.sort()
453 i = bisect.bisect(slist, name)
454 i = bisect.bisect(slist, name)
454 if i >= len(slist) or not slist[i].startswith(name):
455 if i >= len(slist) or not slist[i].startswith(name):
455 raise util.Abort(_("no match under directory %s!")
456 raise util.Abort(_("no match under directory %s!")
456 % rf)
457 % rf)
457 elif not stat.S_ISREG(mode):
458 elif not stat.S_ISREG(mode):
458 raise util.Abort(_("can't commit %s: "
459 raise util.Abort(_("can't commit %s: "
459 "unsupported file type!") % rf)
460 "unsupported file type!") % rf)
460 else:
461 else:
461 files = []
462 files = []
462 try:
463 try:
463 repo.commit(files, message, opts['user'], opts['date'], match,
464 repo.commit(files, message, opts['user'], opts['date'], match,
464 force_editor=opts.get('force_editor'))
465 force_editor=opts.get('force_editor'))
465 except ValueError, inst:
466 except ValueError, inst:
466 raise util.Abort(str(inst))
467 raise util.Abort(str(inst))
467
468
468 def docopy(ui, repo, pats, opts, wlock):
469 def docopy(ui, repo, pats, opts, wlock):
469 # called with the repo lock held
470 # called with the repo lock held
470 #
471 #
471 # hgsep => pathname that uses "/" to separate directories
472 # hgsep => pathname that uses "/" to separate directories
472 # ossep => pathname that uses os.sep to separate directories
473 # ossep => pathname that uses os.sep to separate directories
473 cwd = repo.getcwd()
474 cwd = repo.getcwd()
474 errors = 0
475 errors = 0
475 copied = []
476 copied = []
476 targets = {}
477 targets = {}
477
478
478 # abs: hgsep
479 # abs: hgsep
479 # rel: ossep
480 # rel: ossep
480 # return: hgsep
481 # return: hgsep
481 def okaytocopy(abs, rel, exact):
482 def okaytocopy(abs, rel, exact):
482 reasons = {'?': _('is not managed'),
483 reasons = {'?': _('is not managed'),
483 'a': _('has been marked for add'),
484 'a': _('has been marked for add'),
484 'r': _('has been marked for remove')}
485 'r': _('has been marked for remove')}
485 state = repo.dirstate.state(abs)
486 state = repo.dirstate.state(abs)
486 reason = reasons.get(state)
487 reason = reasons.get(state)
487 if reason:
488 if reason:
488 if state == 'a':
489 if state == 'a':
489 origsrc = repo.dirstate.copied(abs)
490 origsrc = repo.dirstate.copied(abs)
490 if origsrc is not None:
491 if origsrc is not None:
491 return origsrc
492 return origsrc
492 if exact:
493 if exact:
493 ui.warn(_('%s: not copying - file %s\n') % (rel, reason))
494 ui.warn(_('%s: not copying - file %s\n') % (rel, reason))
494 else:
495 else:
495 return abs
496 return abs
496
497
497 # origsrc: hgsep
498 # origsrc: hgsep
498 # abssrc: hgsep
499 # abssrc: hgsep
499 # relsrc: ossep
500 # relsrc: ossep
500 # target: ossep
501 # target: ossep
501 def copy(origsrc, abssrc, relsrc, target, exact):
502 def copy(origsrc, abssrc, relsrc, target, exact):
502 abstarget = util.canonpath(repo.root, cwd, target)
503 abstarget = util.canonpath(repo.root, cwd, target)
503 reltarget = util.pathto(repo.root, cwd, abstarget)
504 reltarget = util.pathto(repo.root, cwd, abstarget)
504 prevsrc = targets.get(abstarget)
505 prevsrc = targets.get(abstarget)
505 if prevsrc is not None:
506 if prevsrc is not None:
506 ui.warn(_('%s: not overwriting - %s collides with %s\n') %
507 ui.warn(_('%s: not overwriting - %s collides with %s\n') %
507 (reltarget, util.localpath(abssrc),
508 (reltarget, util.localpath(abssrc),
508 util.localpath(prevsrc)))
509 util.localpath(prevsrc)))
509 return
510 return
510 if (not opts['after'] and os.path.exists(reltarget) or
511 if (not opts['after'] and os.path.exists(reltarget) or
511 opts['after'] and repo.dirstate.state(abstarget) not in '?ar'):
512 opts['after'] and repo.dirstate.state(abstarget) not in '?ar'):
512 if not opts['force']:
513 if not opts['force']:
513 ui.warn(_('%s: not overwriting - file exists\n') %
514 ui.warn(_('%s: not overwriting - file exists\n') %
514 reltarget)
515 reltarget)
515 return
516 return
516 if not opts['after'] and not opts.get('dry_run'):
517 if not opts['after'] and not opts.get('dry_run'):
517 os.unlink(reltarget)
518 os.unlink(reltarget)
518 if opts['after']:
519 if opts['after']:
519 if not os.path.exists(reltarget):
520 if not os.path.exists(reltarget):
520 return
521 return
521 else:
522 else:
522 targetdir = os.path.dirname(reltarget) or '.'
523 targetdir = os.path.dirname(reltarget) or '.'
523 if not os.path.isdir(targetdir) and not opts.get('dry_run'):
524 if not os.path.isdir(targetdir) and not opts.get('dry_run'):
524 os.makedirs(targetdir)
525 os.makedirs(targetdir)
525 try:
526 try:
526 restore = repo.dirstate.state(abstarget) == 'r'
527 restore = repo.dirstate.state(abstarget) == 'r'
527 if restore and not opts.get('dry_run'):
528 if restore and not opts.get('dry_run'):
528 repo.undelete([abstarget], wlock)
529 repo.undelete([abstarget], wlock)
529 try:
530 try:
530 if not opts.get('dry_run'):
531 if not opts.get('dry_run'):
531 util.copyfile(relsrc, reltarget)
532 util.copyfile(relsrc, reltarget)
532 restore = False
533 restore = False
533 finally:
534 finally:
534 if restore:
535 if restore:
535 repo.remove([abstarget], wlock=wlock)
536 repo.remove([abstarget], wlock=wlock)
536 except IOError, inst:
537 except IOError, inst:
537 if inst.errno == errno.ENOENT:
538 if inst.errno == errno.ENOENT:
538 ui.warn(_('%s: deleted in working copy\n') % relsrc)
539 ui.warn(_('%s: deleted in working copy\n') % relsrc)
539 else:
540 else:
540 ui.warn(_('%s: cannot copy - %s\n') %
541 ui.warn(_('%s: cannot copy - %s\n') %
541 (relsrc, inst.strerror))
542 (relsrc, inst.strerror))
542 errors += 1
543 errors += 1
543 return
544 return
544 if ui.verbose or not exact:
545 if ui.verbose or not exact:
545 ui.status(_('copying %s to %s\n') % (relsrc, reltarget))
546 ui.status(_('copying %s to %s\n') % (relsrc, reltarget))
546 targets[abstarget] = abssrc
547 targets[abstarget] = abssrc
547 if abstarget != origsrc and not opts.get('dry_run'):
548 if abstarget != origsrc and not opts.get('dry_run'):
548 repo.copy(origsrc, abstarget, wlock)
549 repo.copy(origsrc, abstarget, wlock)
549 copied.append((abssrc, relsrc, exact))
550 copied.append((abssrc, relsrc, exact))
550
551
551 # pat: ossep
552 # pat: ossep
552 # dest ossep
553 # dest ossep
553 # srcs: list of (hgsep, hgsep, ossep, bool)
554 # srcs: list of (hgsep, hgsep, ossep, bool)
554 # return: function that takes hgsep and returns ossep
555 # return: function that takes hgsep and returns ossep
555 def targetpathfn(pat, dest, srcs):
556 def targetpathfn(pat, dest, srcs):
556 if os.path.isdir(pat):
557 if os.path.isdir(pat):
557 abspfx = util.canonpath(repo.root, cwd, pat)
558 abspfx = util.canonpath(repo.root, cwd, pat)
558 abspfx = util.localpath(abspfx)
559 abspfx = util.localpath(abspfx)
559 if destdirexists:
560 if destdirexists:
560 striplen = len(os.path.split(abspfx)[0])
561 striplen = len(os.path.split(abspfx)[0])
561 else:
562 else:
562 striplen = len(abspfx)
563 striplen = len(abspfx)
563 if striplen:
564 if striplen:
564 striplen += len(os.sep)
565 striplen += len(os.sep)
565 res = lambda p: os.path.join(dest, util.localpath(p)[striplen:])
566 res = lambda p: os.path.join(dest, util.localpath(p)[striplen:])
566 elif destdirexists:
567 elif destdirexists:
567 res = lambda p: os.path.join(dest,
568 res = lambda p: os.path.join(dest,
568 os.path.basename(util.localpath(p)))
569 os.path.basename(util.localpath(p)))
569 else:
570 else:
570 res = lambda p: dest
571 res = lambda p: dest
571 return res
572 return res
572
573
573 # pat: ossep
574 # pat: ossep
574 # dest ossep
575 # dest ossep
575 # srcs: list of (hgsep, hgsep, ossep, bool)
576 # srcs: list of (hgsep, hgsep, ossep, bool)
576 # return: function that takes hgsep and returns ossep
577 # return: function that takes hgsep and returns ossep
577 def targetpathafterfn(pat, dest, srcs):
578 def targetpathafterfn(pat, dest, srcs):
578 if util.patkind(pat, None)[0]:
579 if util.patkind(pat, None)[0]:
579 # a mercurial pattern
580 # a mercurial pattern
580 res = lambda p: os.path.join(dest,
581 res = lambda p: os.path.join(dest,
581 os.path.basename(util.localpath(p)))
582 os.path.basename(util.localpath(p)))
582 else:
583 else:
583 abspfx = util.canonpath(repo.root, cwd, pat)
584 abspfx = util.canonpath(repo.root, cwd, pat)
584 if len(abspfx) < len(srcs[0][0]):
585 if len(abspfx) < len(srcs[0][0]):
585 # A directory. Either the target path contains the last
586 # A directory. Either the target path contains the last
586 # component of the source path or it does not.
587 # component of the source path or it does not.
587 def evalpath(striplen):
588 def evalpath(striplen):
588 score = 0
589 score = 0
589 for s in srcs:
590 for s in srcs:
590 t = os.path.join(dest, util.localpath(s[0])[striplen:])
591 t = os.path.join(dest, util.localpath(s[0])[striplen:])
591 if os.path.exists(t):
592 if os.path.exists(t):
592 score += 1
593 score += 1
593 return score
594 return score
594
595
595 abspfx = util.localpath(abspfx)
596 abspfx = util.localpath(abspfx)
596 striplen = len(abspfx)
597 striplen = len(abspfx)
597 if striplen:
598 if striplen:
598 striplen += len(os.sep)
599 striplen += len(os.sep)
599 if os.path.isdir(os.path.join(dest, os.path.split(abspfx)[1])):
600 if os.path.isdir(os.path.join(dest, os.path.split(abspfx)[1])):
600 score = evalpath(striplen)
601 score = evalpath(striplen)
601 striplen1 = len(os.path.split(abspfx)[0])
602 striplen1 = len(os.path.split(abspfx)[0])
602 if striplen1:
603 if striplen1:
603 striplen1 += len(os.sep)
604 striplen1 += len(os.sep)
604 if evalpath(striplen1) > score:
605 if evalpath(striplen1) > score:
605 striplen = striplen1
606 striplen = striplen1
606 res = lambda p: os.path.join(dest,
607 res = lambda p: os.path.join(dest,
607 util.localpath(p)[striplen:])
608 util.localpath(p)[striplen:])
608 else:
609 else:
609 # a file
610 # a file
610 if destdirexists:
611 if destdirexists:
611 res = lambda p: os.path.join(dest,
612 res = lambda p: os.path.join(dest,
612 os.path.basename(util.localpath(p)))
613 os.path.basename(util.localpath(p)))
613 else:
614 else:
614 res = lambda p: dest
615 res = lambda p: dest
615 return res
616 return res
616
617
617
618
618 pats = util.expand_glob(pats)
619 pats = util.expand_glob(pats)
619 if not pats:
620 if not pats:
620 raise util.Abort(_('no source or destination specified'))
621 raise util.Abort(_('no source or destination specified'))
621 if len(pats) == 1:
622 if len(pats) == 1:
622 raise util.Abort(_('no destination specified'))
623 raise util.Abort(_('no destination specified'))
623 dest = pats.pop()
624 dest = pats.pop()
624 destdirexists = os.path.isdir(dest)
625 destdirexists = os.path.isdir(dest)
625 if (len(pats) > 1 or util.patkind(pats[0], None)[0]) and not destdirexists:
626 if (len(pats) > 1 or util.patkind(pats[0], None)[0]) and not destdirexists:
626 raise util.Abort(_('with multiple sources, destination must be an '
627 raise util.Abort(_('with multiple sources, destination must be an '
627 'existing directory'))
628 'existing directory'))
628 if opts['after']:
629 if opts['after']:
629 tfn = targetpathafterfn
630 tfn = targetpathafterfn
630 else:
631 else:
631 tfn = targetpathfn
632 tfn = targetpathfn
632 copylist = []
633 copylist = []
633 for pat in pats:
634 for pat in pats:
634 srcs = []
635 srcs = []
635 for tag, abssrc, relsrc, exact in cmdutil.walk(repo, [pat], opts,
636 for tag, abssrc, relsrc, exact in cmdutil.walk(repo, [pat], opts,
636 globbed=True):
637 globbed=True):
637 origsrc = okaytocopy(abssrc, relsrc, exact)
638 origsrc = okaytocopy(abssrc, relsrc, exact)
638 if origsrc:
639 if origsrc:
639 srcs.append((origsrc, abssrc, relsrc, exact))
640 srcs.append((origsrc, abssrc, relsrc, exact))
640 if not srcs:
641 if not srcs:
641 continue
642 continue
642 copylist.append((tfn(pat, dest, srcs), srcs))
643 copylist.append((tfn(pat, dest, srcs), srcs))
643 if not copylist:
644 if not copylist:
644 raise util.Abort(_('no files to copy'))
645 raise util.Abort(_('no files to copy'))
645
646
646 for targetpath, srcs in copylist:
647 for targetpath, srcs in copylist:
647 for origsrc, abssrc, relsrc, exact in srcs:
648 for origsrc, abssrc, relsrc, exact in srcs:
648 copy(origsrc, abssrc, relsrc, targetpath(abssrc), exact)
649 copy(origsrc, abssrc, relsrc, targetpath(abssrc), exact)
649
650
650 if errors:
651 if errors:
651 ui.warn(_('(consider using --after)\n'))
652 ui.warn(_('(consider using --after)\n'))
652 return errors, copied
653 return errors, copied
653
654
654 def copy(ui, repo, *pats, **opts):
655 def copy(ui, repo, *pats, **opts):
655 """mark files as copied for the next commit
656 """mark files as copied for the next commit
656
657
657 Mark dest as having copies of source files. If dest is a
658 Mark dest as having copies of source files. If dest is a
658 directory, copies are put in that directory. If dest is a file,
659 directory, copies are put in that directory. If dest is a file,
659 there can only be one source.
660 there can only be one source.
660
661
661 By default, this command copies the contents of files as they
662 By default, this command copies the contents of files as they
662 stand in the working directory. If invoked with --after, the
663 stand in the working directory. If invoked with --after, the
663 operation is recorded, but no copying is performed.
664 operation is recorded, but no copying is performed.
664
665
665 This command takes effect in the next commit. To undo a copy
666 This command takes effect in the next commit. To undo a copy
666 before that, see hg revert.
667 before that, see hg revert.
667 """
668 """
668 wlock = repo.wlock(0)
669 wlock = repo.wlock(0)
669 errs, copied = docopy(ui, repo, pats, opts, wlock)
670 errs, copied = docopy(ui, repo, pats, opts, wlock)
670 return errs
671 return errs
671
672
672 def debugancestor(ui, index, rev1, rev2):
673 def debugancestor(ui, index, rev1, rev2):
673 """find the ancestor revision of two revisions in a given index"""
674 """find the ancestor revision of two revisions in a given index"""
674 r = revlog.revlog(util.opener(os.getcwd(), audit=False), index)
675 r = revlog.revlog(util.opener(os.getcwd(), audit=False), index)
675 a = r.ancestor(r.lookup(rev1), r.lookup(rev2))
676 a = r.ancestor(r.lookup(rev1), r.lookup(rev2))
676 ui.write("%d:%s\n" % (r.rev(a), hex(a)))
677 ui.write("%d:%s\n" % (r.rev(a), hex(a)))
677
678
678 def debugcomplete(ui, cmd='', **opts):
679 def debugcomplete(ui, cmd='', **opts):
679 """returns the completion list associated with the given command"""
680 """returns the completion list associated with the given command"""
680
681
681 if opts['options']:
682 if opts['options']:
682 options = []
683 options = []
683 otables = [globalopts]
684 otables = [globalopts]
684 if cmd:
685 if cmd:
685 aliases, entry = findcmd(ui, cmd)
686 aliases, entry = findcmd(ui, cmd)
686 otables.append(entry[1])
687 otables.append(entry[1])
687 for t in otables:
688 for t in otables:
688 for o in t:
689 for o in t:
689 if o[0]:
690 if o[0]:
690 options.append('-%s' % o[0])
691 options.append('-%s' % o[0])
691 options.append('--%s' % o[1])
692 options.append('--%s' % o[1])
692 ui.write("%s\n" % "\n".join(options))
693 ui.write("%s\n" % "\n".join(options))
693 return
694 return
694
695
695 clist = findpossible(ui, cmd).keys()
696 clist = findpossible(ui, cmd).keys()
696 clist.sort()
697 clist.sort()
697 ui.write("%s\n" % "\n".join(clist))
698 ui.write("%s\n" % "\n".join(clist))
698
699
699 def debugrebuildstate(ui, repo, rev=""):
700 def debugrebuildstate(ui, repo, rev=""):
700 """rebuild the dirstate as it would look like for the given revision"""
701 """rebuild the dirstate as it would look like for the given revision"""
701 if rev == "":
702 if rev == "":
702 rev = repo.changelog.tip()
703 rev = repo.changelog.tip()
703 ctx = repo.changectx(rev)
704 ctx = repo.changectx(rev)
704 files = ctx.manifest()
705 files = ctx.manifest()
705 wlock = repo.wlock()
706 wlock = repo.wlock()
706 repo.dirstate.rebuild(rev, files)
707 repo.dirstate.rebuild(rev, files)
707
708
708 def debugcheckstate(ui, repo):
709 def debugcheckstate(ui, repo):
709 """validate the correctness of the current dirstate"""
710 """validate the correctness of the current dirstate"""
710 parent1, parent2 = repo.dirstate.parents()
711 parent1, parent2 = repo.dirstate.parents()
711 repo.dirstate.read()
712 repo.dirstate.read()
712 dc = repo.dirstate.map
713 dc = repo.dirstate.map
713 keys = dc.keys()
714 keys = dc.keys()
714 keys.sort()
715 keys.sort()
715 m1 = repo.changectx(parent1).manifest()
716 m1 = repo.changectx(parent1).manifest()
716 m2 = repo.changectx(parent2).manifest()
717 m2 = repo.changectx(parent2).manifest()
717 errors = 0
718 errors = 0
718 for f in dc:
719 for f in dc:
719 state = repo.dirstate.state(f)
720 state = repo.dirstate.state(f)
720 if state in "nr" and f not in m1:
721 if state in "nr" and f not in m1:
721 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
722 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
722 errors += 1
723 errors += 1
723 if state in "a" and f in m1:
724 if state in "a" and f in m1:
724 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
725 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
725 errors += 1
726 errors += 1
726 if state in "m" and f not in m1 and f not in m2:
727 if state in "m" and f not in m1 and f not in m2:
727 ui.warn(_("%s in state %s, but not in either manifest\n") %
728 ui.warn(_("%s in state %s, but not in either manifest\n") %
728 (f, state))
729 (f, state))
729 errors += 1
730 errors += 1
730 for f in m1:
731 for f in m1:
731 state = repo.dirstate.state(f)
732 state = repo.dirstate.state(f)
732 if state not in "nrm":
733 if state not in "nrm":
733 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
734 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
734 errors += 1
735 errors += 1
735 if errors:
736 if errors:
736 error = _(".hg/dirstate inconsistent with current parent's manifest")
737 error = _(".hg/dirstate inconsistent with current parent's manifest")
737 raise util.Abort(error)
738 raise util.Abort(error)
738
739
739 def showconfig(ui, repo, *values, **opts):
740 def showconfig(ui, repo, *values, **opts):
740 """show combined config settings from all hgrc files
741 """show combined config settings from all hgrc files
741
742
742 With no args, print names and values of all config items.
743 With no args, print names and values of all config items.
743
744
744 With one arg of the form section.name, print just the value of
745 With one arg of the form section.name, print just the value of
745 that config item.
746 that config item.
746
747
747 With multiple args, print names and values of all config items
748 With multiple args, print names and values of all config items
748 with matching section names."""
749 with matching section names."""
749
750
750 untrusted = bool(opts.get('untrusted'))
751 untrusted = bool(opts.get('untrusted'))
751 if values:
752 if values:
752 if len([v for v in values if '.' in v]) > 1:
753 if len([v for v in values if '.' in v]) > 1:
753 raise util.Abort(_('only one config item permitted'))
754 raise util.Abort(_('only one config item permitted'))
754 for section, name, value in ui.walkconfig(untrusted=untrusted):
755 for section, name, value in ui.walkconfig(untrusted=untrusted):
755 sectname = section + '.' + name
756 sectname = section + '.' + name
756 if values:
757 if values:
757 for v in values:
758 for v in values:
758 if v == section:
759 if v == section:
759 ui.write('%s=%s\n' % (sectname, value))
760 ui.write('%s=%s\n' % (sectname, value))
760 elif v == sectname:
761 elif v == sectname:
761 ui.write(value, '\n')
762 ui.write(value, '\n')
762 else:
763 else:
763 ui.write('%s=%s\n' % (sectname, value))
764 ui.write('%s=%s\n' % (sectname, value))
764
765
765 def debugsetparents(ui, repo, rev1, rev2=None):
766 def debugsetparents(ui, repo, rev1, rev2=None):
766 """manually set the parents of the current working directory
767 """manually set the parents of the current working directory
767
768
768 This is useful for writing repository conversion tools, but should
769 This is useful for writing repository conversion tools, but should
769 be used with care.
770 be used with care.
770 """
771 """
771
772
772 if not rev2:
773 if not rev2:
773 rev2 = hex(nullid)
774 rev2 = hex(nullid)
774
775
775 wlock = repo.wlock()
776 wlock = repo.wlock()
776 try:
777 try:
777 repo.dirstate.setparents(repo.lookup(rev1), repo.lookup(rev2))
778 repo.dirstate.setparents(repo.lookup(rev1), repo.lookup(rev2))
778 finally:
779 finally:
779 wlock.release()
780 wlock.release()
780
781
781 def debugstate(ui, repo):
782 def debugstate(ui, repo):
782 """show the contents of the current dirstate"""
783 """show the contents of the current dirstate"""
783 repo.dirstate.read()
784 repo.dirstate.read()
784 dc = repo.dirstate.map
785 dc = repo.dirstate.map
785 keys = dc.keys()
786 keys = dc.keys()
786 keys.sort()
787 keys.sort()
787 for file_ in keys:
788 for file_ in keys:
788 if dc[file_][3] == -1:
789 if dc[file_][3] == -1:
789 # Pad or slice to locale representation
790 # Pad or slice to locale representation
790 locale_len = len(time.strftime("%x %X", time.localtime(0)))
791 locale_len = len(time.strftime("%x %X", time.localtime(0)))
791 timestr = 'unset'
792 timestr = 'unset'
792 timestr = timestr[:locale_len] + ' '*(locale_len - len(timestr))
793 timestr = timestr[:locale_len] + ' '*(locale_len - len(timestr))
793 else:
794 else:
794 timestr = time.strftime("%x %X", time.localtime(dc[file_][3]))
795 timestr = time.strftime("%x %X", time.localtime(dc[file_][3]))
795 ui.write("%c %3o %10d %s %s\n"
796 ui.write("%c %3o %10d %s %s\n"
796 % (dc[file_][0], dc[file_][1] & 0777, dc[file_][2],
797 % (dc[file_][0], dc[file_][1] & 0777, dc[file_][2],
797 timestr, file_))
798 timestr, file_))
798 for f in repo.dirstate.copies():
799 for f in repo.dirstate.copies():
799 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
800 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
800
801
801 def debugdata(ui, file_, rev):
802 def debugdata(ui, file_, rev):
802 """dump the contents of a data file revision"""
803 """dump the contents of a data file revision"""
803 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_[:-2] + ".i")
804 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_[:-2] + ".i")
804 try:
805 try:
805 ui.write(r.revision(r.lookup(rev)))
806 ui.write(r.revision(r.lookup(rev)))
806 except KeyError:
807 except KeyError:
807 raise util.Abort(_('invalid revision identifier %s') % rev)
808 raise util.Abort(_('invalid revision identifier %s') % rev)
808
809
809 def debugdate(ui, date, range=None, **opts):
810 def debugdate(ui, date, range=None, **opts):
810 """parse and display a date"""
811 """parse and display a date"""
811 if opts["extended"]:
812 if opts["extended"]:
812 d = util.parsedate(date, util.extendeddateformats)
813 d = util.parsedate(date, util.extendeddateformats)
813 else:
814 else:
814 d = util.parsedate(date)
815 d = util.parsedate(date)
815 ui.write("internal: %s %s\n" % d)
816 ui.write("internal: %s %s\n" % d)
816 ui.write("standard: %s\n" % util.datestr(d))
817 ui.write("standard: %s\n" % util.datestr(d))
817 if range:
818 if range:
818 m = util.matchdate(range)
819 m = util.matchdate(range)
819 ui.write("match: %s\n" % m(d[0]))
820 ui.write("match: %s\n" % m(d[0]))
820
821
821 def debugindex(ui, file_):
822 def debugindex(ui, file_):
822 """dump the contents of an index file"""
823 """dump the contents of an index file"""
823 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_)
824 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_)
824 ui.write(" rev offset length base linkrev" +
825 ui.write(" rev offset length base linkrev" +
825 " nodeid p1 p2\n")
826 " nodeid p1 p2\n")
826 for i in xrange(r.count()):
827 for i in xrange(r.count()):
827 node = r.node(i)
828 node = r.node(i)
828 pp = r.parents(node)
829 pp = r.parents(node)
829 ui.write("% 6d % 9d % 7d % 6d % 7d %s %s %s\n" % (
830 ui.write("% 6d % 9d % 7d % 6d % 7d %s %s %s\n" % (
830 i, r.start(i), r.length(i), r.base(i), r.linkrev(node),
831 i, r.start(i), r.length(i), r.base(i), r.linkrev(node),
831 short(node), short(pp[0]), short(pp[1])))
832 short(node), short(pp[0]), short(pp[1])))
832
833
833 def debugindexdot(ui, file_):
834 def debugindexdot(ui, file_):
834 """dump an index DAG as a .dot file"""
835 """dump an index DAG as a .dot file"""
835 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_)
836 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_)
836 ui.write("digraph G {\n")
837 ui.write("digraph G {\n")
837 for i in xrange(r.count()):
838 for i in xrange(r.count()):
838 node = r.node(i)
839 node = r.node(i)
839 pp = r.parents(node)
840 pp = r.parents(node)
840 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
841 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
841 if pp[1] != nullid:
842 if pp[1] != nullid:
842 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
843 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
843 ui.write("}\n")
844 ui.write("}\n")
844
845
845 def debuginstall(ui):
846 def debuginstall(ui):
846 '''test Mercurial installation'''
847 '''test Mercurial installation'''
847
848
848 def writetemp(contents):
849 def writetemp(contents):
849 (fd, name) = tempfile.mkstemp()
850 (fd, name) = tempfile.mkstemp()
850 f = os.fdopen(fd, "wb")
851 f = os.fdopen(fd, "wb")
851 f.write(contents)
852 f.write(contents)
852 f.close()
853 f.close()
853 return name
854 return name
854
855
855 problems = 0
856 problems = 0
856
857
857 # encoding
858 # encoding
858 ui.status(_("Checking encoding (%s)...\n") % util._encoding)
859 ui.status(_("Checking encoding (%s)...\n") % util._encoding)
859 try:
860 try:
860 util.fromlocal("test")
861 util.fromlocal("test")
861 except util.Abort, inst:
862 except util.Abort, inst:
862 ui.write(" %s\n" % inst)
863 ui.write(" %s\n" % inst)
863 ui.write(_(" (check that your locale is properly set)\n"))
864 ui.write(_(" (check that your locale is properly set)\n"))
864 problems += 1
865 problems += 1
865
866
866 # compiled modules
867 # compiled modules
867 ui.status(_("Checking extensions...\n"))
868 ui.status(_("Checking extensions...\n"))
868 try:
869 try:
869 import bdiff, mpatch, base85
870 import bdiff, mpatch, base85
870 except Exception, inst:
871 except Exception, inst:
871 ui.write(" %s\n" % inst)
872 ui.write(" %s\n" % inst)
872 ui.write(_(" One or more extensions could not be found"))
873 ui.write(_(" One or more extensions could not be found"))
873 ui.write(_(" (check that you compiled the extensions)\n"))
874 ui.write(_(" (check that you compiled the extensions)\n"))
874 problems += 1
875 problems += 1
875
876
876 # templates
877 # templates
877 ui.status(_("Checking templates...\n"))
878 ui.status(_("Checking templates...\n"))
878 try:
879 try:
879 import templater
880 import templater
880 t = templater.templater(templater.templatepath("map-cmdline.default"))
881 t = templater.templater(templater.templatepath("map-cmdline.default"))
881 except Exception, inst:
882 except Exception, inst:
882 ui.write(" %s\n" % inst)
883 ui.write(" %s\n" % inst)
883 ui.write(_(" (templates seem to have been installed incorrectly)\n"))
884 ui.write(_(" (templates seem to have been installed incorrectly)\n"))
884 problems += 1
885 problems += 1
885
886
886 # patch
887 # patch
887 ui.status(_("Checking patch...\n"))
888 ui.status(_("Checking patch...\n"))
888 patcher = ui.config('ui', 'patch')
889 patcher = ui.config('ui', 'patch')
889 patcher = ((patcher and util.find_exe(patcher)) or
890 patcher = ((patcher and util.find_exe(patcher)) or
890 util.find_exe('gpatch') or
891 util.find_exe('gpatch') or
891 util.find_exe('patch'))
892 util.find_exe('patch'))
892 if not patcher:
893 if not patcher:
893 ui.write(_(" Can't find patch or gpatch in PATH\n"))
894 ui.write(_(" Can't find patch or gpatch in PATH\n"))
894 ui.write(_(" (specify a patch utility in your .hgrc file)\n"))
895 ui.write(_(" (specify a patch utility in your .hgrc file)\n"))
895 problems += 1
896 problems += 1
896 else:
897 else:
897 # actually attempt a patch here
898 # actually attempt a patch here
898 a = "1\n2\n3\n4\n"
899 a = "1\n2\n3\n4\n"
899 b = "1\n2\n3\ninsert\n4\n"
900 b = "1\n2\n3\ninsert\n4\n"
900 fa = writetemp(a)
901 fa = writetemp(a)
901 d = mdiff.unidiff(a, None, b, None, os.path.basename(fa))
902 d = mdiff.unidiff(a, None, b, None, os.path.basename(fa))
902 fd = writetemp(d)
903 fd = writetemp(d)
903
904
904 files = {}
905 files = {}
905 try:
906 try:
906 patch.patch(fd, ui, cwd=os.path.dirname(fa), files=files)
907 patch.patch(fd, ui, cwd=os.path.dirname(fa), files=files)
907 except util.Abort, e:
908 except util.Abort, e:
908 ui.write(_(" patch call failed:\n"))
909 ui.write(_(" patch call failed:\n"))
909 ui.write(" " + str(e) + "\n")
910 ui.write(" " + str(e) + "\n")
910 problems += 1
911 problems += 1
911 else:
912 else:
912 if list(files) != [os.path.basename(fa)]:
913 if list(files) != [os.path.basename(fa)]:
913 ui.write(_(" unexpected patch output!"))
914 ui.write(_(" unexpected patch output!"))
914 ui.write(_(" (you may have an incompatible version of patch)\n"))
915 ui.write(_(" (you may have an incompatible version of patch)\n"))
915 problems += 1
916 problems += 1
916 a = file(fa).read()
917 a = file(fa).read()
917 if a != b:
918 if a != b:
918 ui.write(_(" patch test failed!"))
919 ui.write(_(" patch test failed!"))
919 ui.write(_(" (you may have an incompatible version of patch)\n"))
920 ui.write(_(" (you may have an incompatible version of patch)\n"))
920 problems += 1
921 problems += 1
921
922
922 os.unlink(fa)
923 os.unlink(fa)
923 os.unlink(fd)
924 os.unlink(fd)
924
925
925 # merge helper
926 # merge helper
926 ui.status(_("Checking merge helper...\n"))
927 ui.status(_("Checking merge helper...\n"))
927 cmd = (os.environ.get("HGMERGE") or ui.config("ui", "merge")
928 cmd = (os.environ.get("HGMERGE") or ui.config("ui", "merge")
928 or "hgmerge")
929 or "hgmerge")
929 cmdpath = util.find_exe(cmd) or util.find_exe(cmd.split()[0])
930 cmdpath = util.find_exe(cmd) or util.find_exe(cmd.split()[0])
930 if not cmdpath:
931 if not cmdpath:
931 if cmd == 'hgmerge':
932 if cmd == 'hgmerge':
932 ui.write(_(" No merge helper set and can't find default"
933 ui.write(_(" No merge helper set and can't find default"
933 " hgmerge script in PATH\n"))
934 " hgmerge script in PATH\n"))
934 ui.write(_(" (specify a merge helper in your .hgrc file)\n"))
935 ui.write(_(" (specify a merge helper in your .hgrc file)\n"))
935 else:
936 else:
936 ui.write(_(" Can't find merge helper '%s' in PATH\n") % cmd)
937 ui.write(_(" Can't find merge helper '%s' in PATH\n") % cmd)
937 ui.write(_(" (specify a merge helper in your .hgrc file)\n"))
938 ui.write(_(" (specify a merge helper in your .hgrc file)\n"))
938 problems += 1
939 problems += 1
939 else:
940 else:
940 # actually attempt a patch here
941 # actually attempt a patch here
941 fa = writetemp("1\n2\n3\n4\n")
942 fa = writetemp("1\n2\n3\n4\n")
942 fl = writetemp("1\n2\n3\ninsert\n4\n")
943 fl = writetemp("1\n2\n3\ninsert\n4\n")
943 fr = writetemp("begin\n1\n2\n3\n4\n")
944 fr = writetemp("begin\n1\n2\n3\n4\n")
944 r = os.system('%s %s %s %s' % (cmd, fl, fa, fr))
945 r = os.system('%s %s %s %s' % (cmd, fl, fa, fr))
945 if r:
946 if r:
946 ui.write(_(" got unexpected merge error %d!") % r)
947 ui.write(_(" got unexpected merge error %d!") % r)
947 problems += 1
948 problems += 1
948 m = file(fl).read()
949 m = file(fl).read()
949 if m != "begin\n1\n2\n3\ninsert\n4\n":
950 if m != "begin\n1\n2\n3\ninsert\n4\n":
950 ui.write(_(" got unexpected merge results!") % r)
951 ui.write(_(" got unexpected merge results!") % r)
951 ui.write(_(" (your merge helper may have the"
952 ui.write(_(" (your merge helper may have the"
952 " wrong argument order)\n"))
953 " wrong argument order)\n"))
953 ui.write(m)
954 ui.write(m)
954 os.unlink(fa)
955 os.unlink(fa)
955 os.unlink(fl)
956 os.unlink(fl)
956 os.unlink(fr)
957 os.unlink(fr)
957
958
958 # editor
959 # editor
959 ui.status(_("Checking commit editor...\n"))
960 ui.status(_("Checking commit editor...\n"))
960 editor = (os.environ.get("HGEDITOR") or
961 editor = (os.environ.get("HGEDITOR") or
961 ui.config("ui", "editor") or
962 ui.config("ui", "editor") or
962 os.environ.get("EDITOR", "vi"))
963 os.environ.get("EDITOR", "vi"))
963 cmdpath = util.find_exe(editor) or util.find_exe(editor.split()[0])
964 cmdpath = util.find_exe(editor) or util.find_exe(editor.split()[0])
964 if not cmdpath:
965 if not cmdpath:
965 if editor == 'vi':
966 if editor == 'vi':
966 ui.write(_(" No commit editor set and can't find vi in PATH\n"))
967 ui.write(_(" No commit editor set and can't find vi in PATH\n"))
967 ui.write(_(" (specify a commit editor in your .hgrc file)\n"))
968 ui.write(_(" (specify a commit editor in your .hgrc file)\n"))
968 else:
969 else:
969 ui.write(_(" Can't find editor '%s' in PATH\n") % editor)
970 ui.write(_(" Can't find editor '%s' in PATH\n") % editor)
970 ui.write(_(" (specify a commit editor in your .hgrc file)\n"))
971 ui.write(_(" (specify a commit editor in your .hgrc file)\n"))
971 problems += 1
972 problems += 1
972
973
973 # check username
974 # check username
974 ui.status(_("Checking username...\n"))
975 ui.status(_("Checking username...\n"))
975 user = os.environ.get("HGUSER")
976 user = os.environ.get("HGUSER")
976 if user is None:
977 if user is None:
977 user = ui.config("ui", "username")
978 user = ui.config("ui", "username")
978 if user is None:
979 if user is None:
979 user = os.environ.get("EMAIL")
980 user = os.environ.get("EMAIL")
980 if not user:
981 if not user:
981 ui.warn(" ")
982 ui.warn(" ")
982 ui.username()
983 ui.username()
983 ui.write(_(" (specify a username in your .hgrc file)\n"))
984 ui.write(_(" (specify a username in your .hgrc file)\n"))
984
985
985 if not problems:
986 if not problems:
986 ui.status(_("No problems detected\n"))
987 ui.status(_("No problems detected\n"))
987 else:
988 else:
988 ui.write(_("%s problems detected,"
989 ui.write(_("%s problems detected,"
989 " please check your install!\n") % problems)
990 " please check your install!\n") % problems)
990
991
991 return problems
992 return problems
992
993
993 def debugrename(ui, repo, file1, *pats, **opts):
994 def debugrename(ui, repo, file1, *pats, **opts):
994 """dump rename information"""
995 """dump rename information"""
995
996
996 ctx = repo.changectx(opts.get('rev', 'tip'))
997 ctx = repo.changectx(opts.get('rev', 'tip'))
997 for src, abs, rel, exact in cmdutil.walk(repo, (file1,) + pats, opts,
998 for src, abs, rel, exact in cmdutil.walk(repo, (file1,) + pats, opts,
998 ctx.node()):
999 ctx.node()):
999 m = ctx.filectx(abs).renamed()
1000 m = ctx.filectx(abs).renamed()
1000 if m:
1001 if m:
1001 ui.write(_("%s renamed from %s:%s\n") % (rel, m[0], hex(m[1])))
1002 ui.write(_("%s renamed from %s:%s\n") % (rel, m[0], hex(m[1])))
1002 else:
1003 else:
1003 ui.write(_("%s not renamed\n") % rel)
1004 ui.write(_("%s not renamed\n") % rel)
1004
1005
1005 def debugwalk(ui, repo, *pats, **opts):
1006 def debugwalk(ui, repo, *pats, **opts):
1006 """show how files match on given patterns"""
1007 """show how files match on given patterns"""
1007 items = list(cmdutil.walk(repo, pats, opts))
1008 items = list(cmdutil.walk(repo, pats, opts))
1008 if not items:
1009 if not items:
1009 return
1010 return
1010 fmt = '%%s %%-%ds %%-%ds %%s' % (
1011 fmt = '%%s %%-%ds %%-%ds %%s' % (
1011 max([len(abs) for (src, abs, rel, exact) in items]),
1012 max([len(abs) for (src, abs, rel, exact) in items]),
1012 max([len(rel) for (src, abs, rel, exact) in items]))
1013 max([len(rel) for (src, abs, rel, exact) in items]))
1013 for src, abs, rel, exact in items:
1014 for src, abs, rel, exact in items:
1014 line = fmt % (src, abs, rel, exact and 'exact' or '')
1015 line = fmt % (src, abs, rel, exact and 'exact' or '')
1015 ui.write("%s\n" % line.rstrip())
1016 ui.write("%s\n" % line.rstrip())
1016
1017
1017 def diff(ui, repo, *pats, **opts):
1018 def diff(ui, repo, *pats, **opts):
1018 """diff repository (or selected files)
1019 """diff repository (or selected files)
1019
1020
1020 Show differences between revisions for the specified files.
1021 Show differences between revisions for the specified files.
1021
1022
1022 Differences between files are shown using the unified diff format.
1023 Differences between files are shown using the unified diff format.
1023
1024
1024 NOTE: diff may generate unexpected results for merges, as it will
1025 NOTE: diff may generate unexpected results for merges, as it will
1025 default to comparing against the working directory's first parent
1026 default to comparing against the working directory's first parent
1026 changeset if no revisions are specified.
1027 changeset if no revisions are specified.
1027
1028
1028 When two revision arguments are given, then changes are shown
1029 When two revision arguments are given, then changes are shown
1029 between those revisions. If only one revision is specified then
1030 between those revisions. If only one revision is specified then
1030 that revision is compared to the working directory, and, when no
1031 that revision is compared to the working directory, and, when no
1031 revisions are specified, the working directory files are compared
1032 revisions are specified, the working directory files are compared
1032 to its parent.
1033 to its parent.
1033
1034
1034 Without the -a option, diff will avoid generating diffs of files
1035 Without the -a option, diff will avoid generating diffs of files
1035 it detects as binary. With -a, diff will generate a diff anyway,
1036 it detects as binary. With -a, diff will generate a diff anyway,
1036 probably with undesirable results.
1037 probably with undesirable results.
1037 """
1038 """
1038 node1, node2 = cmdutil.revpair(repo, opts['rev'])
1039 node1, node2 = cmdutil.revpair(repo, opts['rev'])
1039
1040
1040 fns, matchfn, anypats = cmdutil.matchpats(repo, pats, opts)
1041 fns, matchfn, anypats = cmdutil.matchpats(repo, pats, opts)
1041
1042
1042 patch.diff(repo, node1, node2, fns, match=matchfn,
1043 patch.diff(repo, node1, node2, fns, match=matchfn,
1043 opts=patch.diffopts(ui, opts))
1044 opts=patch.diffopts(ui, opts))
1044
1045
1045 def export(ui, repo, *changesets, **opts):
1046 def export(ui, repo, *changesets, **opts):
1046 """dump the header and diffs for one or more changesets
1047 """dump the header and diffs for one or more changesets
1047
1048
1048 Print the changeset header and diffs for one or more revisions.
1049 Print the changeset header and diffs for one or more revisions.
1049
1050
1050 The information shown in the changeset header is: author,
1051 The information shown in the changeset header is: author,
1051 changeset hash, parent(s) and commit comment.
1052 changeset hash, parent(s) and commit comment.
1052
1053
1053 NOTE: export may generate unexpected diff output for merge changesets,
1054 NOTE: export may generate unexpected diff output for merge changesets,
1054 as it will compare the merge changeset against its first parent only.
1055 as it will compare the merge changeset against its first parent only.
1055
1056
1056 Output may be to a file, in which case the name of the file is
1057 Output may be to a file, in which case the name of the file is
1057 given using a format string. The formatting rules are as follows:
1058 given using a format string. The formatting rules are as follows:
1058
1059
1059 %% literal "%" character
1060 %% literal "%" character
1060 %H changeset hash (40 bytes of hexadecimal)
1061 %H changeset hash (40 bytes of hexadecimal)
1061 %N number of patches being generated
1062 %N number of patches being generated
1062 %R changeset revision number
1063 %R changeset revision number
1063 %b basename of the exporting repository
1064 %b basename of the exporting repository
1064 %h short-form changeset hash (12 bytes of hexadecimal)
1065 %h short-form changeset hash (12 bytes of hexadecimal)
1065 %n zero-padded sequence number, starting at 1
1066 %n zero-padded sequence number, starting at 1
1066 %r zero-padded changeset revision number
1067 %r zero-padded changeset revision number
1067
1068
1068 Without the -a option, export will avoid generating diffs of files
1069 Without the -a option, export will avoid generating diffs of files
1069 it detects as binary. With -a, export will generate a diff anyway,
1070 it detects as binary. With -a, export will generate a diff anyway,
1070 probably with undesirable results.
1071 probably with undesirable results.
1071
1072
1072 With the --switch-parent option, the diff will be against the second
1073 With the --switch-parent option, the diff will be against the second
1073 parent. It can be useful to review a merge.
1074 parent. It can be useful to review a merge.
1074 """
1075 """
1075 if not changesets:
1076 if not changesets:
1076 raise util.Abort(_("export requires at least one changeset"))
1077 raise util.Abort(_("export requires at least one changeset"))
1077 revs = cmdutil.revrange(repo, changesets)
1078 revs = cmdutil.revrange(repo, changesets)
1078 if len(revs) > 1:
1079 if len(revs) > 1:
1079 ui.note(_('exporting patches:\n'))
1080 ui.note(_('exporting patches:\n'))
1080 else:
1081 else:
1081 ui.note(_('exporting patch:\n'))
1082 ui.note(_('exporting patch:\n'))
1082 patch.export(repo, revs, template=opts['output'],
1083 patch.export(repo, revs, template=opts['output'],
1083 switch_parent=opts['switch_parent'],
1084 switch_parent=opts['switch_parent'],
1084 opts=patch.diffopts(ui, opts))
1085 opts=patch.diffopts(ui, opts))
1085
1086
1086 def grep(ui, repo, pattern, *pats, **opts):
1087 def grep(ui, repo, pattern, *pats, **opts):
1087 """search for a pattern in specified files and revisions
1088 """search for a pattern in specified files and revisions
1088
1089
1089 Search revisions of files for a regular expression.
1090 Search revisions of files for a regular expression.
1090
1091
1091 This command behaves differently than Unix grep. It only accepts
1092 This command behaves differently than Unix grep. It only accepts
1092 Python/Perl regexps. It searches repository history, not the
1093 Python/Perl regexps. It searches repository history, not the
1093 working directory. It always prints the revision number in which
1094 working directory. It always prints the revision number in which
1094 a match appears.
1095 a match appears.
1095
1096
1096 By default, grep only prints output for the first revision of a
1097 By default, grep only prints output for the first revision of a
1097 file in which it finds a match. To get it to print every revision
1098 file in which it finds a match. To get it to print every revision
1098 that contains a change in match status ("-" for a match that
1099 that contains a change in match status ("-" for a match that
1099 becomes a non-match, or "+" for a non-match that becomes a match),
1100 becomes a non-match, or "+" for a non-match that becomes a match),
1100 use the --all flag.
1101 use the --all flag.
1101 """
1102 """
1102 reflags = 0
1103 reflags = 0
1103 if opts['ignore_case']:
1104 if opts['ignore_case']:
1104 reflags |= re.I
1105 reflags |= re.I
1105 regexp = re.compile(pattern, reflags)
1106 regexp = re.compile(pattern, reflags)
1106 sep, eol = ':', '\n'
1107 sep, eol = ':', '\n'
1107 if opts['print0']:
1108 if opts['print0']:
1108 sep = eol = '\0'
1109 sep = eol = '\0'
1109
1110
1110 fcache = {}
1111 fcache = {}
1111 def getfile(fn):
1112 def getfile(fn):
1112 if fn not in fcache:
1113 if fn not in fcache:
1113 fcache[fn] = repo.file(fn)
1114 fcache[fn] = repo.file(fn)
1114 return fcache[fn]
1115 return fcache[fn]
1115
1116
1116 def matchlines(body):
1117 def matchlines(body):
1117 begin = 0
1118 begin = 0
1118 linenum = 0
1119 linenum = 0
1119 while True:
1120 while True:
1120 match = regexp.search(body, begin)
1121 match = regexp.search(body, begin)
1121 if not match:
1122 if not match:
1122 break
1123 break
1123 mstart, mend = match.span()
1124 mstart, mend = match.span()
1124 linenum += body.count('\n', begin, mstart) + 1
1125 linenum += body.count('\n', begin, mstart) + 1
1125 lstart = body.rfind('\n', begin, mstart) + 1 or begin
1126 lstart = body.rfind('\n', begin, mstart) + 1 or begin
1126 lend = body.find('\n', mend)
1127 lend = body.find('\n', mend)
1127 yield linenum, mstart - lstart, mend - lstart, body[lstart:lend]
1128 yield linenum, mstart - lstart, mend - lstart, body[lstart:lend]
1128 begin = lend + 1
1129 begin = lend + 1
1129
1130
1130 class linestate(object):
1131 class linestate(object):
1131 def __init__(self, line, linenum, colstart, colend):
1132 def __init__(self, line, linenum, colstart, colend):
1132 self.line = line
1133 self.line = line
1133 self.linenum = linenum
1134 self.linenum = linenum
1134 self.colstart = colstart
1135 self.colstart = colstart
1135 self.colend = colend
1136 self.colend = colend
1136
1137
1137 def __eq__(self, other):
1138 def __eq__(self, other):
1138 return self.line == other.line
1139 return self.line == other.line
1139
1140
1140 matches = {}
1141 matches = {}
1141 copies = {}
1142 copies = {}
1142 def grepbody(fn, rev, body):
1143 def grepbody(fn, rev, body):
1143 matches[rev].setdefault(fn, [])
1144 matches[rev].setdefault(fn, [])
1144 m = matches[rev][fn]
1145 m = matches[rev][fn]
1145 for lnum, cstart, cend, line in matchlines(body):
1146 for lnum, cstart, cend, line in matchlines(body):
1146 s = linestate(line, lnum, cstart, cend)
1147 s = linestate(line, lnum, cstart, cend)
1147 m.append(s)
1148 m.append(s)
1148
1149
1149 def difflinestates(a, b):
1150 def difflinestates(a, b):
1150 sm = difflib.SequenceMatcher(None, a, b)
1151 sm = difflib.SequenceMatcher(None, a, b)
1151 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
1152 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
1152 if tag == 'insert':
1153 if tag == 'insert':
1153 for i in xrange(blo, bhi):
1154 for i in xrange(blo, bhi):
1154 yield ('+', b[i])
1155 yield ('+', b[i])
1155 elif tag == 'delete':
1156 elif tag == 'delete':
1156 for i in xrange(alo, ahi):
1157 for i in xrange(alo, ahi):
1157 yield ('-', a[i])
1158 yield ('-', a[i])
1158 elif tag == 'replace':
1159 elif tag == 'replace':
1159 for i in xrange(alo, ahi):
1160 for i in xrange(alo, ahi):
1160 yield ('-', a[i])
1161 yield ('-', a[i])
1161 for i in xrange(blo, bhi):
1162 for i in xrange(blo, bhi):
1162 yield ('+', b[i])
1163 yield ('+', b[i])
1163
1164
1164 prev = {}
1165 prev = {}
1165 def display(fn, rev, states, prevstates):
1166 def display(fn, rev, states, prevstates):
1166 found = False
1167 found = False
1167 filerevmatches = {}
1168 filerevmatches = {}
1168 r = prev.get(fn, -1)
1169 r = prev.get(fn, -1)
1169 if opts['all']:
1170 if opts['all']:
1170 iter = difflinestates(states, prevstates)
1171 iter = difflinestates(states, prevstates)
1171 else:
1172 else:
1172 iter = [('', l) for l in prevstates]
1173 iter = [('', l) for l in prevstates]
1173 for change, l in iter:
1174 for change, l in iter:
1174 cols = [fn, str(r)]
1175 cols = [fn, str(r)]
1175 if opts['line_number']:
1176 if opts['line_number']:
1176 cols.append(str(l.linenum))
1177 cols.append(str(l.linenum))
1177 if opts['all']:
1178 if opts['all']:
1178 cols.append(change)
1179 cols.append(change)
1179 if opts['user']:
1180 if opts['user']:
1180 cols.append(ui.shortuser(get(r)[1]))
1181 cols.append(ui.shortuser(get(r)[1]))
1181 if opts['files_with_matches']:
1182 if opts['files_with_matches']:
1182 c = (fn, r)
1183 c = (fn, r)
1183 if c in filerevmatches:
1184 if c in filerevmatches:
1184 continue
1185 continue
1185 filerevmatches[c] = 1
1186 filerevmatches[c] = 1
1186 else:
1187 else:
1187 cols.append(l.line)
1188 cols.append(l.line)
1188 ui.write(sep.join(cols), eol)
1189 ui.write(sep.join(cols), eol)
1189 found = True
1190 found = True
1190 return found
1191 return found
1191
1192
1192 fstate = {}
1193 fstate = {}
1193 skip = {}
1194 skip = {}
1194 get = util.cachefunc(lambda r: repo.changectx(r).changeset())
1195 get = util.cachefunc(lambda r: repo.changectx(r).changeset())
1195 changeiter, matchfn = cmdutil.walkchangerevs(ui, repo, pats, get, opts)
1196 changeiter, matchfn = cmdutil.walkchangerevs(ui, repo, pats, get, opts)
1196 found = False
1197 found = False
1197 follow = opts.get('follow')
1198 follow = opts.get('follow')
1198 for st, rev, fns in changeiter:
1199 for st, rev, fns in changeiter:
1199 if st == 'window':
1200 if st == 'window':
1200 matches.clear()
1201 matches.clear()
1201 elif st == 'add':
1202 elif st == 'add':
1202 mf = repo.changectx(rev).manifest()
1203 mf = repo.changectx(rev).manifest()
1203 matches[rev] = {}
1204 matches[rev] = {}
1204 for fn in fns:
1205 for fn in fns:
1205 if fn in skip:
1206 if fn in skip:
1206 continue
1207 continue
1207 fstate.setdefault(fn, {})
1208 fstate.setdefault(fn, {})
1208 try:
1209 try:
1209 grepbody(fn, rev, getfile(fn).read(mf[fn]))
1210 grepbody(fn, rev, getfile(fn).read(mf[fn]))
1210 if follow:
1211 if follow:
1211 copied = getfile(fn).renamed(mf[fn])
1212 copied = getfile(fn).renamed(mf[fn])
1212 if copied:
1213 if copied:
1213 copies.setdefault(rev, {})[fn] = copied[0]
1214 copies.setdefault(rev, {})[fn] = copied[0]
1214 except KeyError:
1215 except KeyError:
1215 pass
1216 pass
1216 elif st == 'iter':
1217 elif st == 'iter':
1217 states = matches[rev].items()
1218 states = matches[rev].items()
1218 states.sort()
1219 states.sort()
1219 for fn, m in states:
1220 for fn, m in states:
1220 copy = copies.get(rev, {}).get(fn)
1221 copy = copies.get(rev, {}).get(fn)
1221 if fn in skip:
1222 if fn in skip:
1222 if copy:
1223 if copy:
1223 skip[copy] = True
1224 skip[copy] = True
1224 continue
1225 continue
1225 if fn in prev or fstate[fn]:
1226 if fn in prev or fstate[fn]:
1226 r = display(fn, rev, m, fstate[fn])
1227 r = display(fn, rev, m, fstate[fn])
1227 found = found or r
1228 found = found or r
1228 if r and not opts['all']:
1229 if r and not opts['all']:
1229 skip[fn] = True
1230 skip[fn] = True
1230 if copy:
1231 if copy:
1231 skip[copy] = True
1232 skip[copy] = True
1232 fstate[fn] = m
1233 fstate[fn] = m
1233 if copy:
1234 if copy:
1234 fstate[copy] = m
1235 fstate[copy] = m
1235 prev[fn] = rev
1236 prev[fn] = rev
1236
1237
1237 fstate = fstate.items()
1238 fstate = fstate.items()
1238 fstate.sort()
1239 fstate.sort()
1239 for fn, state in fstate:
1240 for fn, state in fstate:
1240 if fn in skip:
1241 if fn in skip:
1241 continue
1242 continue
1242 if fn not in copies.get(prev[fn], {}):
1243 if fn not in copies.get(prev[fn], {}):
1243 found = display(fn, rev, {}, state) or found
1244 found = display(fn, rev, {}, state) or found
1244 return (not found and 1) or 0
1245 return (not found and 1) or 0
1245
1246
1246 def heads(ui, repo, **opts):
1247 def heads(ui, repo, **opts):
1247 """show current repository heads
1248 """show current repository heads
1248
1249
1249 Show all repository head changesets.
1250 Show all repository head changesets.
1250
1251
1251 Repository "heads" are changesets that don't have children
1252 Repository "heads" are changesets that don't have children
1252 changesets. They are where development generally takes place and
1253 changesets. They are where development generally takes place and
1253 are the usual targets for update and merge operations.
1254 are the usual targets for update and merge operations.
1254 """
1255 """
1255 if opts['rev']:
1256 if opts['rev']:
1256 heads = repo.heads(repo.lookup(opts['rev']))
1257 heads = repo.heads(repo.lookup(opts['rev']))
1257 else:
1258 else:
1258 heads = repo.heads()
1259 heads = repo.heads()
1259 displayer = cmdutil.show_changeset(ui, repo, opts)
1260 displayer = cmdutil.show_changeset(ui, repo, opts)
1260 for n in heads:
1261 for n in heads:
1261 displayer.show(changenode=n)
1262 displayer.show(changenode=n)
1262
1263
1263 def help_(ui, name=None, with_version=False):
1264 def help_(ui, name=None, with_version=False):
1264 """show help for a command, extension, or list of commands
1265 """show help for a command, extension, or list of commands
1265
1266
1266 With no arguments, print a list of commands and short help.
1267 With no arguments, print a list of commands and short help.
1267
1268
1268 Given a command name, print help for that command.
1269 Given a command name, print help for that command.
1269
1270
1270 Given an extension name, print help for that extension, and the
1271 Given an extension name, print help for that extension, and the
1271 commands it provides."""
1272 commands it provides."""
1272 option_lists = []
1273 option_lists = []
1273
1274
1274 def addglobalopts(aliases):
1275 def addglobalopts(aliases):
1275 if ui.verbose:
1276 if ui.verbose:
1276 option_lists.append((_("global options:"), globalopts))
1277 option_lists.append((_("global options:"), globalopts))
1277 if name == 'shortlist':
1278 if name == 'shortlist':
1278 option_lists.append((_('use "hg help" for the full list '
1279 option_lists.append((_('use "hg help" for the full list '
1279 'of commands'), ()))
1280 'of commands'), ()))
1280 else:
1281 else:
1281 if name == 'shortlist':
1282 if name == 'shortlist':
1282 msg = _('use "hg help" for the full list of commands '
1283 msg = _('use "hg help" for the full list of commands '
1283 'or "hg -v" for details')
1284 'or "hg -v" for details')
1284 elif aliases:
1285 elif aliases:
1285 msg = _('use "hg -v help%s" to show aliases and '
1286 msg = _('use "hg -v help%s" to show aliases and '
1286 'global options') % (name and " " + name or "")
1287 'global options') % (name and " " + name or "")
1287 else:
1288 else:
1288 msg = _('use "hg -v help %s" to show global options') % name
1289 msg = _('use "hg -v help %s" to show global options') % name
1289 option_lists.append((msg, ()))
1290 option_lists.append((msg, ()))
1290
1291
1291 def helpcmd(name):
1292 def helpcmd(name):
1292 if with_version:
1293 if with_version:
1293 version_(ui)
1294 version_(ui)
1294 ui.write('\n')
1295 ui.write('\n')
1295 aliases, i = findcmd(ui, name)
1296 aliases, i = findcmd(ui, name)
1296 # synopsis
1297 # synopsis
1297 ui.write("%s\n\n" % i[2])
1298 ui.write("%s\n\n" % i[2])
1298
1299
1299 # description
1300 # description
1300 doc = i[0].__doc__
1301 doc = i[0].__doc__
1301 if not doc:
1302 if not doc:
1302 doc = _("(No help text available)")
1303 doc = _("(No help text available)")
1303 if ui.quiet:
1304 if ui.quiet:
1304 doc = doc.splitlines(0)[0]
1305 doc = doc.splitlines(0)[0]
1305 ui.write("%s\n" % doc.rstrip())
1306 ui.write("%s\n" % doc.rstrip())
1306
1307
1307 if not ui.quiet:
1308 if not ui.quiet:
1308 # aliases
1309 # aliases
1309 if len(aliases) > 1:
1310 if len(aliases) > 1:
1310 ui.write(_("\naliases: %s\n") % ', '.join(aliases[1:]))
1311 ui.write(_("\naliases: %s\n") % ', '.join(aliases[1:]))
1311
1312
1312 # options
1313 # options
1313 if i[1]:
1314 if i[1]:
1314 option_lists.append((_("options:\n"), i[1]))
1315 option_lists.append((_("options:\n"), i[1]))
1315
1316
1316 addglobalopts(False)
1317 addglobalopts(False)
1317
1318
1318 def helplist(select=None):
1319 def helplist(select=None):
1319 h = {}
1320 h = {}
1320 cmds = {}
1321 cmds = {}
1321 for c, e in table.items():
1322 for c, e in table.items():
1322 f = c.split("|", 1)[0]
1323 f = c.split("|", 1)[0]
1323 if select and not select(f):
1324 if select and not select(f):
1324 continue
1325 continue
1325 if name == "shortlist" and not f.startswith("^"):
1326 if name == "shortlist" and not f.startswith("^"):
1326 continue
1327 continue
1327 f = f.lstrip("^")
1328 f = f.lstrip("^")
1328 if not ui.debugflag and f.startswith("debug"):
1329 if not ui.debugflag and f.startswith("debug"):
1329 continue
1330 continue
1330 doc = e[0].__doc__
1331 doc = e[0].__doc__
1331 if not doc:
1332 if not doc:
1332 doc = _("(No help text available)")
1333 doc = _("(No help text available)")
1333 h[f] = doc.splitlines(0)[0].rstrip()
1334 h[f] = doc.splitlines(0)[0].rstrip()
1334 cmds[f] = c.lstrip("^")
1335 cmds[f] = c.lstrip("^")
1335
1336
1336 fns = h.keys()
1337 fns = h.keys()
1337 fns.sort()
1338 fns.sort()
1338 m = max(map(len, fns))
1339 m = max(map(len, fns))
1339 for f in fns:
1340 for f in fns:
1340 if ui.verbose:
1341 if ui.verbose:
1341 commands = cmds[f].replace("|",", ")
1342 commands = cmds[f].replace("|",", ")
1342 ui.write(" %s:\n %s\n"%(commands, h[f]))
1343 ui.write(" %s:\n %s\n"%(commands, h[f]))
1343 else:
1344 else:
1344 ui.write(' %-*s %s\n' % (m, f, h[f]))
1345 ui.write(' %-*s %s\n' % (m, f, h[f]))
1345
1346
1346 if not ui.quiet:
1347 if not ui.quiet:
1347 addglobalopts(True)
1348 addglobalopts(True)
1348
1349
1349 def helptopic(name):
1350 def helptopic(name):
1350 v = None
1351 v = None
1351 for i in help.helptable:
1352 for i in help.helptable:
1352 l = i.split('|')
1353 l = i.split('|')
1353 if name in l:
1354 if name in l:
1354 v = i
1355 v = i
1355 header = l[-1]
1356 header = l[-1]
1356 if not v:
1357 if not v:
1357 raise UnknownCommand(name)
1358 raise UnknownCommand(name)
1358
1359
1359 # description
1360 # description
1360 doc = help.helptable[v]
1361 doc = help.helptable[v]
1361 if not doc:
1362 if not doc:
1362 doc = _("(No help text available)")
1363 doc = _("(No help text available)")
1363 if callable(doc):
1364 if callable(doc):
1364 doc = doc()
1365 doc = doc()
1365
1366
1366 ui.write("%s\n" % header)
1367 ui.write("%s\n" % header)
1367 ui.write("%s\n" % doc.rstrip())
1368 ui.write("%s\n" % doc.rstrip())
1368
1369
1369 def helpext(name):
1370 def helpext(name):
1370 try:
1371 try:
1371 mod = findext(name)
1372 mod = findext(name)
1372 except KeyError:
1373 except KeyError:
1373 raise UnknownCommand(name)
1374 raise UnknownCommand(name)
1374
1375
1375 doc = (mod.__doc__ or _('No help text available')).splitlines(0)
1376 doc = (mod.__doc__ or _('No help text available')).splitlines(0)
1376 ui.write(_('%s extension - %s\n') % (name.split('.')[-1], doc[0]))
1377 ui.write(_('%s extension - %s\n') % (name.split('.')[-1], doc[0]))
1377 for d in doc[1:]:
1378 for d in doc[1:]:
1378 ui.write(d, '\n')
1379 ui.write(d, '\n')
1379
1380
1380 ui.status('\n')
1381 ui.status('\n')
1381
1382
1382 try:
1383 try:
1383 ct = mod.cmdtable
1384 ct = mod.cmdtable
1384 except AttributeError:
1385 except AttributeError:
1385 ui.status(_('no commands defined\n'))
1386 ui.status(_('no commands defined\n'))
1386 return
1387 return
1387
1388
1388 ui.status(_('list of commands:\n\n'))
1389 ui.status(_('list of commands:\n\n'))
1389 modcmds = dict.fromkeys([c.split('|', 1)[0] for c in ct])
1390 modcmds = dict.fromkeys([c.split('|', 1)[0] for c in ct])
1390 helplist(modcmds.has_key)
1391 helplist(modcmds.has_key)
1391
1392
1392 if name and name != 'shortlist':
1393 if name and name != 'shortlist':
1393 i = None
1394 i = None
1394 for f in (helpcmd, helptopic, helpext):
1395 for f in (helpcmd, helptopic, helpext):
1395 try:
1396 try:
1396 f(name)
1397 f(name)
1397 i = None
1398 i = None
1398 break
1399 break
1399 except UnknownCommand, inst:
1400 except UnknownCommand, inst:
1400 i = inst
1401 i = inst
1401 if i:
1402 if i:
1402 raise i
1403 raise i
1403
1404
1404 else:
1405 else:
1405 # program name
1406 # program name
1406 if ui.verbose or with_version:
1407 if ui.verbose or with_version:
1407 version_(ui)
1408 version_(ui)
1408 else:
1409 else:
1409 ui.status(_("Mercurial Distributed SCM\n"))
1410 ui.status(_("Mercurial Distributed SCM\n"))
1410 ui.status('\n')
1411 ui.status('\n')
1411
1412
1412 # list of commands
1413 # list of commands
1413 if name == "shortlist":
1414 if name == "shortlist":
1414 ui.status(_('basic commands:\n\n'))
1415 ui.status(_('basic commands:\n\n'))
1415 else:
1416 else:
1416 ui.status(_('list of commands:\n\n'))
1417 ui.status(_('list of commands:\n\n'))
1417
1418
1418 helplist()
1419 helplist()
1419
1420
1420 # list all option lists
1421 # list all option lists
1421 opt_output = []
1422 opt_output = []
1422 for title, options in option_lists:
1423 for title, options in option_lists:
1423 opt_output.append(("\n%s" % title, None))
1424 opt_output.append(("\n%s" % title, None))
1424 for shortopt, longopt, default, desc in options:
1425 for shortopt, longopt, default, desc in options:
1425 if "DEPRECATED" in desc and not ui.verbose: continue
1426 if "DEPRECATED" in desc and not ui.verbose: continue
1426 opt_output.append(("%2s%s" % (shortopt and "-%s" % shortopt,
1427 opt_output.append(("%2s%s" % (shortopt and "-%s" % shortopt,
1427 longopt and " --%s" % longopt),
1428 longopt and " --%s" % longopt),
1428 "%s%s" % (desc,
1429 "%s%s" % (desc,
1429 default
1430 default
1430 and _(" (default: %s)") % default
1431 and _(" (default: %s)") % default
1431 or "")))
1432 or "")))
1432
1433
1433 if opt_output:
1434 if opt_output:
1434 opts_len = max([len(line[0]) for line in opt_output if line[1]] or [0])
1435 opts_len = max([len(line[0]) for line in opt_output if line[1]] or [0])
1435 for first, second in opt_output:
1436 for first, second in opt_output:
1436 if second:
1437 if second:
1437 ui.write(" %-*s %s\n" % (opts_len, first, second))
1438 ui.write(" %-*s %s\n" % (opts_len, first, second))
1438 else:
1439 else:
1439 ui.write("%s\n" % first)
1440 ui.write("%s\n" % first)
1440
1441
1441 def identify(ui, repo):
1442 def identify(ui, repo):
1442 """print information about the working copy
1443 """print information about the working copy
1443
1444
1444 Print a short summary of the current state of the repo.
1445 Print a short summary of the current state of the repo.
1445
1446
1446 This summary identifies the repository state using one or two parent
1447 This summary identifies the repository state using one or two parent
1447 hash identifiers, followed by a "+" if there are uncommitted changes
1448 hash identifiers, followed by a "+" if there are uncommitted changes
1448 in the working directory, followed by a list of tags for this revision.
1449 in the working directory, followed by a list of tags for this revision.
1449 """
1450 """
1450 parents = [p for p in repo.dirstate.parents() if p != nullid]
1451 parents = [p for p in repo.dirstate.parents() if p != nullid]
1451 if not parents:
1452 if not parents:
1452 ui.write(_("unknown\n"))
1453 ui.write(_("unknown\n"))
1453 return
1454 return
1454
1455
1455 hexfunc = ui.debugflag and hex or short
1456 hexfunc = ui.debugflag and hex or short
1456 modified, added, removed, deleted = repo.status()[:4]
1457 modified, added, removed, deleted = repo.status()[:4]
1457 output = ["%s%s" %
1458 output = ["%s%s" %
1458 ('+'.join([hexfunc(parent) for parent in parents]),
1459 ('+'.join([hexfunc(parent) for parent in parents]),
1459 (modified or added or removed or deleted) and "+" or "")]
1460 (modified or added or removed or deleted) and "+" or "")]
1460
1461
1461 if not ui.quiet:
1462 if not ui.quiet:
1462
1463
1463 branch = util.tolocal(repo.workingctx().branch())
1464 branch = util.tolocal(repo.workingctx().branch())
1464 if branch != 'default':
1465 if branch != 'default':
1465 output.append("(%s)" % branch)
1466 output.append("(%s)" % branch)
1466
1467
1467 # multiple tags for a single parent separated by '/'
1468 # multiple tags for a single parent separated by '/'
1468 parenttags = ['/'.join(tags)
1469 parenttags = ['/'.join(tags)
1469 for tags in map(repo.nodetags, parents) if tags]
1470 for tags in map(repo.nodetags, parents) if tags]
1470 # tags for multiple parents separated by ' + '
1471 # tags for multiple parents separated by ' + '
1471 if parenttags:
1472 if parenttags:
1472 output.append(' + '.join(parenttags))
1473 output.append(' + '.join(parenttags))
1473
1474
1474 ui.write("%s\n" % ' '.join(output))
1475 ui.write("%s\n" % ' '.join(output))
1475
1476
1476 def import_(ui, repo, patch1, *patches, **opts):
1477 def import_(ui, repo, patch1, *patches, **opts):
1477 """import an ordered set of patches
1478 """import an ordered set of patches
1478
1479
1479 Import a list of patches and commit them individually.
1480 Import a list of patches and commit them individually.
1480
1481
1481 If there are outstanding changes in the working directory, import
1482 If there are outstanding changes in the working directory, import
1482 will abort unless given the -f flag.
1483 will abort unless given the -f flag.
1483
1484
1484 You can import a patch straight from a mail message. Even patches
1485 You can import a patch straight from a mail message. Even patches
1485 as attachments work (body part must be type text/plain or
1486 as attachments work (body part must be type text/plain or
1486 text/x-patch to be used). From and Subject headers of email
1487 text/x-patch to be used). From and Subject headers of email
1487 message are used as default committer and commit message. All
1488 message are used as default committer and commit message. All
1488 text/plain body parts before first diff are added to commit
1489 text/plain body parts before first diff are added to commit
1489 message.
1490 message.
1490
1491
1491 If the imported patch was generated by hg export, user and description
1492 If the imported patch was generated by hg export, user and description
1492 from patch override values from message headers and body. Values
1493 from patch override values from message headers and body. Values
1493 given on command line with -m and -u override these.
1494 given on command line with -m and -u override these.
1494
1495
1495 If --exact is specified, import will set the working directory
1496 If --exact is specified, import will set the working directory
1496 to the parent of each patch before applying it, and will abort
1497 to the parent of each patch before applying it, and will abort
1497 if the resulting changeset has a different ID than the one
1498 if the resulting changeset has a different ID than the one
1498 recorded in the patch. This may happen due to character set
1499 recorded in the patch. This may happen due to character set
1499 problems or other deficiencies in the text patch format.
1500 problems or other deficiencies in the text patch format.
1500
1501
1501 To read a patch from standard input, use patch name "-".
1502 To read a patch from standard input, use patch name "-".
1502 """
1503 """
1503 patches = (patch1,) + patches
1504 patches = (patch1,) + patches
1504
1505
1505 if opts.get('exact') or not opts['force']:
1506 if opts.get('exact') or not opts['force']:
1506 bail_if_changed(repo)
1507 bail_if_changed(repo)
1507
1508
1508 d = opts["base"]
1509 d = opts["base"]
1509 strip = opts["strip"]
1510 strip = opts["strip"]
1510
1511
1511 wlock = repo.wlock()
1512 wlock = repo.wlock()
1512 lock = repo.lock()
1513 lock = repo.lock()
1513
1514
1514 for p in patches:
1515 for p in patches:
1515 pf = os.path.join(d, p)
1516 pf = os.path.join(d, p)
1516
1517
1517 if pf == '-':
1518 if pf == '-':
1518 ui.status(_("applying patch from stdin\n"))
1519 ui.status(_("applying patch from stdin\n"))
1519 tmpname, message, user, date, branch, nodeid, p1, p2 = patch.extract(ui, sys.stdin)
1520 tmpname, message, user, date, branch, nodeid, p1, p2 = patch.extract(ui, sys.stdin)
1520 else:
1521 else:
1521 ui.status(_("applying %s\n") % p)
1522 ui.status(_("applying %s\n") % p)
1522 tmpname, message, user, date, branch, nodeid, p1, p2 = patch.extract(ui, file(pf))
1523 tmpname, message, user, date, branch, nodeid, p1, p2 = patch.extract(ui, file(pf))
1523
1524
1524 if tmpname is None:
1525 if tmpname is None:
1525 raise util.Abort(_('no diffs found'))
1526 raise util.Abort(_('no diffs found'))
1526
1527
1527 try:
1528 try:
1528 cmdline_message = logmessage(opts)
1529 cmdline_message = logmessage(opts)
1529 if cmdline_message:
1530 if cmdline_message:
1530 # pickup the cmdline msg
1531 # pickup the cmdline msg
1531 message = cmdline_message
1532 message = cmdline_message
1532 elif message:
1533 elif message:
1533 # pickup the patch msg
1534 # pickup the patch msg
1534 message = message.strip()
1535 message = message.strip()
1535 else:
1536 else:
1536 # launch the editor
1537 # launch the editor
1537 message = None
1538 message = None
1538 ui.debug(_('message:\n%s\n') % message)
1539 ui.debug(_('message:\n%s\n') % message)
1539
1540
1540 wp = repo.workingctx().parents()
1541 wp = repo.workingctx().parents()
1541 if opts.get('exact'):
1542 if opts.get('exact'):
1542 if not nodeid or not p1:
1543 if not nodeid or not p1:
1543 raise util.Abort(_('not a mercurial patch'))
1544 raise util.Abort(_('not a mercurial patch'))
1544 p1 = repo.lookup(p1)
1545 p1 = repo.lookup(p1)
1545 p2 = repo.lookup(p2 or hex(nullid))
1546 p2 = repo.lookup(p2 or hex(nullid))
1546
1547
1547 if p1 != wp[0].node():
1548 if p1 != wp[0].node():
1548 hg.clean(repo, p1, wlock=wlock)
1549 hg.clean(repo, p1, wlock=wlock)
1549 repo.dirstate.setparents(p1, p2)
1550 repo.dirstate.setparents(p1, p2)
1550 repo.dirstate.setbranch(branch or 'default')
1551 repo.dirstate.setbranch(branch or 'default')
1551 elif p2:
1552 elif p2:
1552 try:
1553 try:
1553 p1 = repo.lookup(p1)
1554 p1 = repo.lookup(p1)
1554 p2 = repo.lookup(p2)
1555 p2 = repo.lookup(p2)
1555 if p1 == wp[0].node():
1556 if p1 == wp[0].node():
1556 repo.dirstate.setparents(p1, p2)
1557 repo.dirstate.setparents(p1, p2)
1557 except hg.RepoError:
1558 except hg.RepoError:
1558 pass
1559 pass
1559
1560
1560 files = {}
1561 files = {}
1561 try:
1562 try:
1562 fuzz = patch.patch(tmpname, ui, strip=strip, cwd=repo.root,
1563 fuzz = patch.patch(tmpname, ui, strip=strip, cwd=repo.root,
1563 files=files)
1564 files=files)
1564 finally:
1565 finally:
1565 files = patch.updatedir(ui, repo, files, wlock=wlock)
1566 files = patch.updatedir(ui, repo, files, wlock=wlock)
1566 n = repo.commit(files, message, user, date, wlock=wlock, lock=lock)
1567 n = repo.commit(files, message, user, date, wlock=wlock, lock=lock)
1567 if opts.get('exact'):
1568 if opts.get('exact'):
1568 if hex(n) != nodeid:
1569 if hex(n) != nodeid:
1569 repo.rollback(wlock=wlock, lock=lock)
1570 repo.rollback(wlock=wlock, lock=lock)
1570 raise util.Abort(_('patch is damaged or loses information'))
1571 raise util.Abort(_('patch is damaged or loses information'))
1571 finally:
1572 finally:
1572 os.unlink(tmpname)
1573 os.unlink(tmpname)
1573
1574
1574 def incoming(ui, repo, source="default", **opts):
1575 def incoming(ui, repo, source="default", **opts):
1575 """show new changesets found in source
1576 """show new changesets found in source
1576
1577
1577 Show new changesets found in the specified path/URL or the default
1578 Show new changesets found in the specified path/URL or the default
1578 pull location. These are the changesets that would be pulled if a pull
1579 pull location. These are the changesets that would be pulled if a pull
1579 was requested.
1580 was requested.
1580
1581
1581 For remote repository, using --bundle avoids downloading the changesets
1582 For remote repository, using --bundle avoids downloading the changesets
1582 twice if the incoming is followed by a pull.
1583 twice if the incoming is followed by a pull.
1583
1584
1584 See pull for valid source format details.
1585 See pull for valid source format details.
1585 """
1586 """
1586 source = ui.expandpath(source)
1587 source, revs = cmdutil.parseurl(ui.expandpath(source), opts['rev'])
1587 setremoteconfig(ui, opts)
1588 setremoteconfig(ui, opts)
1588
1589
1589 other = hg.repository(ui, source)
1590 other = hg.repository(ui, source)
1590 ui.status(_('comparing with %s\n') % source)
1591 ui.status(_('comparing with %s\n') % source)
1591 incoming = repo.findincoming(other, force=opts["force"])
1592 if revs:
1593 if 'lookup' in other.capabilities:
1594 revs = [other.lookup(rev) for rev in revs]
1595 else:
1596 error = _("Other repository doesn't support revision lookup, so a rev cannot be specified.")
1597 raise util.Abort(error)
1598 incoming = repo.findincoming(other, heads=revs, force=opts["force"])
1592 if not incoming:
1599 if not incoming:
1593 try:
1600 try:
1594 os.unlink(opts["bundle"])
1601 os.unlink(opts["bundle"])
1595 except:
1602 except:
1596 pass
1603 pass
1597 ui.status(_("no changes found\n"))
1604 ui.status(_("no changes found\n"))
1598 return 1
1605 return 1
1599
1606
1600 cleanup = None
1607 cleanup = None
1601 try:
1608 try:
1602 fname = opts["bundle"]
1609 fname = opts["bundle"]
1603 if fname or not other.local():
1610 if fname or not other.local():
1604 # create a bundle (uncompressed if other repo is not local)
1611 # create a bundle (uncompressed if other repo is not local)
1612 if revs is None:
1605 cg = other.changegroup(incoming, "incoming")
1613 cg = other.changegroup(incoming, "incoming")
1614 else:
1615 if 'changegroupsubset' not in other.capabilities:
1616 raise util.Abort(_("Partial incoming cannot be done because other repository doesn't support changegroupsubset."))
1617 cg = other.changegroupsubset(incoming, revs, 'incoming')
1606 bundletype = other.local() and "HG10BZ" or "HG10UN"
1618 bundletype = other.local() and "HG10BZ" or "HG10UN"
1607 fname = cleanup = changegroup.writebundle(cg, fname, bundletype)
1619 fname = cleanup = changegroup.writebundle(cg, fname, bundletype)
1608 # keep written bundle?
1620 # keep written bundle?
1609 if opts["bundle"]:
1621 if opts["bundle"]:
1610 cleanup = None
1622 cleanup = None
1611 if not other.local():
1623 if not other.local():
1612 # use the created uncompressed bundlerepo
1624 # use the created uncompressed bundlerepo
1613 other = bundlerepo.bundlerepository(ui, repo.root, fname)
1625 other = bundlerepo.bundlerepository(ui, repo.root, fname)
1614
1626
1615 revs = None
1616 if opts['rev']:
1617 revs = [other.lookup(rev) for rev in opts['rev']]
1618 o = other.changelog.nodesbetween(incoming, revs)[0]
1627 o = other.changelog.nodesbetween(incoming, revs)[0]
1619 if opts['newest_first']:
1628 if opts['newest_first']:
1620 o.reverse()
1629 o.reverse()
1621 displayer = cmdutil.show_changeset(ui, other, opts)
1630 displayer = cmdutil.show_changeset(ui, other, opts)
1622 for n in o:
1631 for n in o:
1623 parents = [p for p in other.changelog.parents(n) if p != nullid]
1632 parents = [p for p in other.changelog.parents(n) if p != nullid]
1624 if opts['no_merges'] and len(parents) == 2:
1633 if opts['no_merges'] and len(parents) == 2:
1625 continue
1634 continue
1626 displayer.show(changenode=n)
1635 displayer.show(changenode=n)
1627 finally:
1636 finally:
1628 if hasattr(other, 'close'):
1637 if hasattr(other, 'close'):
1629 other.close()
1638 other.close()
1630 if cleanup:
1639 if cleanup:
1631 os.unlink(cleanup)
1640 os.unlink(cleanup)
1632
1641
1633 def init(ui, dest=".", **opts):
1642 def init(ui, dest=".", **opts):
1634 """create a new repository in the given directory
1643 """create a new repository in the given directory
1635
1644
1636 Initialize a new repository in the given directory. If the given
1645 Initialize a new repository in the given directory. If the given
1637 directory does not exist, it is created.
1646 directory does not exist, it is created.
1638
1647
1639 If no directory is given, the current directory is used.
1648 If no directory is given, the current directory is used.
1640
1649
1641 It is possible to specify an ssh:// URL as the destination.
1650 It is possible to specify an ssh:// URL as the destination.
1642 Look at the help text for the pull command for important details
1651 Look at the help text for the pull command for important details
1643 about ssh:// URLs.
1652 about ssh:// URLs.
1644 """
1653 """
1645 setremoteconfig(ui, opts)
1654 setremoteconfig(ui, opts)
1646 hg.repository(ui, dest, create=1)
1655 hg.repository(ui, dest, create=1)
1647
1656
1648 def locate(ui, repo, *pats, **opts):
1657 def locate(ui, repo, *pats, **opts):
1649 """locate files matching specific patterns
1658 """locate files matching specific patterns
1650
1659
1651 Print all files under Mercurial control whose names match the
1660 Print all files under Mercurial control whose names match the
1652 given patterns.
1661 given patterns.
1653
1662
1654 This command searches the entire repository by default. To search
1663 This command searches the entire repository by default. To search
1655 just the current directory and its subdirectories, use "--include .".
1664 just the current directory and its subdirectories, use "--include .".
1656
1665
1657 If no patterns are given to match, this command prints all file
1666 If no patterns are given to match, this command prints all file
1658 names.
1667 names.
1659
1668
1660 If you want to feed the output of this command into the "xargs"
1669 If you want to feed the output of this command into the "xargs"
1661 command, use the "-0" option to both this command and "xargs".
1670 command, use the "-0" option to both this command and "xargs".
1662 This will avoid the problem of "xargs" treating single filenames
1671 This will avoid the problem of "xargs" treating single filenames
1663 that contain white space as multiple filenames.
1672 that contain white space as multiple filenames.
1664 """
1673 """
1665 end = opts['print0'] and '\0' or '\n'
1674 end = opts['print0'] and '\0' or '\n'
1666 rev = opts['rev']
1675 rev = opts['rev']
1667 if rev:
1676 if rev:
1668 node = repo.lookup(rev)
1677 node = repo.lookup(rev)
1669 else:
1678 else:
1670 node = None
1679 node = None
1671
1680
1672 ret = 1
1681 ret = 1
1673 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts, node=node,
1682 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts, node=node,
1674 badmatch=util.always,
1683 badmatch=util.always,
1675 default='relglob'):
1684 default='relglob'):
1676 if src == 'b':
1685 if src == 'b':
1677 continue
1686 continue
1678 if not node and repo.dirstate.state(abs) == '?':
1687 if not node and repo.dirstate.state(abs) == '?':
1679 continue
1688 continue
1680 if opts['fullpath']:
1689 if opts['fullpath']:
1681 ui.write(os.path.join(repo.root, abs), end)
1690 ui.write(os.path.join(repo.root, abs), end)
1682 else:
1691 else:
1683 ui.write(((pats and rel) or abs), end)
1692 ui.write(((pats and rel) or abs), end)
1684 ret = 0
1693 ret = 0
1685
1694
1686 return ret
1695 return ret
1687
1696
1688 def log(ui, repo, *pats, **opts):
1697 def log(ui, repo, *pats, **opts):
1689 """show revision history of entire repository or files
1698 """show revision history of entire repository or files
1690
1699
1691 Print the revision history of the specified files or the entire
1700 Print the revision history of the specified files or the entire
1692 project.
1701 project.
1693
1702
1694 File history is shown without following rename or copy history of
1703 File history is shown without following rename or copy history of
1695 files. Use -f/--follow with a file name to follow history across
1704 files. Use -f/--follow with a file name to follow history across
1696 renames and copies. --follow without a file name will only show
1705 renames and copies. --follow without a file name will only show
1697 ancestors or descendants of the starting revision. --follow-first
1706 ancestors or descendants of the starting revision. --follow-first
1698 only follows the first parent of merge revisions.
1707 only follows the first parent of merge revisions.
1699
1708
1700 If no revision range is specified, the default is tip:0 unless
1709 If no revision range is specified, the default is tip:0 unless
1701 --follow is set, in which case the working directory parent is
1710 --follow is set, in which case the working directory parent is
1702 used as the starting revision.
1711 used as the starting revision.
1703
1712
1704 By default this command outputs: changeset id and hash, tags,
1713 By default this command outputs: changeset id and hash, tags,
1705 non-trivial parents, user, date and time, and a summary for each
1714 non-trivial parents, user, date and time, and a summary for each
1706 commit. When the -v/--verbose switch is used, the list of changed
1715 commit. When the -v/--verbose switch is used, the list of changed
1707 files and full commit message is shown.
1716 files and full commit message is shown.
1708
1717
1709 NOTE: log -p may generate unexpected diff output for merge
1718 NOTE: log -p may generate unexpected diff output for merge
1710 changesets, as it will compare the merge changeset against its
1719 changesets, as it will compare the merge changeset against its
1711 first parent only. Also, the files: list will only reflect files
1720 first parent only. Also, the files: list will only reflect files
1712 that are different from BOTH parents.
1721 that are different from BOTH parents.
1713
1722
1714 """
1723 """
1715
1724
1716 get = util.cachefunc(lambda r: repo.changectx(r).changeset())
1725 get = util.cachefunc(lambda r: repo.changectx(r).changeset())
1717 changeiter, matchfn = cmdutil.walkchangerevs(ui, repo, pats, get, opts)
1726 changeiter, matchfn = cmdutil.walkchangerevs(ui, repo, pats, get, opts)
1718
1727
1719 if opts['limit']:
1728 if opts['limit']:
1720 try:
1729 try:
1721 limit = int(opts['limit'])
1730 limit = int(opts['limit'])
1722 except ValueError:
1731 except ValueError:
1723 raise util.Abort(_('limit must be a positive integer'))
1732 raise util.Abort(_('limit must be a positive integer'))
1724 if limit <= 0: raise util.Abort(_('limit must be positive'))
1733 if limit <= 0: raise util.Abort(_('limit must be positive'))
1725 else:
1734 else:
1726 limit = sys.maxint
1735 limit = sys.maxint
1727 count = 0
1736 count = 0
1728
1737
1729 if opts['copies'] and opts['rev']:
1738 if opts['copies'] and opts['rev']:
1730 endrev = max(cmdutil.revrange(repo, opts['rev'])) + 1
1739 endrev = max(cmdutil.revrange(repo, opts['rev'])) + 1
1731 else:
1740 else:
1732 endrev = repo.changelog.count()
1741 endrev = repo.changelog.count()
1733 rcache = {}
1742 rcache = {}
1734 ncache = {}
1743 ncache = {}
1735 dcache = []
1744 dcache = []
1736 def getrenamed(fn, rev, man):
1745 def getrenamed(fn, rev, man):
1737 '''looks up all renames for a file (up to endrev) the first
1746 '''looks up all renames for a file (up to endrev) the first
1738 time the file is given. It indexes on the changerev and only
1747 time the file is given. It indexes on the changerev and only
1739 parses the manifest if linkrev != changerev.
1748 parses the manifest if linkrev != changerev.
1740 Returns rename info for fn at changerev rev.'''
1749 Returns rename info for fn at changerev rev.'''
1741 if fn not in rcache:
1750 if fn not in rcache:
1742 rcache[fn] = {}
1751 rcache[fn] = {}
1743 ncache[fn] = {}
1752 ncache[fn] = {}
1744 fl = repo.file(fn)
1753 fl = repo.file(fn)
1745 for i in xrange(fl.count()):
1754 for i in xrange(fl.count()):
1746 node = fl.node(i)
1755 node = fl.node(i)
1747 lr = fl.linkrev(node)
1756 lr = fl.linkrev(node)
1748 renamed = fl.renamed(node)
1757 renamed = fl.renamed(node)
1749 rcache[fn][lr] = renamed
1758 rcache[fn][lr] = renamed
1750 if renamed:
1759 if renamed:
1751 ncache[fn][node] = renamed
1760 ncache[fn][node] = renamed
1752 if lr >= endrev:
1761 if lr >= endrev:
1753 break
1762 break
1754 if rev in rcache[fn]:
1763 if rev in rcache[fn]:
1755 return rcache[fn][rev]
1764 return rcache[fn][rev]
1756 mr = repo.manifest.rev(man)
1765 mr = repo.manifest.rev(man)
1757 if repo.manifest.parentrevs(mr) != (mr - 1, nullrev):
1766 if repo.manifest.parentrevs(mr) != (mr - 1, nullrev):
1758 return ncache[fn].get(repo.manifest.find(man, fn)[0])
1767 return ncache[fn].get(repo.manifest.find(man, fn)[0])
1759 if not dcache or dcache[0] != man:
1768 if not dcache or dcache[0] != man:
1760 dcache[:] = [man, repo.manifest.readdelta(man)]
1769 dcache[:] = [man, repo.manifest.readdelta(man)]
1761 if fn in dcache[1]:
1770 if fn in dcache[1]:
1762 return ncache[fn].get(dcache[1][fn])
1771 return ncache[fn].get(dcache[1][fn])
1763 return None
1772 return None
1764
1773
1765 df = False
1774 df = False
1766 if opts["date"]:
1775 if opts["date"]:
1767 df = util.matchdate(opts["date"])
1776 df = util.matchdate(opts["date"])
1768
1777
1769 displayer = cmdutil.show_changeset(ui, repo, opts, True, matchfn)
1778 displayer = cmdutil.show_changeset(ui, repo, opts, True, matchfn)
1770 for st, rev, fns in changeiter:
1779 for st, rev, fns in changeiter:
1771 if st == 'add':
1780 if st == 'add':
1772 changenode = repo.changelog.node(rev)
1781 changenode = repo.changelog.node(rev)
1773 parents = [p for p in repo.changelog.parentrevs(rev)
1782 parents = [p for p in repo.changelog.parentrevs(rev)
1774 if p != nullrev]
1783 if p != nullrev]
1775 if opts['no_merges'] and len(parents) == 2:
1784 if opts['no_merges'] and len(parents) == 2:
1776 continue
1785 continue
1777 if opts['only_merges'] and len(parents) != 2:
1786 if opts['only_merges'] and len(parents) != 2:
1778 continue
1787 continue
1779
1788
1780 if df:
1789 if df:
1781 changes = get(rev)
1790 changes = get(rev)
1782 if not df(changes[2][0]):
1791 if not df(changes[2][0]):
1783 continue
1792 continue
1784
1793
1785 if opts['keyword']:
1794 if opts['keyword']:
1786 changes = get(rev)
1795 changes = get(rev)
1787 miss = 0
1796 miss = 0
1788 for k in [kw.lower() for kw in opts['keyword']]:
1797 for k in [kw.lower() for kw in opts['keyword']]:
1789 if not (k in changes[1].lower() or
1798 if not (k in changes[1].lower() or
1790 k in changes[4].lower() or
1799 k in changes[4].lower() or
1791 k in " ".join(changes[3]).lower()):
1800 k in " ".join(changes[3]).lower()):
1792 miss = 1
1801 miss = 1
1793 break
1802 break
1794 if miss:
1803 if miss:
1795 continue
1804 continue
1796
1805
1797 copies = []
1806 copies = []
1798 if opts.get('copies') and rev:
1807 if opts.get('copies') and rev:
1799 mf = get(rev)[0]
1808 mf = get(rev)[0]
1800 for fn in get(rev)[3]:
1809 for fn in get(rev)[3]:
1801 rename = getrenamed(fn, rev, mf)
1810 rename = getrenamed(fn, rev, mf)
1802 if rename:
1811 if rename:
1803 copies.append((fn, rename[0]))
1812 copies.append((fn, rename[0]))
1804 displayer.show(rev, changenode, copies=copies)
1813 displayer.show(rev, changenode, copies=copies)
1805 elif st == 'iter':
1814 elif st == 'iter':
1806 if count == limit: break
1815 if count == limit: break
1807 if displayer.flush(rev):
1816 if displayer.flush(rev):
1808 count += 1
1817 count += 1
1809
1818
1810 def manifest(ui, repo, rev=None):
1819 def manifest(ui, repo, rev=None):
1811 """output the current or given revision of the project manifest
1820 """output the current or given revision of the project manifest
1812
1821
1813 Print a list of version controlled files for the given revision.
1822 Print a list of version controlled files for the given revision.
1814 If no revision is given, the parent of the working directory is used,
1823 If no revision is given, the parent of the working directory is used,
1815 or tip if no revision is checked out.
1824 or tip if no revision is checked out.
1816
1825
1817 The manifest is the list of files being version controlled. If no revision
1826 The manifest is the list of files being version controlled. If no revision
1818 is given then the first parent of the working directory is used.
1827 is given then the first parent of the working directory is used.
1819
1828
1820 With -v flag, print file permissions. With --debug flag, print
1829 With -v flag, print file permissions. With --debug flag, print
1821 file revision hashes.
1830 file revision hashes.
1822 """
1831 """
1823
1832
1824 m = repo.changectx(rev).manifest()
1833 m = repo.changectx(rev).manifest()
1825 files = m.keys()
1834 files = m.keys()
1826 files.sort()
1835 files.sort()
1827
1836
1828 for f in files:
1837 for f in files:
1829 if ui.debugflag:
1838 if ui.debugflag:
1830 ui.write("%40s " % hex(m[f]))
1839 ui.write("%40s " % hex(m[f]))
1831 if ui.verbose:
1840 if ui.verbose:
1832 ui.write("%3s " % (m.execf(f) and "755" or "644"))
1841 ui.write("%3s " % (m.execf(f) and "755" or "644"))
1833 ui.write("%s\n" % f)
1842 ui.write("%s\n" % f)
1834
1843
1835 def merge(ui, repo, node=None, force=None, rev=None):
1844 def merge(ui, repo, node=None, force=None, rev=None):
1836 """merge working directory with another revision
1845 """merge working directory with another revision
1837
1846
1838 Merge the contents of the current working directory and the
1847 Merge the contents of the current working directory and the
1839 requested revision. Files that changed between either parent are
1848 requested revision. Files that changed between either parent are
1840 marked as changed for the next commit and a commit must be
1849 marked as changed for the next commit and a commit must be
1841 performed before any further updates are allowed.
1850 performed before any further updates are allowed.
1842
1851
1843 If no revision is specified, the working directory's parent is a
1852 If no revision is specified, the working directory's parent is a
1844 head revision, and the repository contains exactly one other head,
1853 head revision, and the repository contains exactly one other head,
1845 the other head is merged with by default. Otherwise, an explicit
1854 the other head is merged with by default. Otherwise, an explicit
1846 revision to merge with must be provided.
1855 revision to merge with must be provided.
1847 """
1856 """
1848
1857
1849 if rev and node:
1858 if rev and node:
1850 raise util.Abort(_("please specify just one revision"))
1859 raise util.Abort(_("please specify just one revision"))
1851
1860
1852 if not node:
1861 if not node:
1853 node = rev
1862 node = rev
1854
1863
1855 if not node:
1864 if not node:
1856 heads = repo.heads()
1865 heads = repo.heads()
1857 if len(heads) > 2:
1866 if len(heads) > 2:
1858 raise util.Abort(_('repo has %d heads - '
1867 raise util.Abort(_('repo has %d heads - '
1859 'please merge with an explicit rev') %
1868 'please merge with an explicit rev') %
1860 len(heads))
1869 len(heads))
1861 if len(heads) == 1:
1870 if len(heads) == 1:
1862 raise util.Abort(_('there is nothing to merge - '
1871 raise util.Abort(_('there is nothing to merge - '
1863 'use "hg update" instead'))
1872 'use "hg update" instead'))
1864 parent = repo.dirstate.parents()[0]
1873 parent = repo.dirstate.parents()[0]
1865 if parent not in heads:
1874 if parent not in heads:
1866 raise util.Abort(_('working dir not at a head rev - '
1875 raise util.Abort(_('working dir not at a head rev - '
1867 'use "hg update" or merge with an explicit rev'))
1876 'use "hg update" or merge with an explicit rev'))
1868 node = parent == heads[0] and heads[-1] or heads[0]
1877 node = parent == heads[0] and heads[-1] or heads[0]
1869 return hg.merge(repo, node, force=force)
1878 return hg.merge(repo, node, force=force)
1870
1879
1871 def outgoing(ui, repo, dest=None, **opts):
1880 def outgoing(ui, repo, dest=None, **opts):
1872 """show changesets not found in destination
1881 """show changesets not found in destination
1873
1882
1874 Show changesets not found in the specified destination repository or
1883 Show changesets not found in the specified destination repository or
1875 the default push location. These are the changesets that would be pushed
1884 the default push location. These are the changesets that would be pushed
1876 if a push was requested.
1885 if a push was requested.
1877
1886
1878 See pull for valid destination format details.
1887 See pull for valid destination format details.
1879 """
1888 """
1880 dest = ui.expandpath(dest or 'default-push', dest or 'default')
1889 dest, revs = cmdutil.parseurl(
1890 ui.expandpath(dest or 'default-push', dest or 'default'), opts['rev'])
1881 setremoteconfig(ui, opts)
1891 setremoteconfig(ui, opts)
1882 revs = None
1892 if revs:
1883 if opts['rev']:
1893 revs = [repo.lookup(rev) for rev in revs]
1884 revs = [repo.lookup(rev) for rev in opts['rev']]
1885
1894
1886 other = hg.repository(ui, dest)
1895 other = hg.repository(ui, dest)
1887 ui.status(_('comparing with %s\n') % dest)
1896 ui.status(_('comparing with %s\n') % dest)
1888 o = repo.findoutgoing(other, force=opts['force'])
1897 o = repo.findoutgoing(other, force=opts['force'])
1889 if not o:
1898 if not o:
1890 ui.status(_("no changes found\n"))
1899 ui.status(_("no changes found\n"))
1891 return 1
1900 return 1
1892 o = repo.changelog.nodesbetween(o, revs)[0]
1901 o = repo.changelog.nodesbetween(o, revs)[0]
1893 if opts['newest_first']:
1902 if opts['newest_first']:
1894 o.reverse()
1903 o.reverse()
1895 displayer = cmdutil.show_changeset(ui, repo, opts)
1904 displayer = cmdutil.show_changeset(ui, repo, opts)
1896 for n in o:
1905 for n in o:
1897 parents = [p for p in repo.changelog.parents(n) if p != nullid]
1906 parents = [p for p in repo.changelog.parents(n) if p != nullid]
1898 if opts['no_merges'] and len(parents) == 2:
1907 if opts['no_merges'] and len(parents) == 2:
1899 continue
1908 continue
1900 displayer.show(changenode=n)
1909 displayer.show(changenode=n)
1901
1910
1902 def parents(ui, repo, file_=None, **opts):
1911 def parents(ui, repo, file_=None, **opts):
1903 """show the parents of the working dir or revision
1912 """show the parents of the working dir or revision
1904
1913
1905 Print the working directory's parent revisions.
1914 Print the working directory's parent revisions.
1906 """
1915 """
1907 rev = opts.get('rev')
1916 rev = opts.get('rev')
1908 if rev:
1917 if rev:
1909 if file_:
1918 if file_:
1910 ctx = repo.filectx(file_, changeid=rev)
1919 ctx = repo.filectx(file_, changeid=rev)
1911 else:
1920 else:
1912 ctx = repo.changectx(rev)
1921 ctx = repo.changectx(rev)
1913 p = [cp.node() for cp in ctx.parents()]
1922 p = [cp.node() for cp in ctx.parents()]
1914 else:
1923 else:
1915 p = repo.dirstate.parents()
1924 p = repo.dirstate.parents()
1916
1925
1917 displayer = cmdutil.show_changeset(ui, repo, opts)
1926 displayer = cmdutil.show_changeset(ui, repo, opts)
1918 for n in p:
1927 for n in p:
1919 if n != nullid:
1928 if n != nullid:
1920 displayer.show(changenode=n)
1929 displayer.show(changenode=n)
1921
1930
1922 def paths(ui, repo, search=None):
1931 def paths(ui, repo, search=None):
1923 """show definition of symbolic path names
1932 """show definition of symbolic path names
1924
1933
1925 Show definition of symbolic path name NAME. If no name is given, show
1934 Show definition of symbolic path name NAME. If no name is given, show
1926 definition of available names.
1935 definition of available names.
1927
1936
1928 Path names are defined in the [paths] section of /etc/mercurial/hgrc
1937 Path names are defined in the [paths] section of /etc/mercurial/hgrc
1929 and $HOME/.hgrc. If run inside a repository, .hg/hgrc is used, too.
1938 and $HOME/.hgrc. If run inside a repository, .hg/hgrc is used, too.
1930 """
1939 """
1931 if search:
1940 if search:
1932 for name, path in ui.configitems("paths"):
1941 for name, path in ui.configitems("paths"):
1933 if name == search:
1942 if name == search:
1934 ui.write("%s\n" % path)
1943 ui.write("%s\n" % path)
1935 return
1944 return
1936 ui.warn(_("not found!\n"))
1945 ui.warn(_("not found!\n"))
1937 return 1
1946 return 1
1938 else:
1947 else:
1939 for name, path in ui.configitems("paths"):
1948 for name, path in ui.configitems("paths"):
1940 ui.write("%s = %s\n" % (name, path))
1949 ui.write("%s = %s\n" % (name, path))
1941
1950
1942 def postincoming(ui, repo, modheads, optupdate):
1951 def postincoming(ui, repo, modheads, optupdate):
1943 if modheads == 0:
1952 if modheads == 0:
1944 return
1953 return
1945 if optupdate:
1954 if optupdate:
1946 if modheads == 1:
1955 if modheads == 1:
1947 return hg.update(repo, repo.changelog.tip()) # update
1956 return hg.update(repo, repo.changelog.tip()) # update
1948 else:
1957 else:
1949 ui.status(_("not updating, since new heads added\n"))
1958 ui.status(_("not updating, since new heads added\n"))
1950 if modheads > 1:
1959 if modheads > 1:
1951 ui.status(_("(run 'hg heads' to see heads, 'hg merge' to merge)\n"))
1960 ui.status(_("(run 'hg heads' to see heads, 'hg merge' to merge)\n"))
1952 else:
1961 else:
1953 ui.status(_("(run 'hg update' to get a working copy)\n"))
1962 ui.status(_("(run 'hg update' to get a working copy)\n"))
1954
1963
1955 def pull(ui, repo, source="default", **opts):
1964 def pull(ui, repo, source="default", **opts):
1956 """pull changes from the specified source
1965 """pull changes from the specified source
1957
1966
1958 Pull changes from a remote repository to a local one.
1967 Pull changes from a remote repository to a local one.
1959
1968
1960 This finds all changes from the repository at the specified path
1969 This finds all changes from the repository at the specified path
1961 or URL and adds them to the local repository. By default, this
1970 or URL and adds them to the local repository. By default, this
1962 does not update the copy of the project in the working directory.
1971 does not update the copy of the project in the working directory.
1963
1972
1964 Valid URLs are of the form:
1973 Valid URLs are of the form:
1965
1974
1966 local/filesystem/path (or file://local/filesystem/path)
1975 local/filesystem/path (or file://local/filesystem/path)
1967 http://[user@]host[:port]/[path]
1976 http://[user@]host[:port]/[path]
1968 https://[user@]host[:port]/[path]
1977 https://[user@]host[:port]/[path]
1969 ssh://[user@]host[:port]/[path]
1978 ssh://[user@]host[:port]/[path]
1970 static-http://host[:port]/[path]
1979 static-http://host[:port]/[path]
1971
1980
1972 Paths in the local filesystem can either point to Mercurial
1981 Paths in the local filesystem can either point to Mercurial
1973 repositories or to bundle files (as created by 'hg bundle' or
1982 repositories or to bundle files (as created by 'hg bundle' or
1974 'hg incoming --bundle'). The static-http:// protocol, albeit slow,
1983 'hg incoming --bundle'). The static-http:// protocol, albeit slow,
1975 allows access to a Mercurial repository where you simply use a web
1984 allows access to a Mercurial repository where you simply use a web
1976 server to publish the .hg directory as static content.
1985 server to publish the .hg directory as static content.
1977
1986
1987 An optional identifier after # indicates a particular branch, tag,
1988 or changeset to pull.
1989
1978 Some notes about using SSH with Mercurial:
1990 Some notes about using SSH with Mercurial:
1979 - SSH requires an accessible shell account on the destination machine
1991 - SSH requires an accessible shell account on the destination machine
1980 and a copy of hg in the remote path or specified with as remotecmd.
1992 and a copy of hg in the remote path or specified with as remotecmd.
1981 - path is relative to the remote user's home directory by default.
1993 - path is relative to the remote user's home directory by default.
1982 Use an extra slash at the start of a path to specify an absolute path:
1994 Use an extra slash at the start of a path to specify an absolute path:
1983 ssh://example.com//tmp/repository
1995 ssh://example.com//tmp/repository
1984 - Mercurial doesn't use its own compression via SSH; the right thing
1996 - Mercurial doesn't use its own compression via SSH; the right thing
1985 to do is to configure it in your ~/.ssh/config, e.g.:
1997 to do is to configure it in your ~/.ssh/config, e.g.:
1986 Host *.mylocalnetwork.example.com
1998 Host *.mylocalnetwork.example.com
1987 Compression no
1999 Compression no
1988 Host *
2000 Host *
1989 Compression yes
2001 Compression yes
1990 Alternatively specify "ssh -C" as your ssh command in your hgrc or
2002 Alternatively specify "ssh -C" as your ssh command in your hgrc or
1991 with the --ssh command line option.
2003 with the --ssh command line option.
1992 """
2004 """
1993 source = ui.expandpath(source)
2005 source, revs = cmdutil.parseurl(ui.expandpath(source), opts['rev'])
1994 setremoteconfig(ui, opts)
2006 setremoteconfig(ui, opts)
1995
2007
1996 other = hg.repository(ui, source)
2008 other = hg.repository(ui, source)
1997 ui.status(_('pulling from %s\n') % (source))
2009 ui.status(_('pulling from %s\n') % (source))
1998 revs = None
2010 if revs:
1999 if opts['rev']:
2000 if 'lookup' in other.capabilities:
2011 if 'lookup' in other.capabilities:
2001 revs = [other.lookup(rev) for rev in opts['rev']]
2012 revs = [other.lookup(rev) for rev in revs]
2002 else:
2013 else:
2003 error = _("Other repository doesn't support revision lookup, so a rev cannot be specified.")
2014 error = _("Other repository doesn't support revision lookup, so a rev cannot be specified.")
2004 raise util.Abort(error)
2015 raise util.Abort(error)
2016
2005 modheads = repo.pull(other, heads=revs, force=opts['force'])
2017 modheads = repo.pull(other, heads=revs, force=opts['force'])
2006 return postincoming(ui, repo, modheads, opts['update'])
2018 return postincoming(ui, repo, modheads, opts['update'])
2007
2019
2008 def push(ui, repo, dest=None, **opts):
2020 def push(ui, repo, dest=None, **opts):
2009 """push changes to the specified destination
2021 """push changes to the specified destination
2010
2022
2011 Push changes from the local repository to the given destination.
2023 Push changes from the local repository to the given destination.
2012
2024
2013 This is the symmetrical operation for pull. It helps to move
2025 This is the symmetrical operation for pull. It helps to move
2014 changes from the current repository to a different one. If the
2026 changes from the current repository to a different one. If the
2015 destination is local this is identical to a pull in that directory
2027 destination is local this is identical to a pull in that directory
2016 from the current one.
2028 from the current one.
2017
2029
2018 By default, push will refuse to run if it detects the result would
2030 By default, push will refuse to run if it detects the result would
2019 increase the number of remote heads. This generally indicates the
2031 increase the number of remote heads. This generally indicates the
2020 the client has forgotten to sync and merge before pushing.
2032 the client has forgotten to sync and merge before pushing.
2021
2033
2022 Valid URLs are of the form:
2034 Valid URLs are of the form:
2023
2035
2024 local/filesystem/path (or file://local/filesystem/path)
2036 local/filesystem/path (or file://local/filesystem/path)
2025 ssh://[user@]host[:port]/[path]
2037 ssh://[user@]host[:port]/[path]
2026 http://[user@]host[:port]/[path]
2038 http://[user@]host[:port]/[path]
2027 https://[user@]host[:port]/[path]
2039 https://[user@]host[:port]/[path]
2028
2040
2041 An optional identifier after # indicates a particular branch, tag,
2042 or changeset to push.
2043
2029 Look at the help text for the pull command for important details
2044 Look at the help text for the pull command for important details
2030 about ssh:// URLs.
2045 about ssh:// URLs.
2031
2046
2032 Pushing to http:// and https:// URLs is only possible, if this
2047 Pushing to http:// and https:// URLs is only possible, if this
2033 feature is explicitly enabled on the remote Mercurial server.
2048 feature is explicitly enabled on the remote Mercurial server.
2034 """
2049 """
2035 dest = ui.expandpath(dest or 'default-push', dest or 'default')
2050 dest, revs = cmdutil.parseurl(
2051 ui.expandpath(dest or 'default-push', dest or 'default'), opts['rev'])
2036 setremoteconfig(ui, opts)
2052 setremoteconfig(ui, opts)
2037
2053
2038 other = hg.repository(ui, dest)
2054 other = hg.repository(ui, dest)
2039 ui.status('pushing to %s\n' % (dest))
2055 ui.status('pushing to %s\n' % (dest))
2040 revs = None
2056 if revs:
2041 if opts['rev']:
2057 revs = [repo.lookup(rev) for rev in revs]
2042 revs = [repo.lookup(rev) for rev in opts['rev']]
2043 r = repo.push(other, opts['force'], revs=revs)
2058 r = repo.push(other, opts['force'], revs=revs)
2044 return r == 0
2059 return r == 0
2045
2060
2046 def rawcommit(ui, repo, *pats, **opts):
2061 def rawcommit(ui, repo, *pats, **opts):
2047 """raw commit interface (DEPRECATED)
2062 """raw commit interface (DEPRECATED)
2048
2063
2049 (DEPRECATED)
2064 (DEPRECATED)
2050 Lowlevel commit, for use in helper scripts.
2065 Lowlevel commit, for use in helper scripts.
2051
2066
2052 This command is not intended to be used by normal users, as it is
2067 This command is not intended to be used by normal users, as it is
2053 primarily useful for importing from other SCMs.
2068 primarily useful for importing from other SCMs.
2054
2069
2055 This command is now deprecated and will be removed in a future
2070 This command is now deprecated and will be removed in a future
2056 release, please use debugsetparents and commit instead.
2071 release, please use debugsetparents and commit instead.
2057 """
2072 """
2058
2073
2059 ui.warn(_("(the rawcommit command is deprecated)\n"))
2074 ui.warn(_("(the rawcommit command is deprecated)\n"))
2060
2075
2061 message = logmessage(opts)
2076 message = logmessage(opts)
2062
2077
2063 files, match, anypats = cmdutil.matchpats(repo, pats, opts)
2078 files, match, anypats = cmdutil.matchpats(repo, pats, opts)
2064 if opts['files']:
2079 if opts['files']:
2065 files += open(opts['files']).read().splitlines()
2080 files += open(opts['files']).read().splitlines()
2066
2081
2067 parents = [repo.lookup(p) for p in opts['parent']]
2082 parents = [repo.lookup(p) for p in opts['parent']]
2068
2083
2069 try:
2084 try:
2070 repo.rawcommit(files, message, opts['user'], opts['date'], *parents)
2085 repo.rawcommit(files, message, opts['user'], opts['date'], *parents)
2071 except ValueError, inst:
2086 except ValueError, inst:
2072 raise util.Abort(str(inst))
2087 raise util.Abort(str(inst))
2073
2088
2074 def recover(ui, repo):
2089 def recover(ui, repo):
2075 """roll back an interrupted transaction
2090 """roll back an interrupted transaction
2076
2091
2077 Recover from an interrupted commit or pull.
2092 Recover from an interrupted commit or pull.
2078
2093
2079 This command tries to fix the repository status after an interrupted
2094 This command tries to fix the repository status after an interrupted
2080 operation. It should only be necessary when Mercurial suggests it.
2095 operation. It should only be necessary when Mercurial suggests it.
2081 """
2096 """
2082 if repo.recover():
2097 if repo.recover():
2083 return hg.verify(repo)
2098 return hg.verify(repo)
2084 return 1
2099 return 1
2085
2100
2086 def remove(ui, repo, *pats, **opts):
2101 def remove(ui, repo, *pats, **opts):
2087 """remove the specified files on the next commit
2102 """remove the specified files on the next commit
2088
2103
2089 Schedule the indicated files for removal from the repository.
2104 Schedule the indicated files for removal from the repository.
2090
2105
2091 This only removes files from the current branch, not from the
2106 This only removes files from the current branch, not from the
2092 entire project history. If the files still exist in the working
2107 entire project history. If the files still exist in the working
2093 directory, they will be deleted from it. If invoked with --after,
2108 directory, they will be deleted from it. If invoked with --after,
2094 files are marked as removed, but not actually unlinked unless --force
2109 files are marked as removed, but not actually unlinked unless --force
2095 is also given. Without exact file names, --after will only mark
2110 is also given. Without exact file names, --after will only mark
2096 files as removed if they are no longer in the working directory.
2111 files as removed if they are no longer in the working directory.
2097
2112
2098 This command schedules the files to be removed at the next commit.
2113 This command schedules the files to be removed at the next commit.
2099 To undo a remove before that, see hg revert.
2114 To undo a remove before that, see hg revert.
2100
2115
2101 Modified files and added files are not removed by default. To
2116 Modified files and added files are not removed by default. To
2102 remove them, use the -f/--force option.
2117 remove them, use the -f/--force option.
2103 """
2118 """
2104 names = []
2119 names = []
2105 if not opts['after'] and not pats:
2120 if not opts['after'] and not pats:
2106 raise util.Abort(_('no files specified'))
2121 raise util.Abort(_('no files specified'))
2107 files, matchfn, anypats = cmdutil.matchpats(repo, pats, opts)
2122 files, matchfn, anypats = cmdutil.matchpats(repo, pats, opts)
2108 exact = dict.fromkeys(files)
2123 exact = dict.fromkeys(files)
2109 mardu = map(dict.fromkeys, repo.status(files=files, match=matchfn))[:5]
2124 mardu = map(dict.fromkeys, repo.status(files=files, match=matchfn))[:5]
2110 modified, added, removed, deleted, unknown = mardu
2125 modified, added, removed, deleted, unknown = mardu
2111 remove, forget = [], []
2126 remove, forget = [], []
2112 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts):
2127 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts):
2113 reason = None
2128 reason = None
2114 if abs in modified and not opts['force']:
2129 if abs in modified and not opts['force']:
2115 reason = _('is modified (use -f to force removal)')
2130 reason = _('is modified (use -f to force removal)')
2116 elif abs in added:
2131 elif abs in added:
2117 if opts['force']:
2132 if opts['force']:
2118 forget.append(abs)
2133 forget.append(abs)
2119 continue
2134 continue
2120 reason = _('has been marked for add (use -f to force removal)')
2135 reason = _('has been marked for add (use -f to force removal)')
2121 elif abs in unknown:
2136 elif abs in unknown:
2122 reason = _('is not managed')
2137 reason = _('is not managed')
2123 elif opts['after'] and not exact and abs not in deleted:
2138 elif opts['after'] and not exact and abs not in deleted:
2124 continue
2139 continue
2125 elif abs in removed:
2140 elif abs in removed:
2126 continue
2141 continue
2127 if reason:
2142 if reason:
2128 if exact:
2143 if exact:
2129 ui.warn(_('not removing %s: file %s\n') % (rel, reason))
2144 ui.warn(_('not removing %s: file %s\n') % (rel, reason))
2130 else:
2145 else:
2131 if ui.verbose or not exact:
2146 if ui.verbose or not exact:
2132 ui.status(_('removing %s\n') % rel)
2147 ui.status(_('removing %s\n') % rel)
2133 remove.append(abs)
2148 remove.append(abs)
2134 repo.forget(forget)
2149 repo.forget(forget)
2135 repo.remove(remove, unlink=opts['force'] or not opts['after'])
2150 repo.remove(remove, unlink=opts['force'] or not opts['after'])
2136
2151
2137 def rename(ui, repo, *pats, **opts):
2152 def rename(ui, repo, *pats, **opts):
2138 """rename files; equivalent of copy + remove
2153 """rename files; equivalent of copy + remove
2139
2154
2140 Mark dest as copies of sources; mark sources for deletion. If
2155 Mark dest as copies of sources; mark sources for deletion. If
2141 dest is a directory, copies are put in that directory. If dest is
2156 dest is a directory, copies are put in that directory. If dest is
2142 a file, there can only be one source.
2157 a file, there can only be one source.
2143
2158
2144 By default, this command copies the contents of files as they
2159 By default, this command copies the contents of files as they
2145 stand in the working directory. If invoked with --after, the
2160 stand in the working directory. If invoked with --after, the
2146 operation is recorded, but no copying is performed.
2161 operation is recorded, but no copying is performed.
2147
2162
2148 This command takes effect in the next commit. To undo a rename
2163 This command takes effect in the next commit. To undo a rename
2149 before that, see hg revert.
2164 before that, see hg revert.
2150 """
2165 """
2151 wlock = repo.wlock(0)
2166 wlock = repo.wlock(0)
2152 errs, copied = docopy(ui, repo, pats, opts, wlock)
2167 errs, copied = docopy(ui, repo, pats, opts, wlock)
2153 names = []
2168 names = []
2154 for abs, rel, exact in copied:
2169 for abs, rel, exact in copied:
2155 if ui.verbose or not exact:
2170 if ui.verbose or not exact:
2156 ui.status(_('removing %s\n') % rel)
2171 ui.status(_('removing %s\n') % rel)
2157 names.append(abs)
2172 names.append(abs)
2158 if not opts.get('dry_run'):
2173 if not opts.get('dry_run'):
2159 repo.remove(names, True, wlock=wlock)
2174 repo.remove(names, True, wlock=wlock)
2160 return errs
2175 return errs
2161
2176
2162 def revert(ui, repo, *pats, **opts):
2177 def revert(ui, repo, *pats, **opts):
2163 """revert files or dirs to their states as of some revision
2178 """revert files or dirs to their states as of some revision
2164
2179
2165 With no revision specified, revert the named files or directories
2180 With no revision specified, revert the named files or directories
2166 to the contents they had in the parent of the working directory.
2181 to the contents they had in the parent of the working directory.
2167 This restores the contents of the affected files to an unmodified
2182 This restores the contents of the affected files to an unmodified
2168 state and unschedules adds, removes, copies, and renames. If the
2183 state and unschedules adds, removes, copies, and renames. If the
2169 working directory has two parents, you must explicitly specify the
2184 working directory has two parents, you must explicitly specify the
2170 revision to revert to.
2185 revision to revert to.
2171
2186
2172 Modified files are saved with a .orig suffix before reverting.
2187 Modified files are saved with a .orig suffix before reverting.
2173 To disable these backups, use --no-backup.
2188 To disable these backups, use --no-backup.
2174
2189
2175 Using the -r option, revert the given files or directories to their
2190 Using the -r option, revert the given files or directories to their
2176 contents as of a specific revision. This can be helpful to "roll
2191 contents as of a specific revision. This can be helpful to "roll
2177 back" some or all of a change that should not have been committed.
2192 back" some or all of a change that should not have been committed.
2178
2193
2179 Revert modifies the working directory. It does not commit any
2194 Revert modifies the working directory. It does not commit any
2180 changes, or change the parent of the working directory. If you
2195 changes, or change the parent of the working directory. If you
2181 revert to a revision other than the parent of the working
2196 revert to a revision other than the parent of the working
2182 directory, the reverted files will thus appear modified
2197 directory, the reverted files will thus appear modified
2183 afterwards.
2198 afterwards.
2184
2199
2185 If a file has been deleted, it is recreated. If the executable
2200 If a file has been deleted, it is recreated. If the executable
2186 mode of a file was changed, it is reset.
2201 mode of a file was changed, it is reset.
2187
2202
2188 If names are given, all files matching the names are reverted.
2203 If names are given, all files matching the names are reverted.
2189
2204
2190 If no arguments are given, no files are reverted.
2205 If no arguments are given, no files are reverted.
2191 """
2206 """
2192
2207
2193 if opts["date"]:
2208 if opts["date"]:
2194 if opts["rev"]:
2209 if opts["rev"]:
2195 raise util.Abort(_("you can't specify a revision and a date"))
2210 raise util.Abort(_("you can't specify a revision and a date"))
2196 opts["rev"] = cmdutil.finddate(ui, repo, opts["date"])
2211 opts["rev"] = cmdutil.finddate(ui, repo, opts["date"])
2197
2212
2198 if not pats and not opts['all']:
2213 if not pats and not opts['all']:
2199 raise util.Abort(_('no files or directories specified; '
2214 raise util.Abort(_('no files or directories specified; '
2200 'use --all to revert the whole repo'))
2215 'use --all to revert the whole repo'))
2201
2216
2202 parent, p2 = repo.dirstate.parents()
2217 parent, p2 = repo.dirstate.parents()
2203 if not opts['rev'] and p2 != nullid:
2218 if not opts['rev'] and p2 != nullid:
2204 raise util.Abort(_('uncommitted merge - please provide a '
2219 raise util.Abort(_('uncommitted merge - please provide a '
2205 'specific revision'))
2220 'specific revision'))
2206 ctx = repo.changectx(opts['rev'])
2221 ctx = repo.changectx(opts['rev'])
2207 node = ctx.node()
2222 node = ctx.node()
2208 mf = ctx.manifest()
2223 mf = ctx.manifest()
2209 if node == parent:
2224 if node == parent:
2210 pmf = mf
2225 pmf = mf
2211 else:
2226 else:
2212 pmf = None
2227 pmf = None
2213
2228
2214 wlock = repo.wlock()
2229 wlock = repo.wlock()
2215
2230
2216 # need all matching names in dirstate and manifest of target rev,
2231 # need all matching names in dirstate and manifest of target rev,
2217 # so have to walk both. do not print errors if files exist in one
2232 # so have to walk both. do not print errors if files exist in one
2218 # but not other.
2233 # but not other.
2219
2234
2220 names = {}
2235 names = {}
2221 target_only = {}
2236 target_only = {}
2222
2237
2223 # walk dirstate.
2238 # walk dirstate.
2224
2239
2225 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts,
2240 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts,
2226 badmatch=mf.has_key):
2241 badmatch=mf.has_key):
2227 names[abs] = (rel, exact)
2242 names[abs] = (rel, exact)
2228 if src == 'b':
2243 if src == 'b':
2229 target_only[abs] = True
2244 target_only[abs] = True
2230
2245
2231 # walk target manifest.
2246 # walk target manifest.
2232
2247
2233 def badmatch(path):
2248 def badmatch(path):
2234 if path in names:
2249 if path in names:
2235 return True
2250 return True
2236 path_ = path + '/'
2251 path_ = path + '/'
2237 for f in names:
2252 for f in names:
2238 if f.startswith(path_):
2253 if f.startswith(path_):
2239 return True
2254 return True
2240 return False
2255 return False
2241
2256
2242 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts, node=node,
2257 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts, node=node,
2243 badmatch=badmatch):
2258 badmatch=badmatch):
2244 if abs in names or src == 'b':
2259 if abs in names or src == 'b':
2245 continue
2260 continue
2246 names[abs] = (rel, exact)
2261 names[abs] = (rel, exact)
2247 target_only[abs] = True
2262 target_only[abs] = True
2248
2263
2249 changes = repo.status(match=names.has_key, wlock=wlock)[:5]
2264 changes = repo.status(match=names.has_key, wlock=wlock)[:5]
2250 modified, added, removed, deleted, unknown = map(dict.fromkeys, changes)
2265 modified, added, removed, deleted, unknown = map(dict.fromkeys, changes)
2251
2266
2252 revert = ([], _('reverting %s\n'))
2267 revert = ([], _('reverting %s\n'))
2253 add = ([], _('adding %s\n'))
2268 add = ([], _('adding %s\n'))
2254 remove = ([], _('removing %s\n'))
2269 remove = ([], _('removing %s\n'))
2255 forget = ([], _('forgetting %s\n'))
2270 forget = ([], _('forgetting %s\n'))
2256 undelete = ([], _('undeleting %s\n'))
2271 undelete = ([], _('undeleting %s\n'))
2257 update = {}
2272 update = {}
2258
2273
2259 disptable = (
2274 disptable = (
2260 # dispatch table:
2275 # dispatch table:
2261 # file state
2276 # file state
2262 # action if in target manifest
2277 # action if in target manifest
2263 # action if not in target manifest
2278 # action if not in target manifest
2264 # make backup if in target manifest
2279 # make backup if in target manifest
2265 # make backup if not in target manifest
2280 # make backup if not in target manifest
2266 (modified, revert, remove, True, True),
2281 (modified, revert, remove, True, True),
2267 (added, revert, forget, True, False),
2282 (added, revert, forget, True, False),
2268 (removed, undelete, None, False, False),
2283 (removed, undelete, None, False, False),
2269 (deleted, revert, remove, False, False),
2284 (deleted, revert, remove, False, False),
2270 (unknown, add, None, True, False),
2285 (unknown, add, None, True, False),
2271 (target_only, add, None, False, False),
2286 (target_only, add, None, False, False),
2272 )
2287 )
2273
2288
2274 entries = names.items()
2289 entries = names.items()
2275 entries.sort()
2290 entries.sort()
2276
2291
2277 for abs, (rel, exact) in entries:
2292 for abs, (rel, exact) in entries:
2278 mfentry = mf.get(abs)
2293 mfentry = mf.get(abs)
2279 def handle(xlist, dobackup):
2294 def handle(xlist, dobackup):
2280 xlist[0].append(abs)
2295 xlist[0].append(abs)
2281 update[abs] = 1
2296 update[abs] = 1
2282 if (dobackup and not opts['no_backup'] and
2297 if (dobackup and not opts['no_backup'] and
2283 (os.path.islink(rel) or os.path.exists(rel))):
2298 (os.path.islink(rel) or os.path.exists(rel))):
2284 bakname = "%s.orig" % rel
2299 bakname = "%s.orig" % rel
2285 ui.note(_('saving current version of %s as %s\n') %
2300 ui.note(_('saving current version of %s as %s\n') %
2286 (rel, bakname))
2301 (rel, bakname))
2287 if not opts.get('dry_run'):
2302 if not opts.get('dry_run'):
2288 util.copyfile(rel, bakname)
2303 util.copyfile(rel, bakname)
2289 if ui.verbose or not exact:
2304 if ui.verbose or not exact:
2290 ui.status(xlist[1] % rel)
2305 ui.status(xlist[1] % rel)
2291 for table, hitlist, misslist, backuphit, backupmiss in disptable:
2306 for table, hitlist, misslist, backuphit, backupmiss in disptable:
2292 if abs not in table: continue
2307 if abs not in table: continue
2293 # file has changed in dirstate
2308 # file has changed in dirstate
2294 if mfentry:
2309 if mfentry:
2295 handle(hitlist, backuphit)
2310 handle(hitlist, backuphit)
2296 elif misslist is not None:
2311 elif misslist is not None:
2297 handle(misslist, backupmiss)
2312 handle(misslist, backupmiss)
2298 else:
2313 else:
2299 if exact: ui.warn(_('file not managed: %s\n') % rel)
2314 if exact: ui.warn(_('file not managed: %s\n') % rel)
2300 break
2315 break
2301 else:
2316 else:
2302 # file has not changed in dirstate
2317 # file has not changed in dirstate
2303 if node == parent:
2318 if node == parent:
2304 if exact: ui.warn(_('no changes needed to %s\n') % rel)
2319 if exact: ui.warn(_('no changes needed to %s\n') % rel)
2305 continue
2320 continue
2306 if pmf is None:
2321 if pmf is None:
2307 # only need parent manifest in this unlikely case,
2322 # only need parent manifest in this unlikely case,
2308 # so do not read by default
2323 # so do not read by default
2309 pmf = repo.changectx(parent).manifest()
2324 pmf = repo.changectx(parent).manifest()
2310 if abs in pmf:
2325 if abs in pmf:
2311 if mfentry:
2326 if mfentry:
2312 # if version of file is same in parent and target
2327 # if version of file is same in parent and target
2313 # manifests, do nothing
2328 # manifests, do nothing
2314 if pmf[abs] != mfentry:
2329 if pmf[abs] != mfentry:
2315 handle(revert, False)
2330 handle(revert, False)
2316 else:
2331 else:
2317 handle(remove, False)
2332 handle(remove, False)
2318
2333
2319 if not opts.get('dry_run'):
2334 if not opts.get('dry_run'):
2320 repo.dirstate.forget(forget[0])
2335 repo.dirstate.forget(forget[0])
2321 r = hg.revert(repo, node, update.has_key, wlock)
2336 r = hg.revert(repo, node, update.has_key, wlock)
2322 repo.dirstate.update(add[0], 'a')
2337 repo.dirstate.update(add[0], 'a')
2323 repo.dirstate.update(undelete[0], 'n')
2338 repo.dirstate.update(undelete[0], 'n')
2324 repo.dirstate.update(remove[0], 'r')
2339 repo.dirstate.update(remove[0], 'r')
2325 return r
2340 return r
2326
2341
2327 def rollback(ui, repo):
2342 def rollback(ui, repo):
2328 """roll back the last transaction in this repository
2343 """roll back the last transaction in this repository
2329
2344
2330 Roll back the last transaction in this repository, restoring the
2345 Roll back the last transaction in this repository, restoring the
2331 project to its state prior to the transaction.
2346 project to its state prior to the transaction.
2332
2347
2333 Transactions are used to encapsulate the effects of all commands
2348 Transactions are used to encapsulate the effects of all commands
2334 that create new changesets or propagate existing changesets into a
2349 that create new changesets or propagate existing changesets into a
2335 repository. For example, the following commands are transactional,
2350 repository. For example, the following commands are transactional,
2336 and their effects can be rolled back:
2351 and their effects can be rolled back:
2337
2352
2338 commit
2353 commit
2339 import
2354 import
2340 pull
2355 pull
2341 push (with this repository as destination)
2356 push (with this repository as destination)
2342 unbundle
2357 unbundle
2343
2358
2344 This command should be used with care. There is only one level of
2359 This command should be used with care. There is only one level of
2345 rollback, and there is no way to undo a rollback.
2360 rollback, and there is no way to undo a rollback.
2346
2361
2347 This command is not intended for use on public repositories. Once
2362 This command is not intended for use on public repositories. Once
2348 changes are visible for pull by other users, rolling a transaction
2363 changes are visible for pull by other users, rolling a transaction
2349 back locally is ineffective (someone else may already have pulled
2364 back locally is ineffective (someone else may already have pulled
2350 the changes). Furthermore, a race is possible with readers of the
2365 the changes). Furthermore, a race is possible with readers of the
2351 repository; for example an in-progress pull from the repository
2366 repository; for example an in-progress pull from the repository
2352 may fail if a rollback is performed.
2367 may fail if a rollback is performed.
2353 """
2368 """
2354 repo.rollback()
2369 repo.rollback()
2355
2370
2356 def root(ui, repo):
2371 def root(ui, repo):
2357 """print the root (top) of the current working dir
2372 """print the root (top) of the current working dir
2358
2373
2359 Print the root directory of the current repository.
2374 Print the root directory of the current repository.
2360 """
2375 """
2361 ui.write(repo.root + "\n")
2376 ui.write(repo.root + "\n")
2362
2377
2363 def serve(ui, repo, **opts):
2378 def serve(ui, repo, **opts):
2364 """export the repository via HTTP
2379 """export the repository via HTTP
2365
2380
2366 Start a local HTTP repository browser and pull server.
2381 Start a local HTTP repository browser and pull server.
2367
2382
2368 By default, the server logs accesses to stdout and errors to
2383 By default, the server logs accesses to stdout and errors to
2369 stderr. Use the "-A" and "-E" options to log to files.
2384 stderr. Use the "-A" and "-E" options to log to files.
2370 """
2385 """
2371
2386
2372 if opts["stdio"]:
2387 if opts["stdio"]:
2373 if repo is None:
2388 if repo is None:
2374 raise hg.RepoError(_("There is no Mercurial repository here"
2389 raise hg.RepoError(_("There is no Mercurial repository here"
2375 " (.hg not found)"))
2390 " (.hg not found)"))
2376 s = sshserver.sshserver(ui, repo)
2391 s = sshserver.sshserver(ui, repo)
2377 s.serve_forever()
2392 s.serve_forever()
2378
2393
2379 parentui = ui.parentui or ui
2394 parentui = ui.parentui or ui
2380 optlist = ("name templates style address port ipv6"
2395 optlist = ("name templates style address port ipv6"
2381 " accesslog errorlog webdir_conf")
2396 " accesslog errorlog webdir_conf")
2382 for o in optlist.split():
2397 for o in optlist.split():
2383 if opts[o]:
2398 if opts[o]:
2384 parentui.setconfig("web", o, str(opts[o]))
2399 parentui.setconfig("web", o, str(opts[o]))
2385
2400
2386 if repo is None and not ui.config("web", "webdir_conf"):
2401 if repo is None and not ui.config("web", "webdir_conf"):
2387 raise hg.RepoError(_("There is no Mercurial repository here"
2402 raise hg.RepoError(_("There is no Mercurial repository here"
2388 " (.hg not found)"))
2403 " (.hg not found)"))
2389
2404
2390 class service:
2405 class service:
2391 def init(self):
2406 def init(self):
2392 try:
2407 try:
2393 self.httpd = hgweb.server.create_server(parentui, repo)
2408 self.httpd = hgweb.server.create_server(parentui, repo)
2394 except socket.error, inst:
2409 except socket.error, inst:
2395 raise util.Abort(_('cannot start server: ') + inst.args[1])
2410 raise util.Abort(_('cannot start server: ') + inst.args[1])
2396
2411
2397 if not ui.verbose: return
2412 if not ui.verbose: return
2398
2413
2399 if httpd.port != 80:
2414 if httpd.port != 80:
2400 ui.status(_('listening at http://%s:%d/\n') %
2415 ui.status(_('listening at http://%s:%d/\n') %
2401 (httpd.addr, httpd.port))
2416 (httpd.addr, httpd.port))
2402 else:
2417 else:
2403 ui.status(_('listening at http://%s/\n') % httpd.addr)
2418 ui.status(_('listening at http://%s/\n') % httpd.addr)
2404
2419
2405 def run(self):
2420 def run(self):
2406 self.httpd.serve_forever()
2421 self.httpd.serve_forever()
2407
2422
2408 service = service()
2423 service = service()
2409
2424
2410 cmdutil.service(opts, initfn=service.init, runfn=service.run)
2425 cmdutil.service(opts, initfn=service.init, runfn=service.run)
2411
2426
2412 def status(ui, repo, *pats, **opts):
2427 def status(ui, repo, *pats, **opts):
2413 """show changed files in the working directory
2428 """show changed files in the working directory
2414
2429
2415 Show status of files in the repository. If names are given, only
2430 Show status of files in the repository. If names are given, only
2416 files that match are shown. Files that are clean or ignored, are
2431 files that match are shown. Files that are clean or ignored, are
2417 not listed unless -c (clean), -i (ignored) or -A is given.
2432 not listed unless -c (clean), -i (ignored) or -A is given.
2418
2433
2419 NOTE: status may appear to disagree with diff if permissions have
2434 NOTE: status may appear to disagree with diff if permissions have
2420 changed or a merge has occurred. The standard diff format does not
2435 changed or a merge has occurred. The standard diff format does not
2421 report permission changes and diff only reports changes relative
2436 report permission changes and diff only reports changes relative
2422 to one merge parent.
2437 to one merge parent.
2423
2438
2424 If one revision is given, it is used as the base revision.
2439 If one revision is given, it is used as the base revision.
2425 If two revisions are given, the difference between them is shown.
2440 If two revisions are given, the difference between them is shown.
2426
2441
2427 The codes used to show the status of files are:
2442 The codes used to show the status of files are:
2428 M = modified
2443 M = modified
2429 A = added
2444 A = added
2430 R = removed
2445 R = removed
2431 C = clean
2446 C = clean
2432 ! = deleted, but still tracked
2447 ! = deleted, but still tracked
2433 ? = not tracked
2448 ? = not tracked
2434 I = ignored (not shown by default)
2449 I = ignored (not shown by default)
2435 = the previous added file was copied from here
2450 = the previous added file was copied from here
2436 """
2451 """
2437
2452
2438 all = opts['all']
2453 all = opts['all']
2439 node1, node2 = cmdutil.revpair(repo, opts.get('rev'))
2454 node1, node2 = cmdutil.revpair(repo, opts.get('rev'))
2440
2455
2441 files, matchfn, anypats = cmdutil.matchpats(repo, pats, opts)
2456 files, matchfn, anypats = cmdutil.matchpats(repo, pats, opts)
2442 cwd = (pats and repo.getcwd()) or ''
2457 cwd = (pats and repo.getcwd()) or ''
2443 modified, added, removed, deleted, unknown, ignored, clean = [
2458 modified, added, removed, deleted, unknown, ignored, clean = [
2444 n for n in repo.status(node1=node1, node2=node2, files=files,
2459 n for n in repo.status(node1=node1, node2=node2, files=files,
2445 match=matchfn,
2460 match=matchfn,
2446 list_ignored=all or opts['ignored'],
2461 list_ignored=all or opts['ignored'],
2447 list_clean=all or opts['clean'])]
2462 list_clean=all or opts['clean'])]
2448
2463
2449 changetypes = (('modified', 'M', modified),
2464 changetypes = (('modified', 'M', modified),
2450 ('added', 'A', added),
2465 ('added', 'A', added),
2451 ('removed', 'R', removed),
2466 ('removed', 'R', removed),
2452 ('deleted', '!', deleted),
2467 ('deleted', '!', deleted),
2453 ('unknown', '?', unknown),
2468 ('unknown', '?', unknown),
2454 ('ignored', 'I', ignored))
2469 ('ignored', 'I', ignored))
2455
2470
2456 explicit_changetypes = changetypes + (('clean', 'C', clean),)
2471 explicit_changetypes = changetypes + (('clean', 'C', clean),)
2457
2472
2458 end = opts['print0'] and '\0' or '\n'
2473 end = opts['print0'] and '\0' or '\n'
2459
2474
2460 for opt, char, changes in ([ct for ct in explicit_changetypes
2475 for opt, char, changes in ([ct for ct in explicit_changetypes
2461 if all or opts[ct[0]]]
2476 if all or opts[ct[0]]]
2462 or changetypes):
2477 or changetypes):
2463 if opts['no_status']:
2478 if opts['no_status']:
2464 format = "%%s%s" % end
2479 format = "%%s%s" % end
2465 else:
2480 else:
2466 format = "%s %%s%s" % (char, end)
2481 format = "%s %%s%s" % (char, end)
2467
2482
2468 for f in changes:
2483 for f in changes:
2469 ui.write(format % util.pathto(repo.root, cwd, f))
2484 ui.write(format % util.pathto(repo.root, cwd, f))
2470 if ((all or opts.get('copies')) and not opts.get('no_status')):
2485 if ((all or opts.get('copies')) and not opts.get('no_status')):
2471 copied = repo.dirstate.copied(f)
2486 copied = repo.dirstate.copied(f)
2472 if copied:
2487 if copied:
2473 ui.write(' %s%s' % (util.pathto(repo.root, cwd, copied),
2488 ui.write(' %s%s' % (util.pathto(repo.root, cwd, copied),
2474 end))
2489 end))
2475
2490
2476 def tag(ui, repo, name, rev_=None, **opts):
2491 def tag(ui, repo, name, rev_=None, **opts):
2477 """add a tag for the current or given revision
2492 """add a tag for the current or given revision
2478
2493
2479 Name a particular revision using <name>.
2494 Name a particular revision using <name>.
2480
2495
2481 Tags are used to name particular revisions of the repository and are
2496 Tags are used to name particular revisions of the repository and are
2482 very useful to compare different revision, to go back to significant
2497 very useful to compare different revision, to go back to significant
2483 earlier versions or to mark branch points as releases, etc.
2498 earlier versions or to mark branch points as releases, etc.
2484
2499
2485 If no revision is given, the parent of the working directory is used,
2500 If no revision is given, the parent of the working directory is used,
2486 or tip if no revision is checked out.
2501 or tip if no revision is checked out.
2487
2502
2488 To facilitate version control, distribution, and merging of tags,
2503 To facilitate version control, distribution, and merging of tags,
2489 they are stored as a file named ".hgtags" which is managed
2504 they are stored as a file named ".hgtags" which is managed
2490 similarly to other project files and can be hand-edited if
2505 similarly to other project files and can be hand-edited if
2491 necessary. The file '.hg/localtags' is used for local tags (not
2506 necessary. The file '.hg/localtags' is used for local tags (not
2492 shared among repositories).
2507 shared among repositories).
2493 """
2508 """
2494 if name in ['tip', '.', 'null']:
2509 if name in ['tip', '.', 'null']:
2495 raise util.Abort(_("the name '%s' is reserved") % name)
2510 raise util.Abort(_("the name '%s' is reserved") % name)
2496 if rev_ is not None:
2511 if rev_ is not None:
2497 ui.warn(_("use of 'hg tag NAME [REV]' is deprecated, "
2512 ui.warn(_("use of 'hg tag NAME [REV]' is deprecated, "
2498 "please use 'hg tag [-r REV] NAME' instead\n"))
2513 "please use 'hg tag [-r REV] NAME' instead\n"))
2499 if opts['rev']:
2514 if opts['rev']:
2500 raise util.Abort(_("use only one form to specify the revision"))
2515 raise util.Abort(_("use only one form to specify the revision"))
2501 if opts['rev'] and opts['remove']:
2516 if opts['rev'] and opts['remove']:
2502 raise util.Abort(_("--rev and --remove are incompatible"))
2517 raise util.Abort(_("--rev and --remove are incompatible"))
2503 if opts['rev']:
2518 if opts['rev']:
2504 rev_ = opts['rev']
2519 rev_ = opts['rev']
2505 message = opts['message']
2520 message = opts['message']
2506 if opts['remove']:
2521 if opts['remove']:
2507 rev_ = nullid
2522 rev_ = nullid
2508 if not message:
2523 if not message:
2509 message = _('Removed tag %s') % name
2524 message = _('Removed tag %s') % name
2510 elif name in repo.tags() and not opts['force']:
2525 elif name in repo.tags() and not opts['force']:
2511 raise util.Abort(_('a tag named %s already exists (use -f to force)')
2526 raise util.Abort(_('a tag named %s already exists (use -f to force)')
2512 % name)
2527 % name)
2513 if not rev_ and repo.dirstate.parents()[1] != nullid:
2528 if not rev_ and repo.dirstate.parents()[1] != nullid:
2514 raise util.Abort(_('uncommitted merge - please provide a '
2529 raise util.Abort(_('uncommitted merge - please provide a '
2515 'specific revision'))
2530 'specific revision'))
2516 r = repo.changectx(rev_).node()
2531 r = repo.changectx(rev_).node()
2517
2532
2518 if not message:
2533 if not message:
2519 message = _('Added tag %s for changeset %s') % (name, short(r))
2534 message = _('Added tag %s for changeset %s') % (name, short(r))
2520
2535
2521 repo.tag(name, r, message, opts['local'], opts['user'], opts['date'])
2536 repo.tag(name, r, message, opts['local'], opts['user'], opts['date'])
2522
2537
2523 def tags(ui, repo):
2538 def tags(ui, repo):
2524 """list repository tags
2539 """list repository tags
2525
2540
2526 List the repository tags.
2541 List the repository tags.
2527
2542
2528 This lists both regular and local tags.
2543 This lists both regular and local tags.
2529 """
2544 """
2530
2545
2531 l = repo.tagslist()
2546 l = repo.tagslist()
2532 l.reverse()
2547 l.reverse()
2533 hexfunc = ui.debugflag and hex or short
2548 hexfunc = ui.debugflag and hex or short
2534 for t, n in l:
2549 for t, n in l:
2535 try:
2550 try:
2536 hn = hexfunc(n)
2551 hn = hexfunc(n)
2537 r = "%5d:%s" % (repo.changelog.rev(n), hexfunc(n))
2552 r = "%5d:%s" % (repo.changelog.rev(n), hexfunc(n))
2538 except revlog.LookupError:
2553 except revlog.LookupError:
2539 r = " ?:%s" % hn
2554 r = " ?:%s" % hn
2540 if ui.quiet:
2555 if ui.quiet:
2541 ui.write("%s\n" % t)
2556 ui.write("%s\n" % t)
2542 else:
2557 else:
2543 spaces = " " * (30 - util.locallen(t))
2558 spaces = " " * (30 - util.locallen(t))
2544 ui.write("%s%s %s\n" % (t, spaces, r))
2559 ui.write("%s%s %s\n" % (t, spaces, r))
2545
2560
2546 def tip(ui, repo, **opts):
2561 def tip(ui, repo, **opts):
2547 """show the tip revision
2562 """show the tip revision
2548
2563
2549 Show the tip revision.
2564 Show the tip revision.
2550 """
2565 """
2551 cmdutil.show_changeset(ui, repo, opts).show(nullrev+repo.changelog.count())
2566 cmdutil.show_changeset(ui, repo, opts).show(nullrev+repo.changelog.count())
2552
2567
2553 def unbundle(ui, repo, fname, **opts):
2568 def unbundle(ui, repo, fname, **opts):
2554 """apply a changegroup file
2569 """apply a changegroup file
2555
2570
2556 Apply a compressed changegroup file generated by the bundle
2571 Apply a compressed changegroup file generated by the bundle
2557 command.
2572 command.
2558 """
2573 """
2559 if os.path.exists(fname):
2574 if os.path.exists(fname):
2560 f = open(fname, "rb")
2575 f = open(fname, "rb")
2561 else:
2576 else:
2562 f = urllib.urlopen(fname)
2577 f = urllib.urlopen(fname)
2563 gen = changegroup.readbundle(f, fname)
2578 gen = changegroup.readbundle(f, fname)
2564 modheads = repo.addchangegroup(gen, 'unbundle', 'bundle:' + fname)
2579 modheads = repo.addchangegroup(gen, 'unbundle', 'bundle:' + fname)
2565 return postincoming(ui, repo, modheads, opts['update'])
2580 return postincoming(ui, repo, modheads, opts['update'])
2566
2581
2567 def update(ui, repo, node=None, rev=None, clean=False, date=None):
2582 def update(ui, repo, node=None, rev=None, clean=False, date=None):
2568 """update working directory
2583 """update working directory
2569
2584
2570 Update the working directory to the specified revision, or the
2585 Update the working directory to the specified revision, or the
2571 tip of the current branch if none is specified.
2586 tip of the current branch if none is specified.
2572
2587
2573 If there are no outstanding changes in the working directory and
2588 If there are no outstanding changes in the working directory and
2574 there is a linear relationship between the current version and the
2589 there is a linear relationship between the current version and the
2575 requested version, the result is the requested version.
2590 requested version, the result is the requested version.
2576
2591
2577 To merge the working directory with another revision, use the
2592 To merge the working directory with another revision, use the
2578 merge command.
2593 merge command.
2579
2594
2580 By default, update will refuse to run if doing so would require
2595 By default, update will refuse to run if doing so would require
2581 discarding local changes.
2596 discarding local changes.
2582 """
2597 """
2583 if rev and node:
2598 if rev and node:
2584 raise util.Abort(_("please specify just one revision"))
2599 raise util.Abort(_("please specify just one revision"))
2585
2600
2586 if not rev:
2601 if not rev:
2587 rev = node
2602 rev = node
2588
2603
2589 if date:
2604 if date:
2590 if rev:
2605 if rev:
2591 raise util.Abort(_("you can't specify a revision and a date"))
2606 raise util.Abort(_("you can't specify a revision and a date"))
2592 rev = cmdutil.finddate(ui, repo, date)
2607 rev = cmdutil.finddate(ui, repo, date)
2593
2608
2594 if clean:
2609 if clean:
2595 return hg.clean(repo, rev)
2610 return hg.clean(repo, rev)
2596 else:
2611 else:
2597 return hg.update(repo, rev)
2612 return hg.update(repo, rev)
2598
2613
2599 def verify(ui, repo):
2614 def verify(ui, repo):
2600 """verify the integrity of the repository
2615 """verify the integrity of the repository
2601
2616
2602 Verify the integrity of the current repository.
2617 Verify the integrity of the current repository.
2603
2618
2604 This will perform an extensive check of the repository's
2619 This will perform an extensive check of the repository's
2605 integrity, validating the hashes and checksums of each entry in
2620 integrity, validating the hashes and checksums of each entry in
2606 the changelog, manifest, and tracked files, as well as the
2621 the changelog, manifest, and tracked files, as well as the
2607 integrity of their crosslinks and indices.
2622 integrity of their crosslinks and indices.
2608 """
2623 """
2609 return hg.verify(repo)
2624 return hg.verify(repo)
2610
2625
2611 def version_(ui):
2626 def version_(ui):
2612 """output version and copyright information"""
2627 """output version and copyright information"""
2613 ui.write(_("Mercurial Distributed SCM (version %s)\n")
2628 ui.write(_("Mercurial Distributed SCM (version %s)\n")
2614 % version.get_version())
2629 % version.get_version())
2615 ui.status(_(
2630 ui.status(_(
2616 "\nCopyright (C) 2005, 2006 Matt Mackall <mpm@selenic.com>\n"
2631 "\nCopyright (C) 2005, 2006 Matt Mackall <mpm@selenic.com>\n"
2617 "This is free software; see the source for copying conditions. "
2632 "This is free software; see the source for copying conditions. "
2618 "There is NO\nwarranty; "
2633 "There is NO\nwarranty; "
2619 "not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.\n"
2634 "not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.\n"
2620 ))
2635 ))
2621
2636
2622 # Command options and aliases are listed here, alphabetically
2637 # Command options and aliases are listed here, alphabetically
2623
2638
2624 globalopts = [
2639 globalopts = [
2625 ('R', 'repository', '',
2640 ('R', 'repository', '',
2626 _('repository root directory or symbolic path name')),
2641 _('repository root directory or symbolic path name')),
2627 ('', 'cwd', '', _('change working directory')),
2642 ('', 'cwd', '', _('change working directory')),
2628 ('y', 'noninteractive', None,
2643 ('y', 'noninteractive', None,
2629 _('do not prompt, assume \'yes\' for any required answers')),
2644 _('do not prompt, assume \'yes\' for any required answers')),
2630 ('q', 'quiet', None, _('suppress output')),
2645 ('q', 'quiet', None, _('suppress output')),
2631 ('v', 'verbose', None, _('enable additional output')),
2646 ('v', 'verbose', None, _('enable additional output')),
2632 ('', 'config', [], _('set/override config option')),
2647 ('', 'config', [], _('set/override config option')),
2633 ('', 'debug', None, _('enable debugging output')),
2648 ('', 'debug', None, _('enable debugging output')),
2634 ('', 'debugger', None, _('start debugger')),
2649 ('', 'debugger', None, _('start debugger')),
2635 ('', 'encoding', util._encoding, _('set the charset encoding')),
2650 ('', 'encoding', util._encoding, _('set the charset encoding')),
2636 ('', 'encodingmode', util._encodingmode, _('set the charset encoding mode')),
2651 ('', 'encodingmode', util._encodingmode, _('set the charset encoding mode')),
2637 ('', 'lsprof', None, _('print improved command execution profile')),
2652 ('', 'lsprof', None, _('print improved command execution profile')),
2638 ('', 'traceback', None, _('print traceback on exception')),
2653 ('', 'traceback', None, _('print traceback on exception')),
2639 ('', 'time', None, _('time how long the command takes')),
2654 ('', 'time', None, _('time how long the command takes')),
2640 ('', 'profile', None, _('print command execution profile')),
2655 ('', 'profile', None, _('print command execution profile')),
2641 ('', 'version', None, _('output version information and exit')),
2656 ('', 'version', None, _('output version information and exit')),
2642 ('h', 'help', None, _('display help and exit')),
2657 ('h', 'help', None, _('display help and exit')),
2643 ]
2658 ]
2644
2659
2645 dryrunopts = [('n', 'dry-run', None,
2660 dryrunopts = [('n', 'dry-run', None,
2646 _('do not perform actions, just print output'))]
2661 _('do not perform actions, just print output'))]
2647
2662
2648 remoteopts = [
2663 remoteopts = [
2649 ('e', 'ssh', '', _('specify ssh command to use')),
2664 ('e', 'ssh', '', _('specify ssh command to use')),
2650 ('', 'remotecmd', '', _('specify hg command to run on the remote side')),
2665 ('', 'remotecmd', '', _('specify hg command to run on the remote side')),
2651 ]
2666 ]
2652
2667
2653 walkopts = [
2668 walkopts = [
2654 ('I', 'include', [], _('include names matching the given patterns')),
2669 ('I', 'include', [], _('include names matching the given patterns')),
2655 ('X', 'exclude', [], _('exclude names matching the given patterns')),
2670 ('X', 'exclude', [], _('exclude names matching the given patterns')),
2656 ]
2671 ]
2657
2672
2658 commitopts = [
2673 commitopts = [
2659 ('m', 'message', '', _('use <text> as commit message')),
2674 ('m', 'message', '', _('use <text> as commit message')),
2660 ('l', 'logfile', '', _('read commit message from <file>')),
2675 ('l', 'logfile', '', _('read commit message from <file>')),
2661 ]
2676 ]
2662
2677
2663 table = {
2678 table = {
2664 "^add": (add, walkopts + dryrunopts, _('hg add [OPTION]... [FILE]...')),
2679 "^add": (add, walkopts + dryrunopts, _('hg add [OPTION]... [FILE]...')),
2665 "addremove":
2680 "addremove":
2666 (addremove,
2681 (addremove,
2667 [('s', 'similarity', '',
2682 [('s', 'similarity', '',
2668 _('guess renamed files by similarity (0<=s<=100)')),
2683 _('guess renamed files by similarity (0<=s<=100)')),
2669 ] + walkopts + dryrunopts,
2684 ] + walkopts + dryrunopts,
2670 _('hg addremove [OPTION]... [FILE]...')),
2685 _('hg addremove [OPTION]... [FILE]...')),
2671 "^annotate":
2686 "^annotate":
2672 (annotate,
2687 (annotate,
2673 [('r', 'rev', '', _('annotate the specified revision')),
2688 [('r', 'rev', '', _('annotate the specified revision')),
2674 ('f', 'follow', None, _('follow file copies and renames')),
2689 ('f', 'follow', None, _('follow file copies and renames')),
2675 ('a', 'text', None, _('treat all files as text')),
2690 ('a', 'text', None, _('treat all files as text')),
2676 ('u', 'user', None, _('list the author')),
2691 ('u', 'user', None, _('list the author')),
2677 ('d', 'date', None, _('list the date')),
2692 ('d', 'date', None, _('list the date')),
2678 ('n', 'number', None, _('list the revision number (default)')),
2693 ('n', 'number', None, _('list the revision number (default)')),
2679 ('c', 'changeset', None, _('list the changeset')),
2694 ('c', 'changeset', None, _('list the changeset')),
2680 ] + walkopts,
2695 ] + walkopts,
2681 _('hg annotate [-r REV] [-f] [-a] [-u] [-d] [-n] [-c] FILE...')),
2696 _('hg annotate [-r REV] [-f] [-a] [-u] [-d] [-n] [-c] FILE...')),
2682 "archive":
2697 "archive":
2683 (archive,
2698 (archive,
2684 [('', 'no-decode', None, _('do not pass files through decoders')),
2699 [('', 'no-decode', None, _('do not pass files through decoders')),
2685 ('p', 'prefix', '', _('directory prefix for files in archive')),
2700 ('p', 'prefix', '', _('directory prefix for files in archive')),
2686 ('r', 'rev', '', _('revision to distribute')),
2701 ('r', 'rev', '', _('revision to distribute')),
2687 ('t', 'type', '', _('type of distribution to create')),
2702 ('t', 'type', '', _('type of distribution to create')),
2688 ] + walkopts,
2703 ] + walkopts,
2689 _('hg archive [OPTION]... DEST')),
2704 _('hg archive [OPTION]... DEST')),
2690 "backout":
2705 "backout":
2691 (backout,
2706 (backout,
2692 [('', 'merge', None,
2707 [('', 'merge', None,
2693 _('merge with old dirstate parent after backout')),
2708 _('merge with old dirstate parent after backout')),
2694 ('d', 'date', '', _('record datecode as commit date')),
2709 ('d', 'date', '', _('record datecode as commit date')),
2695 ('', 'parent', '', _('parent to choose when backing out merge')),
2710 ('', 'parent', '', _('parent to choose when backing out merge')),
2696 ('u', 'user', '', _('record user as committer')),
2711 ('u', 'user', '', _('record user as committer')),
2697 ('r', 'rev', '', _('revision to backout')),
2712 ('r', 'rev', '', _('revision to backout')),
2698 ] + walkopts + commitopts,
2713 ] + walkopts + commitopts,
2699 _('hg backout [OPTION]... [-r] REV')),
2714 _('hg backout [OPTION]... [-r] REV')),
2700 "branch": (branch,
2715 "branch": (branch,
2701 [('f', 'force', None,
2716 [('f', 'force', None,
2702 _('set branch name even if it shadows an existing branch'))],
2717 _('set branch name even if it shadows an existing branch'))],
2703 _('hg branch [NAME]')),
2718 _('hg branch [NAME]')),
2704 "branches": (branches, [], _('hg branches')),
2719 "branches": (branches, [], _('hg branches')),
2705 "bundle":
2720 "bundle":
2706 (bundle,
2721 (bundle,
2707 [('f', 'force', None,
2722 [('f', 'force', None,
2708 _('run even when remote repository is unrelated')),
2723 _('run even when remote repository is unrelated')),
2709 ('r', 'rev', [],
2724 ('r', 'rev', [],
2710 _('a changeset you would like to bundle')),
2725 _('a changeset you would like to bundle')),
2711 ('', 'base', [],
2726 ('', 'base', [],
2712 _('a base changeset to specify instead of a destination')),
2727 _('a base changeset to specify instead of a destination')),
2713 ] + remoteopts,
2728 ] + remoteopts,
2714 _('hg bundle [-f] [-r REV]... [--base REV]... FILE [DEST]')),
2729 _('hg bundle [-f] [-r REV]... [--base REV]... FILE [DEST]')),
2715 "cat":
2730 "cat":
2716 (cat,
2731 (cat,
2717 [('o', 'output', '', _('print output to file with formatted name')),
2732 [('o', 'output', '', _('print output to file with formatted name')),
2718 ('r', 'rev', '', _('print the given revision')),
2733 ('r', 'rev', '', _('print the given revision')),
2719 ] + walkopts,
2734 ] + walkopts,
2720 _('hg cat [OPTION]... FILE...')),
2735 _('hg cat [OPTION]... FILE...')),
2721 "^clone":
2736 "^clone":
2722 (clone,
2737 (clone,
2723 [('U', 'noupdate', None, _('do not update the new working directory')),
2738 [('U', 'noupdate', None, _('do not update the new working directory')),
2724 ('r', 'rev', [],
2739 ('r', 'rev', [],
2725 _('a changeset you would like to have after cloning')),
2740 _('a changeset you would like to have after cloning')),
2726 ('', 'pull', None, _('use pull protocol to copy metadata')),
2741 ('', 'pull', None, _('use pull protocol to copy metadata')),
2727 ('', 'uncompressed', None,
2742 ('', 'uncompressed', None,
2728 _('use uncompressed transfer (fast over LAN)')),
2743 _('use uncompressed transfer (fast over LAN)')),
2729 ] + remoteopts,
2744 ] + remoteopts,
2730 _('hg clone [OPTION]... SOURCE [DEST]')),
2745 _('hg clone [OPTION]... SOURCE [DEST]')),
2731 "^commit|ci":
2746 "^commit|ci":
2732 (commit,
2747 (commit,
2733 [('A', 'addremove', None,
2748 [('A', 'addremove', None,
2734 _('mark new/missing files as added/removed before committing')),
2749 _('mark new/missing files as added/removed before committing')),
2735 ('d', 'date', '', _('record datecode as commit date')),
2750 ('d', 'date', '', _('record datecode as commit date')),
2736 ('u', 'user', '', _('record user as commiter')),
2751 ('u', 'user', '', _('record user as commiter')),
2737 ] + walkopts + commitopts,
2752 ] + walkopts + commitopts,
2738 _('hg commit [OPTION]... [FILE]...')),
2753 _('hg commit [OPTION]... [FILE]...')),
2739 "copy|cp":
2754 "copy|cp":
2740 (copy,
2755 (copy,
2741 [('A', 'after', None, _('record a copy that has already occurred')),
2756 [('A', 'after', None, _('record a copy that has already occurred')),
2742 ('f', 'force', None,
2757 ('f', 'force', None,
2743 _('forcibly copy over an existing managed file')),
2758 _('forcibly copy over an existing managed file')),
2744 ] + walkopts + dryrunopts,
2759 ] + walkopts + dryrunopts,
2745 _('hg copy [OPTION]... [SOURCE]... DEST')),
2760 _('hg copy [OPTION]... [SOURCE]... DEST')),
2746 "debugancestor": (debugancestor, [], _('debugancestor INDEX REV1 REV2')),
2761 "debugancestor": (debugancestor, [], _('debugancestor INDEX REV1 REV2')),
2747 "debugcomplete":
2762 "debugcomplete":
2748 (debugcomplete,
2763 (debugcomplete,
2749 [('o', 'options', None, _('show the command options'))],
2764 [('o', 'options', None, _('show the command options'))],
2750 _('debugcomplete [-o] CMD')),
2765 _('debugcomplete [-o] CMD')),
2751 "debuginstall": (debuginstall, [], _('debuginstall')),
2766 "debuginstall": (debuginstall, [], _('debuginstall')),
2752 "debugrebuildstate":
2767 "debugrebuildstate":
2753 (debugrebuildstate,
2768 (debugrebuildstate,
2754 [('r', 'rev', '', _('revision to rebuild to'))],
2769 [('r', 'rev', '', _('revision to rebuild to'))],
2755 _('debugrebuildstate [-r REV] [REV]')),
2770 _('debugrebuildstate [-r REV] [REV]')),
2756 "debugcheckstate": (debugcheckstate, [], _('debugcheckstate')),
2771 "debugcheckstate": (debugcheckstate, [], _('debugcheckstate')),
2757 "debugsetparents": (debugsetparents, [], _('debugsetparents REV1 [REV2]')),
2772 "debugsetparents": (debugsetparents, [], _('debugsetparents REV1 [REV2]')),
2758 "debugstate": (debugstate, [], _('debugstate')),
2773 "debugstate": (debugstate, [], _('debugstate')),
2759 "debugdate":
2774 "debugdate":
2760 (debugdate,
2775 (debugdate,
2761 [('e', 'extended', None, _('try extended date formats'))],
2776 [('e', 'extended', None, _('try extended date formats'))],
2762 _('debugdate [-e] DATE [RANGE]')),
2777 _('debugdate [-e] DATE [RANGE]')),
2763 "debugdata": (debugdata, [], _('debugdata FILE REV')),
2778 "debugdata": (debugdata, [], _('debugdata FILE REV')),
2764 "debugindex": (debugindex, [], _('debugindex FILE')),
2779 "debugindex": (debugindex, [], _('debugindex FILE')),
2765 "debugindexdot": (debugindexdot, [], _('debugindexdot FILE')),
2780 "debugindexdot": (debugindexdot, [], _('debugindexdot FILE')),
2766 "debugrename": (debugrename, [], _('debugrename FILE [REV]')),
2781 "debugrename": (debugrename, [], _('debugrename FILE [REV]')),
2767 "debugwalk": (debugwalk, walkopts, _('debugwalk [OPTION]... [FILE]...')),
2782 "debugwalk": (debugwalk, walkopts, _('debugwalk [OPTION]... [FILE]...')),
2768 "^diff":
2783 "^diff":
2769 (diff,
2784 (diff,
2770 [('r', 'rev', [], _('revision')),
2785 [('r', 'rev', [], _('revision')),
2771 ('a', 'text', None, _('treat all files as text')),
2786 ('a', 'text', None, _('treat all files as text')),
2772 ('p', 'show-function', None,
2787 ('p', 'show-function', None,
2773 _('show which function each change is in')),
2788 _('show which function each change is in')),
2774 ('g', 'git', None, _('use git extended diff format')),
2789 ('g', 'git', None, _('use git extended diff format')),
2775 ('', 'nodates', None, _("don't include dates in diff headers")),
2790 ('', 'nodates', None, _("don't include dates in diff headers")),
2776 ('w', 'ignore-all-space', None,
2791 ('w', 'ignore-all-space', None,
2777 _('ignore white space when comparing lines')),
2792 _('ignore white space when comparing lines')),
2778 ('b', 'ignore-space-change', None,
2793 ('b', 'ignore-space-change', None,
2779 _('ignore changes in the amount of white space')),
2794 _('ignore changes in the amount of white space')),
2780 ('B', 'ignore-blank-lines', None,
2795 ('B', 'ignore-blank-lines', None,
2781 _('ignore changes whose lines are all blank')),
2796 _('ignore changes whose lines are all blank')),
2782 ] + walkopts,
2797 ] + walkopts,
2783 _('hg diff [OPTION]... [-r REV1 [-r REV2]] [FILE]...')),
2798 _('hg diff [OPTION]... [-r REV1 [-r REV2]] [FILE]...')),
2784 "^export":
2799 "^export":
2785 (export,
2800 (export,
2786 [('o', 'output', '', _('print output to file with formatted name')),
2801 [('o', 'output', '', _('print output to file with formatted name')),
2787 ('a', 'text', None, _('treat all files as text')),
2802 ('a', 'text', None, _('treat all files as text')),
2788 ('g', 'git', None, _('use git extended diff format')),
2803 ('g', 'git', None, _('use git extended diff format')),
2789 ('', 'nodates', None, _("don't include dates in diff headers")),
2804 ('', 'nodates', None, _("don't include dates in diff headers")),
2790 ('', 'switch-parent', None, _('diff against the second parent'))],
2805 ('', 'switch-parent', None, _('diff against the second parent'))],
2791 _('hg export [OPTION]... [-o OUTFILESPEC] REV...')),
2806 _('hg export [OPTION]... [-o OUTFILESPEC] REV...')),
2792 "grep":
2807 "grep":
2793 (grep,
2808 (grep,
2794 [('0', 'print0', None, _('end fields with NUL')),
2809 [('0', 'print0', None, _('end fields with NUL')),
2795 ('', 'all', None, _('print all revisions that match')),
2810 ('', 'all', None, _('print all revisions that match')),
2796 ('f', 'follow', None,
2811 ('f', 'follow', None,
2797 _('follow changeset history, or file history across copies and renames')),
2812 _('follow changeset history, or file history across copies and renames')),
2798 ('i', 'ignore-case', None, _('ignore case when matching')),
2813 ('i', 'ignore-case', None, _('ignore case when matching')),
2799 ('l', 'files-with-matches', None,
2814 ('l', 'files-with-matches', None,
2800 _('print only filenames and revs that match')),
2815 _('print only filenames and revs that match')),
2801 ('n', 'line-number', None, _('print matching line numbers')),
2816 ('n', 'line-number', None, _('print matching line numbers')),
2802 ('r', 'rev', [], _('search in given revision range')),
2817 ('r', 'rev', [], _('search in given revision range')),
2803 ('u', 'user', None, _('print user who committed change')),
2818 ('u', 'user', None, _('print user who committed change')),
2804 ] + walkopts,
2819 ] + walkopts,
2805 _('hg grep [OPTION]... PATTERN [FILE]...')),
2820 _('hg grep [OPTION]... PATTERN [FILE]...')),
2806 "heads":
2821 "heads":
2807 (heads,
2822 (heads,
2808 [('', 'style', '', _('display using template map file')),
2823 [('', 'style', '', _('display using template map file')),
2809 ('r', 'rev', '', _('show only heads which are descendants of rev')),
2824 ('r', 'rev', '', _('show only heads which are descendants of rev')),
2810 ('', 'template', '', _('display with template'))],
2825 ('', 'template', '', _('display with template'))],
2811 _('hg heads [-r REV]')),
2826 _('hg heads [-r REV]')),
2812 "help": (help_, [], _('hg help [COMMAND]')),
2827 "help": (help_, [], _('hg help [COMMAND]')),
2813 "identify|id": (identify, [], _('hg identify')),
2828 "identify|id": (identify, [], _('hg identify')),
2814 "import|patch":
2829 "import|patch":
2815 (import_,
2830 (import_,
2816 [('p', 'strip', 1,
2831 [('p', 'strip', 1,
2817 _('directory strip option for patch. This has the same\n'
2832 _('directory strip option for patch. This has the same\n'
2818 'meaning as the corresponding patch option')),
2833 'meaning as the corresponding patch option')),
2819 ('b', 'base', '', _('base path')),
2834 ('b', 'base', '', _('base path')),
2820 ('f', 'force', None,
2835 ('f', 'force', None,
2821 _('skip check for outstanding uncommitted changes')),
2836 _('skip check for outstanding uncommitted changes')),
2822 ('', 'exact', None,
2837 ('', 'exact', None,
2823 _('apply patch to the nodes from which it was generated'))] + commitopts,
2838 _('apply patch to the nodes from which it was generated'))] + commitopts,
2824 _('hg import [-p NUM] [-m MESSAGE] [-f] PATCH...')),
2839 _('hg import [-p NUM] [-m MESSAGE] [-f] PATCH...')),
2825 "incoming|in": (incoming,
2840 "incoming|in": (incoming,
2826 [('M', 'no-merges', None, _('do not show merges')),
2841 [('M', 'no-merges', None, _('do not show merges')),
2827 ('f', 'force', None,
2842 ('f', 'force', None,
2828 _('run even when remote repository is unrelated')),
2843 _('run even when remote repository is unrelated')),
2829 ('', 'style', '', _('display using template map file')),
2844 ('', 'style', '', _('display using template map file')),
2830 ('n', 'newest-first', None, _('show newest record first')),
2845 ('n', 'newest-first', None, _('show newest record first')),
2831 ('', 'bundle', '', _('file to store the bundles into')),
2846 ('', 'bundle', '', _('file to store the bundles into')),
2832 ('p', 'patch', None, _('show patch')),
2847 ('p', 'patch', None, _('show patch')),
2833 ('r', 'rev', [], _('a specific revision up to which you would like to pull')),
2848 ('r', 'rev', [], _('a specific revision up to which you would like to pull')),
2834 ('', 'template', '', _('display with template')),
2849 ('', 'template', '', _('display with template')),
2835 ] + remoteopts,
2850 ] + remoteopts,
2836 _('hg incoming [-p] [-n] [-M] [-f] [-r REV]...'
2851 _('hg incoming [-p] [-n] [-M] [-f] [-r REV]...'
2837 ' [--bundle FILENAME] [SOURCE]')),
2852 ' [--bundle FILENAME] [SOURCE]')),
2838 "^init":
2853 "^init":
2839 (init,
2854 (init,
2840 remoteopts,
2855 remoteopts,
2841 _('hg init [-e CMD] [--remotecmd CMD] [DEST]')),
2856 _('hg init [-e CMD] [--remotecmd CMD] [DEST]')),
2842 "locate":
2857 "locate":
2843 (locate,
2858 (locate,
2844 [('r', 'rev', '', _('search the repository as it stood at rev')),
2859 [('r', 'rev', '', _('search the repository as it stood at rev')),
2845 ('0', 'print0', None,
2860 ('0', 'print0', None,
2846 _('end filenames with NUL, for use with xargs')),
2861 _('end filenames with NUL, for use with xargs')),
2847 ('f', 'fullpath', None,
2862 ('f', 'fullpath', None,
2848 _('print complete paths from the filesystem root')),
2863 _('print complete paths from the filesystem root')),
2849 ] + walkopts,
2864 ] + walkopts,
2850 _('hg locate [OPTION]... [PATTERN]...')),
2865 _('hg locate [OPTION]... [PATTERN]...')),
2851 "^log|history":
2866 "^log|history":
2852 (log,
2867 (log,
2853 [('f', 'follow', None,
2868 [('f', 'follow', None,
2854 _('follow changeset history, or file history across copies and renames')),
2869 _('follow changeset history, or file history across copies and renames')),
2855 ('', 'follow-first', None,
2870 ('', 'follow-first', None,
2856 _('only follow the first parent of merge changesets')),
2871 _('only follow the first parent of merge changesets')),
2857 ('d', 'date', '', _('show revs matching date spec')),
2872 ('d', 'date', '', _('show revs matching date spec')),
2858 ('C', 'copies', None, _('show copied files')),
2873 ('C', 'copies', None, _('show copied files')),
2859 ('k', 'keyword', [], _('do case-insensitive search for a keyword')),
2874 ('k', 'keyword', [], _('do case-insensitive search for a keyword')),
2860 ('l', 'limit', '', _('limit number of changes displayed')),
2875 ('l', 'limit', '', _('limit number of changes displayed')),
2861 ('r', 'rev', [], _('show the specified revision or range')),
2876 ('r', 'rev', [], _('show the specified revision or range')),
2862 ('', 'removed', None, _('include revs where files were removed')),
2877 ('', 'removed', None, _('include revs where files were removed')),
2863 ('M', 'no-merges', None, _('do not show merges')),
2878 ('M', 'no-merges', None, _('do not show merges')),
2864 ('', 'style', '', _('display using template map file')),
2879 ('', 'style', '', _('display using template map file')),
2865 ('m', 'only-merges', None, _('show only merges')),
2880 ('m', 'only-merges', None, _('show only merges')),
2866 ('p', 'patch', None, _('show patch')),
2881 ('p', 'patch', None, _('show patch')),
2867 ('P', 'prune', [], _('do not display revision or any of its ancestors')),
2882 ('P', 'prune', [], _('do not display revision or any of its ancestors')),
2868 ('', 'template', '', _('display with template')),
2883 ('', 'template', '', _('display with template')),
2869 ] + walkopts,
2884 ] + walkopts,
2870 _('hg log [OPTION]... [FILE]')),
2885 _('hg log [OPTION]... [FILE]')),
2871 "manifest": (manifest, [], _('hg manifest [REV]')),
2886 "manifest": (manifest, [], _('hg manifest [REV]')),
2872 "^merge":
2887 "^merge":
2873 (merge,
2888 (merge,
2874 [('f', 'force', None, _('force a merge with outstanding changes')),
2889 [('f', 'force', None, _('force a merge with outstanding changes')),
2875 ('r', 'rev', '', _('revision to merge')),
2890 ('r', 'rev', '', _('revision to merge')),
2876 ],
2891 ],
2877 _('hg merge [-f] [[-r] REV]')),
2892 _('hg merge [-f] [[-r] REV]')),
2878 "outgoing|out": (outgoing,
2893 "outgoing|out": (outgoing,
2879 [('M', 'no-merges', None, _('do not show merges')),
2894 [('M', 'no-merges', None, _('do not show merges')),
2880 ('f', 'force', None,
2895 ('f', 'force', None,
2881 _('run even when remote repository is unrelated')),
2896 _('run even when remote repository is unrelated')),
2882 ('p', 'patch', None, _('show patch')),
2897 ('p', 'patch', None, _('show patch')),
2883 ('', 'style', '', _('display using template map file')),
2898 ('', 'style', '', _('display using template map file')),
2884 ('r', 'rev', [], _('a specific revision you would like to push')),
2899 ('r', 'rev', [], _('a specific revision you would like to push')),
2885 ('n', 'newest-first', None, _('show newest record first')),
2900 ('n', 'newest-first', None, _('show newest record first')),
2886 ('', 'template', '', _('display with template')),
2901 ('', 'template', '', _('display with template')),
2887 ] + remoteopts,
2902 ] + remoteopts,
2888 _('hg outgoing [-M] [-p] [-n] [-f] [-r REV]... [DEST]')),
2903 _('hg outgoing [-M] [-p] [-n] [-f] [-r REV]... [DEST]')),
2889 "^parents":
2904 "^parents":
2890 (parents,
2905 (parents,
2891 [('r', 'rev', '', _('show parents from the specified rev')),
2906 [('r', 'rev', '', _('show parents from the specified rev')),
2892 ('', 'style', '', _('display using template map file')),
2907 ('', 'style', '', _('display using template map file')),
2893 ('', 'template', '', _('display with template'))],
2908 ('', 'template', '', _('display with template'))],
2894 _('hg parents [-r REV] [FILE]')),
2909 _('hg parents [-r REV] [FILE]')),
2895 "paths": (paths, [], _('hg paths [NAME]')),
2910 "paths": (paths, [], _('hg paths [NAME]')),
2896 "^pull":
2911 "^pull":
2897 (pull,
2912 (pull,
2898 [('u', 'update', None,
2913 [('u', 'update', None,
2899 _('update to new tip if changesets were pulled')),
2914 _('update to new tip if changesets were pulled')),
2900 ('f', 'force', None,
2915 ('f', 'force', None,
2901 _('run even when remote repository is unrelated')),
2916 _('run even when remote repository is unrelated')),
2902 ('r', 'rev', [],
2917 ('r', 'rev', [],
2903 _('a specific revision up to which you would like to pull')),
2918 _('a specific revision up to which you would like to pull')),
2904 ] + remoteopts,
2919 ] + remoteopts,
2905 _('hg pull [-u] [-f] [-r REV]... [-e CMD] [--remotecmd CMD] [SOURCE]')),
2920 _('hg pull [-u] [-f] [-r REV]... [-e CMD] [--remotecmd CMD] [SOURCE]')),
2906 "^push":
2921 "^push":
2907 (push,
2922 (push,
2908 [('f', 'force', None, _('force push')),
2923 [('f', 'force', None, _('force push')),
2909 ('r', 'rev', [], _('a specific revision you would like to push')),
2924 ('r', 'rev', [], _('a specific revision you would like to push')),
2910 ] + remoteopts,
2925 ] + remoteopts,
2911 _('hg push [-f] [-r REV]... [-e CMD] [--remotecmd CMD] [DEST]')),
2926 _('hg push [-f] [-r REV]... [-e CMD] [--remotecmd CMD] [DEST]')),
2912 "debugrawcommit|rawcommit":
2927 "debugrawcommit|rawcommit":
2913 (rawcommit,
2928 (rawcommit,
2914 [('p', 'parent', [], _('parent')),
2929 [('p', 'parent', [], _('parent')),
2915 ('d', 'date', '', _('date code')),
2930 ('d', 'date', '', _('date code')),
2916 ('u', 'user', '', _('user')),
2931 ('u', 'user', '', _('user')),
2917 ('F', 'files', '', _('file list'))
2932 ('F', 'files', '', _('file list'))
2918 ] + commitopts,
2933 ] + commitopts,
2919 _('hg debugrawcommit [OPTION]... [FILE]...')),
2934 _('hg debugrawcommit [OPTION]... [FILE]...')),
2920 "recover": (recover, [], _('hg recover')),
2935 "recover": (recover, [], _('hg recover')),
2921 "^remove|rm":
2936 "^remove|rm":
2922 (remove,
2937 (remove,
2923 [('A', 'after', None, _('record remove that has already occurred')),
2938 [('A', 'after', None, _('record remove that has already occurred')),
2924 ('f', 'force', None, _('remove file even if modified')),
2939 ('f', 'force', None, _('remove file even if modified')),
2925 ] + walkopts,
2940 ] + walkopts,
2926 _('hg remove [OPTION]... FILE...')),
2941 _('hg remove [OPTION]... FILE...')),
2927 "rename|mv":
2942 "rename|mv":
2928 (rename,
2943 (rename,
2929 [('A', 'after', None, _('record a rename that has already occurred')),
2944 [('A', 'after', None, _('record a rename that has already occurred')),
2930 ('f', 'force', None,
2945 ('f', 'force', None,
2931 _('forcibly copy over an existing managed file')),
2946 _('forcibly copy over an existing managed file')),
2932 ] + walkopts + dryrunopts,
2947 ] + walkopts + dryrunopts,
2933 _('hg rename [OPTION]... SOURCE... DEST')),
2948 _('hg rename [OPTION]... SOURCE... DEST')),
2934 "^revert":
2949 "^revert":
2935 (revert,
2950 (revert,
2936 [('a', 'all', None, _('revert all changes when no arguments given')),
2951 [('a', 'all', None, _('revert all changes when no arguments given')),
2937 ('d', 'date', '', _('tipmost revision matching date')),
2952 ('d', 'date', '', _('tipmost revision matching date')),
2938 ('r', 'rev', '', _('revision to revert to')),
2953 ('r', 'rev', '', _('revision to revert to')),
2939 ('', 'no-backup', None, _('do not save backup copies of files')),
2954 ('', 'no-backup', None, _('do not save backup copies of files')),
2940 ] + walkopts + dryrunopts,
2955 ] + walkopts + dryrunopts,
2941 _('hg revert [OPTION]... [-r REV] [NAME]...')),
2956 _('hg revert [OPTION]... [-r REV] [NAME]...')),
2942 "rollback": (rollback, [], _('hg rollback')),
2957 "rollback": (rollback, [], _('hg rollback')),
2943 "root": (root, [], _('hg root')),
2958 "root": (root, [], _('hg root')),
2944 "showconfig|debugconfig":
2959 "showconfig|debugconfig":
2945 (showconfig,
2960 (showconfig,
2946 [('u', 'untrusted', None, _('show untrusted configuration options'))],
2961 [('u', 'untrusted', None, _('show untrusted configuration options'))],
2947 _('showconfig [-u] [NAME]...')),
2962 _('showconfig [-u] [NAME]...')),
2948 "^serve":
2963 "^serve":
2949 (serve,
2964 (serve,
2950 [('A', 'accesslog', '', _('name of access log file to write to')),
2965 [('A', 'accesslog', '', _('name of access log file to write to')),
2951 ('d', 'daemon', None, _('run server in background')),
2966 ('d', 'daemon', None, _('run server in background')),
2952 ('', 'daemon-pipefds', '', _('used internally by daemon mode')),
2967 ('', 'daemon-pipefds', '', _('used internally by daemon mode')),
2953 ('E', 'errorlog', '', _('name of error log file to write to')),
2968 ('E', 'errorlog', '', _('name of error log file to write to')),
2954 ('p', 'port', 0, _('port to use (default: 8000)')),
2969 ('p', 'port', 0, _('port to use (default: 8000)')),
2955 ('a', 'address', '', _('address to use')),
2970 ('a', 'address', '', _('address to use')),
2956 ('n', 'name', '',
2971 ('n', 'name', '',
2957 _('name to show in web pages (default: working dir)')),
2972 _('name to show in web pages (default: working dir)')),
2958 ('', 'webdir-conf', '', _('name of the webdir config file'
2973 ('', 'webdir-conf', '', _('name of the webdir config file'
2959 ' (serve more than one repo)')),
2974 ' (serve more than one repo)')),
2960 ('', 'pid-file', '', _('name of file to write process ID to')),
2975 ('', 'pid-file', '', _('name of file to write process ID to')),
2961 ('', 'stdio', None, _('for remote clients')),
2976 ('', 'stdio', None, _('for remote clients')),
2962 ('t', 'templates', '', _('web templates to use')),
2977 ('t', 'templates', '', _('web templates to use')),
2963 ('', 'style', '', _('template style to use')),
2978 ('', 'style', '', _('template style to use')),
2964 ('6', 'ipv6', None, _('use IPv6 in addition to IPv4'))],
2979 ('6', 'ipv6', None, _('use IPv6 in addition to IPv4'))],
2965 _('hg serve [OPTION]...')),
2980 _('hg serve [OPTION]...')),
2966 "^status|st":
2981 "^status|st":
2967 (status,
2982 (status,
2968 [('A', 'all', None, _('show status of all files')),
2983 [('A', 'all', None, _('show status of all files')),
2969 ('m', 'modified', None, _('show only modified files')),
2984 ('m', 'modified', None, _('show only modified files')),
2970 ('a', 'added', None, _('show only added files')),
2985 ('a', 'added', None, _('show only added files')),
2971 ('r', 'removed', None, _('show only removed files')),
2986 ('r', 'removed', None, _('show only removed files')),
2972 ('d', 'deleted', None, _('show only deleted (but tracked) files')),
2987 ('d', 'deleted', None, _('show only deleted (but tracked) files')),
2973 ('c', 'clean', None, _('show only files without changes')),
2988 ('c', 'clean', None, _('show only files without changes')),
2974 ('u', 'unknown', None, _('show only unknown (not tracked) files')),
2989 ('u', 'unknown', None, _('show only unknown (not tracked) files')),
2975 ('i', 'ignored', None, _('show only ignored files')),
2990 ('i', 'ignored', None, _('show only ignored files')),
2976 ('n', 'no-status', None, _('hide status prefix')),
2991 ('n', 'no-status', None, _('hide status prefix')),
2977 ('C', 'copies', None, _('show source of copied files')),
2992 ('C', 'copies', None, _('show source of copied files')),
2978 ('0', 'print0', None,
2993 ('0', 'print0', None,
2979 _('end filenames with NUL, for use with xargs')),
2994 _('end filenames with NUL, for use with xargs')),
2980 ('', 'rev', [], _('show difference from revision')),
2995 ('', 'rev', [], _('show difference from revision')),
2981 ] + walkopts,
2996 ] + walkopts,
2982 _('hg status [OPTION]... [FILE]...')),
2997 _('hg status [OPTION]... [FILE]...')),
2983 "tag":
2998 "tag":
2984 (tag,
2999 (tag,
2985 [('f', 'force', None, _('replace existing tag')),
3000 [('f', 'force', None, _('replace existing tag')),
2986 ('l', 'local', None, _('make the tag local')),
3001 ('l', 'local', None, _('make the tag local')),
2987 ('m', 'message', '', _('message for tag commit log entry')),
3002 ('m', 'message', '', _('message for tag commit log entry')),
2988 ('d', 'date', '', _('record datecode as commit date')),
3003 ('d', 'date', '', _('record datecode as commit date')),
2989 ('u', 'user', '', _('record user as commiter')),
3004 ('u', 'user', '', _('record user as commiter')),
2990 ('r', 'rev', '', _('revision to tag')),
3005 ('r', 'rev', '', _('revision to tag')),
2991 ('', 'remove', None, _('remove a tag'))],
3006 ('', 'remove', None, _('remove a tag'))],
2992 _('hg tag [-l] [-m TEXT] [-d DATE] [-u USER] [-r REV] NAME')),
3007 _('hg tag [-l] [-m TEXT] [-d DATE] [-u USER] [-r REV] NAME')),
2993 "tags": (tags, [], _('hg tags')),
3008 "tags": (tags, [], _('hg tags')),
2994 "tip":
3009 "tip":
2995 (tip,
3010 (tip,
2996 [('', 'style', '', _('display using template map file')),
3011 [('', 'style', '', _('display using template map file')),
2997 ('p', 'patch', None, _('show patch')),
3012 ('p', 'patch', None, _('show patch')),
2998 ('', 'template', '', _('display with template'))],
3013 ('', 'template', '', _('display with template'))],
2999 _('hg tip [-p]')),
3014 _('hg tip [-p]')),
3000 "unbundle":
3015 "unbundle":
3001 (unbundle,
3016 (unbundle,
3002 [('u', 'update', None,
3017 [('u', 'update', None,
3003 _('update to new tip if changesets were unbundled'))],
3018 _('update to new tip if changesets were unbundled'))],
3004 _('hg unbundle [-u] FILE')),
3019 _('hg unbundle [-u] FILE')),
3005 "^update|up|checkout|co":
3020 "^update|up|checkout|co":
3006 (update,
3021 (update,
3007 [('C', 'clean', None, _('overwrite locally modified files')),
3022 [('C', 'clean', None, _('overwrite locally modified files')),
3008 ('d', 'date', '', _('tipmost revision matching date')),
3023 ('d', 'date', '', _('tipmost revision matching date')),
3009 ('r', 'rev', '', _('revision'))],
3024 ('r', 'rev', '', _('revision'))],
3010 _('hg update [-C] [-d DATE] [[-r] REV]')),
3025 _('hg update [-C] [-d DATE] [[-r] REV]')),
3011 "verify": (verify, [], _('hg verify')),
3026 "verify": (verify, [], _('hg verify')),
3012 "version": (version_, [], _('hg version')),
3027 "version": (version_, [], _('hg version')),
3013 }
3028 }
3014
3029
3015 norepo = ("clone init version help debugancestor debugcomplete debugdata"
3030 norepo = ("clone init version help debugancestor debugcomplete debugdata"
3016 " debugindex debugindexdot debugdate debuginstall")
3031 " debugindex debugindexdot debugdate debuginstall")
3017 optionalrepo = ("paths serve showconfig")
3032 optionalrepo = ("paths serve showconfig")
3018
3033
3019 def findpossible(ui, cmd):
3034 def findpossible(ui, cmd):
3020 """
3035 """
3021 Return cmd -> (aliases, command table entry)
3036 Return cmd -> (aliases, command table entry)
3022 for each matching command.
3037 for each matching command.
3023 Return debug commands (or their aliases) only if no normal command matches.
3038 Return debug commands (or their aliases) only if no normal command matches.
3024 """
3039 """
3025 choice = {}
3040 choice = {}
3026 debugchoice = {}
3041 debugchoice = {}
3027 for e in table.keys():
3042 for e in table.keys():
3028 aliases = e.lstrip("^").split("|")
3043 aliases = e.lstrip("^").split("|")
3029 found = None
3044 found = None
3030 if cmd in aliases:
3045 if cmd in aliases:
3031 found = cmd
3046 found = cmd
3032 elif not ui.config("ui", "strict"):
3047 elif not ui.config("ui", "strict"):
3033 for a in aliases:
3048 for a in aliases:
3034 if a.startswith(cmd):
3049 if a.startswith(cmd):
3035 found = a
3050 found = a
3036 break
3051 break
3037 if found is not None:
3052 if found is not None:
3038 if aliases[0].startswith("debug") or found.startswith("debug"):
3053 if aliases[0].startswith("debug") or found.startswith("debug"):
3039 debugchoice[found] = (aliases, table[e])
3054 debugchoice[found] = (aliases, table[e])
3040 else:
3055 else:
3041 choice[found] = (aliases, table[e])
3056 choice[found] = (aliases, table[e])
3042
3057
3043 if not choice and debugchoice:
3058 if not choice and debugchoice:
3044 choice = debugchoice
3059 choice = debugchoice
3045
3060
3046 return choice
3061 return choice
3047
3062
3048 def findcmd(ui, cmd):
3063 def findcmd(ui, cmd):
3049 """Return (aliases, command table entry) for command string."""
3064 """Return (aliases, command table entry) for command string."""
3050 choice = findpossible(ui, cmd)
3065 choice = findpossible(ui, cmd)
3051
3066
3052 if choice.has_key(cmd):
3067 if choice.has_key(cmd):
3053 return choice[cmd]
3068 return choice[cmd]
3054
3069
3055 if len(choice) > 1:
3070 if len(choice) > 1:
3056 clist = choice.keys()
3071 clist = choice.keys()
3057 clist.sort()
3072 clist.sort()
3058 raise AmbiguousCommand(cmd, clist)
3073 raise AmbiguousCommand(cmd, clist)
3059
3074
3060 if choice:
3075 if choice:
3061 return choice.values()[0]
3076 return choice.values()[0]
3062
3077
3063 raise UnknownCommand(cmd)
3078 raise UnknownCommand(cmd)
3064
3079
3065 def catchterm(*args):
3080 def catchterm(*args):
3066 raise util.SignalInterrupt
3081 raise util.SignalInterrupt
3067
3082
3068 def run():
3083 def run():
3069 sys.exit(dispatch(sys.argv[1:]))
3084 sys.exit(dispatch(sys.argv[1:]))
3070
3085
3071 class ParseError(Exception):
3086 class ParseError(Exception):
3072 """Exception raised on errors in parsing the command line."""
3087 """Exception raised on errors in parsing the command line."""
3073
3088
3074 def parse(ui, args):
3089 def parse(ui, args):
3075 options = {}
3090 options = {}
3076 cmdoptions = {}
3091 cmdoptions = {}
3077
3092
3078 try:
3093 try:
3079 args = fancyopts.fancyopts(args, globalopts, options)
3094 args = fancyopts.fancyopts(args, globalopts, options)
3080 except fancyopts.getopt.GetoptError, inst:
3095 except fancyopts.getopt.GetoptError, inst:
3081 raise ParseError(None, inst)
3096 raise ParseError(None, inst)
3082
3097
3083 if args:
3098 if args:
3084 cmd, args = args[0], args[1:]
3099 cmd, args = args[0], args[1:]
3085 aliases, i = findcmd(ui, cmd)
3100 aliases, i = findcmd(ui, cmd)
3086 cmd = aliases[0]
3101 cmd = aliases[0]
3087 defaults = ui.config("defaults", cmd)
3102 defaults = ui.config("defaults", cmd)
3088 if defaults:
3103 if defaults:
3089 args = shlex.split(defaults) + args
3104 args = shlex.split(defaults) + args
3090 c = list(i[1])
3105 c = list(i[1])
3091 else:
3106 else:
3092 cmd = None
3107 cmd = None
3093 c = []
3108 c = []
3094
3109
3095 # combine global options into local
3110 # combine global options into local
3096 for o in globalopts:
3111 for o in globalopts:
3097 c.append((o[0], o[1], options[o[1]], o[3]))
3112 c.append((o[0], o[1], options[o[1]], o[3]))
3098
3113
3099 try:
3114 try:
3100 args = fancyopts.fancyopts(args, c, cmdoptions)
3115 args = fancyopts.fancyopts(args, c, cmdoptions)
3101 except fancyopts.getopt.GetoptError, inst:
3116 except fancyopts.getopt.GetoptError, inst:
3102 raise ParseError(cmd, inst)
3117 raise ParseError(cmd, inst)
3103
3118
3104 # separate global options back out
3119 # separate global options back out
3105 for o in globalopts:
3120 for o in globalopts:
3106 n = o[1]
3121 n = o[1]
3107 options[n] = cmdoptions[n]
3122 options[n] = cmdoptions[n]
3108 del cmdoptions[n]
3123 del cmdoptions[n]
3109
3124
3110 return (cmd, cmd and i[0] or None, args, options, cmdoptions)
3125 return (cmd, cmd and i[0] or None, args, options, cmdoptions)
3111
3126
3112 external = {}
3127 external = {}
3113
3128
3114 def findext(name):
3129 def findext(name):
3115 '''return module with given extension name'''
3130 '''return module with given extension name'''
3116 try:
3131 try:
3117 return sys.modules[external[name]]
3132 return sys.modules[external[name]]
3118 except KeyError:
3133 except KeyError:
3119 for k, v in external.iteritems():
3134 for k, v in external.iteritems():
3120 if k.endswith('.' + name) or k.endswith('/' + name) or v == name:
3135 if k.endswith('.' + name) or k.endswith('/' + name) or v == name:
3121 return sys.modules[v]
3136 return sys.modules[v]
3122 raise KeyError(name)
3137 raise KeyError(name)
3123
3138
3124 def load_extensions(ui):
3139 def load_extensions(ui):
3125 added = []
3140 added = []
3126 for ext_name, load_from_name in ui.extensions():
3141 for ext_name, load_from_name in ui.extensions():
3127 if ext_name in external:
3142 if ext_name in external:
3128 continue
3143 continue
3129 try:
3144 try:
3130 if load_from_name:
3145 if load_from_name:
3131 # the module will be loaded in sys.modules
3146 # the module will be loaded in sys.modules
3132 # choose an unique name so that it doesn't
3147 # choose an unique name so that it doesn't
3133 # conflicts with other modules
3148 # conflicts with other modules
3134 module_name = "hgext_%s" % ext_name.replace('.', '_')
3149 module_name = "hgext_%s" % ext_name.replace('.', '_')
3135 mod = imp.load_source(module_name, load_from_name)
3150 mod = imp.load_source(module_name, load_from_name)
3136 else:
3151 else:
3137 def importh(name):
3152 def importh(name):
3138 mod = __import__(name)
3153 mod = __import__(name)
3139 components = name.split('.')
3154 components = name.split('.')
3140 for comp in components[1:]:
3155 for comp in components[1:]:
3141 mod = getattr(mod, comp)
3156 mod = getattr(mod, comp)
3142 return mod
3157 return mod
3143 try:
3158 try:
3144 mod = importh("hgext.%s" % ext_name)
3159 mod = importh("hgext.%s" % ext_name)
3145 except ImportError:
3160 except ImportError:
3146 mod = importh(ext_name)
3161 mod = importh(ext_name)
3147 external[ext_name] = mod.__name__
3162 external[ext_name] = mod.__name__
3148 added.append((mod, ext_name))
3163 added.append((mod, ext_name))
3149 except (util.SignalInterrupt, KeyboardInterrupt):
3164 except (util.SignalInterrupt, KeyboardInterrupt):
3150 raise
3165 raise
3151 except Exception, inst:
3166 except Exception, inst:
3152 ui.warn(_("*** failed to import extension %s: %s\n") %
3167 ui.warn(_("*** failed to import extension %s: %s\n") %
3153 (ext_name, inst))
3168 (ext_name, inst))
3154 if ui.print_exc():
3169 if ui.print_exc():
3155 return 1
3170 return 1
3156
3171
3157 for mod, name in added:
3172 for mod, name in added:
3158 uisetup = getattr(mod, 'uisetup', None)
3173 uisetup = getattr(mod, 'uisetup', None)
3159 if uisetup:
3174 if uisetup:
3160 uisetup(ui)
3175 uisetup(ui)
3161 reposetup = getattr(mod, 'reposetup', None)
3176 reposetup = getattr(mod, 'reposetup', None)
3162 if reposetup:
3177 if reposetup:
3163 hg.repo_setup_hooks.append(reposetup)
3178 hg.repo_setup_hooks.append(reposetup)
3164 cmdtable = getattr(mod, 'cmdtable', {})
3179 cmdtable = getattr(mod, 'cmdtable', {})
3165 overrides = [cmd for cmd in cmdtable if cmd in table]
3180 overrides = [cmd for cmd in cmdtable if cmd in table]
3166 if overrides:
3181 if overrides:
3167 ui.warn(_("extension '%s' overrides commands: %s\n")
3182 ui.warn(_("extension '%s' overrides commands: %s\n")
3168 % (name, " ".join(overrides)))
3183 % (name, " ".join(overrides)))
3169 table.update(cmdtable)
3184 table.update(cmdtable)
3170
3185
3171 def parseconfig(config):
3186 def parseconfig(config):
3172 """parse the --config options from the command line"""
3187 """parse the --config options from the command line"""
3173 parsed = []
3188 parsed = []
3174 for cfg in config:
3189 for cfg in config:
3175 try:
3190 try:
3176 name, value = cfg.split('=', 1)
3191 name, value = cfg.split('=', 1)
3177 section, name = name.split('.', 1)
3192 section, name = name.split('.', 1)
3178 if not section or not name:
3193 if not section or not name:
3179 raise IndexError
3194 raise IndexError
3180 parsed.append((section, name, value))
3195 parsed.append((section, name, value))
3181 except (IndexError, ValueError):
3196 except (IndexError, ValueError):
3182 raise util.Abort(_('malformed --config option: %s') % cfg)
3197 raise util.Abort(_('malformed --config option: %s') % cfg)
3183 return parsed
3198 return parsed
3184
3199
3185 def dispatch(args):
3200 def dispatch(args):
3186 for name in 'SIGBREAK', 'SIGHUP', 'SIGTERM':
3201 for name in 'SIGBREAK', 'SIGHUP', 'SIGTERM':
3187 num = getattr(signal, name, None)
3202 num = getattr(signal, name, None)
3188 if num: signal.signal(num, catchterm)
3203 if num: signal.signal(num, catchterm)
3189
3204
3190 try:
3205 try:
3191 u = ui.ui(traceback='--traceback' in sys.argv[1:])
3206 u = ui.ui(traceback='--traceback' in sys.argv[1:])
3192 except util.Abort, inst:
3207 except util.Abort, inst:
3193 sys.stderr.write(_("abort: %s\n") % inst)
3208 sys.stderr.write(_("abort: %s\n") % inst)
3194 return -1
3209 return -1
3195
3210
3196 load_extensions(u)
3211 load_extensions(u)
3197 u.addreadhook(load_extensions)
3212 u.addreadhook(load_extensions)
3198
3213
3199 try:
3214 try:
3200 cmd, func, args, options, cmdoptions = parse(u, args)
3215 cmd, func, args, options, cmdoptions = parse(u, args)
3201 if options["encoding"]:
3216 if options["encoding"]:
3202 util._encoding = options["encoding"]
3217 util._encoding = options["encoding"]
3203 if options["encodingmode"]:
3218 if options["encodingmode"]:
3204 util._encodingmode = options["encodingmode"]
3219 util._encodingmode = options["encodingmode"]
3205 if options["time"]:
3220 if options["time"]:
3206 def get_times():
3221 def get_times():
3207 t = os.times()
3222 t = os.times()
3208 if t[4] == 0.0: # Windows leaves this as zero, so use time.clock()
3223 if t[4] == 0.0: # Windows leaves this as zero, so use time.clock()
3209 t = (t[0], t[1], t[2], t[3], time.clock())
3224 t = (t[0], t[1], t[2], t[3], time.clock())
3210 return t
3225 return t
3211 s = get_times()
3226 s = get_times()
3212 def print_time():
3227 def print_time():
3213 t = get_times()
3228 t = get_times()
3214 u.warn(_("Time: real %.3f secs (user %.3f+%.3f sys %.3f+%.3f)\n") %
3229 u.warn(_("Time: real %.3f secs (user %.3f+%.3f sys %.3f+%.3f)\n") %
3215 (t[4]-s[4], t[0]-s[0], t[2]-s[2], t[1]-s[1], t[3]-s[3]))
3230 (t[4]-s[4], t[0]-s[0], t[2]-s[2], t[1]-s[1], t[3]-s[3]))
3216 atexit.register(print_time)
3231 atexit.register(print_time)
3217
3232
3218 # enter the debugger before command execution
3233 # enter the debugger before command execution
3219 if options['debugger']:
3234 if options['debugger']:
3220 pdb.set_trace()
3235 pdb.set_trace()
3221
3236
3222 try:
3237 try:
3223 if options['cwd']:
3238 if options['cwd']:
3224 os.chdir(options['cwd'])
3239 os.chdir(options['cwd'])
3225
3240
3226 u.updateopts(options["verbose"], options["debug"], options["quiet"],
3241 u.updateopts(options["verbose"], options["debug"], options["quiet"],
3227 not options["noninteractive"], options["traceback"],
3242 not options["noninteractive"], options["traceback"],
3228 parseconfig(options["config"]))
3243 parseconfig(options["config"]))
3229
3244
3230 path = u.expandpath(options["repository"]) or ""
3245 path = u.expandpath(options["repository"]) or ""
3231 repo = path and hg.repository(u, path=path) or None
3246 repo = path and hg.repository(u, path=path) or None
3232 if repo and not repo.local():
3247 if repo and not repo.local():
3233 raise util.Abort(_("repository '%s' is not local") % path)
3248 raise util.Abort(_("repository '%s' is not local") % path)
3234
3249
3235 if options['help']:
3250 if options['help']:
3236 return help_(u, cmd, options['version'])
3251 return help_(u, cmd, options['version'])
3237 elif options['version']:
3252 elif options['version']:
3238 return version_(u)
3253 return version_(u)
3239 elif not cmd:
3254 elif not cmd:
3240 return help_(u, 'shortlist')
3255 return help_(u, 'shortlist')
3241
3256
3242 if cmd not in norepo.split():
3257 if cmd not in norepo.split():
3243 try:
3258 try:
3244 if not repo:
3259 if not repo:
3245 repo = hg.repository(u, path=path)
3260 repo = hg.repository(u, path=path)
3246 u = repo.ui
3261 u = repo.ui
3247 except hg.RepoError:
3262 except hg.RepoError:
3248 if cmd not in optionalrepo.split():
3263 if cmd not in optionalrepo.split():
3249 raise
3264 raise
3250 d = lambda: func(u, repo, *args, **cmdoptions)
3265 d = lambda: func(u, repo, *args, **cmdoptions)
3251 else:
3266 else:
3252 d = lambda: func(u, *args, **cmdoptions)
3267 d = lambda: func(u, *args, **cmdoptions)
3253
3268
3254 try:
3269 try:
3255 if options['profile']:
3270 if options['profile']:
3256 import hotshot, hotshot.stats
3271 import hotshot, hotshot.stats
3257 prof = hotshot.Profile("hg.prof")
3272 prof = hotshot.Profile("hg.prof")
3258 try:
3273 try:
3259 try:
3274 try:
3260 return prof.runcall(d)
3275 return prof.runcall(d)
3261 except:
3276 except:
3262 try:
3277 try:
3263 u.warn(_('exception raised - generating '
3278 u.warn(_('exception raised - generating '
3264 'profile anyway\n'))
3279 'profile anyway\n'))
3265 except:
3280 except:
3266 pass
3281 pass
3267 raise
3282 raise
3268 finally:
3283 finally:
3269 prof.close()
3284 prof.close()
3270 stats = hotshot.stats.load("hg.prof")
3285 stats = hotshot.stats.load("hg.prof")
3271 stats.strip_dirs()
3286 stats.strip_dirs()
3272 stats.sort_stats('time', 'calls')
3287 stats.sort_stats('time', 'calls')
3273 stats.print_stats(40)
3288 stats.print_stats(40)
3274 elif options['lsprof']:
3289 elif options['lsprof']:
3275 try:
3290 try:
3276 from mercurial import lsprof
3291 from mercurial import lsprof
3277 except ImportError:
3292 except ImportError:
3278 raise util.Abort(_(
3293 raise util.Abort(_(
3279 'lsprof not available - install from '
3294 'lsprof not available - install from '
3280 'http://codespeak.net/svn/user/arigo/hack/misc/lsprof/'))
3295 'http://codespeak.net/svn/user/arigo/hack/misc/lsprof/'))
3281 p = lsprof.Profiler()
3296 p = lsprof.Profiler()
3282 p.enable(subcalls=True)
3297 p.enable(subcalls=True)
3283 try:
3298 try:
3284 return d()
3299 return d()
3285 finally:
3300 finally:
3286 p.disable()
3301 p.disable()
3287 stats = lsprof.Stats(p.getstats())
3302 stats = lsprof.Stats(p.getstats())
3288 stats.sort()
3303 stats.sort()
3289 stats.pprint(top=10, file=sys.stderr, climit=5)
3304 stats.pprint(top=10, file=sys.stderr, climit=5)
3290 else:
3305 else:
3291 return d()
3306 return d()
3292 finally:
3307 finally:
3293 u.flush()
3308 u.flush()
3294 except:
3309 except:
3295 # enter the debugger when we hit an exception
3310 # enter the debugger when we hit an exception
3296 if options['debugger']:
3311 if options['debugger']:
3297 pdb.post_mortem(sys.exc_info()[2])
3312 pdb.post_mortem(sys.exc_info()[2])
3298 u.print_exc()
3313 u.print_exc()
3299 raise
3314 raise
3300 except ParseError, inst:
3315 except ParseError, inst:
3301 if inst.args[0]:
3316 if inst.args[0]:
3302 u.warn(_("hg %s: %s\n") % (inst.args[0], inst.args[1]))
3317 u.warn(_("hg %s: %s\n") % (inst.args[0], inst.args[1]))
3303 help_(u, inst.args[0])
3318 help_(u, inst.args[0])
3304 else:
3319 else:
3305 u.warn(_("hg: %s\n") % inst.args[1])
3320 u.warn(_("hg: %s\n") % inst.args[1])
3306 help_(u, 'shortlist')
3321 help_(u, 'shortlist')
3307 except AmbiguousCommand, inst:
3322 except AmbiguousCommand, inst:
3308 u.warn(_("hg: command '%s' is ambiguous:\n %s\n") %
3323 u.warn(_("hg: command '%s' is ambiguous:\n %s\n") %
3309 (inst.args[0], " ".join(inst.args[1])))
3324 (inst.args[0], " ".join(inst.args[1])))
3310 except UnknownCommand, inst:
3325 except UnknownCommand, inst:
3311 u.warn(_("hg: unknown command '%s'\n") % inst.args[0])
3326 u.warn(_("hg: unknown command '%s'\n") % inst.args[0])
3312 help_(u, 'shortlist')
3327 help_(u, 'shortlist')
3313 except hg.RepoError, inst:
3328 except hg.RepoError, inst:
3314 u.warn(_("abort: %s!\n") % inst)
3329 u.warn(_("abort: %s!\n") % inst)
3315 except lock.LockHeld, inst:
3330 except lock.LockHeld, inst:
3316 if inst.errno == errno.ETIMEDOUT:
3331 if inst.errno == errno.ETIMEDOUT:
3317 reason = _('timed out waiting for lock held by %s') % inst.locker
3332 reason = _('timed out waiting for lock held by %s') % inst.locker
3318 else:
3333 else:
3319 reason = _('lock held by %s') % inst.locker
3334 reason = _('lock held by %s') % inst.locker
3320 u.warn(_("abort: %s: %s\n") % (inst.desc or inst.filename, reason))
3335 u.warn(_("abort: %s: %s\n") % (inst.desc or inst.filename, reason))
3321 except lock.LockUnavailable, inst:
3336 except lock.LockUnavailable, inst:
3322 u.warn(_("abort: could not lock %s: %s\n") %
3337 u.warn(_("abort: could not lock %s: %s\n") %
3323 (inst.desc or inst.filename, inst.strerror))
3338 (inst.desc or inst.filename, inst.strerror))
3324 except revlog.RevlogError, inst:
3339 except revlog.RevlogError, inst:
3325 u.warn(_("abort: %s!\n") % inst)
3340 u.warn(_("abort: %s!\n") % inst)
3326 except util.SignalInterrupt:
3341 except util.SignalInterrupt:
3327 u.warn(_("killed!\n"))
3342 u.warn(_("killed!\n"))
3328 except KeyboardInterrupt:
3343 except KeyboardInterrupt:
3329 try:
3344 try:
3330 u.warn(_("interrupted!\n"))
3345 u.warn(_("interrupted!\n"))
3331 except IOError, inst:
3346 except IOError, inst:
3332 if inst.errno == errno.EPIPE:
3347 if inst.errno == errno.EPIPE:
3333 if u.debugflag:
3348 if u.debugflag:
3334 u.warn(_("\nbroken pipe\n"))
3349 u.warn(_("\nbroken pipe\n"))
3335 else:
3350 else:
3336 raise
3351 raise
3337 except socket.error, inst:
3352 except socket.error, inst:
3338 u.warn(_("abort: %s\n") % inst[1])
3353 u.warn(_("abort: %s\n") % inst[1])
3339 except IOError, inst:
3354 except IOError, inst:
3340 if hasattr(inst, "code"):
3355 if hasattr(inst, "code"):
3341 u.warn(_("abort: %s\n") % inst)
3356 u.warn(_("abort: %s\n") % inst)
3342 elif hasattr(inst, "reason"):
3357 elif hasattr(inst, "reason"):
3343 try: # usually it is in the form (errno, strerror)
3358 try: # usually it is in the form (errno, strerror)
3344 reason = inst.reason.args[1]
3359 reason = inst.reason.args[1]
3345 except: # it might be anything, for example a string
3360 except: # it might be anything, for example a string
3346 reason = inst.reason
3361 reason = inst.reason
3347 u.warn(_("abort: error: %s\n") % reason)
3362 u.warn(_("abort: error: %s\n") % reason)
3348 elif hasattr(inst, "args") and inst[0] == errno.EPIPE:
3363 elif hasattr(inst, "args") and inst[0] == errno.EPIPE:
3349 if u.debugflag:
3364 if u.debugflag:
3350 u.warn(_("broken pipe\n"))
3365 u.warn(_("broken pipe\n"))
3351 elif getattr(inst, "strerror", None):
3366 elif getattr(inst, "strerror", None):
3352 if getattr(inst, "filename", None):
3367 if getattr(inst, "filename", None):
3353 u.warn(_("abort: %s: %s\n") % (inst.strerror, inst.filename))
3368 u.warn(_("abort: %s: %s\n") % (inst.strerror, inst.filename))
3354 else:
3369 else:
3355 u.warn(_("abort: %s\n") % inst.strerror)
3370 u.warn(_("abort: %s\n") % inst.strerror)
3356 else:
3371 else:
3357 raise
3372 raise
3358 except OSError, inst:
3373 except OSError, inst:
3359 if getattr(inst, "filename", None):
3374 if getattr(inst, "filename", None):
3360 u.warn(_("abort: %s: %s\n") % (inst.strerror, inst.filename))
3375 u.warn(_("abort: %s: %s\n") % (inst.strerror, inst.filename))
3361 else:
3376 else:
3362 u.warn(_("abort: %s\n") % inst.strerror)
3377 u.warn(_("abort: %s\n") % inst.strerror)
3363 except util.UnexpectedOutput, inst:
3378 except util.UnexpectedOutput, inst:
3364 u.warn(_("abort: %s") % inst[0])
3379 u.warn(_("abort: %s") % inst[0])
3365 if not isinstance(inst[1], basestring):
3380 if not isinstance(inst[1], basestring):
3366 u.warn(" %r\n" % (inst[1],))
3381 u.warn(" %r\n" % (inst[1],))
3367 elif not inst[1]:
3382 elif not inst[1]:
3368 u.warn(_(" empty string\n"))
3383 u.warn(_(" empty string\n"))
3369 else:
3384 else:
3370 u.warn("\n%r\n" % util.ellipsis(inst[1]))
3385 u.warn("\n%r\n" % util.ellipsis(inst[1]))
3371 except util.Abort, inst:
3386 except util.Abort, inst:
3372 u.warn(_("abort: %s\n") % inst)
3387 u.warn(_("abort: %s\n") % inst)
3373 except TypeError, inst:
3388 except TypeError, inst:
3374 # was this an argument error?
3389 # was this an argument error?
3375 tb = traceback.extract_tb(sys.exc_info()[2])
3390 tb = traceback.extract_tb(sys.exc_info()[2])
3376 if len(tb) > 2: # no
3391 if len(tb) > 2: # no
3377 raise
3392 raise
3378 u.debug(inst, "\n")
3393 u.debug(inst, "\n")
3379 u.warn(_("%s: invalid arguments\n") % cmd)
3394 u.warn(_("%s: invalid arguments\n") % cmd)
3380 help_(u, cmd)
3395 help_(u, cmd)
3381 except SystemExit, inst:
3396 except SystemExit, inst:
3382 # Commands shouldn't sys.exit directly, but give a return code.
3397 # Commands shouldn't sys.exit directly, but give a return code.
3383 # Just in case catch this and and pass exit code to caller.
3398 # Just in case catch this and and pass exit code to caller.
3384 return inst.code
3399 return inst.code
3385 except:
3400 except:
3386 u.warn(_("** unknown exception encountered, details follow\n"))
3401 u.warn(_("** unknown exception encountered, details follow\n"))
3387 u.warn(_("** report bug details to "
3402 u.warn(_("** report bug details to "
3388 "http://www.selenic.com/mercurial/bts\n"))
3403 "http://www.selenic.com/mercurial/bts\n"))
3389 u.warn(_("** or mercurial@selenic.com\n"))
3404 u.warn(_("** or mercurial@selenic.com\n"))
3390 u.warn(_("** Mercurial Distributed SCM (version %s)\n")
3405 u.warn(_("** Mercurial Distributed SCM (version %s)\n")
3391 % version.get_version())
3406 % version.get_version())
3392 raise
3407 raise
3393
3408
3394 return -1
3409 return -1
@@ -1,281 +1,289 b''
1 # hg.py - repository classes for mercurial
1 # hg.py - repository classes for mercurial
2 #
2 #
3 # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
4 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
4 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
5 #
5 #
6 # This software may be used and distributed according to the terms
6 # This software may be used and distributed according to the terms
7 # of the GNU General Public License, incorporated herein by reference.
7 # of the GNU General Public License, incorporated herein by reference.
8
8
9 from node import *
9 from node import *
10 from repo import *
10 from repo import *
11 from i18n import _
11 from i18n import _
12 import localrepo, bundlerepo, httprepo, sshrepo, statichttprepo
12 import localrepo, bundlerepo, httprepo, sshrepo, statichttprepo
13 import errno, lock, os, shutil, util
13 import errno, lock, os, shutil, util, cmdutil
14 import merge as _merge
14 import merge as _merge
15 import verify as _verify
15 import verify as _verify
16
16
17 def _local(path):
17 def _local(path):
18 return (os.path.isfile(util.drop_scheme('file', path)) and
18 return (os.path.isfile(util.drop_scheme('file', path)) and
19 bundlerepo or localrepo)
19 bundlerepo or localrepo)
20
20
21 schemes = {
21 schemes = {
22 'bundle': bundlerepo,
22 'bundle': bundlerepo,
23 'file': _local,
23 'file': _local,
24 'hg': httprepo,
24 'hg': httprepo,
25 'http': httprepo,
25 'http': httprepo,
26 'https': httprepo,
26 'https': httprepo,
27 'old-http': statichttprepo,
27 'old-http': statichttprepo,
28 'ssh': sshrepo,
28 'ssh': sshrepo,
29 'static-http': statichttprepo,
29 'static-http': statichttprepo,
30 }
30 }
31
31
32 def _lookup(path):
32 def _lookup(path):
33 scheme = 'file'
33 scheme = 'file'
34 if path:
34 if path:
35 c = path.find(':')
35 c = path.find(':')
36 if c > 0:
36 if c > 0:
37 scheme = path[:c]
37 scheme = path[:c]
38 thing = schemes.get(scheme) or schemes['file']
38 thing = schemes.get(scheme) or schemes['file']
39 try:
39 try:
40 return thing(path)
40 return thing(path)
41 except TypeError:
41 except TypeError:
42 return thing
42 return thing
43
43
44 def islocal(repo):
44 def islocal(repo):
45 '''return true if repo or path is local'''
45 '''return true if repo or path is local'''
46 if isinstance(repo, str):
46 if isinstance(repo, str):
47 try:
47 try:
48 return _lookup(repo).islocal(repo)
48 return _lookup(repo).islocal(repo)
49 except AttributeError:
49 except AttributeError:
50 return False
50 return False
51 return repo.local()
51 return repo.local()
52
52
53 repo_setup_hooks = []
53 repo_setup_hooks = []
54
54
55 def repository(ui, path='', create=False):
55 def repository(ui, path='', create=False):
56 """return a repository object for the specified path"""
56 """return a repository object for the specified path"""
57 repo = _lookup(path).instance(ui, path, create)
57 repo = _lookup(path).instance(ui, path, create)
58 ui = getattr(repo, "ui", ui)
58 ui = getattr(repo, "ui", ui)
59 for hook in repo_setup_hooks:
59 for hook in repo_setup_hooks:
60 hook(ui, repo)
60 hook(ui, repo)
61 return repo
61 return repo
62
62
63 def defaultdest(source):
63 def defaultdest(source):
64 '''return default destination of clone if none is given'''
64 '''return default destination of clone if none is given'''
65 return os.path.basename(os.path.normpath(source))
65 return os.path.basename(os.path.normpath(source))
66
66
67 def clone(ui, source, dest=None, pull=False, rev=None, update=True,
67 def clone(ui, source, dest=None, pull=False, rev=None, update=True,
68 stream=False):
68 stream=False):
69 """Make a copy of an existing repository.
69 """Make a copy of an existing repository.
70
70
71 Create a copy of an existing repository in a new directory. The
71 Create a copy of an existing repository in a new directory. The
72 source and destination are URLs, as passed to the repository
72 source and destination are URLs, as passed to the repository
73 function. Returns a pair of repository objects, the source and
73 function. Returns a pair of repository objects, the source and
74 newly created destination.
74 newly created destination.
75
75
76 The location of the source is added to the new repository's
76 The location of the source is added to the new repository's
77 .hg/hgrc file, as the default to be used for future pulls and
77 .hg/hgrc file, as the default to be used for future pulls and
78 pushes.
78 pushes.
79
79
80 If an exception is raised, the partly cloned/updated destination
80 If an exception is raised, the partly cloned/updated destination
81 repository will be deleted.
81 repository will be deleted.
82
82
83 Arguments:
83 Arguments:
84
84
85 source: repository object or URL
85 source: repository object or URL
86
86
87 dest: URL of destination repository to create (defaults to base
87 dest: URL of destination repository to create (defaults to base
88 name of source repository)
88 name of source repository)
89
89
90 pull: always pull from source repository, even in local case
90 pull: always pull from source repository, even in local case
91
91
92 stream: stream raw data uncompressed from repository (fast over
92 stream: stream raw data uncompressed from repository (fast over
93 LAN, slow over WAN)
93 LAN, slow over WAN)
94
94
95 rev: revision to clone up to (implies pull=True)
95 rev: revision to clone up to (implies pull=True)
96
96
97 update: update working directory after clone completes, if
97 update: update working directory after clone completes, if
98 destination is local repository
98 destination is local repository
99 """
99 """
100
101 origsource = source
102 source, rev = cmdutil.parseurl(ui.expandpath(source), rev)
103
100 if isinstance(source, str):
104 if isinstance(source, str):
101 src_repo = repository(ui, source)
105 src_repo = repository(ui, source)
102 else:
106 else:
103 src_repo = source
107 src_repo = source
104 source = src_repo.url()
108 source = src_repo.url()
105
109
106 if dest is None:
110 if dest is None:
107 dest = defaultdest(source)
111 dest = defaultdest(source)
108 ui.status(_("destination directory: %s\n") % dest)
112 ui.status(_("destination directory: %s\n") % dest)
109
113
110 def localpath(path):
114 def localpath(path):
111 if path.startswith('file://'):
115 if path.startswith('file://'):
112 return path[7:]
116 return path[7:]
113 if path.startswith('file:'):
117 if path.startswith('file:'):
114 return path[5:]
118 return path[5:]
115 return path
119 return path
116
120
117 dest = localpath(dest)
121 dest = localpath(dest)
118 source = localpath(source)
122 source = localpath(source)
119
123
120 if os.path.exists(dest):
124 if os.path.exists(dest):
121 raise util.Abort(_("destination '%s' already exists") % dest)
125 raise util.Abort(_("destination '%s' already exists") % dest)
122
126
123 class DirCleanup(object):
127 class DirCleanup(object):
124 def __init__(self, dir_):
128 def __init__(self, dir_):
125 self.rmtree = shutil.rmtree
129 self.rmtree = shutil.rmtree
126 self.dir_ = dir_
130 self.dir_ = dir_
127 def close(self):
131 def close(self):
128 self.dir_ = None
132 self.dir_ = None
129 def __del__(self):
133 def __del__(self):
130 if self.dir_:
134 if self.dir_:
131 self.rmtree(self.dir_, True)
135 self.rmtree(self.dir_, True)
132
136
133 dir_cleanup = None
137 dir_cleanup = None
134 if islocal(dest):
138 if islocal(dest):
135 dir_cleanup = DirCleanup(dest)
139 dir_cleanup = DirCleanup(dest)
136
140
137 abspath = source
141 abspath = origsource
138 copy = False
142 copy = False
139 if src_repo.local() and islocal(dest):
143 if src_repo.local() and islocal(dest):
140 abspath = os.path.abspath(source)
144 abspath = os.path.abspath(origsource)
141 copy = not pull and not rev
145 copy = not pull and not rev
142
146
143 src_lock, dest_lock = None, None
147 src_lock, dest_lock = None, None
144 if copy:
148 if copy:
145 try:
149 try:
146 # we use a lock here because if we race with commit, we
150 # we use a lock here because if we race with commit, we
147 # can end up with extra data in the cloned revlogs that's
151 # can end up with extra data in the cloned revlogs that's
148 # not pointed to by changesets, thus causing verify to
152 # not pointed to by changesets, thus causing verify to
149 # fail
153 # fail
150 src_lock = src_repo.lock()
154 src_lock = src_repo.lock()
151 except lock.LockException:
155 except lock.LockException:
152 copy = False
156 copy = False
153
157
154 if copy:
158 if copy:
155 def force_copy(src, dst):
159 def force_copy(src, dst):
156 try:
160 try:
157 util.copyfiles(src, dst)
161 util.copyfiles(src, dst)
158 except OSError, inst:
162 except OSError, inst:
159 if inst.errno != errno.ENOENT:
163 if inst.errno != errno.ENOENT:
160 raise
164 raise
161
165
162 src_store = os.path.realpath(src_repo.spath)
166 src_store = os.path.realpath(src_repo.spath)
163 if not os.path.exists(dest):
167 if not os.path.exists(dest):
164 os.mkdir(dest)
168 os.mkdir(dest)
165 dest_path = os.path.realpath(os.path.join(dest, ".hg"))
169 dest_path = os.path.realpath(os.path.join(dest, ".hg"))
166 os.mkdir(dest_path)
170 os.mkdir(dest_path)
167 if src_repo.spath != src_repo.path:
171 if src_repo.spath != src_repo.path:
168 dest_store = os.path.join(dest_path, "store")
172 dest_store = os.path.join(dest_path, "store")
169 os.mkdir(dest_store)
173 os.mkdir(dest_store)
170 else:
174 else:
171 dest_store = dest_path
175 dest_store = dest_path
172 # copy the requires file
176 # copy the requires file
173 force_copy(src_repo.join("requires"),
177 force_copy(src_repo.join("requires"),
174 os.path.join(dest_path, "requires"))
178 os.path.join(dest_path, "requires"))
175 # we lock here to avoid premature writing to the target
179 # we lock here to avoid premature writing to the target
176 dest_lock = lock.lock(os.path.join(dest_store, "lock"))
180 dest_lock = lock.lock(os.path.join(dest_store, "lock"))
177
181
178 files = ("data",
182 files = ("data",
179 "00manifest.d", "00manifest.i",
183 "00manifest.d", "00manifest.i",
180 "00changelog.d", "00changelog.i")
184 "00changelog.d", "00changelog.i")
181 for f in files:
185 for f in files:
182 src = os.path.join(src_store, f)
186 src = os.path.join(src_store, f)
183 dst = os.path.join(dest_store, f)
187 dst = os.path.join(dest_store, f)
184 force_copy(src, dst)
188 force_copy(src, dst)
185
189
186 # we need to re-init the repo after manually copying the data
190 # we need to re-init the repo after manually copying the data
187 # into it
191 # into it
188 dest_repo = repository(ui, dest)
192 dest_repo = repository(ui, dest)
189
193
190 else:
194 else:
191 dest_repo = repository(ui, dest, create=True)
195 dest_repo = repository(ui, dest, create=True)
192
196
193 revs = None
197 revs = None
194 if rev:
198 if rev:
195 if 'lookup' not in src_repo.capabilities:
199 if 'lookup' not in src_repo.capabilities:
196 raise util.Abort(_("src repository does not support revision "
200 raise util.Abort(_("src repository does not support revision "
197 "lookup and so doesn't support clone by "
201 "lookup and so doesn't support clone by "
198 "revision"))
202 "revision"))
199 revs = [src_repo.lookup(r) for r in rev]
203 revs = [src_repo.lookup(r) for r in rev]
200
204
201 if dest_repo.local():
205 if dest_repo.local():
202 dest_repo.clone(src_repo, heads=revs, stream=stream)
206 dest_repo.clone(src_repo, heads=revs, stream=stream)
203 elif src_repo.local():
207 elif src_repo.local():
204 src_repo.push(dest_repo, revs=revs)
208 src_repo.push(dest_repo, revs=revs)
205 else:
209 else:
206 raise util.Abort(_("clone from remote to remote not supported"))
210 raise util.Abort(_("clone from remote to remote not supported"))
207
211
208 if src_lock:
212 if src_lock:
209 src_lock.release()
213 src_lock.release()
210
214
211 if dest_repo.local():
215 if dest_repo.local():
212 fp = dest_repo.opener("hgrc", "w", text=True)
216 fp = dest_repo.opener("hgrc", "w", text=True)
213 fp.write("[paths]\n")
217 fp.write("[paths]\n")
214 fp.write("default = %s\n" % abspath)
218 fp.write("default = %s\n" % abspath)
215 fp.close()
219 fp.close()
216
220
217 if dest_lock:
221 if dest_lock:
218 dest_lock.release()
222 dest_lock.release()
219
223
220 if update:
224 if update:
221 _update(dest_repo, dest_repo.changelog.tip())
225 try:
226 checkout = dest_repo.lookup("default")
227 except:
228 checkout = dest_repo.changelog.tip()
229 _update(dest_repo, checkout)
222 if dir_cleanup:
230 if dir_cleanup:
223 dir_cleanup.close()
231 dir_cleanup.close()
224
232
225 return src_repo, dest_repo
233 return src_repo, dest_repo
226
234
227 def _showstats(repo, stats):
235 def _showstats(repo, stats):
228 stats = ((stats[0], _("updated")),
236 stats = ((stats[0], _("updated")),
229 (stats[1], _("merged")),
237 (stats[1], _("merged")),
230 (stats[2], _("removed")),
238 (stats[2], _("removed")),
231 (stats[3], _("unresolved")))
239 (stats[3], _("unresolved")))
232 note = ", ".join([_("%d files %s") % s for s in stats])
240 note = ", ".join([_("%d files %s") % s for s in stats])
233 repo.ui.status("%s\n" % note)
241 repo.ui.status("%s\n" % note)
234
242
235 def _update(repo, node): return update(repo, node)
243 def _update(repo, node): return update(repo, node)
236
244
237 def update(repo, node):
245 def update(repo, node):
238 """update the working directory to node, merging linear changes"""
246 """update the working directory to node, merging linear changes"""
239 pl = repo.parents()
247 pl = repo.parents()
240 stats = _merge.update(repo, node, False, False, None, None)
248 stats = _merge.update(repo, node, False, False, None, None)
241 _showstats(repo, stats)
249 _showstats(repo, stats)
242 if stats[3]:
250 if stats[3]:
243 repo.ui.status(_("There are unresolved merges with"
251 repo.ui.status(_("There are unresolved merges with"
244 " locally modified files.\n"))
252 " locally modified files.\n"))
245 if stats[1]:
253 if stats[1]:
246 repo.ui.status(_("You can finish the partial merge using:\n"))
254 repo.ui.status(_("You can finish the partial merge using:\n"))
247 else:
255 else:
248 repo.ui.status(_("You can redo the full merge using:\n"))
256 repo.ui.status(_("You can redo the full merge using:\n"))
249 # len(pl)==1, otherwise _merge.update() would have raised util.Abort:
257 # len(pl)==1, otherwise _merge.update() would have raised util.Abort:
250 repo.ui.status(_(" hg update %s\n hg update %s\n")
258 repo.ui.status(_(" hg update %s\n hg update %s\n")
251 % (pl[0].rev(), repo.changectx(node).rev()))
259 % (pl[0].rev(), repo.changectx(node).rev()))
252 return stats[3]
260 return stats[3]
253
261
254 def clean(repo, node, wlock=None, show_stats=True):
262 def clean(repo, node, wlock=None, show_stats=True):
255 """forcibly switch the working directory to node, clobbering changes"""
263 """forcibly switch the working directory to node, clobbering changes"""
256 stats = _merge.update(repo, node, False, True, None, wlock)
264 stats = _merge.update(repo, node, False, True, None, wlock)
257 if show_stats: _showstats(repo, stats)
265 if show_stats: _showstats(repo, stats)
258 return stats[3]
266 return stats[3]
259
267
260 def merge(repo, node, force=None, remind=True, wlock=None):
268 def merge(repo, node, force=None, remind=True, wlock=None):
261 """branch merge with node, resolving changes"""
269 """branch merge with node, resolving changes"""
262 stats = _merge.update(repo, node, True, force, False, wlock)
270 stats = _merge.update(repo, node, True, force, False, wlock)
263 _showstats(repo, stats)
271 _showstats(repo, stats)
264 if stats[3]:
272 if stats[3]:
265 pl = repo.parents()
273 pl = repo.parents()
266 repo.ui.status(_("There are unresolved merges,"
274 repo.ui.status(_("There are unresolved merges,"
267 " you can redo the full merge using:\n"
275 " you can redo the full merge using:\n"
268 " hg update -C %s\n"
276 " hg update -C %s\n"
269 " hg merge %s\n")
277 " hg merge %s\n")
270 % (pl[0].rev(), pl[1].rev()))
278 % (pl[0].rev(), pl[1].rev()))
271 elif remind:
279 elif remind:
272 repo.ui.status(_("(branch merge, don't forget to commit)\n"))
280 repo.ui.status(_("(branch merge, don't forget to commit)\n"))
273 return stats[3]
281 return stats[3]
274
282
275 def revert(repo, node, choose, wlock):
283 def revert(repo, node, choose, wlock):
276 """revert changes to revision in node without updating dirstate"""
284 """revert changes to revision in node without updating dirstate"""
277 return _merge.update(repo, node, False, True, choose, wlock)[3]
285 return _merge.update(repo, node, False, True, choose, wlock)[3]
278
286
279 def verify(repo):
287 def verify(repo):
280 """verify the consistency of a repository"""
288 """verify the consistency of a repository"""
281 return _verify.verify(repo)
289 return _verify.verify(repo)
@@ -1,62 +1,78 b''
1 # hgweb/common.py - Utility functions needed by hgweb_mod and hgwebdir_mod
1 # hgweb/common.py - Utility functions needed by hgweb_mod and hgwebdir_mod
2 #
2 #
3 # Copyright 21 May 2005 - (c) 2005 Jake Edge <jake@edge2.net>
3 # Copyright 21 May 2005 - (c) 2005 Jake Edge <jake@edge2.net>
4 # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
4 # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
5 #
5 #
6 # This software may be used and distributed according to the terms
6 # This software may be used and distributed according to the terms
7 # of the GNU General Public License, incorporated herein by reference.
7 # of the GNU General Public License, incorporated herein by reference.
8
8
9 import os, mimetypes
9 import os, mimetypes
10
10
11 def get_mtime(repo_path):
11 def get_mtime(repo_path):
12 store_path = os.path.join(repo_path, ".hg")
12 store_path = os.path.join(repo_path, ".hg")
13 if not os.path.isdir(os.path.join(store_path, "data")):
13 if not os.path.isdir(os.path.join(store_path, "data")):
14 store_path = os.path.join(store_path, "store")
14 store_path = os.path.join(store_path, "store")
15 cl_path = os.path.join(store_path, "00changelog.i")
15 cl_path = os.path.join(store_path, "00changelog.i")
16 if os.path.exists(cl_path):
16 if os.path.exists(cl_path):
17 return os.stat(cl_path).st_mtime
17 return os.stat(cl_path).st_mtime
18 else:
18 else:
19 return os.stat(store_path).st_mtime
19 return os.stat(store_path).st_mtime
20
20
21 def staticfile(directory, fname, req):
21 def staticfile(directory, fname, req):
22 """return a file inside directory with guessed content-type header
22 """return a file inside directory with guessed content-type header
23
23
24 fname always uses '/' as directory separator and isn't allowed to
24 fname always uses '/' as directory separator and isn't allowed to
25 contain unusual path components.
25 contain unusual path components.
26 Content-type is guessed using the mimetypes module.
26 Content-type is guessed using the mimetypes module.
27 Return an empty string if fname is illegal or file not found.
27 Return an empty string if fname is illegal or file not found.
28
28
29 """
29 """
30 parts = fname.split('/')
30 parts = fname.split('/')
31 path = directory
31 path = directory
32 for part in parts:
32 for part in parts:
33 if (part in ('', os.curdir, os.pardir) or
33 if (part in ('', os.curdir, os.pardir) or
34 os.sep in part or os.altsep is not None and os.altsep in part):
34 os.sep in part or os.altsep is not None and os.altsep in part):
35 return ""
35 return ""
36 path = os.path.join(path, part)
36 path = os.path.join(path, part)
37 try:
37 try:
38 os.stat(path)
38 os.stat(path)
39 ct = mimetypes.guess_type(path)[0] or "text/plain"
39 ct = mimetypes.guess_type(path)[0] or "text/plain"
40 req.header([('Content-type', ct),
40 req.header([('Content-type', ct),
41 ('Content-length', str(os.path.getsize(path)))])
41 ('Content-length', str(os.path.getsize(path)))])
42 return file(path, 'rb').read()
42 return file(path, 'rb').read()
43 except (TypeError, OSError):
43 except (TypeError, OSError):
44 # illegal fname or unreadable file
44 # illegal fname or unreadable file
45 return ""
45 return ""
46
46
47 def style_map(templatepath, style):
47 def style_map(templatepath, style):
48 """Return path to mapfile for a given style.
48 """Return path to mapfile for a given style.
49
49
50 Searches mapfile in the following locations:
50 Searches mapfile in the following locations:
51 1. templatepath/style/map
51 1. templatepath/style/map
52 2. templatepath/map-style
52 2. templatepath/map-style
53 3. templatepath/map
53 3. templatepath/map
54 """
54 """
55 locations = style and [os.path.join(style, "map"), "map-"+style] or []
55 locations = style and [os.path.join(style, "map"), "map-"+style] or []
56 locations.append("map")
56 locations.append("map")
57 for location in locations:
57 for location in locations:
58 mapfile = os.path.join(templatepath, location)
58 mapfile = os.path.join(templatepath, location)
59 if os.path.isfile(mapfile):
59 if os.path.isfile(mapfile):
60 return mapfile
60 return mapfile
61 raise RuntimeError("No hgweb templates found in %r" % templatepath)
61 raise RuntimeError("No hgweb templates found in %r" % templatepath)
62
62
63 def paritygen(stripecount, offset=0):
64 """count parity of horizontal stripes for easier reading"""
65 if stripecount and offset:
66 # account for offset, e.g. due to building the list in reverse
67 count = (stripecount + offset) % stripecount
68 parity = (stripecount + offset) / stripecount & 1
69 else:
70 count = 0
71 parity = 0
72 while True:
73 yield parity
74 count += 1
75 if stripecount and count >= stripecount:
76 parity = 1 - parity
77 count = 0
78
@@ -1,1173 +1,1158 b''
1 # hgweb/hgweb_mod.py - Web interface for a repository.
1 # hgweb/hgweb_mod.py - Web interface for a repository.
2 #
2 #
3 # Copyright 21 May 2005 - (c) 2005 Jake Edge <jake@edge2.net>
3 # Copyright 21 May 2005 - (c) 2005 Jake Edge <jake@edge2.net>
4 # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
4 # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
5 #
5 #
6 # This software may be used and distributed according to the terms
6 # This software may be used and distributed according to the terms
7 # of the GNU General Public License, incorporated herein by reference.
7 # of the GNU General Public License, incorporated herein by reference.
8
8
9 import os, mimetypes, re, zlib, mimetools, cStringIO, sys
9 import os, mimetypes, re, zlib, mimetools, cStringIO, sys
10 import tempfile, urllib, bz2
10 import tempfile, urllib, bz2
11 from mercurial.node import *
11 from mercurial.node import *
12 from mercurial.i18n import gettext as _
12 from mercurial.i18n import gettext as _
13 from mercurial import mdiff, ui, hg, util, archival, streamclone, patch
13 from mercurial import mdiff, ui, hg, util, archival, streamclone, patch
14 from mercurial import revlog, templater
14 from mercurial import revlog, templater
15 from common import get_mtime, staticfile, style_map
15 from common import get_mtime, staticfile, style_map, paritygen
16
16
17 def _up(p):
17 def _up(p):
18 if p[0] != "/":
18 if p[0] != "/":
19 p = "/" + p
19 p = "/" + p
20 if p[-1] == "/":
20 if p[-1] == "/":
21 p = p[:-1]
21 p = p[:-1]
22 up = os.path.dirname(p)
22 up = os.path.dirname(p)
23 if up == "/":
23 if up == "/":
24 return "/"
24 return "/"
25 return up + "/"
25 return up + "/"
26
26
27 def revnavgen(pos, pagelen, limit, nodefunc):
27 def revnavgen(pos, pagelen, limit, nodefunc):
28 def seq(factor, limit=None):
28 def seq(factor, limit=None):
29 if limit:
29 if limit:
30 yield limit
30 yield limit
31 if limit >= 20 and limit <= 40:
31 if limit >= 20 and limit <= 40:
32 yield 50
32 yield 50
33 else:
33 else:
34 yield 1 * factor
34 yield 1 * factor
35 yield 3 * factor
35 yield 3 * factor
36 for f in seq(factor * 10):
36 for f in seq(factor * 10):
37 yield f
37 yield f
38
38
39 def nav(**map):
39 def nav(**map):
40 l = []
40 l = []
41 last = 0
41 last = 0
42 for f in seq(1, pagelen):
42 for f in seq(1, pagelen):
43 if f < pagelen or f <= last:
43 if f < pagelen or f <= last:
44 continue
44 continue
45 if f > limit:
45 if f > limit:
46 break
46 break
47 last = f
47 last = f
48 if pos + f < limit:
48 if pos + f < limit:
49 l.append(("+%d" % f, hex(nodefunc(pos + f).node())))
49 l.append(("+%d" % f, hex(nodefunc(pos + f).node())))
50 if pos - f >= 0:
50 if pos - f >= 0:
51 l.insert(0, ("-%d" % f, hex(nodefunc(pos - f).node())))
51 l.insert(0, ("-%d" % f, hex(nodefunc(pos - f).node())))
52
52
53 try:
53 try:
54 yield {"label": "(0)", "node": hex(nodefunc('0').node())}
54 yield {"label": "(0)", "node": hex(nodefunc('0').node())}
55
55
56 for label, node in l:
56 for label, node in l:
57 yield {"label": label, "node": node}
57 yield {"label": label, "node": node}
58
58
59 yield {"label": "tip", "node": "tip"}
59 yield {"label": "tip", "node": "tip"}
60 except hg.RepoError:
60 except hg.RepoError:
61 pass
61 pass
62
62
63 return nav
63 return nav
64
64
65 class hgweb(object):
65 class hgweb(object):
66 def __init__(self, repo, name=None):
66 def __init__(self, repo, name=None):
67 if type(repo) == type(""):
67 if type(repo) == type(""):
68 self.repo = hg.repository(ui.ui(report_untrusted=False), repo)
68 self.repo = hg.repository(ui.ui(report_untrusted=False), repo)
69 else:
69 else:
70 self.repo = repo
70 self.repo = repo
71
71
72 self.mtime = -1
72 self.mtime = -1
73 self.reponame = name
73 self.reponame = name
74 self.archives = 'zip', 'gz', 'bz2'
74 self.archives = 'zip', 'gz', 'bz2'
75 self.stripecount = 1
75 self.stripecount = 1
76 # a repo owner may set web.templates in .hg/hgrc to get any file
76 # a repo owner may set web.templates in .hg/hgrc to get any file
77 # readable by the user running the CGI script
77 # readable by the user running the CGI script
78 self.templatepath = self.config("web", "templates",
78 self.templatepath = self.config("web", "templates",
79 templater.templatepath(),
79 templater.templatepath(),
80 untrusted=False)
80 untrusted=False)
81
81
82 # The CGI scripts are often run by a user different from the repo owner.
82 # The CGI scripts are often run by a user different from the repo owner.
83 # Trust the settings from the .hg/hgrc files by default.
83 # Trust the settings from the .hg/hgrc files by default.
84 def config(self, section, name, default=None, untrusted=True):
84 def config(self, section, name, default=None, untrusted=True):
85 return self.repo.ui.config(section, name, default,
85 return self.repo.ui.config(section, name, default,
86 untrusted=untrusted)
86 untrusted=untrusted)
87
87
88 def configbool(self, section, name, default=False, untrusted=True):
88 def configbool(self, section, name, default=False, untrusted=True):
89 return self.repo.ui.configbool(section, name, default,
89 return self.repo.ui.configbool(section, name, default,
90 untrusted=untrusted)
90 untrusted=untrusted)
91
91
92 def configlist(self, section, name, default=None, untrusted=True):
92 def configlist(self, section, name, default=None, untrusted=True):
93 return self.repo.ui.configlist(section, name, default,
93 return self.repo.ui.configlist(section, name, default,
94 untrusted=untrusted)
94 untrusted=untrusted)
95
95
96 def refresh(self):
96 def refresh(self):
97 mtime = get_mtime(self.repo.root)
97 mtime = get_mtime(self.repo.root)
98 if mtime != self.mtime:
98 if mtime != self.mtime:
99 self.mtime = mtime
99 self.mtime = mtime
100 self.repo = hg.repository(self.repo.ui, self.repo.root)
100 self.repo = hg.repository(self.repo.ui, self.repo.root)
101 self.maxchanges = int(self.config("web", "maxchanges", 10))
101 self.maxchanges = int(self.config("web", "maxchanges", 10))
102 self.stripecount = int(self.config("web", "stripes", 1))
102 self.stripecount = int(self.config("web", "stripes", 1))
103 self.maxshortchanges = int(self.config("web", "maxshortchanges", 60))
103 self.maxshortchanges = int(self.config("web", "maxshortchanges", 60))
104 self.maxfiles = int(self.config("web", "maxfiles", 10))
104 self.maxfiles = int(self.config("web", "maxfiles", 10))
105 self.allowpull = self.configbool("web", "allowpull", True)
105 self.allowpull = self.configbool("web", "allowpull", True)
106
106
107 def archivelist(self, nodeid):
107 def archivelist(self, nodeid):
108 allowed = self.configlist("web", "allow_archive")
108 allowed = self.configlist("web", "allow_archive")
109 for i, spec in self.archive_specs.iteritems():
109 for i, spec in self.archive_specs.iteritems():
110 if i in allowed or self.configbool("web", "allow" + i):
110 if i in allowed or self.configbool("web", "allow" + i):
111 yield {"type" : i, "extension" : spec[2], "node" : nodeid}
111 yield {"type" : i, "extension" : spec[2], "node" : nodeid}
112
112
113 def listfilediffs(self, files, changeset):
113 def listfilediffs(self, files, changeset):
114 for f in files[:self.maxfiles]:
114 for f in files[:self.maxfiles]:
115 yield self.t("filedifflink", node=hex(changeset), file=f)
115 yield self.t("filedifflink", node=hex(changeset), file=f)
116 if len(files) > self.maxfiles:
116 if len(files) > self.maxfiles:
117 yield self.t("fileellipses")
117 yield self.t("fileellipses")
118
118
119 def siblings(self, siblings=[], hiderev=None, **args):
119 def siblings(self, siblings=[], hiderev=None, **args):
120 siblings = [s for s in siblings if s.node() != nullid]
120 siblings = [s for s in siblings if s.node() != nullid]
121 if len(siblings) == 1 and siblings[0].rev() == hiderev:
121 if len(siblings) == 1 and siblings[0].rev() == hiderev:
122 return
122 return
123 for s in siblings:
123 for s in siblings:
124 d = {'node': hex(s.node()), 'rev': s.rev()}
124 d = {'node': hex(s.node()), 'rev': s.rev()}
125 if hasattr(s, 'path'):
125 if hasattr(s, 'path'):
126 d['file'] = s.path()
126 d['file'] = s.path()
127 d.update(args)
127 d.update(args)
128 yield d
128 yield d
129
129
130 def renamelink(self, fl, node):
130 def renamelink(self, fl, node):
131 r = fl.renamed(node)
131 r = fl.renamed(node)
132 if r:
132 if r:
133 return [dict(file=r[0], node=hex(r[1]))]
133 return [dict(file=r[0], node=hex(r[1]))]
134 return []
134 return []
135
135
136 def showtag(self, t1, node=nullid, **args):
136 def showtag(self, t1, node=nullid, **args):
137 for t in self.repo.nodetags(node):
137 for t in self.repo.nodetags(node):
138 yield self.t(t1, tag=t, **args)
138 yield self.t(t1, tag=t, **args)
139
139
140 def diff(self, node1, node2, files):
140 def diff(self, node1, node2, files):
141 def filterfiles(filters, files):
141 def filterfiles(filters, files):
142 l = [x for x in files if x in filters]
142 l = [x for x in files if x in filters]
143
143
144 for t in filters:
144 for t in filters:
145 if t and t[-1] != os.sep:
145 if t and t[-1] != os.sep:
146 t += os.sep
146 t += os.sep
147 l += [x for x in files if x.startswith(t)]
147 l += [x for x in files if x.startswith(t)]
148 return l
148 return l
149
149
150 parity = [0]
150 parity = paritygen(self.stripecount)
151 def diffblock(diff, f, fn):
151 def diffblock(diff, f, fn):
152 yield self.t("diffblock",
152 yield self.t("diffblock",
153 lines=prettyprintlines(diff),
153 lines=prettyprintlines(diff),
154 parity=parity[0],
154 parity=parity.next(),
155 file=f,
155 file=f,
156 filenode=hex(fn or nullid))
156 filenode=hex(fn or nullid))
157 parity[0] = 1 - parity[0]
158
157
159 def prettyprintlines(diff):
158 def prettyprintlines(diff):
160 for l in diff.splitlines(1):
159 for l in diff.splitlines(1):
161 if l.startswith('+'):
160 if l.startswith('+'):
162 yield self.t("difflineplus", line=l)
161 yield self.t("difflineplus", line=l)
163 elif l.startswith('-'):
162 elif l.startswith('-'):
164 yield self.t("difflineminus", line=l)
163 yield self.t("difflineminus", line=l)
165 elif l.startswith('@'):
164 elif l.startswith('@'):
166 yield self.t("difflineat", line=l)
165 yield self.t("difflineat", line=l)
167 else:
166 else:
168 yield self.t("diffline", line=l)
167 yield self.t("diffline", line=l)
169
168
170 r = self.repo
169 r = self.repo
171 c1 = r.changectx(node1)
170 c1 = r.changectx(node1)
172 c2 = r.changectx(node2)
171 c2 = r.changectx(node2)
173 date1 = util.datestr(c1.date())
172 date1 = util.datestr(c1.date())
174 date2 = util.datestr(c2.date())
173 date2 = util.datestr(c2.date())
175
174
176 modified, added, removed, deleted, unknown = r.status(node1, node2)[:5]
175 modified, added, removed, deleted, unknown = r.status(node1, node2)[:5]
177 if files:
176 if files:
178 modified, added, removed = map(lambda x: filterfiles(files, x),
177 modified, added, removed = map(lambda x: filterfiles(files, x),
179 (modified, added, removed))
178 (modified, added, removed))
180
179
181 diffopts = patch.diffopts(self.repo.ui, untrusted=True)
180 diffopts = patch.diffopts(self.repo.ui, untrusted=True)
182 for f in modified:
181 for f in modified:
183 to = c1.filectx(f).data()
182 to = c1.filectx(f).data()
184 tn = c2.filectx(f).data()
183 tn = c2.filectx(f).data()
185 yield diffblock(mdiff.unidiff(to, date1, tn, date2, f,
184 yield diffblock(mdiff.unidiff(to, date1, tn, date2, f,
186 opts=diffopts), f, tn)
185 opts=diffopts), f, tn)
187 for f in added:
186 for f in added:
188 to = None
187 to = None
189 tn = c2.filectx(f).data()
188 tn = c2.filectx(f).data()
190 yield diffblock(mdiff.unidiff(to, date1, tn, date2, f,
189 yield diffblock(mdiff.unidiff(to, date1, tn, date2, f,
191 opts=diffopts), f, tn)
190 opts=diffopts), f, tn)
192 for f in removed:
191 for f in removed:
193 to = c1.filectx(f).data()
192 to = c1.filectx(f).data()
194 tn = None
193 tn = None
195 yield diffblock(mdiff.unidiff(to, date1, tn, date2, f,
194 yield diffblock(mdiff.unidiff(to, date1, tn, date2, f,
196 opts=diffopts), f, tn)
195 opts=diffopts), f, tn)
197
196
198 def changelog(self, ctx, shortlog=False):
197 def changelog(self, ctx, shortlog=False):
199 def changelist(**map):
198 def changelist(**map):
200 parity = (start - end) & 1
201 cl = self.repo.changelog
199 cl = self.repo.changelog
202 l = [] # build a list in forward order for efficiency
200 l = [] # build a list in forward order for efficiency
203 for i in xrange(start, end):
201 for i in xrange(start, end):
204 ctx = self.repo.changectx(i)
202 ctx = self.repo.changectx(i)
205 n = ctx.node()
203 n = ctx.node()
206
204
207 l.insert(0, {"parity": parity,
205 l.insert(0, {"parity": parity.next(),
208 "author": ctx.user(),
206 "author": ctx.user(),
209 "parent": self.siblings(ctx.parents(), i - 1),
207 "parent": self.siblings(ctx.parents(), i - 1),
210 "child": self.siblings(ctx.children(), i + 1),
208 "child": self.siblings(ctx.children(), i + 1),
211 "changelogtag": self.showtag("changelogtag",n),
209 "changelogtag": self.showtag("changelogtag",n),
212 "desc": ctx.description(),
210 "desc": ctx.description(),
213 "date": ctx.date(),
211 "date": ctx.date(),
214 "files": self.listfilediffs(ctx.files(), n),
212 "files": self.listfilediffs(ctx.files(), n),
215 "rev": i,
213 "rev": i,
216 "node": hex(n)})
214 "node": hex(n)})
217 parity = 1 - parity
218
215
219 for e in l:
216 for e in l:
220 yield e
217 yield e
221
218
222 maxchanges = shortlog and self.maxshortchanges or self.maxchanges
219 maxchanges = shortlog and self.maxshortchanges or self.maxchanges
223 cl = self.repo.changelog
220 cl = self.repo.changelog
224 count = cl.count()
221 count = cl.count()
225 pos = ctx.rev()
222 pos = ctx.rev()
226 start = max(0, pos - maxchanges + 1)
223 start = max(0, pos - maxchanges + 1)
227 end = min(count, start + maxchanges)
224 end = min(count, start + maxchanges)
228 pos = end - 1
225 pos = end - 1
226 parity = paritygen(self.stripecount, offset=start-end)
229
227
230 changenav = revnavgen(pos, maxchanges, count, self.repo.changectx)
228 changenav = revnavgen(pos, maxchanges, count, self.repo.changectx)
231
229
232 yield self.t(shortlog and 'shortlog' or 'changelog',
230 yield self.t(shortlog and 'shortlog' or 'changelog',
233 changenav=changenav,
231 changenav=changenav,
234 node=hex(cl.tip()),
232 node=hex(cl.tip()),
235 rev=pos, changesets=count, entries=changelist,
233 rev=pos, changesets=count, entries=changelist,
236 archives=self.archivelist("tip"))
234 archives=self.archivelist("tip"))
237
235
238 def search(self, query):
236 def search(self, query):
239
237
240 def changelist(**map):
238 def changelist(**map):
241 cl = self.repo.changelog
239 cl = self.repo.changelog
242 count = 0
240 count = 0
243 qw = query.lower().split()
241 qw = query.lower().split()
244
242
245 def revgen():
243 def revgen():
246 for i in xrange(cl.count() - 1, 0, -100):
244 for i in xrange(cl.count() - 1, 0, -100):
247 l = []
245 l = []
248 for j in xrange(max(0, i - 100), i):
246 for j in xrange(max(0, i - 100), i):
249 ctx = self.repo.changectx(j)
247 ctx = self.repo.changectx(j)
250 l.append(ctx)
248 l.append(ctx)
251 l.reverse()
249 l.reverse()
252 for e in l:
250 for e in l:
253 yield e
251 yield e
254
252
255 for ctx in revgen():
253 for ctx in revgen():
256 miss = 0
254 miss = 0
257 for q in qw:
255 for q in qw:
258 if not (q in ctx.user().lower() or
256 if not (q in ctx.user().lower() or
259 q in ctx.description().lower() or
257 q in ctx.description().lower() or
260 q in " ".join(ctx.files()).lower()):
258 q in " ".join(ctx.files()).lower()):
261 miss = 1
259 miss = 1
262 break
260 break
263 if miss:
261 if miss:
264 continue
262 continue
265
263
266 count += 1
264 count += 1
267 n = ctx.node()
265 n = ctx.node()
268
266
269 yield self.t('searchentry',
267 yield self.t('searchentry',
270 parity=self.stripes(count),
268 parity=parity.next(),
271 author=ctx.user(),
269 author=ctx.user(),
272 parent=self.siblings(ctx.parents()),
270 parent=self.siblings(ctx.parents()),
273 child=self.siblings(ctx.children()),
271 child=self.siblings(ctx.children()),
274 changelogtag=self.showtag("changelogtag",n),
272 changelogtag=self.showtag("changelogtag",n),
275 desc=ctx.description(),
273 desc=ctx.description(),
276 date=ctx.date(),
274 date=ctx.date(),
277 files=self.listfilediffs(ctx.files(), n),
275 files=self.listfilediffs(ctx.files(), n),
278 rev=ctx.rev(),
276 rev=ctx.rev(),
279 node=hex(n))
277 node=hex(n))
280
278
281 if count >= self.maxchanges:
279 if count >= self.maxchanges:
282 break
280 break
283
281
284 cl = self.repo.changelog
282 cl = self.repo.changelog
283 parity = paritygen(self.stripecount)
285
284
286 yield self.t('search',
285 yield self.t('search',
287 query=query,
286 query=query,
288 node=hex(cl.tip()),
287 node=hex(cl.tip()),
289 entries=changelist)
288 entries=changelist,
289 archives=self.archivelist("tip"))
290
290
291 def changeset(self, ctx):
291 def changeset(self, ctx):
292 n = ctx.node()
292 n = ctx.node()
293 parents = ctx.parents()
293 parents = ctx.parents()
294 p1 = parents[0].node()
294 p1 = parents[0].node()
295
295
296 files = []
296 files = []
297 parity = 0
297 parity = paritygen(self.stripecount)
298 for f in ctx.files():
298 for f in ctx.files():
299 files.append(self.t("filenodelink",
299 files.append(self.t("filenodelink",
300 node=hex(n), file=f,
300 node=hex(n), file=f,
301 parity=parity))
301 parity=parity.next()))
302 parity = 1 - parity
303
302
304 def diff(**map):
303 def diff(**map):
305 yield self.diff(p1, n, None)
304 yield self.diff(p1, n, None)
306
305
307 yield self.t('changeset',
306 yield self.t('changeset',
308 diff=diff,
307 diff=diff,
309 rev=ctx.rev(),
308 rev=ctx.rev(),
310 node=hex(n),
309 node=hex(n),
311 parent=self.siblings(parents),
310 parent=self.siblings(parents),
312 child=self.siblings(ctx.children()),
311 child=self.siblings(ctx.children()),
313 changesettag=self.showtag("changesettag",n),
312 changesettag=self.showtag("changesettag",n),
314 author=ctx.user(),
313 author=ctx.user(),
315 desc=ctx.description(),
314 desc=ctx.description(),
316 date=ctx.date(),
315 date=ctx.date(),
317 files=files,
316 files=files,
318 archives=self.archivelist(hex(n)))
317 archives=self.archivelist(hex(n)))
319
318
320 def filelog(self, fctx):
319 def filelog(self, fctx):
321 f = fctx.path()
320 f = fctx.path()
322 fl = fctx.filelog()
321 fl = fctx.filelog()
323 count = fl.count()
322 count = fl.count()
324 pagelen = self.maxshortchanges
323 pagelen = self.maxshortchanges
325 pos = fctx.filerev()
324 pos = fctx.filerev()
326 start = max(0, pos - pagelen + 1)
325 start = max(0, pos - pagelen + 1)
327 end = min(count, start + pagelen)
326 end = min(count, start + pagelen)
328 pos = end - 1
327 pos = end - 1
328 parity = paritygen(self.stripecount, offset=start-end)
329
329
330 def entries(**map):
330 def entries(**map):
331 l = []
331 l = []
332 parity = (count - 1) & 1
333
332
334 for i in xrange(start, end):
333 for i in xrange(start, end):
335 ctx = fctx.filectx(i)
334 ctx = fctx.filectx(i)
336 n = fl.node(i)
335 n = fl.node(i)
337
336
338 l.insert(0, {"parity": parity,
337 l.insert(0, {"parity": parity.next(),
339 "filerev": i,
338 "filerev": i,
340 "file": f,
339 "file": f,
341 "node": hex(ctx.node()),
340 "node": hex(ctx.node()),
342 "author": ctx.user(),
341 "author": ctx.user(),
343 "date": ctx.date(),
342 "date": ctx.date(),
344 "rename": self.renamelink(fl, n),
343 "rename": self.renamelink(fl, n),
345 "parent": self.siblings(fctx.parents()),
344 "parent": self.siblings(fctx.parents()),
346 "child": self.siblings(fctx.children()),
345 "child": self.siblings(fctx.children()),
347 "desc": ctx.description()})
346 "desc": ctx.description()})
348 parity = 1 - parity
349
347
350 for e in l:
348 for e in l:
351 yield e
349 yield e
352
350
353 nodefunc = lambda x: fctx.filectx(fileid=x)
351 nodefunc = lambda x: fctx.filectx(fileid=x)
354 nav = revnavgen(pos, pagelen, count, nodefunc)
352 nav = revnavgen(pos, pagelen, count, nodefunc)
355 yield self.t("filelog", file=f, node=hex(fctx.node()), nav=nav,
353 yield self.t("filelog", file=f, node=hex(fctx.node()), nav=nav,
356 entries=entries)
354 entries=entries)
357
355
358 def filerevision(self, fctx):
356 def filerevision(self, fctx):
359 f = fctx.path()
357 f = fctx.path()
360 text = fctx.data()
358 text = fctx.data()
361 fl = fctx.filelog()
359 fl = fctx.filelog()
362 n = fctx.filenode()
360 n = fctx.filenode()
361 parity = paritygen(self.stripecount)
363
362
364 mt = mimetypes.guess_type(f)[0]
363 mt = mimetypes.guess_type(f)[0]
365 rawtext = text
364 rawtext = text
366 if util.binary(text):
365 if util.binary(text):
367 mt = mt or 'application/octet-stream'
366 mt = mt or 'application/octet-stream'
368 text = "(binary:%s)" % mt
367 text = "(binary:%s)" % mt
369 mt = mt or 'text/plain'
368 mt = mt or 'text/plain'
370
369
371 def lines():
370 def lines():
372 for l, t in enumerate(text.splitlines(1)):
371 for l, t in enumerate(text.splitlines(1)):
373 yield {"line": t,
372 yield {"line": t,
374 "linenumber": "% 6d" % (l + 1),
373 "linenumber": "% 6d" % (l + 1),
375 "parity": self.stripes(l)}
374 "parity": parity.next()}
376
375
377 yield self.t("filerevision",
376 yield self.t("filerevision",
378 file=f,
377 file=f,
379 path=_up(f),
378 path=_up(f),
380 text=lines(),
379 text=lines(),
381 raw=rawtext,
380 raw=rawtext,
382 mimetype=mt,
381 mimetype=mt,
383 rev=fctx.rev(),
382 rev=fctx.rev(),
384 node=hex(fctx.node()),
383 node=hex(fctx.node()),
385 author=fctx.user(),
384 author=fctx.user(),
386 date=fctx.date(),
385 date=fctx.date(),
387 desc=fctx.description(),
386 desc=fctx.description(),
388 parent=self.siblings(fctx.parents()),
387 parent=self.siblings(fctx.parents()),
389 child=self.siblings(fctx.children()),
388 child=self.siblings(fctx.children()),
390 rename=self.renamelink(fl, n),
389 rename=self.renamelink(fl, n),
391 permissions=fctx.manifest().execf(f))
390 permissions=fctx.manifest().execf(f))
392
391
393 def fileannotate(self, fctx):
392 def fileannotate(self, fctx):
394 f = fctx.path()
393 f = fctx.path()
395 n = fctx.filenode()
394 n = fctx.filenode()
396 fl = fctx.filelog()
395 fl = fctx.filelog()
396 parity = paritygen(self.stripecount)
397
397
398 def annotate(**map):
398 def annotate(**map):
399 parity = 0
400 last = None
399 last = None
401 for f, l in fctx.annotate(follow=True):
400 for f, l in fctx.annotate(follow=True):
402 fnode = f.filenode()
401 fnode = f.filenode()
403 name = self.repo.ui.shortuser(f.user())
402 name = self.repo.ui.shortuser(f.user())
404
403
405 if last != fnode:
404 if last != fnode:
406 parity = 1 - parity
407 last = fnode
405 last = fnode
408
406
409 yield {"parity": parity,
407 yield {"parity": parity.next(),
410 "node": hex(f.node()),
408 "node": hex(f.node()),
411 "rev": f.rev(),
409 "rev": f.rev(),
412 "author": name,
410 "author": name,
413 "file": f.path(),
411 "file": f.path(),
414 "line": l}
412 "line": l}
415
413
416 yield self.t("fileannotate",
414 yield self.t("fileannotate",
417 file=f,
415 file=f,
418 annotate=annotate,
416 annotate=annotate,
419 path=_up(f),
417 path=_up(f),
420 rev=fctx.rev(),
418 rev=fctx.rev(),
421 node=hex(fctx.node()),
419 node=hex(fctx.node()),
422 author=fctx.user(),
420 author=fctx.user(),
423 date=fctx.date(),
421 date=fctx.date(),
424 desc=fctx.description(),
422 desc=fctx.description(),
425 rename=self.renamelink(fl, n),
423 rename=self.renamelink(fl, n),
426 parent=self.siblings(fctx.parents()),
424 parent=self.siblings(fctx.parents()),
427 child=self.siblings(fctx.children()),
425 child=self.siblings(fctx.children()),
428 permissions=fctx.manifest().execf(f))
426 permissions=fctx.manifest().execf(f))
429
427
430 def manifest(self, ctx, path):
428 def manifest(self, ctx, path):
431 mf = ctx.manifest()
429 mf = ctx.manifest()
432 node = ctx.node()
430 node = ctx.node()
433
431
434 files = {}
432 files = {}
433 parity = paritygen(self.stripecount)
435
434
436 if path and path[-1] != "/":
435 if path and path[-1] != "/":
437 path += "/"
436 path += "/"
438 l = len(path)
437 l = len(path)
439 abspath = "/" + path
438 abspath = "/" + path
440
439
441 for f, n in mf.items():
440 for f, n in mf.items():
442 if f[:l] != path:
441 if f[:l] != path:
443 continue
442 continue
444 remain = f[l:]
443 remain = f[l:]
445 if "/" in remain:
444 if "/" in remain:
446 short = remain[:remain.index("/") + 1] # bleah
445 short = remain[:remain.index("/") + 1] # bleah
447 files[short] = (f, None)
446 files[short] = (f, None)
448 else:
447 else:
449 short = os.path.basename(remain)
448 short = os.path.basename(remain)
450 files[short] = (f, n)
449 files[short] = (f, n)
451
450
452 def filelist(**map):
451 def filelist(**map):
453 parity = 0
454 fl = files.keys()
452 fl = files.keys()
455 fl.sort()
453 fl.sort()
456 for f in fl:
454 for f in fl:
457 full, fnode = files[f]
455 full, fnode = files[f]
458 if not fnode:
456 if not fnode:
459 continue
457 continue
460
458
461 yield {"file": full,
459 yield {"file": full,
462 "parity": self.stripes(parity),
460 "parity": parity.next(),
463 "basename": f,
461 "basename": f,
464 "size": ctx.filectx(full).size(),
462 "size": ctx.filectx(full).size(),
465 "permissions": mf.execf(full)}
463 "permissions": mf.execf(full)}
466 parity += 1
467
464
468 def dirlist(**map):
465 def dirlist(**map):
469 parity = 0
470 fl = files.keys()
466 fl = files.keys()
471 fl.sort()
467 fl.sort()
472 for f in fl:
468 for f in fl:
473 full, fnode = files[f]
469 full, fnode = files[f]
474 if fnode:
470 if fnode:
475 continue
471 continue
476
472
477 yield {"parity": self.stripes(parity),
473 yield {"parity": parity.next(),
478 "path": os.path.join(abspath, f),
474 "path": os.path.join(abspath, f),
479 "basename": f[:-1]}
475 "basename": f[:-1]}
480 parity += 1
481
476
482 yield self.t("manifest",
477 yield self.t("manifest",
483 rev=ctx.rev(),
478 rev=ctx.rev(),
484 node=hex(node),
479 node=hex(node),
485 path=abspath,
480 path=abspath,
486 up=_up(abspath),
481 up=_up(abspath),
482 upparity=parity.next(),
487 fentries=filelist,
483 fentries=filelist,
488 dentries=dirlist,
484 dentries=dirlist,
489 archives=self.archivelist(hex(node)))
485 archives=self.archivelist(hex(node)))
490
486
491 def tags(self):
487 def tags(self):
492 i = self.repo.tagslist()
488 i = self.repo.tagslist()
493 i.reverse()
489 i.reverse()
490 parity = paritygen(self.stripecount)
494
491
495 def entries(notip=False, **map):
492 def entries(notip=False, **map):
496 parity = 0
497 for k, n in i:
493 for k, n in i:
498 if notip and k == "tip":
494 if notip and k == "tip":
499 continue
495 continue
500 yield {"parity": self.stripes(parity),
496 yield {"parity": parity.next(),
501 "tag": k,
497 "tag": k,
502 "date": self.repo.changectx(n).date(),
498 "date": self.repo.changectx(n).date(),
503 "node": hex(n)}
499 "node": hex(n)}
504 parity += 1
505
500
506 yield self.t("tags",
501 yield self.t("tags",
507 node=hex(self.repo.changelog.tip()),
502 node=hex(self.repo.changelog.tip()),
508 entries=lambda **x: entries(False, **x),
503 entries=lambda **x: entries(False, **x),
509 entriesnotip=lambda **x: entries(True, **x))
504 entriesnotip=lambda **x: entries(True, **x))
510
505
511 def summary(self):
506 def summary(self):
512 i = self.repo.tagslist()
507 i = self.repo.tagslist()
513 i.reverse()
508 i.reverse()
514
509
515 def tagentries(**map):
510 def tagentries(**map):
516 parity = 0
511 parity = paritygen(self.stripecount)
517 count = 0
512 count = 0
518 for k, n in i:
513 for k, n in i:
519 if k == "tip": # skip tip
514 if k == "tip": # skip tip
520 continue;
515 continue;
521
516
522 count += 1
517 count += 1
523 if count > 10: # limit to 10 tags
518 if count > 10: # limit to 10 tags
524 break;
519 break;
525
520
526 yield self.t("tagentry",
521 yield self.t("tagentry",
527 parity=self.stripes(parity),
522 parity=parity.next(),
528 tag=k,
523 tag=k,
529 node=hex(n),
524 node=hex(n),
530 date=self.repo.changectx(n).date())
525 date=self.repo.changectx(n).date())
531 parity += 1
532
526
533
527
534 def branches(**map):
528 def branches(**map):
535 parity = 0
529 parity = paritygen(self.stripecount)
536
530
537 b = self.repo.branchtags()
531 b = self.repo.branchtags()
538 l = [(-self.repo.changelog.rev(n), n, t) for t, n in b.items()]
532 l = [(-self.repo.changelog.rev(n), n, t) for t, n in b.items()]
539 l.sort()
533 l.sort()
540
534
541 for r,n,t in l:
535 for r,n,t in l:
542 ctx = self.repo.changectx(n)
536 ctx = self.repo.changectx(n)
543
537
544 yield {'parity': self.stripes(parity),
538 yield {'parity': parity.next(),
545 'branch': t,
539 'branch': t,
546 'node': hex(n),
540 'node': hex(n),
547 'date': ctx.date()}
541 'date': ctx.date()}
548 parity += 1
549
542
550 def changelist(**map):
543 def changelist(**map):
551 parity = 0
544 parity = paritygen(self.stripecount, offset=start-end)
552 l = [] # build a list in forward order for efficiency
545 l = [] # build a list in forward order for efficiency
553 for i in xrange(start, end):
546 for i in xrange(start, end):
554 ctx = self.repo.changectx(i)
547 ctx = self.repo.changectx(i)
555 hn = hex(ctx.node())
548 hn = hex(ctx.node())
556
549
557 l.insert(0, self.t(
550 l.insert(0, self.t(
558 'shortlogentry',
551 'shortlogentry',
559 parity=parity,
552 parity=parity.next(),
560 author=ctx.user(),
553 author=ctx.user(),
561 desc=ctx.description(),
554 desc=ctx.description(),
562 date=ctx.date(),
555 date=ctx.date(),
563 rev=i,
556 rev=i,
564 node=hn))
557 node=hn))
565 parity = 1 - parity
566
558
567 yield l
559 yield l
568
560
569 cl = self.repo.changelog
561 cl = self.repo.changelog
570 count = cl.count()
562 count = cl.count()
571 start = max(0, count - self.maxchanges)
563 start = max(0, count - self.maxchanges)
572 end = min(count, start + self.maxchanges)
564 end = min(count, start + self.maxchanges)
573
565
574 yield self.t("summary",
566 yield self.t("summary",
575 desc=self.config("web", "description", "unknown"),
567 desc=self.config("web", "description", "unknown"),
576 owner=(self.config("ui", "username") or # preferred
568 owner=(self.config("ui", "username") or # preferred
577 self.config("web", "contact") or # deprecated
569 self.config("web", "contact") or # deprecated
578 self.config("web", "author", "unknown")), # also
570 self.config("web", "author", "unknown")), # also
579 lastchange=cl.read(cl.tip())[2],
571 lastchange=cl.read(cl.tip())[2],
580 tags=tagentries,
572 tags=tagentries,
581 branches=branches,
573 branches=branches,
582 shortlog=changelist,
574 shortlog=changelist,
583 node=hex(cl.tip()),
575 node=hex(cl.tip()),
584 archives=self.archivelist("tip"))
576 archives=self.archivelist("tip"))
585
577
586 def filediff(self, fctx):
578 def filediff(self, fctx):
587 n = fctx.node()
579 n = fctx.node()
588 path = fctx.path()
580 path = fctx.path()
589 parents = fctx.parents()
581 parents = fctx.parents()
590 p1 = parents and parents[0].node() or nullid
582 p1 = parents and parents[0].node() or nullid
591
583
592 def diff(**map):
584 def diff(**map):
593 yield self.diff(p1, n, [path])
585 yield self.diff(p1, n, [path])
594
586
595 yield self.t("filediff",
587 yield self.t("filediff",
596 file=path,
588 file=path,
597 node=hex(n),
589 node=hex(n),
598 rev=fctx.rev(),
590 rev=fctx.rev(),
599 parent=self.siblings(parents),
591 parent=self.siblings(parents),
600 child=self.siblings(fctx.children()),
592 child=self.siblings(fctx.children()),
601 diff=diff)
593 diff=diff)
602
594
603 archive_specs = {
595 archive_specs = {
604 'bz2': ('application/x-tar', 'tbz2', '.tar.bz2', None),
596 'bz2': ('application/x-tar', 'tbz2', '.tar.bz2', None),
605 'gz': ('application/x-tar', 'tgz', '.tar.gz', None),
597 'gz': ('application/x-tar', 'tgz', '.tar.gz', None),
606 'zip': ('application/zip', 'zip', '.zip', None),
598 'zip': ('application/zip', 'zip', '.zip', None),
607 }
599 }
608
600
609 def archive(self, req, id, type_):
601 def archive(self, req, id, type_):
610 reponame = re.sub(r"\W+", "-", os.path.basename(self.reponame))
602 reponame = re.sub(r"\W+", "-", os.path.basename(self.reponame))
611 cnode = self.repo.lookup(id)
603 cnode = self.repo.lookup(id)
612 arch_version = id
604 arch_version = id
613 if cnode == id or id == 'tip':
605 if cnode == id or id == 'tip':
614 arch_version = short(cnode)
606 arch_version = short(cnode)
615 name = "%s-%s" % (reponame, arch_version)
607 name = "%s-%s" % (reponame, arch_version)
616 mimetype, artype, extension, encoding = self.archive_specs[type_]
608 mimetype, artype, extension, encoding = self.archive_specs[type_]
617 headers = [('Content-type', mimetype),
609 headers = [('Content-type', mimetype),
618 ('Content-disposition', 'attachment; filename=%s%s' %
610 ('Content-disposition', 'attachment; filename=%s%s' %
619 (name, extension))]
611 (name, extension))]
620 if encoding:
612 if encoding:
621 headers.append(('Content-encoding', encoding))
613 headers.append(('Content-encoding', encoding))
622 req.header(headers)
614 req.header(headers)
623 archival.archive(self.repo, req.out, cnode, artype, prefix=name)
615 archival.archive(self.repo, req.out, cnode, artype, prefix=name)
624
616
625 # add tags to things
617 # add tags to things
626 # tags -> list of changesets corresponding to tags
618 # tags -> list of changesets corresponding to tags
627 # find tag, changeset, file
619 # find tag, changeset, file
628
620
629 def cleanpath(self, path):
621 def cleanpath(self, path):
630 path = path.lstrip('/')
622 path = path.lstrip('/')
631 return util.canonpath(self.repo.root, '', path)
623 return util.canonpath(self.repo.root, '', path)
632
624
633 def run(self):
625 def run(self):
634 if not os.environ.get('GATEWAY_INTERFACE', '').startswith("CGI/1."):
626 if not os.environ.get('GATEWAY_INTERFACE', '').startswith("CGI/1."):
635 raise RuntimeError("This function is only intended to be called while running as a CGI script.")
627 raise RuntimeError("This function is only intended to be called while running as a CGI script.")
636 import mercurial.hgweb.wsgicgi as wsgicgi
628 import mercurial.hgweb.wsgicgi as wsgicgi
637 from request import wsgiapplication
629 from request import wsgiapplication
638 def make_web_app():
630 def make_web_app():
639 return self
631 return self
640 wsgicgi.launch(wsgiapplication(make_web_app))
632 wsgicgi.launch(wsgiapplication(make_web_app))
641
633
642 def run_wsgi(self, req):
634 def run_wsgi(self, req):
643 def header(**map):
635 def header(**map):
644 header_file = cStringIO.StringIO(
636 header_file = cStringIO.StringIO(
645 ''.join(self.t("header", encoding=util._encoding, **map)))
637 ''.join(self.t("header", encoding=util._encoding, **map)))
646 msg = mimetools.Message(header_file, 0)
638 msg = mimetools.Message(header_file, 0)
647 req.header(msg.items())
639 req.header(msg.items())
648 yield header_file.read()
640 yield header_file.read()
649
641
650 def rawfileheader(**map):
642 def rawfileheader(**map):
651 req.header([('Content-type', map['mimetype']),
643 req.header([('Content-type', map['mimetype']),
652 ('Content-disposition', 'filename=%s' % map['file']),
644 ('Content-disposition', 'filename=%s' % map['file']),
653 ('Content-length', str(len(map['raw'])))])
645 ('Content-length', str(len(map['raw'])))])
654 yield ''
646 yield ''
655
647
656 def footer(**map):
648 def footer(**map):
657 yield self.t("footer", **map)
649 yield self.t("footer", **map)
658
650
659 def motd(**map):
651 def motd(**map):
660 yield self.config("web", "motd", "")
652 yield self.config("web", "motd", "")
661
653
662 def expand_form(form):
654 def expand_form(form):
663 shortcuts = {
655 shortcuts = {
664 'cl': [('cmd', ['changelog']), ('rev', None)],
656 'cl': [('cmd', ['changelog']), ('rev', None)],
665 'sl': [('cmd', ['shortlog']), ('rev', None)],
657 'sl': [('cmd', ['shortlog']), ('rev', None)],
666 'cs': [('cmd', ['changeset']), ('node', None)],
658 'cs': [('cmd', ['changeset']), ('node', None)],
667 'f': [('cmd', ['file']), ('filenode', None)],
659 'f': [('cmd', ['file']), ('filenode', None)],
668 'fl': [('cmd', ['filelog']), ('filenode', None)],
660 'fl': [('cmd', ['filelog']), ('filenode', None)],
669 'fd': [('cmd', ['filediff']), ('node', None)],
661 'fd': [('cmd', ['filediff']), ('node', None)],
670 'fa': [('cmd', ['annotate']), ('filenode', None)],
662 'fa': [('cmd', ['annotate']), ('filenode', None)],
671 'mf': [('cmd', ['manifest']), ('manifest', None)],
663 'mf': [('cmd', ['manifest']), ('manifest', None)],
672 'ca': [('cmd', ['archive']), ('node', None)],
664 'ca': [('cmd', ['archive']), ('node', None)],
673 'tags': [('cmd', ['tags'])],
665 'tags': [('cmd', ['tags'])],
674 'tip': [('cmd', ['changeset']), ('node', ['tip'])],
666 'tip': [('cmd', ['changeset']), ('node', ['tip'])],
675 'static': [('cmd', ['static']), ('file', None)]
667 'static': [('cmd', ['static']), ('file', None)]
676 }
668 }
677
669
678 for k in shortcuts.iterkeys():
670 for k in shortcuts.iterkeys():
679 if form.has_key(k):
671 if form.has_key(k):
680 for name, value in shortcuts[k]:
672 for name, value in shortcuts[k]:
681 if value is None:
673 if value is None:
682 value = form[k]
674 value = form[k]
683 form[name] = value
675 form[name] = value
684 del form[k]
676 del form[k]
685
677
686 def rewrite_request(req):
678 def rewrite_request(req):
687 '''translate new web interface to traditional format'''
679 '''translate new web interface to traditional format'''
688
680
689 def spliturl(req):
681 def spliturl(req):
690 def firstitem(query):
682 def firstitem(query):
691 return query.split('&', 1)[0].split(';', 1)[0]
683 return query.split('&', 1)[0].split(';', 1)[0]
692
684
693 def normurl(url):
685 def normurl(url):
694 inner = '/'.join([x for x in url.split('/') if x])
686 inner = '/'.join([x for x in url.split('/') if x])
695 tl = len(url) > 1 and url.endswith('/') and '/' or ''
687 tl = len(url) > 1 and url.endswith('/') and '/' or ''
696
688
697 return '%s%s%s' % (url.startswith('/') and '/' or '',
689 return '%s%s%s' % (url.startswith('/') and '/' or '',
698 inner, tl)
690 inner, tl)
699
691
700 root = normurl(urllib.unquote(req.env.get('REQUEST_URI', '').split('?', 1)[0]))
692 root = normurl(urllib.unquote(req.env.get('REQUEST_URI', '').split('?', 1)[0]))
701 pi = normurl(req.env.get('PATH_INFO', ''))
693 pi = normurl(req.env.get('PATH_INFO', ''))
702 if pi:
694 if pi:
703 # strip leading /
695 # strip leading /
704 pi = pi[1:]
696 pi = pi[1:]
705 if pi:
697 if pi:
706 root = root[:root.rfind(pi)]
698 root = root[:root.rfind(pi)]
707 if req.env.has_key('REPO_NAME'):
699 if req.env.has_key('REPO_NAME'):
708 rn = req.env['REPO_NAME'] + '/'
700 rn = req.env['REPO_NAME'] + '/'
709 root += rn
701 root += rn
710 query = pi[len(rn):]
702 query = pi[len(rn):]
711 else:
703 else:
712 query = pi
704 query = pi
713 else:
705 else:
714 root += '?'
706 root += '?'
715 query = firstitem(req.env['QUERY_STRING'])
707 query = firstitem(req.env['QUERY_STRING'])
716
708
717 return (root, query)
709 return (root, query)
718
710
719 req.url, query = spliturl(req)
711 req.url, query = spliturl(req)
720
712
721 if req.form.has_key('cmd'):
713 if req.form.has_key('cmd'):
722 # old style
714 # old style
723 return
715 return
724
716
725 args = query.split('/', 2)
717 args = query.split('/', 2)
726 if not args or not args[0]:
718 if not args or not args[0]:
727 return
719 return
728
720
729 cmd = args.pop(0)
721 cmd = args.pop(0)
730 style = cmd.rfind('-')
722 style = cmd.rfind('-')
731 if style != -1:
723 if style != -1:
732 req.form['style'] = [cmd[:style]]
724 req.form['style'] = [cmd[:style]]
733 cmd = cmd[style+1:]
725 cmd = cmd[style+1:]
734 # avoid accepting e.g. style parameter as command
726 # avoid accepting e.g. style parameter as command
735 if hasattr(self, 'do_' + cmd):
727 if hasattr(self, 'do_' + cmd):
736 req.form['cmd'] = [cmd]
728 req.form['cmd'] = [cmd]
737
729
738 if args and args[0]:
730 if args and args[0]:
739 node = args.pop(0)
731 node = args.pop(0)
740 req.form['node'] = [node]
732 req.form['node'] = [node]
741 if args:
733 if args:
742 req.form['file'] = args
734 req.form['file'] = args
743
735
744 if cmd == 'static':
736 if cmd == 'static':
745 req.form['file'] = req.form['node']
737 req.form['file'] = req.form['node']
746 elif cmd == 'archive':
738 elif cmd == 'archive':
747 fn = req.form['node'][0]
739 fn = req.form['node'][0]
748 for type_, spec in self.archive_specs.iteritems():
740 for type_, spec in self.archive_specs.iteritems():
749 ext = spec[2]
741 ext = spec[2]
750 if fn.endswith(ext):
742 if fn.endswith(ext):
751 req.form['node'] = [fn[:-len(ext)]]
743 req.form['node'] = [fn[:-len(ext)]]
752 req.form['type'] = [type_]
744 req.form['type'] = [type_]
753
745
754 def sessionvars(**map):
746 def sessionvars(**map):
755 fields = []
747 fields = []
756 if req.form.has_key('style'):
748 if req.form.has_key('style'):
757 style = req.form['style'][0]
749 style = req.form['style'][0]
758 if style != self.config('web', 'style', ''):
750 if style != self.config('web', 'style', ''):
759 fields.append(('style', style))
751 fields.append(('style', style))
760
752
761 separator = req.url[-1] == '?' and ';' or '?'
753 separator = req.url[-1] == '?' and ';' or '?'
762 for name, value in fields:
754 for name, value in fields:
763 yield dict(name=name, value=value, separator=separator)
755 yield dict(name=name, value=value, separator=separator)
764 separator = ';'
756 separator = ';'
765
757
766 self.refresh()
758 self.refresh()
767
759
768 expand_form(req.form)
760 expand_form(req.form)
769 rewrite_request(req)
761 rewrite_request(req)
770
762
771 style = self.config("web", "style", "")
763 style = self.config("web", "style", "")
772 if req.form.has_key('style'):
764 if req.form.has_key('style'):
773 style = req.form['style'][0]
765 style = req.form['style'][0]
774 mapfile = style_map(self.templatepath, style)
766 mapfile = style_map(self.templatepath, style)
775
767
776 port = req.env["SERVER_PORT"]
768 port = req.env["SERVER_PORT"]
777 port = port != "80" and (":" + port) or ""
769 port = port != "80" and (":" + port) or ""
778 urlbase = 'http://%s%s' % (req.env['SERVER_NAME'], port)
770 urlbase = 'http://%s%s' % (req.env['SERVER_NAME'], port)
779 staticurl = self.config("web", "staticurl") or req.url + 'static/'
771 staticurl = self.config("web", "staticurl") or req.url + 'static/'
780 if not staticurl.endswith('/'):
772 if not staticurl.endswith('/'):
781 staticurl += '/'
773 staticurl += '/'
782
774
783 if not self.reponame:
775 if not self.reponame:
784 self.reponame = (self.config("web", "name")
776 self.reponame = (self.config("web", "name")
785 or req.env.get('REPO_NAME')
777 or req.env.get('REPO_NAME')
786 or req.url.strip('/') or self.repo.root)
778 or req.url.strip('/') or self.repo.root)
787
779
788 self.t = templater.templater(mapfile, templater.common_filters,
780 self.t = templater.templater(mapfile, templater.common_filters,
789 defaults={"url": req.url,
781 defaults={"url": req.url,
790 "staticurl": staticurl,
782 "staticurl": staticurl,
791 "urlbase": urlbase,
783 "urlbase": urlbase,
792 "repo": self.reponame,
784 "repo": self.reponame,
793 "header": header,
785 "header": header,
794 "footer": footer,
786 "footer": footer,
795 "motd": motd,
787 "motd": motd,
796 "rawfileheader": rawfileheader,
788 "rawfileheader": rawfileheader,
797 "sessionvars": sessionvars
789 "sessionvars": sessionvars
798 })
790 })
799
791
800 try:
792 try:
801 if not req.form.has_key('cmd'):
793 if not req.form.has_key('cmd'):
802 req.form['cmd'] = [self.t.cache['default']]
794 req.form['cmd'] = [self.t.cache['default']]
803
795
804 cmd = req.form['cmd'][0]
796 cmd = req.form['cmd'][0]
805
797
806 method = getattr(self, 'do_' + cmd, None)
798 method = getattr(self, 'do_' + cmd, None)
807 if method:
799 if method:
808 try:
800 try:
809 method(req)
801 method(req)
810 except (hg.RepoError, revlog.RevlogError), inst:
802 except (hg.RepoError, revlog.RevlogError), inst:
811 req.write(self.t("error", error=str(inst)))
803 req.write(self.t("error", error=str(inst)))
812 else:
804 else:
813 req.write(self.t("error", error='No such method: ' + cmd))
805 req.write(self.t("error", error='No such method: ' + cmd))
814 finally:
806 finally:
815 self.t = None
807 self.t = None
816
808
817 def changectx(self, req):
809 def changectx(self, req):
818 if req.form.has_key('node'):
810 if req.form.has_key('node'):
819 changeid = req.form['node'][0]
811 changeid = req.form['node'][0]
820 elif req.form.has_key('manifest'):
812 elif req.form.has_key('manifest'):
821 changeid = req.form['manifest'][0]
813 changeid = req.form['manifest'][0]
822 else:
814 else:
823 changeid = self.repo.changelog.count() - 1
815 changeid = self.repo.changelog.count() - 1
824
816
825 try:
817 try:
826 ctx = self.repo.changectx(changeid)
818 ctx = self.repo.changectx(changeid)
827 except hg.RepoError:
819 except hg.RepoError:
828 man = self.repo.manifest
820 man = self.repo.manifest
829 mn = man.lookup(changeid)
821 mn = man.lookup(changeid)
830 ctx = self.repo.changectx(man.linkrev(mn))
822 ctx = self.repo.changectx(man.linkrev(mn))
831
823
832 return ctx
824 return ctx
833
825
834 def filectx(self, req):
826 def filectx(self, req):
835 path = self.cleanpath(req.form['file'][0])
827 path = self.cleanpath(req.form['file'][0])
836 if req.form.has_key('node'):
828 if req.form.has_key('node'):
837 changeid = req.form['node'][0]
829 changeid = req.form['node'][0]
838 else:
830 else:
839 changeid = req.form['filenode'][0]
831 changeid = req.form['filenode'][0]
840 try:
832 try:
841 ctx = self.repo.changectx(changeid)
833 ctx = self.repo.changectx(changeid)
842 fctx = ctx.filectx(path)
834 fctx = ctx.filectx(path)
843 except hg.RepoError:
835 except hg.RepoError:
844 fctx = self.repo.filectx(path, fileid=changeid)
836 fctx = self.repo.filectx(path, fileid=changeid)
845
837
846 return fctx
838 return fctx
847
839
848 def stripes(self, parity):
849 "make horizontal stripes for easier reading"
850 if self.stripecount:
851 return (1 + parity / self.stripecount) & 1
852 else:
853 return 0
854
855 def do_log(self, req):
840 def do_log(self, req):
856 if req.form.has_key('file') and req.form['file'][0]:
841 if req.form.has_key('file') and req.form['file'][0]:
857 self.do_filelog(req)
842 self.do_filelog(req)
858 else:
843 else:
859 self.do_changelog(req)
844 self.do_changelog(req)
860
845
861 def do_rev(self, req):
846 def do_rev(self, req):
862 self.do_changeset(req)
847 self.do_changeset(req)
863
848
864 def do_file(self, req):
849 def do_file(self, req):
865 path = self.cleanpath(req.form.get('file', [''])[0])
850 path = self.cleanpath(req.form.get('file', [''])[0])
866 if path:
851 if path:
867 try:
852 try:
868 req.write(self.filerevision(self.filectx(req)))
853 req.write(self.filerevision(self.filectx(req)))
869 return
854 return
870 except revlog.LookupError:
855 except revlog.LookupError:
871 pass
856 pass
872
857
873 req.write(self.manifest(self.changectx(req), path))
858 req.write(self.manifest(self.changectx(req), path))
874
859
875 def do_diff(self, req):
860 def do_diff(self, req):
876 self.do_filediff(req)
861 self.do_filediff(req)
877
862
878 def do_changelog(self, req, shortlog = False):
863 def do_changelog(self, req, shortlog = False):
879 if req.form.has_key('node'):
864 if req.form.has_key('node'):
880 ctx = self.changectx(req)
865 ctx = self.changectx(req)
881 else:
866 else:
882 if req.form.has_key('rev'):
867 if req.form.has_key('rev'):
883 hi = req.form['rev'][0]
868 hi = req.form['rev'][0]
884 else:
869 else:
885 hi = self.repo.changelog.count() - 1
870 hi = self.repo.changelog.count() - 1
886 try:
871 try:
887 ctx = self.repo.changectx(hi)
872 ctx = self.repo.changectx(hi)
888 except hg.RepoError:
873 except hg.RepoError:
889 req.write(self.search(hi)) # XXX redirect to 404 page?
874 req.write(self.search(hi)) # XXX redirect to 404 page?
890 return
875 return
891
876
892 req.write(self.changelog(ctx, shortlog = shortlog))
877 req.write(self.changelog(ctx, shortlog = shortlog))
893
878
894 def do_shortlog(self, req):
879 def do_shortlog(self, req):
895 self.do_changelog(req, shortlog = True)
880 self.do_changelog(req, shortlog = True)
896
881
897 def do_changeset(self, req):
882 def do_changeset(self, req):
898 req.write(self.changeset(self.changectx(req)))
883 req.write(self.changeset(self.changectx(req)))
899
884
900 def do_manifest(self, req):
885 def do_manifest(self, req):
901 req.write(self.manifest(self.changectx(req),
886 req.write(self.manifest(self.changectx(req),
902 self.cleanpath(req.form['path'][0])))
887 self.cleanpath(req.form['path'][0])))
903
888
904 def do_tags(self, req):
889 def do_tags(self, req):
905 req.write(self.tags())
890 req.write(self.tags())
906
891
907 def do_summary(self, req):
892 def do_summary(self, req):
908 req.write(self.summary())
893 req.write(self.summary())
909
894
910 def do_filediff(self, req):
895 def do_filediff(self, req):
911 req.write(self.filediff(self.filectx(req)))
896 req.write(self.filediff(self.filectx(req)))
912
897
913 def do_annotate(self, req):
898 def do_annotate(self, req):
914 req.write(self.fileannotate(self.filectx(req)))
899 req.write(self.fileannotate(self.filectx(req)))
915
900
916 def do_filelog(self, req):
901 def do_filelog(self, req):
917 req.write(self.filelog(self.filectx(req)))
902 req.write(self.filelog(self.filectx(req)))
918
903
919 def do_lookup(self, req):
904 def do_lookup(self, req):
920 try:
905 try:
921 r = hex(self.repo.lookup(req.form['key'][0]))
906 r = hex(self.repo.lookup(req.form['key'][0]))
922 success = 1
907 success = 1
923 except Exception,inst:
908 except Exception,inst:
924 r = str(inst)
909 r = str(inst)
925 success = 0
910 success = 0
926 resp = "%s %s\n" % (success, r)
911 resp = "%s %s\n" % (success, r)
927 req.httphdr("application/mercurial-0.1", length=len(resp))
912 req.httphdr("application/mercurial-0.1", length=len(resp))
928 req.write(resp)
913 req.write(resp)
929
914
930 def do_heads(self, req):
915 def do_heads(self, req):
931 resp = " ".join(map(hex, self.repo.heads())) + "\n"
916 resp = " ".join(map(hex, self.repo.heads())) + "\n"
932 req.httphdr("application/mercurial-0.1", length=len(resp))
917 req.httphdr("application/mercurial-0.1", length=len(resp))
933 req.write(resp)
918 req.write(resp)
934
919
935 def do_branches(self, req):
920 def do_branches(self, req):
936 nodes = []
921 nodes = []
937 if req.form.has_key('nodes'):
922 if req.form.has_key('nodes'):
938 nodes = map(bin, req.form['nodes'][0].split(" "))
923 nodes = map(bin, req.form['nodes'][0].split(" "))
939 resp = cStringIO.StringIO()
924 resp = cStringIO.StringIO()
940 for b in self.repo.branches(nodes):
925 for b in self.repo.branches(nodes):
941 resp.write(" ".join(map(hex, b)) + "\n")
926 resp.write(" ".join(map(hex, b)) + "\n")
942 resp = resp.getvalue()
927 resp = resp.getvalue()
943 req.httphdr("application/mercurial-0.1", length=len(resp))
928 req.httphdr("application/mercurial-0.1", length=len(resp))
944 req.write(resp)
929 req.write(resp)
945
930
946 def do_between(self, req):
931 def do_between(self, req):
947 if req.form.has_key('pairs'):
932 if req.form.has_key('pairs'):
948 pairs = [map(bin, p.split("-"))
933 pairs = [map(bin, p.split("-"))
949 for p in req.form['pairs'][0].split(" ")]
934 for p in req.form['pairs'][0].split(" ")]
950 resp = cStringIO.StringIO()
935 resp = cStringIO.StringIO()
951 for b in self.repo.between(pairs):
936 for b in self.repo.between(pairs):
952 resp.write(" ".join(map(hex, b)) + "\n")
937 resp.write(" ".join(map(hex, b)) + "\n")
953 resp = resp.getvalue()
938 resp = resp.getvalue()
954 req.httphdr("application/mercurial-0.1", length=len(resp))
939 req.httphdr("application/mercurial-0.1", length=len(resp))
955 req.write(resp)
940 req.write(resp)
956
941
957 def do_changegroup(self, req):
942 def do_changegroup(self, req):
958 req.httphdr("application/mercurial-0.1")
943 req.httphdr("application/mercurial-0.1")
959 nodes = []
944 nodes = []
960 if not self.allowpull:
945 if not self.allowpull:
961 return
946 return
962
947
963 if req.form.has_key('roots'):
948 if req.form.has_key('roots'):
964 nodes = map(bin, req.form['roots'][0].split(" "))
949 nodes = map(bin, req.form['roots'][0].split(" "))
965
950
966 z = zlib.compressobj()
951 z = zlib.compressobj()
967 f = self.repo.changegroup(nodes, 'serve')
952 f = self.repo.changegroup(nodes, 'serve')
968 while 1:
953 while 1:
969 chunk = f.read(4096)
954 chunk = f.read(4096)
970 if not chunk:
955 if not chunk:
971 break
956 break
972 req.write(z.compress(chunk))
957 req.write(z.compress(chunk))
973
958
974 req.write(z.flush())
959 req.write(z.flush())
975
960
976 def do_changegroupsubset(self, req):
961 def do_changegroupsubset(self, req):
977 req.httphdr("application/mercurial-0.1")
962 req.httphdr("application/mercurial-0.1")
978 bases = []
963 bases = []
979 heads = []
964 heads = []
980 if not self.allowpull:
965 if not self.allowpull:
981 return
966 return
982
967
983 if req.form.has_key('bases'):
968 if req.form.has_key('bases'):
984 bases = [bin(x) for x in req.form['bases'][0].split(' ')]
969 bases = [bin(x) for x in req.form['bases'][0].split(' ')]
985 if req.form.has_key('heads'):
970 if req.form.has_key('heads'):
986 heads = [bin(x) for x in req.form['heads'][0].split(' ')]
971 heads = [bin(x) for x in req.form['heads'][0].split(' ')]
987
972
988 z = zlib.compressobj()
973 z = zlib.compressobj()
989 f = self.repo.changegroupsubset(bases, heads, 'serve')
974 f = self.repo.changegroupsubset(bases, heads, 'serve')
990 while 1:
975 while 1:
991 chunk = f.read(4096)
976 chunk = f.read(4096)
992 if not chunk:
977 if not chunk:
993 break
978 break
994 req.write(z.compress(chunk))
979 req.write(z.compress(chunk))
995
980
996 req.write(z.flush())
981 req.write(z.flush())
997
982
998 def do_archive(self, req):
983 def do_archive(self, req):
999 type_ = req.form['type'][0]
984 type_ = req.form['type'][0]
1000 allowed = self.configlist("web", "allow_archive")
985 allowed = self.configlist("web", "allow_archive")
1001 if (type_ in self.archives and (type_ in allowed or
986 if (type_ in self.archives and (type_ in allowed or
1002 self.configbool("web", "allow" + type_, False))):
987 self.configbool("web", "allow" + type_, False))):
1003 self.archive(req, req.form['node'][0], type_)
988 self.archive(req, req.form['node'][0], type_)
1004 return
989 return
1005
990
1006 req.write(self.t("error"))
991 req.write(self.t("error"))
1007
992
1008 def do_static(self, req):
993 def do_static(self, req):
1009 fname = req.form['file'][0]
994 fname = req.form['file'][0]
1010 # a repo owner may set web.static in .hg/hgrc to get any file
995 # a repo owner may set web.static in .hg/hgrc to get any file
1011 # readable by the user running the CGI script
996 # readable by the user running the CGI script
1012 static = self.config("web", "static",
997 static = self.config("web", "static",
1013 os.path.join(self.templatepath, "static"),
998 os.path.join(self.templatepath, "static"),
1014 untrusted=False)
999 untrusted=False)
1015 req.write(staticfile(static, fname, req)
1000 req.write(staticfile(static, fname, req)
1016 or self.t("error", error="%r not found" % fname))
1001 or self.t("error", error="%r not found" % fname))
1017
1002
1018 def do_capabilities(self, req):
1003 def do_capabilities(self, req):
1019 caps = ['lookup', 'changegroupsubset']
1004 caps = ['lookup', 'changegroupsubset']
1020 if self.configbool('server', 'uncompressed'):
1005 if self.configbool('server', 'uncompressed'):
1021 caps.append('stream=%d' % self.repo.changelog.version)
1006 caps.append('stream=%d' % self.repo.changelog.version)
1022 # XXX: make configurable and/or share code with do_unbundle:
1007 # XXX: make configurable and/or share code with do_unbundle:
1023 unbundleversions = ['HG10GZ', 'HG10BZ', 'HG10UN']
1008 unbundleversions = ['HG10GZ', 'HG10BZ', 'HG10UN']
1024 if unbundleversions:
1009 if unbundleversions:
1025 caps.append('unbundle=%s' % ','.join(unbundleversions))
1010 caps.append('unbundle=%s' % ','.join(unbundleversions))
1026 resp = ' '.join(caps)
1011 resp = ' '.join(caps)
1027 req.httphdr("application/mercurial-0.1", length=len(resp))
1012 req.httphdr("application/mercurial-0.1", length=len(resp))
1028 req.write(resp)
1013 req.write(resp)
1029
1014
1030 def check_perm(self, req, op, default):
1015 def check_perm(self, req, op, default):
1031 '''check permission for operation based on user auth.
1016 '''check permission for operation based on user auth.
1032 return true if op allowed, else false.
1017 return true if op allowed, else false.
1033 default is policy to use if no config given.'''
1018 default is policy to use if no config given.'''
1034
1019
1035 user = req.env.get('REMOTE_USER')
1020 user = req.env.get('REMOTE_USER')
1036
1021
1037 deny = self.configlist('web', 'deny_' + op)
1022 deny = self.configlist('web', 'deny_' + op)
1038 if deny and (not user or deny == ['*'] or user in deny):
1023 if deny and (not user or deny == ['*'] or user in deny):
1039 return False
1024 return False
1040
1025
1041 allow = self.configlist('web', 'allow_' + op)
1026 allow = self.configlist('web', 'allow_' + op)
1042 return (allow and (allow == ['*'] or user in allow)) or default
1027 return (allow and (allow == ['*'] or user in allow)) or default
1043
1028
1044 def do_unbundle(self, req):
1029 def do_unbundle(self, req):
1045 def bail(response, headers={}):
1030 def bail(response, headers={}):
1046 length = int(req.env['CONTENT_LENGTH'])
1031 length = int(req.env['CONTENT_LENGTH'])
1047 for s in util.filechunkiter(req, limit=length):
1032 for s in util.filechunkiter(req, limit=length):
1048 # drain incoming bundle, else client will not see
1033 # drain incoming bundle, else client will not see
1049 # response when run outside cgi script
1034 # response when run outside cgi script
1050 pass
1035 pass
1051 req.httphdr("application/mercurial-0.1", headers=headers)
1036 req.httphdr("application/mercurial-0.1", headers=headers)
1052 req.write('0\n')
1037 req.write('0\n')
1053 req.write(response)
1038 req.write(response)
1054
1039
1055 # require ssl by default, auth info cannot be sniffed and
1040 # require ssl by default, auth info cannot be sniffed and
1056 # replayed
1041 # replayed
1057 ssl_req = self.configbool('web', 'push_ssl', True)
1042 ssl_req = self.configbool('web', 'push_ssl', True)
1058 if ssl_req:
1043 if ssl_req:
1059 if not req.env.get('HTTPS'):
1044 if not req.env.get('HTTPS'):
1060 bail(_('ssl required\n'))
1045 bail(_('ssl required\n'))
1061 return
1046 return
1062 proto = 'https'
1047 proto = 'https'
1063 else:
1048 else:
1064 proto = 'http'
1049 proto = 'http'
1065
1050
1066 # do not allow push unless explicitly allowed
1051 # do not allow push unless explicitly allowed
1067 if not self.check_perm(req, 'push', False):
1052 if not self.check_perm(req, 'push', False):
1068 bail(_('push not authorized\n'),
1053 bail(_('push not authorized\n'),
1069 headers={'status': '401 Unauthorized'})
1054 headers={'status': '401 Unauthorized'})
1070 return
1055 return
1071
1056
1072 their_heads = req.form['heads'][0].split(' ')
1057 their_heads = req.form['heads'][0].split(' ')
1073
1058
1074 def check_heads():
1059 def check_heads():
1075 heads = map(hex, self.repo.heads())
1060 heads = map(hex, self.repo.heads())
1076 return their_heads == [hex('force')] or their_heads == heads
1061 return their_heads == [hex('force')] or their_heads == heads
1077
1062
1078 # fail early if possible
1063 # fail early if possible
1079 if not check_heads():
1064 if not check_heads():
1080 bail(_('unsynced changes\n'))
1065 bail(_('unsynced changes\n'))
1081 return
1066 return
1082
1067
1083 req.httphdr("application/mercurial-0.1")
1068 req.httphdr("application/mercurial-0.1")
1084
1069
1085 # do not lock repo until all changegroup data is
1070 # do not lock repo until all changegroup data is
1086 # streamed. save to temporary file.
1071 # streamed. save to temporary file.
1087
1072
1088 fd, tempname = tempfile.mkstemp(prefix='hg-unbundle-')
1073 fd, tempname = tempfile.mkstemp(prefix='hg-unbundle-')
1089 fp = os.fdopen(fd, 'wb+')
1074 fp = os.fdopen(fd, 'wb+')
1090 try:
1075 try:
1091 length = int(req.env['CONTENT_LENGTH'])
1076 length = int(req.env['CONTENT_LENGTH'])
1092 for s in util.filechunkiter(req, limit=length):
1077 for s in util.filechunkiter(req, limit=length):
1093 fp.write(s)
1078 fp.write(s)
1094
1079
1095 try:
1080 try:
1096 lock = self.repo.lock()
1081 lock = self.repo.lock()
1097 try:
1082 try:
1098 if not check_heads():
1083 if not check_heads():
1099 req.write('0\n')
1084 req.write('0\n')
1100 req.write(_('unsynced changes\n'))
1085 req.write(_('unsynced changes\n'))
1101 return
1086 return
1102
1087
1103 fp.seek(0)
1088 fp.seek(0)
1104 header = fp.read(6)
1089 header = fp.read(6)
1105 if not header.startswith("HG"):
1090 if not header.startswith("HG"):
1106 # old client with uncompressed bundle
1091 # old client with uncompressed bundle
1107 def generator(f):
1092 def generator(f):
1108 yield header
1093 yield header
1109 for chunk in f:
1094 for chunk in f:
1110 yield chunk
1095 yield chunk
1111 elif not header.startswith("HG10"):
1096 elif not header.startswith("HG10"):
1112 req.write("0\n")
1097 req.write("0\n")
1113 req.write(_("unknown bundle version\n"))
1098 req.write(_("unknown bundle version\n"))
1114 return
1099 return
1115 elif header == "HG10GZ":
1100 elif header == "HG10GZ":
1116 def generator(f):
1101 def generator(f):
1117 zd = zlib.decompressobj()
1102 zd = zlib.decompressobj()
1118 for chunk in f:
1103 for chunk in f:
1119 yield zd.decompress(chunk)
1104 yield zd.decompress(chunk)
1120 elif header == "HG10BZ":
1105 elif header == "HG10BZ":
1121 def generator(f):
1106 def generator(f):
1122 zd = bz2.BZ2Decompressor()
1107 zd = bz2.BZ2Decompressor()
1123 zd.decompress("BZ")
1108 zd.decompress("BZ")
1124 for chunk in f:
1109 for chunk in f:
1125 yield zd.decompress(chunk)
1110 yield zd.decompress(chunk)
1126 elif header == "HG10UN":
1111 elif header == "HG10UN":
1127 def generator(f):
1112 def generator(f):
1128 for chunk in f:
1113 for chunk in f:
1129 yield chunk
1114 yield chunk
1130 else:
1115 else:
1131 req.write("0\n")
1116 req.write("0\n")
1132 req.write(_("unknown bundle compression type\n"))
1117 req.write(_("unknown bundle compression type\n"))
1133 return
1118 return
1134 gen = generator(util.filechunkiter(fp, 4096))
1119 gen = generator(util.filechunkiter(fp, 4096))
1135
1120
1136 # send addchangegroup output to client
1121 # send addchangegroup output to client
1137
1122
1138 old_stdout = sys.stdout
1123 old_stdout = sys.stdout
1139 sys.stdout = cStringIO.StringIO()
1124 sys.stdout = cStringIO.StringIO()
1140
1125
1141 try:
1126 try:
1142 url = 'remote:%s:%s' % (proto,
1127 url = 'remote:%s:%s' % (proto,
1143 req.env.get('REMOTE_HOST', ''))
1128 req.env.get('REMOTE_HOST', ''))
1144 try:
1129 try:
1145 ret = self.repo.addchangegroup(
1130 ret = self.repo.addchangegroup(
1146 util.chunkbuffer(gen), 'serve', url)
1131 util.chunkbuffer(gen), 'serve', url)
1147 except util.Abort, inst:
1132 except util.Abort, inst:
1148 sys.stdout.write("abort: %s\n" % inst)
1133 sys.stdout.write("abort: %s\n" % inst)
1149 ret = 0
1134 ret = 0
1150 finally:
1135 finally:
1151 val = sys.stdout.getvalue()
1136 val = sys.stdout.getvalue()
1152 sys.stdout = old_stdout
1137 sys.stdout = old_stdout
1153 req.write('%d\n' % ret)
1138 req.write('%d\n' % ret)
1154 req.write(val)
1139 req.write(val)
1155 finally:
1140 finally:
1156 lock.release()
1141 lock.release()
1157 except (OSError, IOError), inst:
1142 except (OSError, IOError), inst:
1158 req.write('0\n')
1143 req.write('0\n')
1159 filename = getattr(inst, 'filename', '')
1144 filename = getattr(inst, 'filename', '')
1160 # Don't send our filesystem layout to the client
1145 # Don't send our filesystem layout to the client
1161 if filename.startswith(self.repo.root):
1146 if filename.startswith(self.repo.root):
1162 filename = filename[len(self.repo.root)+1:]
1147 filename = filename[len(self.repo.root)+1:]
1163 else:
1148 else:
1164 filename = ''
1149 filename = ''
1165 error = getattr(inst, 'strerror', 'Unknown error')
1150 error = getattr(inst, 'strerror', 'Unknown error')
1166 req.write('%s: %s\n' % (error, filename))
1151 req.write('%s: %s\n' % (error, filename))
1167 finally:
1152 finally:
1168 fp.close()
1153 fp.close()
1169 os.unlink(tempname)
1154 os.unlink(tempname)
1170
1155
1171 def do_stream_out(self, req):
1156 def do_stream_out(self, req):
1172 req.httphdr("application/mercurial-0.1")
1157 req.httphdr("application/mercurial-0.1")
1173 streamclone.stream_out(self.repo, req)
1158 streamclone.stream_out(self.repo, req)
@@ -1,234 +1,237 b''
1 # hgweb/hgwebdir_mod.py - Web interface for a directory of repositories.
1 # hgweb/hgwebdir_mod.py - Web interface for a directory of repositories.
2 #
2 #
3 # Copyright 21 May 2005 - (c) 2005 Jake Edge <jake@edge2.net>
3 # Copyright 21 May 2005 - (c) 2005 Jake Edge <jake@edge2.net>
4 # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
4 # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
5 #
5 #
6 # This software may be used and distributed according to the terms
6 # This software may be used and distributed according to the terms
7 # of the GNU General Public License, incorporated herein by reference.
7 # of the GNU General Public License, incorporated herein by reference.
8
8
9 from mercurial import demandimport; demandimport.enable()
9 from mercurial import demandimport; demandimport.enable()
10 import os, mimetools, cStringIO
10 import os, mimetools, cStringIO
11 from mercurial.i18n import gettext as _
11 from mercurial.i18n import gettext as _
12 from mercurial import ui, hg, util, templater
12 from mercurial import ui, hg, util, templater
13 from common import get_mtime, staticfile, style_map
13 from common import get_mtime, staticfile, style_map, paritygen
14 from hgweb_mod import hgweb
14 from hgweb_mod import hgweb
15
15
16 # This is a stopgap
16 # This is a stopgap
17 class hgwebdir(object):
17 class hgwebdir(object):
18 def __init__(self, config, parentui=None):
18 def __init__(self, config, parentui=None):
19 def cleannames(items):
19 def cleannames(items):
20 return [(name.strip(os.sep), path) for name, path in items]
20 return [(name.strip(os.sep), path) for name, path in items]
21
21
22 self.parentui = parentui
22 self.parentui = parentui
23 self.motd = None
23 self.motd = None
24 self.style = None
24 self.style = None
25 self.stripecount = None
25 self.repos_sorted = ('name', False)
26 self.repos_sorted = ('name', False)
26 if isinstance(config, (list, tuple)):
27 if isinstance(config, (list, tuple)):
27 self.repos = cleannames(config)
28 self.repos = cleannames(config)
28 self.repos_sorted = ('', False)
29 self.repos_sorted = ('', False)
29 elif isinstance(config, dict):
30 elif isinstance(config, dict):
30 self.repos = cleannames(config.items())
31 self.repos = cleannames(config.items())
31 self.repos.sort()
32 self.repos.sort()
32 else:
33 else:
33 if isinstance(config, util.configparser):
34 if isinstance(config, util.configparser):
34 cp = config
35 cp = config
35 else:
36 else:
36 cp = util.configparser()
37 cp = util.configparser()
37 cp.read(config)
38 cp.read(config)
38 self.repos = []
39 self.repos = []
39 if cp.has_section('web'):
40 if cp.has_section('web'):
40 if cp.has_option('web', 'motd'):
41 if cp.has_option('web', 'motd'):
41 self.motd = cp.get('web', 'motd')
42 self.motd = cp.get('web', 'motd')
42 if cp.has_option('web', 'style'):
43 if cp.has_option('web', 'style'):
43 self.style = cp.get('web', 'style')
44 self.style = cp.get('web', 'style')
45 if cp.has_option('web', 'stripes'):
46 self.stripecount = int(cp.get('web', 'stripes'))
44 if cp.has_section('paths'):
47 if cp.has_section('paths'):
45 self.repos.extend(cleannames(cp.items('paths')))
48 self.repos.extend(cleannames(cp.items('paths')))
46 if cp.has_section('collections'):
49 if cp.has_section('collections'):
47 for prefix, root in cp.items('collections'):
50 for prefix, root in cp.items('collections'):
48 for path in util.walkrepos(root):
51 for path in util.walkrepos(root):
49 repo = os.path.normpath(path)
52 repo = os.path.normpath(path)
50 name = repo
53 name = repo
51 if name.startswith(prefix):
54 if name.startswith(prefix):
52 name = name[len(prefix):]
55 name = name[len(prefix):]
53 self.repos.append((name.lstrip(os.sep), repo))
56 self.repos.append((name.lstrip(os.sep), repo))
54 self.repos.sort()
57 self.repos.sort()
55
58
56 def run(self):
59 def run(self):
57 if not os.environ.get('GATEWAY_INTERFACE', '').startswith("CGI/1."):
60 if not os.environ.get('GATEWAY_INTERFACE', '').startswith("CGI/1."):
58 raise RuntimeError("This function is only intended to be called while running as a CGI script.")
61 raise RuntimeError("This function is only intended to be called while running as a CGI script.")
59 import mercurial.hgweb.wsgicgi as wsgicgi
62 import mercurial.hgweb.wsgicgi as wsgicgi
60 from request import wsgiapplication
63 from request import wsgiapplication
61 def make_web_app():
64 def make_web_app():
62 return self
65 return self
63 wsgicgi.launch(wsgiapplication(make_web_app))
66 wsgicgi.launch(wsgiapplication(make_web_app))
64
67
65 def run_wsgi(self, req):
68 def run_wsgi(self, req):
66 def header(**map):
69 def header(**map):
67 header_file = cStringIO.StringIO(
70 header_file = cStringIO.StringIO(
68 ''.join(tmpl("header", encoding=util._encoding, **map)))
71 ''.join(tmpl("header", encoding=util._encoding, **map)))
69 msg = mimetools.Message(header_file, 0)
72 msg = mimetools.Message(header_file, 0)
70 req.header(msg.items())
73 req.header(msg.items())
71 yield header_file.read()
74 yield header_file.read()
72
75
73 def footer(**map):
76 def footer(**map):
74 yield tmpl("footer", **map)
77 yield tmpl("footer", **map)
75
78
76 def motd(**map):
79 def motd(**map):
77 if self.motd is not None:
80 if self.motd is not None:
78 yield self.motd
81 yield self.motd
79 else:
82 else:
80 yield config('web', 'motd', '')
83 yield config('web', 'motd', '')
81
84
82 parentui = self.parentui or ui.ui(report_untrusted=False)
85 parentui = self.parentui or ui.ui(report_untrusted=False)
83
86
84 def config(section, name, default=None, untrusted=True):
87 def config(section, name, default=None, untrusted=True):
85 return parentui.config(section, name, default, untrusted)
88 return parentui.config(section, name, default, untrusted)
86
89
87 url = req.env['REQUEST_URI'].split('?')[0]
90 url = req.env['REQUEST_URI'].split('?')[0]
88 if not url.endswith('/'):
91 if not url.endswith('/'):
89 url += '/'
92 url += '/'
90
93
91 staticurl = config('web', 'staticurl') or url + 'static/'
94 staticurl = config('web', 'staticurl') or url + 'static/'
92 if not staticurl.endswith('/'):
95 if not staticurl.endswith('/'):
93 staticurl += '/'
96 staticurl += '/'
94
97
95 style = self.style
98 style = self.style
96 if style is None:
99 if style is None:
97 style = config('web', 'style', '')
100 style = config('web', 'style', '')
98 if req.form.has_key('style'):
101 if req.form.has_key('style'):
99 style = req.form['style'][0]
102 style = req.form['style'][0]
103 if self.stripecount is None:
104 self.stripecount = int(config('web', 'stripes', 1))
100 mapfile = style_map(templater.templatepath(), style)
105 mapfile = style_map(templater.templatepath(), style)
101 tmpl = templater.templater(mapfile, templater.common_filters,
106 tmpl = templater.templater(mapfile, templater.common_filters,
102 defaults={"header": header,
107 defaults={"header": header,
103 "footer": footer,
108 "footer": footer,
104 "motd": motd,
109 "motd": motd,
105 "url": url,
110 "url": url,
106 "staticurl": staticurl})
111 "staticurl": staticurl})
107
112
108 def archivelist(ui, nodeid, url):
113 def archivelist(ui, nodeid, url):
109 allowed = ui.configlist("web", "allow_archive", untrusted=True)
114 allowed = ui.configlist("web", "allow_archive", untrusted=True)
110 for i in [('zip', '.zip'), ('gz', '.tar.gz'), ('bz2', '.tar.bz2')]:
115 for i in [('zip', '.zip'), ('gz', '.tar.gz'), ('bz2', '.tar.bz2')]:
111 if i[0] in allowed or ui.configbool("web", "allow" + i[0],
116 if i[0] in allowed or ui.configbool("web", "allow" + i[0],
112 untrusted=True):
117 untrusted=True):
113 yield {"type" : i[0], "extension": i[1],
118 yield {"type" : i[0], "extension": i[1],
114 "node": nodeid, "url": url}
119 "node": nodeid, "url": url}
115
120
116 def entries(sortcolumn="", descending=False, **map):
121 def entries(sortcolumn="", descending=False, **map):
117 def sessionvars(**map):
122 def sessionvars(**map):
118 fields = []
123 fields = []
119 if req.form.has_key('style'):
124 if req.form.has_key('style'):
120 style = req.form['style'][0]
125 style = req.form['style'][0]
121 if style != get('web', 'style', ''):
126 if style != get('web', 'style', ''):
122 fields.append(('style', style))
127 fields.append(('style', style))
123
128
124 separator = url[-1] == '?' and ';' or '?'
129 separator = url[-1] == '?' and ';' or '?'
125 for name, value in fields:
130 for name, value in fields:
126 yield dict(name=name, value=value, separator=separator)
131 yield dict(name=name, value=value, separator=separator)
127 separator = ';'
132 separator = ';'
128
133
129 rows = []
134 rows = []
130 parity = 0
135 parity = paritygen(self.stripecount)
131 for name, path in self.repos:
136 for name, path in self.repos:
132 u = ui.ui(parentui=parentui)
137 u = ui.ui(parentui=parentui)
133 try:
138 try:
134 u.readconfig(os.path.join(path, '.hg', 'hgrc'))
139 u.readconfig(os.path.join(path, '.hg', 'hgrc'))
135 except IOError:
140 except IOError:
136 pass
141 pass
137 def get(section, name, default=None):
142 def get(section, name, default=None):
138 return u.config(section, name, default, untrusted=True)
143 return u.config(section, name, default, untrusted=True)
139
144
140 url = ('/'.join([req.env["REQUEST_URI"].split('?')[0], name])
145 url = ('/'.join([req.env["REQUEST_URI"].split('?')[0], name])
141 .replace("//", "/")) + '/'
146 .replace("//", "/")) + '/'
142
147
143 # update time with local timezone
148 # update time with local timezone
144 try:
149 try:
145 d = (get_mtime(path), util.makedate()[1])
150 d = (get_mtime(path), util.makedate()[1])
146 except OSError:
151 except OSError:
147 continue
152 continue
148
153
149 contact = (get("ui", "username") or # preferred
154 contact = (get("ui", "username") or # preferred
150 get("web", "contact") or # deprecated
155 get("web", "contact") or # deprecated
151 get("web", "author", "")) # also
156 get("web", "author", "")) # also
152 description = get("web", "description", "")
157 description = get("web", "description", "")
153 name = get("web", "name", name)
158 name = get("web", "name", name)
154 row = dict(contact=contact or "unknown",
159 row = dict(contact=contact or "unknown",
155 contact_sort=contact.upper() or "unknown",
160 contact_sort=contact.upper() or "unknown",
156 name=name,
161 name=name,
157 name_sort=name,
162 name_sort=name,
158 url=url,
163 url=url,
159 description=description or "unknown",
164 description=description or "unknown",
160 description_sort=description.upper() or "unknown",
165 description_sort=description.upper() or "unknown",
161 lastchange=d,
166 lastchange=d,
162 lastchange_sort=d[1]-d[0],
167 lastchange_sort=d[1]-d[0],
163 sessionvars=sessionvars,
168 sessionvars=sessionvars,
164 archives=archivelist(u, "tip", url))
169 archives=archivelist(u, "tip", url))
165 if (not sortcolumn
170 if (not sortcolumn
166 or (sortcolumn, descending) == self.repos_sorted):
171 or (sortcolumn, descending) == self.repos_sorted):
167 # fast path for unsorted output
172 # fast path for unsorted output
168 row['parity'] = parity
173 row['parity'] = parity.next()
169 parity = 1 - parity
170 yield row
174 yield row
171 else:
175 else:
172 rows.append((row["%s_sort" % sortcolumn], row))
176 rows.append((row["%s_sort" % sortcolumn], row))
173 if rows:
177 if rows:
174 rows.sort()
178 rows.sort()
175 if descending:
179 if descending:
176 rows.reverse()
180 rows.reverse()
177 for key, row in rows:
181 for key, row in rows:
178 row['parity'] = parity
182 row['parity'] = parity.next()
179 parity = 1 - parity
180 yield row
183 yield row
181
184
182 try:
185 try:
183 virtual = req.env.get("PATH_INFO", "").strip('/')
186 virtual = req.env.get("PATH_INFO", "").strip('/')
184 if virtual.startswith('static/'):
187 if virtual.startswith('static/'):
185 static = os.path.join(templater.templatepath(), 'static')
188 static = os.path.join(templater.templatepath(), 'static')
186 fname = virtual[7:]
189 fname = virtual[7:]
187 req.write(staticfile(static, fname, req) or
190 req.write(staticfile(static, fname, req) or
188 tmpl('error', error='%r not found' % fname))
191 tmpl('error', error='%r not found' % fname))
189 elif virtual:
192 elif virtual:
190 while virtual:
193 while virtual:
191 real = dict(self.repos).get(virtual)
194 real = dict(self.repos).get(virtual)
192 if real:
195 if real:
193 break
196 break
194 up = virtual.rfind('/')
197 up = virtual.rfind('/')
195 if up < 0:
198 if up < 0:
196 break
199 break
197 virtual = virtual[:up]
200 virtual = virtual[:up]
198 if real:
201 if real:
199 req.env['REPO_NAME'] = virtual
202 req.env['REPO_NAME'] = virtual
200 try:
203 try:
201 repo = hg.repository(parentui, real)
204 repo = hg.repository(parentui, real)
202 hgweb(repo).run_wsgi(req)
205 hgweb(repo).run_wsgi(req)
203 except IOError, inst:
206 except IOError, inst:
204 req.write(tmpl("error", error=inst.strerror))
207 req.write(tmpl("error", error=inst.strerror))
205 except hg.RepoError, inst:
208 except hg.RepoError, inst:
206 req.write(tmpl("error", error=str(inst)))
209 req.write(tmpl("error", error=str(inst)))
207 else:
210 else:
208 req.write(tmpl("notfound", repo=virtual))
211 req.write(tmpl("notfound", repo=virtual))
209 else:
212 else:
210 if req.form.has_key('static'):
213 if req.form.has_key('static'):
211 static = os.path.join(templater.templatepath(), "static")
214 static = os.path.join(templater.templatepath(), "static")
212 fname = req.form['static'][0]
215 fname = req.form['static'][0]
213 req.write(staticfile(static, fname, req)
216 req.write(staticfile(static, fname, req)
214 or tmpl("error", error="%r not found" % fname))
217 or tmpl("error", error="%r not found" % fname))
215 else:
218 else:
216 sortable = ["name", "description", "contact", "lastchange"]
219 sortable = ["name", "description", "contact", "lastchange"]
217 sortcolumn, descending = self.repos_sorted
220 sortcolumn, descending = self.repos_sorted
218 if req.form.has_key('sort'):
221 if req.form.has_key('sort'):
219 sortcolumn = req.form['sort'][0]
222 sortcolumn = req.form['sort'][0]
220 descending = sortcolumn.startswith('-')
223 descending = sortcolumn.startswith('-')
221 if descending:
224 if descending:
222 sortcolumn = sortcolumn[1:]
225 sortcolumn = sortcolumn[1:]
223 if sortcolumn not in sortable:
226 if sortcolumn not in sortable:
224 sortcolumn = ""
227 sortcolumn = ""
225
228
226 sort = [("sort_%s" % column,
229 sort = [("sort_%s" % column,
227 "%s%s" % ((not descending and column == sortcolumn)
230 "%s%s" % ((not descending and column == sortcolumn)
228 and "-" or "", column))
231 and "-" or "", column))
229 for column in sortable]
232 for column in sortable]
230 req.write(tmpl("index", entries=entries,
233 req.write(tmpl("index", entries=entries,
231 sortcolumn=sortcolumn, descending=descending,
234 sortcolumn=sortcolumn, descending=descending,
232 **dict(sort)))
235 **dict(sort)))
233 finally:
236 finally:
234 tmpl = None
237 tmpl = None
@@ -1,1944 +1,1950 b''
1 # localrepo.py - read/write repository class for mercurial
1 # localrepo.py - read/write repository class for mercurial
2 #
2 #
3 # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7
7
8 from node import *
8 from node import *
9 from i18n import _
9 from i18n import _
10 import repo, changegroup
10 import repo, changegroup
11 import changelog, dirstate, filelog, manifest, context
11 import changelog, dirstate, filelog, manifest, context
12 import re, lock, transaction, tempfile, stat, mdiff, errno, ui
12 import re, lock, transaction, tempfile, stat, mdiff, errno, ui
13 import os, revlog, time, util
13 import os, revlog, time, util
14
14
15 class localrepository(repo.repository):
15 class localrepository(repo.repository):
16 capabilities = ('lookup', 'changegroupsubset')
16 capabilities = ('lookup', 'changegroupsubset')
17 supported = ('revlogv1', 'store')
17 supported = ('revlogv1', 'store')
18
18
19 def __del__(self):
19 def __del__(self):
20 self.transhandle = None
20 self.transhandle = None
21 def __init__(self, parentui, path=None, create=0):
21 def __init__(self, parentui, path=None, create=0):
22 repo.repository.__init__(self)
22 repo.repository.__init__(self)
23 if not path:
23 if not path:
24 p = os.getcwd()
24 p = os.getcwd()
25 while not os.path.isdir(os.path.join(p, ".hg")):
25 while not os.path.isdir(os.path.join(p, ".hg")):
26 oldp = p
26 oldp = p
27 p = os.path.dirname(p)
27 p = os.path.dirname(p)
28 if p == oldp:
28 if p == oldp:
29 raise repo.RepoError(_("There is no Mercurial repository"
29 raise repo.RepoError(_("There is no Mercurial repository"
30 " here (.hg not found)"))
30 " here (.hg not found)"))
31 path = p
31 path = p
32
32
33 self.root = os.path.realpath(path)
33 self.root = os.path.realpath(path)
34 self.path = os.path.join(self.root, ".hg")
34 self.path = os.path.join(self.root, ".hg")
35 self.origroot = path
35 self.origroot = path
36 self.opener = util.opener(self.path)
36 self.opener = util.opener(self.path)
37 self.wopener = util.opener(self.root)
37 self.wopener = util.opener(self.root)
38
38
39 if not os.path.isdir(self.path):
39 if not os.path.isdir(self.path):
40 if create:
40 if create:
41 if not os.path.exists(path):
41 if not os.path.exists(path):
42 os.mkdir(path)
42 os.mkdir(path)
43 os.mkdir(self.path)
43 os.mkdir(self.path)
44 requirements = ["revlogv1"]
44 requirements = ["revlogv1"]
45 if parentui.configbool('format', 'usestore', True):
45 if parentui.configbool('format', 'usestore', True):
46 os.mkdir(os.path.join(self.path, "store"))
46 os.mkdir(os.path.join(self.path, "store"))
47 requirements.append("store")
47 requirements.append("store")
48 # create an invalid changelog
48 # create an invalid changelog
49 self.opener("00changelog.i", "a").write(
49 self.opener("00changelog.i", "a").write(
50 '\0\0\0\2' # represents revlogv2
50 '\0\0\0\2' # represents revlogv2
51 ' dummy changelog to prevent using the old repo layout'
51 ' dummy changelog to prevent using the old repo layout'
52 )
52 )
53 reqfile = self.opener("requires", "w")
53 reqfile = self.opener("requires", "w")
54 for r in requirements:
54 for r in requirements:
55 reqfile.write("%s\n" % r)
55 reqfile.write("%s\n" % r)
56 reqfile.close()
56 reqfile.close()
57 else:
57 else:
58 raise repo.RepoError(_("repository %s not found") % path)
58 raise repo.RepoError(_("repository %s not found") % path)
59 elif create:
59 elif create:
60 raise repo.RepoError(_("repository %s already exists") % path)
60 raise repo.RepoError(_("repository %s already exists") % path)
61 else:
61 else:
62 # find requirements
62 # find requirements
63 try:
63 try:
64 requirements = self.opener("requires").read().splitlines()
64 requirements = self.opener("requires").read().splitlines()
65 except IOError, inst:
65 except IOError, inst:
66 if inst.errno != errno.ENOENT:
66 if inst.errno != errno.ENOENT:
67 raise
67 raise
68 requirements = []
68 requirements = []
69 # check them
69 # check them
70 for r in requirements:
70 for r in requirements:
71 if r not in self.supported:
71 if r not in self.supported:
72 raise repo.RepoError(_("requirement '%s' not supported") % r)
72 raise repo.RepoError(_("requirement '%s' not supported") % r)
73
73
74 # setup store
74 # setup store
75 if "store" in requirements:
75 if "store" in requirements:
76 self.encodefn = util.encodefilename
76 self.encodefn = util.encodefilename
77 self.decodefn = util.decodefilename
77 self.decodefn = util.decodefilename
78 self.spath = os.path.join(self.path, "store")
78 self.spath = os.path.join(self.path, "store")
79 else:
79 else:
80 self.encodefn = lambda x: x
80 self.encodefn = lambda x: x
81 self.decodefn = lambda x: x
81 self.decodefn = lambda x: x
82 self.spath = self.path
82 self.spath = self.path
83 self.sopener = util.encodedopener(util.opener(self.spath), self.encodefn)
83 self.sopener = util.encodedopener(util.opener(self.spath), self.encodefn)
84
84
85 self.ui = ui.ui(parentui=parentui)
85 self.ui = ui.ui(parentui=parentui)
86 try:
86 try:
87 self.ui.readconfig(self.join("hgrc"), self.root)
87 self.ui.readconfig(self.join("hgrc"), self.root)
88 except IOError:
88 except IOError:
89 pass
89 pass
90
90
91 self.changelog = changelog.changelog(self.sopener)
91 self.changelog = changelog.changelog(self.sopener)
92 self.sopener.defversion = self.changelog.version
92 self.sopener.defversion = self.changelog.version
93 self.manifest = manifest.manifest(self.sopener)
93 self.manifest = manifest.manifest(self.sopener)
94
94
95 fallback = self.ui.config('ui', 'fallbackencoding')
95 fallback = self.ui.config('ui', 'fallbackencoding')
96 if fallback:
96 if fallback:
97 util._fallbackencoding = fallback
97 util._fallbackencoding = fallback
98
98
99 self.tagscache = None
99 self.tagscache = None
100 self.branchcache = None
100 self.branchcache = None
101 self.nodetagscache = None
101 self.nodetagscache = None
102 self.filterpats = {}
102 self.filterpats = {}
103 self.transhandle = None
103 self.transhandle = None
104
104
105 self.dirstate = dirstate.dirstate(self.opener, self.ui, self.root)
105 self.dirstate = dirstate.dirstate(self.opener, self.ui, self.root)
106
106
107 def url(self):
107 def url(self):
108 return 'file:' + self.root
108 return 'file:' + self.root
109
109
110 def hook(self, name, throw=False, **args):
110 def hook(self, name, throw=False, **args):
111 def callhook(hname, funcname):
111 def callhook(hname, funcname):
112 '''call python hook. hook is callable object, looked up as
112 '''call python hook. hook is callable object, looked up as
113 name in python module. if callable returns "true", hook
113 name in python module. if callable returns "true", hook
114 fails, else passes. if hook raises exception, treated as
114 fails, else passes. if hook raises exception, treated as
115 hook failure. exception propagates if throw is "true".
115 hook failure. exception propagates if throw is "true".
116
116
117 reason for "true" meaning "hook failed" is so that
117 reason for "true" meaning "hook failed" is so that
118 unmodified commands (e.g. mercurial.commands.update) can
118 unmodified commands (e.g. mercurial.commands.update) can
119 be run as hooks without wrappers to convert return values.'''
119 be run as hooks without wrappers to convert return values.'''
120
120
121 self.ui.note(_("calling hook %s: %s\n") % (hname, funcname))
121 self.ui.note(_("calling hook %s: %s\n") % (hname, funcname))
122 obj = funcname
122 obj = funcname
123 if not callable(obj):
123 if not callable(obj):
124 d = funcname.rfind('.')
124 d = funcname.rfind('.')
125 if d == -1:
125 if d == -1:
126 raise util.Abort(_('%s hook is invalid ("%s" not in '
126 raise util.Abort(_('%s hook is invalid ("%s" not in '
127 'a module)') % (hname, funcname))
127 'a module)') % (hname, funcname))
128 modname = funcname[:d]
128 modname = funcname[:d]
129 try:
129 try:
130 obj = __import__(modname)
130 obj = __import__(modname)
131 except ImportError:
131 except ImportError:
132 try:
132 try:
133 # extensions are loaded with hgext_ prefix
133 # extensions are loaded with hgext_ prefix
134 obj = __import__("hgext_%s" % modname)
134 obj = __import__("hgext_%s" % modname)
135 except ImportError:
135 except ImportError:
136 raise util.Abort(_('%s hook is invalid '
136 raise util.Abort(_('%s hook is invalid '
137 '(import of "%s" failed)') %
137 '(import of "%s" failed)') %
138 (hname, modname))
138 (hname, modname))
139 try:
139 try:
140 for p in funcname.split('.')[1:]:
140 for p in funcname.split('.')[1:]:
141 obj = getattr(obj, p)
141 obj = getattr(obj, p)
142 except AttributeError, err:
142 except AttributeError, err:
143 raise util.Abort(_('%s hook is invalid '
143 raise util.Abort(_('%s hook is invalid '
144 '("%s" is not defined)') %
144 '("%s" is not defined)') %
145 (hname, funcname))
145 (hname, funcname))
146 if not callable(obj):
146 if not callable(obj):
147 raise util.Abort(_('%s hook is invalid '
147 raise util.Abort(_('%s hook is invalid '
148 '("%s" is not callable)') %
148 '("%s" is not callable)') %
149 (hname, funcname))
149 (hname, funcname))
150 try:
150 try:
151 r = obj(ui=self.ui, repo=self, hooktype=name, **args)
151 r = obj(ui=self.ui, repo=self, hooktype=name, **args)
152 except (KeyboardInterrupt, util.SignalInterrupt):
152 except (KeyboardInterrupt, util.SignalInterrupt):
153 raise
153 raise
154 except Exception, exc:
154 except Exception, exc:
155 if isinstance(exc, util.Abort):
155 if isinstance(exc, util.Abort):
156 self.ui.warn(_('error: %s hook failed: %s\n') %
156 self.ui.warn(_('error: %s hook failed: %s\n') %
157 (hname, exc.args[0]))
157 (hname, exc.args[0]))
158 else:
158 else:
159 self.ui.warn(_('error: %s hook raised an exception: '
159 self.ui.warn(_('error: %s hook raised an exception: '
160 '%s\n') % (hname, exc))
160 '%s\n') % (hname, exc))
161 if throw:
161 if throw:
162 raise
162 raise
163 self.ui.print_exc()
163 self.ui.print_exc()
164 return True
164 return True
165 if r:
165 if r:
166 if throw:
166 if throw:
167 raise util.Abort(_('%s hook failed') % hname)
167 raise util.Abort(_('%s hook failed') % hname)
168 self.ui.warn(_('warning: %s hook failed\n') % hname)
168 self.ui.warn(_('warning: %s hook failed\n') % hname)
169 return r
169 return r
170
170
171 def runhook(name, cmd):
171 def runhook(name, cmd):
172 self.ui.note(_("running hook %s: %s\n") % (name, cmd))
172 self.ui.note(_("running hook %s: %s\n") % (name, cmd))
173 env = dict([('HG_' + k.upper(), v) for k, v in args.iteritems()])
173 env = dict([('HG_' + k.upper(), v) for k, v in args.iteritems()])
174 r = util.system(cmd, environ=env, cwd=self.root)
174 r = util.system(cmd, environ=env, cwd=self.root)
175 if r:
175 if r:
176 desc, r = util.explain_exit(r)
176 desc, r = util.explain_exit(r)
177 if throw:
177 if throw:
178 raise util.Abort(_('%s hook %s') % (name, desc))
178 raise util.Abort(_('%s hook %s') % (name, desc))
179 self.ui.warn(_('warning: %s hook %s\n') % (name, desc))
179 self.ui.warn(_('warning: %s hook %s\n') % (name, desc))
180 return r
180 return r
181
181
182 r = False
182 r = False
183 hooks = [(hname, cmd) for hname, cmd in self.ui.configitems("hooks")
183 hooks = [(hname, cmd) for hname, cmd in self.ui.configitems("hooks")
184 if hname.split(".", 1)[0] == name and cmd]
184 if hname.split(".", 1)[0] == name and cmd]
185 hooks.sort()
185 hooks.sort()
186 for hname, cmd in hooks:
186 for hname, cmd in hooks:
187 if callable(cmd):
187 if callable(cmd):
188 r = callhook(hname, cmd) or r
188 r = callhook(hname, cmd) or r
189 elif cmd.startswith('python:'):
189 elif cmd.startswith('python:'):
190 r = callhook(hname, cmd[7:].strip()) or r
190 r = callhook(hname, cmd[7:].strip()) or r
191 else:
191 else:
192 r = runhook(hname, cmd) or r
192 r = runhook(hname, cmd) or r
193 return r
193 return r
194
194
195 tag_disallowed = ':\r\n'
195 tag_disallowed = ':\r\n'
196
196
197 def _tag(self, name, node, message, local, user, date, parent=None):
197 def _tag(self, name, node, message, local, user, date, parent=None):
198 use_dirstate = parent is None
198 use_dirstate = parent is None
199
199
200 for c in self.tag_disallowed:
200 for c in self.tag_disallowed:
201 if c in name:
201 if c in name:
202 raise util.Abort(_('%r cannot be used in a tag name') % c)
202 raise util.Abort(_('%r cannot be used in a tag name') % c)
203
203
204 self.hook('pretag', throw=True, node=hex(node), tag=name, local=local)
204 self.hook('pretag', throw=True, node=hex(node), tag=name, local=local)
205
205
206 if local:
206 if local:
207 # local tags are stored in the current charset
207 # local tags are stored in the current charset
208 self.opener('localtags', 'a').write('%s %s\n' % (hex(node), name))
208 self.opener('localtags', 'a').write('%s %s\n' % (hex(node), name))
209 self.hook('tag', node=hex(node), tag=name, local=local)
209 self.hook('tag', node=hex(node), tag=name, local=local)
210 return
210 return
211
211
212 # committed tags are stored in UTF-8
212 # committed tags are stored in UTF-8
213 line = '%s %s\n' % (hex(node), util.fromlocal(name))
213 line = '%s %s\n' % (hex(node), util.fromlocal(name))
214 if use_dirstate:
214 if use_dirstate:
215 self.wfile('.hgtags', 'ab').write(line)
215 self.wfile('.hgtags', 'ab').write(line)
216 else:
216 else:
217 ntags = self.filectx('.hgtags', parent).data()
217 ntags = self.filectx('.hgtags', parent).data()
218 self.wfile('.hgtags', 'ab').write(ntags + line)
218 self.wfile('.hgtags', 'ab').write(ntags + line)
219 if use_dirstate and self.dirstate.state('.hgtags') == '?':
219 if use_dirstate and self.dirstate.state('.hgtags') == '?':
220 self.add(['.hgtags'])
220 self.add(['.hgtags'])
221
221
222 tagnode = self.commit(['.hgtags'], message, user, date, p1=parent)
222 tagnode = self.commit(['.hgtags'], message, user, date, p1=parent)
223
223
224 self.hook('tag', node=hex(node), tag=name, local=local)
224 self.hook('tag', node=hex(node), tag=name, local=local)
225
225
226 return tagnode
226 return tagnode
227
227
228 def tag(self, name, node, message, local, user, date):
228 def tag(self, name, node, message, local, user, date):
229 '''tag a revision with a symbolic name.
229 '''tag a revision with a symbolic name.
230
230
231 if local is True, the tag is stored in a per-repository file.
231 if local is True, the tag is stored in a per-repository file.
232 otherwise, it is stored in the .hgtags file, and a new
232 otherwise, it is stored in the .hgtags file, and a new
233 changeset is committed with the change.
233 changeset is committed with the change.
234
234
235 keyword arguments:
235 keyword arguments:
236
236
237 local: whether to store tag in non-version-controlled file
237 local: whether to store tag in non-version-controlled file
238 (default False)
238 (default False)
239
239
240 message: commit message to use if committing
240 message: commit message to use if committing
241
241
242 user: name of user to use if committing
242 user: name of user to use if committing
243
243
244 date: date tuple to use if committing'''
244 date: date tuple to use if committing'''
245
245
246 for x in self.status()[:5]:
246 for x in self.status()[:5]:
247 if '.hgtags' in x:
247 if '.hgtags' in x:
248 raise util.Abort(_('working copy of .hgtags is changed '
248 raise util.Abort(_('working copy of .hgtags is changed '
249 '(please commit .hgtags manually)'))
249 '(please commit .hgtags manually)'))
250
250
251
251
252 self._tag(name, node, message, local, user, date)
252 self._tag(name, node, message, local, user, date)
253
253
254 def tags(self):
254 def tags(self):
255 '''return a mapping of tag to node'''
255 '''return a mapping of tag to node'''
256 if self.tagscache:
256 if self.tagscache:
257 return self.tagscache
257 return self.tagscache
258
258
259 globaltags = {}
259 globaltags = {}
260
260
261 def readtags(lines, fn):
261 def readtags(lines, fn):
262 filetags = {}
262 filetags = {}
263 count = 0
263 count = 0
264
264
265 def warn(msg):
265 def warn(msg):
266 self.ui.warn(_("%s, line %s: %s\n") % (fn, count, msg))
266 self.ui.warn(_("%s, line %s: %s\n") % (fn, count, msg))
267
267
268 for l in lines:
268 for l in lines:
269 count += 1
269 count += 1
270 if not l:
270 if not l:
271 continue
271 continue
272 s = l.split(" ", 1)
272 s = l.split(" ", 1)
273 if len(s) != 2:
273 if len(s) != 2:
274 warn(_("cannot parse entry"))
274 warn(_("cannot parse entry"))
275 continue
275 continue
276 node, key = s
276 node, key = s
277 key = util.tolocal(key.strip()) # stored in UTF-8
277 key = util.tolocal(key.strip()) # stored in UTF-8
278 try:
278 try:
279 bin_n = bin(node)
279 bin_n = bin(node)
280 except TypeError:
280 except TypeError:
281 warn(_("node '%s' is not well formed") % node)
281 warn(_("node '%s' is not well formed") % node)
282 continue
282 continue
283 if bin_n not in self.changelog.nodemap:
283 if bin_n not in self.changelog.nodemap:
284 warn(_("tag '%s' refers to unknown node") % key)
284 warn(_("tag '%s' refers to unknown node") % key)
285 continue
285 continue
286
286
287 h = []
287 h = []
288 if key in filetags:
288 if key in filetags:
289 n, h = filetags[key]
289 n, h = filetags[key]
290 h.append(n)
290 h.append(n)
291 filetags[key] = (bin_n, h)
291 filetags[key] = (bin_n, h)
292
292
293 for k,nh in filetags.items():
293 for k,nh in filetags.items():
294 if k not in globaltags:
294 if k not in globaltags:
295 globaltags[k] = nh
295 globaltags[k] = nh
296 continue
296 continue
297 # we prefer the global tag if:
297 # we prefer the global tag if:
298 # it supercedes us OR
298 # it supercedes us OR
299 # mutual supercedes and it has a higher rank
299 # mutual supercedes and it has a higher rank
300 # otherwise we win because we're tip-most
300 # otherwise we win because we're tip-most
301 an, ah = nh
301 an, ah = nh
302 bn, bh = globaltags[k]
302 bn, bh = globaltags[k]
303 if bn != an and an in bh and \
303 if bn != an and an in bh and \
304 (bn not in ah or len(bh) > len(ah)):
304 (bn not in ah or len(bh) > len(ah)):
305 an = bn
305 an = bn
306 ah.append([n for n in bh if n not in ah])
306 ah.append([n for n in bh if n not in ah])
307 globaltags[k] = an, ah
307 globaltags[k] = an, ah
308
308
309 # read the tags file from each head, ending with the tip
309 # read the tags file from each head, ending with the tip
310 f = None
310 f = None
311 for rev, node, fnode in self._hgtagsnodes():
311 for rev, node, fnode in self._hgtagsnodes():
312 f = (f and f.filectx(fnode) or
312 f = (f and f.filectx(fnode) or
313 self.filectx('.hgtags', fileid=fnode))
313 self.filectx('.hgtags', fileid=fnode))
314 readtags(f.data().splitlines(), f)
314 readtags(f.data().splitlines(), f)
315
315
316 try:
316 try:
317 data = util.fromlocal(self.opener("localtags").read())
317 data = util.fromlocal(self.opener("localtags").read())
318 # localtags are stored in the local character set
318 # localtags are stored in the local character set
319 # while the internal tag table is stored in UTF-8
319 # while the internal tag table is stored in UTF-8
320 readtags(data.splitlines(), "localtags")
320 readtags(data.splitlines(), "localtags")
321 except IOError:
321 except IOError:
322 pass
322 pass
323
323
324 self.tagscache = {}
324 self.tagscache = {}
325 for k,nh in globaltags.items():
325 for k,nh in globaltags.items():
326 n = nh[0]
326 n = nh[0]
327 if n != nullid:
327 if n != nullid:
328 self.tagscache[k] = n
328 self.tagscache[k] = n
329 self.tagscache['tip'] = self.changelog.tip()
329 self.tagscache['tip'] = self.changelog.tip()
330
330
331 return self.tagscache
331 return self.tagscache
332
332
333 def _hgtagsnodes(self):
333 def _hgtagsnodes(self):
334 heads = self.heads()
334 heads = self.heads()
335 heads.reverse()
335 heads.reverse()
336 last = {}
336 last = {}
337 ret = []
337 ret = []
338 for node in heads:
338 for node in heads:
339 c = self.changectx(node)
339 c = self.changectx(node)
340 rev = c.rev()
340 rev = c.rev()
341 try:
341 try:
342 fnode = c.filenode('.hgtags')
342 fnode = c.filenode('.hgtags')
343 except revlog.LookupError:
343 except revlog.LookupError:
344 continue
344 continue
345 ret.append((rev, node, fnode))
345 ret.append((rev, node, fnode))
346 if fnode in last:
346 if fnode in last:
347 ret[last[fnode]] = None
347 ret[last[fnode]] = None
348 last[fnode] = len(ret) - 1
348 last[fnode] = len(ret) - 1
349 return [item for item in ret if item]
349 return [item for item in ret if item]
350
350
351 def tagslist(self):
351 def tagslist(self):
352 '''return a list of tags ordered by revision'''
352 '''return a list of tags ordered by revision'''
353 l = []
353 l = []
354 for t, n in self.tags().items():
354 for t, n in self.tags().items():
355 try:
355 try:
356 r = self.changelog.rev(n)
356 r = self.changelog.rev(n)
357 except:
357 except:
358 r = -2 # sort to the beginning of the list if unknown
358 r = -2 # sort to the beginning of the list if unknown
359 l.append((r, t, n))
359 l.append((r, t, n))
360 l.sort()
360 l.sort()
361 return [(t, n) for r, t, n in l]
361 return [(t, n) for r, t, n in l]
362
362
363 def nodetags(self, node):
363 def nodetags(self, node):
364 '''return the tags associated with a node'''
364 '''return the tags associated with a node'''
365 if not self.nodetagscache:
365 if not self.nodetagscache:
366 self.nodetagscache = {}
366 self.nodetagscache = {}
367 for t, n in self.tags().items():
367 for t, n in self.tags().items():
368 self.nodetagscache.setdefault(n, []).append(t)
368 self.nodetagscache.setdefault(n, []).append(t)
369 return self.nodetagscache.get(node, [])
369 return self.nodetagscache.get(node, [])
370
370
371 def _branchtags(self):
371 def _branchtags(self):
372 partial, last, lrev = self._readbranchcache()
372 partial, last, lrev = self._readbranchcache()
373
373
374 tiprev = self.changelog.count() - 1
374 tiprev = self.changelog.count() - 1
375 if lrev != tiprev:
375 if lrev != tiprev:
376 self._updatebranchcache(partial, lrev+1, tiprev+1)
376 self._updatebranchcache(partial, lrev+1, tiprev+1)
377 self._writebranchcache(partial, self.changelog.tip(), tiprev)
377 self._writebranchcache(partial, self.changelog.tip(), tiprev)
378
378
379 return partial
379 return partial
380
380
381 def branchtags(self):
381 def branchtags(self):
382 if self.branchcache is not None:
382 if self.branchcache is not None:
383 return self.branchcache
383 return self.branchcache
384
384
385 self.branchcache = {} # avoid recursion in changectx
385 self.branchcache = {} # avoid recursion in changectx
386 partial = self._branchtags()
386 partial = self._branchtags()
387
387
388 # the branch cache is stored on disk as UTF-8, but in the local
388 # the branch cache is stored on disk as UTF-8, but in the local
389 # charset internally
389 # charset internally
390 for k, v in partial.items():
390 for k, v in partial.items():
391 self.branchcache[util.tolocal(k)] = v
391 self.branchcache[util.tolocal(k)] = v
392 return self.branchcache
392 return self.branchcache
393
393
394 def _readbranchcache(self):
394 def _readbranchcache(self):
395 partial = {}
395 partial = {}
396 try:
396 try:
397 f = self.opener("branch.cache")
397 f = self.opener("branch.cache")
398 lines = f.read().split('\n')
398 lines = f.read().split('\n')
399 f.close()
399 f.close()
400 except (IOError, OSError):
400 except (IOError, OSError):
401 return {}, nullid, nullrev
401 return {}, nullid, nullrev
402
402
403 try:
403 try:
404 last, lrev = lines.pop(0).split(" ", 1)
404 last, lrev = lines.pop(0).split(" ", 1)
405 last, lrev = bin(last), int(lrev)
405 last, lrev = bin(last), int(lrev)
406 if not (lrev < self.changelog.count() and
406 if not (lrev < self.changelog.count() and
407 self.changelog.node(lrev) == last): # sanity check
407 self.changelog.node(lrev) == last): # sanity check
408 # invalidate the cache
408 # invalidate the cache
409 raise ValueError('Invalid branch cache: unknown tip')
409 raise ValueError('Invalid branch cache: unknown tip')
410 for l in lines:
410 for l in lines:
411 if not l: continue
411 if not l: continue
412 node, label = l.split(" ", 1)
412 node, label = l.split(" ", 1)
413 partial[label.strip()] = bin(node)
413 partial[label.strip()] = bin(node)
414 except (KeyboardInterrupt, util.SignalInterrupt):
414 except (KeyboardInterrupt, util.SignalInterrupt):
415 raise
415 raise
416 except Exception, inst:
416 except Exception, inst:
417 if self.ui.debugflag:
417 if self.ui.debugflag:
418 self.ui.warn(str(inst), '\n')
418 self.ui.warn(str(inst), '\n')
419 partial, last, lrev = {}, nullid, nullrev
419 partial, last, lrev = {}, nullid, nullrev
420 return partial, last, lrev
420 return partial, last, lrev
421
421
422 def _writebranchcache(self, branches, tip, tiprev):
422 def _writebranchcache(self, branches, tip, tiprev):
423 try:
423 try:
424 f = self.opener("branch.cache", "w", atomictemp=True)
424 f = self.opener("branch.cache", "w", atomictemp=True)
425 f.write("%s %s\n" % (hex(tip), tiprev))
425 f.write("%s %s\n" % (hex(tip), tiprev))
426 for label, node in branches.iteritems():
426 for label, node in branches.iteritems():
427 f.write("%s %s\n" % (hex(node), label))
427 f.write("%s %s\n" % (hex(node), label))
428 f.rename()
428 f.rename()
429 except (IOError, OSError):
429 except (IOError, OSError):
430 pass
430 pass
431
431
432 def _updatebranchcache(self, partial, start, end):
432 def _updatebranchcache(self, partial, start, end):
433 for r in xrange(start, end):
433 for r in xrange(start, end):
434 c = self.changectx(r)
434 c = self.changectx(r)
435 b = c.branch()
435 b = c.branch()
436 partial[b] = c.node()
436 partial[b] = c.node()
437
437
438 def lookup(self, key):
438 def lookup(self, key):
439 if key == '.':
439 if key == '.':
440 key = self.dirstate.parents()[0]
440 key = self.dirstate.parents()[0]
441 if key == nullid:
441 if key == nullid:
442 raise repo.RepoError(_("no revision checked out"))
442 raise repo.RepoError(_("no revision checked out"))
443 elif key == 'null':
443 elif key == 'null':
444 return nullid
444 return nullid
445 n = self.changelog._match(key)
445 n = self.changelog._match(key)
446 if n:
446 if n:
447 return n
447 return n
448 if key in self.tags():
448 if key in self.tags():
449 return self.tags()[key]
449 return self.tags()[key]
450 if key in self.branchtags():
450 if key in self.branchtags():
451 return self.branchtags()[key]
451 return self.branchtags()[key]
452 n = self.changelog._partialmatch(key)
452 n = self.changelog._partialmatch(key)
453 if n:
453 if n:
454 return n
454 return n
455 raise repo.RepoError(_("unknown revision '%s'") % key)
455 raise repo.RepoError(_("unknown revision '%s'") % key)
456
456
457 def dev(self):
457 def dev(self):
458 return os.lstat(self.path).st_dev
458 return os.lstat(self.path).st_dev
459
459
460 def local(self):
460 def local(self):
461 return True
461 return True
462
462
463 def join(self, f):
463 def join(self, f):
464 return os.path.join(self.path, f)
464 return os.path.join(self.path, f)
465
465
466 def sjoin(self, f):
466 def sjoin(self, f):
467 f = self.encodefn(f)
467 f = self.encodefn(f)
468 return os.path.join(self.spath, f)
468 return os.path.join(self.spath, f)
469
469
470 def wjoin(self, f):
470 def wjoin(self, f):
471 return os.path.join(self.root, f)
471 return os.path.join(self.root, f)
472
472
473 def file(self, f):
473 def file(self, f):
474 if f[0] == '/':
474 if f[0] == '/':
475 f = f[1:]
475 f = f[1:]
476 return filelog.filelog(self.sopener, f)
476 return filelog.filelog(self.sopener, f)
477
477
478 def changectx(self, changeid=None):
478 def changectx(self, changeid=None):
479 return context.changectx(self, changeid)
479 return context.changectx(self, changeid)
480
480
481 def workingctx(self):
481 def workingctx(self):
482 return context.workingctx(self)
482 return context.workingctx(self)
483
483
484 def parents(self, changeid=None):
484 def parents(self, changeid=None):
485 '''
485 '''
486 get list of changectxs for parents of changeid or working directory
486 get list of changectxs for parents of changeid or working directory
487 '''
487 '''
488 if changeid is None:
488 if changeid is None:
489 pl = self.dirstate.parents()
489 pl = self.dirstate.parents()
490 else:
490 else:
491 n = self.changelog.lookup(changeid)
491 n = self.changelog.lookup(changeid)
492 pl = self.changelog.parents(n)
492 pl = self.changelog.parents(n)
493 if pl[1] == nullid:
493 if pl[1] == nullid:
494 return [self.changectx(pl[0])]
494 return [self.changectx(pl[0])]
495 return [self.changectx(pl[0]), self.changectx(pl[1])]
495 return [self.changectx(pl[0]), self.changectx(pl[1])]
496
496
497 def filectx(self, path, changeid=None, fileid=None):
497 def filectx(self, path, changeid=None, fileid=None):
498 """changeid can be a changeset revision, node, or tag.
498 """changeid can be a changeset revision, node, or tag.
499 fileid can be a file revision or node."""
499 fileid can be a file revision or node."""
500 return context.filectx(self, path, changeid, fileid)
500 return context.filectx(self, path, changeid, fileid)
501
501
502 def getcwd(self):
502 def getcwd(self):
503 return self.dirstate.getcwd()
503 return self.dirstate.getcwd()
504
504
505 def wfile(self, f, mode='r'):
505 def wfile(self, f, mode='r'):
506 return self.wopener(f, mode)
506 return self.wopener(f, mode)
507
507
508 def _link(self, f):
508 def _link(self, f):
509 return os.path.islink(self.wjoin(f))
509 return os.path.islink(self.wjoin(f))
510
510
511 def _filter(self, filter, filename, data):
511 def _filter(self, filter, filename, data):
512 if filter not in self.filterpats:
512 if filter not in self.filterpats:
513 l = []
513 l = []
514 for pat, cmd in self.ui.configitems(filter):
514 for pat, cmd in self.ui.configitems(filter):
515 mf = util.matcher(self.root, "", [pat], [], [])[1]
515 mf = util.matcher(self.root, "", [pat], [], [])[1]
516 l.append((mf, cmd))
516 l.append((mf, cmd))
517 self.filterpats[filter] = l
517 self.filterpats[filter] = l
518
518
519 for mf, cmd in self.filterpats[filter]:
519 for mf, cmd in self.filterpats[filter]:
520 if mf(filename):
520 if mf(filename):
521 self.ui.debug(_("filtering %s through %s\n") % (filename, cmd))
521 self.ui.debug(_("filtering %s through %s\n") % (filename, cmd))
522 data = util.filter(data, cmd)
522 data = util.filter(data, cmd)
523 break
523 break
524
524
525 return data
525 return data
526
526
527 def wread(self, filename):
527 def wread(self, filename):
528 if self._link(filename):
528 if self._link(filename):
529 data = os.readlink(self.wjoin(filename))
529 data = os.readlink(self.wjoin(filename))
530 else:
530 else:
531 data = self.wopener(filename, 'r').read()
531 data = self.wopener(filename, 'r').read()
532 return self._filter("encode", filename, data)
532 return self._filter("encode", filename, data)
533
533
534 def wwrite(self, filename, data, flags):
534 def wwrite(self, filename, data, flags):
535 data = self._filter("decode", filename, data)
535 data = self._filter("decode", filename, data)
536 if "l" in flags:
536 if "l" in flags:
537 f = self.wjoin(filename)
537 f = self.wjoin(filename)
538 try:
538 try:
539 os.unlink(f)
539 os.unlink(f)
540 except OSError:
540 except OSError:
541 pass
541 pass
542 d = os.path.dirname(f)
542 d = os.path.dirname(f)
543 if not os.path.exists(d):
543 if not os.path.exists(d):
544 os.makedirs(d)
544 os.makedirs(d)
545 os.symlink(data, f)
545 os.symlink(data, f)
546 else:
546 else:
547 try:
547 try:
548 if self._link(filename):
548 if self._link(filename):
549 os.unlink(self.wjoin(filename))
549 os.unlink(self.wjoin(filename))
550 except OSError:
550 except OSError:
551 pass
551 pass
552 self.wopener(filename, 'w').write(data)
552 self.wopener(filename, 'w').write(data)
553 util.set_exec(self.wjoin(filename), "x" in flags)
553 util.set_exec(self.wjoin(filename), "x" in flags)
554
554
555 def wwritedata(self, filename, data):
555 def wwritedata(self, filename, data):
556 return self._filter("decode", filename, data)
556 return self._filter("decode", filename, data)
557
557
558 def transaction(self):
558 def transaction(self):
559 tr = self.transhandle
559 tr = self.transhandle
560 if tr != None and tr.running():
560 if tr != None and tr.running():
561 return tr.nest()
561 return tr.nest()
562
562
563 # save dirstate for rollback
563 # save dirstate for rollback
564 try:
564 try:
565 ds = self.opener("dirstate").read()
565 ds = self.opener("dirstate").read()
566 except IOError:
566 except IOError:
567 ds = ""
567 ds = ""
568 self.opener("journal.dirstate", "w").write(ds)
568 self.opener("journal.dirstate", "w").write(ds)
569
569
570 renames = [(self.sjoin("journal"), self.sjoin("undo")),
570 renames = [(self.sjoin("journal"), self.sjoin("undo")),
571 (self.join("journal.dirstate"), self.join("undo.dirstate"))]
571 (self.join("journal.dirstate"), self.join("undo.dirstate"))]
572 tr = transaction.transaction(self.ui.warn, self.sopener,
572 tr = transaction.transaction(self.ui.warn, self.sopener,
573 self.sjoin("journal"),
573 self.sjoin("journal"),
574 aftertrans(renames))
574 aftertrans(renames))
575 self.transhandle = tr
575 self.transhandle = tr
576 return tr
576 return tr
577
577
578 def recover(self):
578 def recover(self):
579 l = self.lock()
579 l = self.lock()
580 if os.path.exists(self.sjoin("journal")):
580 if os.path.exists(self.sjoin("journal")):
581 self.ui.status(_("rolling back interrupted transaction\n"))
581 self.ui.status(_("rolling back interrupted transaction\n"))
582 transaction.rollback(self.sopener, self.sjoin("journal"))
582 transaction.rollback(self.sopener, self.sjoin("journal"))
583 self.reload()
583 self.reload()
584 return True
584 return True
585 else:
585 else:
586 self.ui.warn(_("no interrupted transaction available\n"))
586 self.ui.warn(_("no interrupted transaction available\n"))
587 return False
587 return False
588
588
589 def rollback(self, wlock=None, lock=None):
589 def rollback(self, wlock=None, lock=None):
590 if not wlock:
590 if not wlock:
591 wlock = self.wlock()
591 wlock = self.wlock()
592 if not lock:
592 if not lock:
593 lock = self.lock()
593 lock = self.lock()
594 if os.path.exists(self.sjoin("undo")):
594 if os.path.exists(self.sjoin("undo")):
595 self.ui.status(_("rolling back last transaction\n"))
595 self.ui.status(_("rolling back last transaction\n"))
596 transaction.rollback(self.sopener, self.sjoin("undo"))
596 transaction.rollback(self.sopener, self.sjoin("undo"))
597 util.rename(self.join("undo.dirstate"), self.join("dirstate"))
597 util.rename(self.join("undo.dirstate"), self.join("dirstate"))
598 self.reload()
598 self.reload()
599 self.wreload()
599 self.wreload()
600 else:
600 else:
601 self.ui.warn(_("no rollback information available\n"))
601 self.ui.warn(_("no rollback information available\n"))
602
602
603 def wreload(self):
603 def wreload(self):
604 self.dirstate.reload()
604 self.dirstate.reload()
605
605
606 def reload(self):
606 def reload(self):
607 self.changelog.load()
607 self.changelog.load()
608 self.manifest.load()
608 self.manifest.load()
609 self.tagscache = None
609 self.tagscache = None
610 self.nodetagscache = None
610 self.nodetagscache = None
611
611
612 def do_lock(self, lockname, wait, releasefn=None, acquirefn=None,
612 def do_lock(self, lockname, wait, releasefn=None, acquirefn=None,
613 desc=None):
613 desc=None):
614 try:
614 try:
615 l = lock.lock(lockname, 0, releasefn, desc=desc)
615 l = lock.lock(lockname, 0, releasefn, desc=desc)
616 except lock.LockHeld, inst:
616 except lock.LockHeld, inst:
617 if not wait:
617 if not wait:
618 raise
618 raise
619 self.ui.warn(_("waiting for lock on %s held by %r\n") %
619 self.ui.warn(_("waiting for lock on %s held by %r\n") %
620 (desc, inst.locker))
620 (desc, inst.locker))
621 # default to 600 seconds timeout
621 # default to 600 seconds timeout
622 l = lock.lock(lockname, int(self.ui.config("ui", "timeout", "600")),
622 l = lock.lock(lockname, int(self.ui.config("ui", "timeout", "600")),
623 releasefn, desc=desc)
623 releasefn, desc=desc)
624 if acquirefn:
624 if acquirefn:
625 acquirefn()
625 acquirefn()
626 return l
626 return l
627
627
628 def lock(self, wait=1):
628 def lock(self, wait=1):
629 return self.do_lock(self.sjoin("lock"), wait, acquirefn=self.reload,
629 return self.do_lock(self.sjoin("lock"), wait, acquirefn=self.reload,
630 desc=_('repository %s') % self.origroot)
630 desc=_('repository %s') % self.origroot)
631
631
632 def wlock(self, wait=1):
632 def wlock(self, wait=1):
633 return self.do_lock(self.join("wlock"), wait, self.dirstate.write,
633 return self.do_lock(self.join("wlock"), wait, self.dirstate.write,
634 self.wreload,
634 self.wreload,
635 desc=_('working directory of %s') % self.origroot)
635 desc=_('working directory of %s') % self.origroot)
636
636
637 def filecommit(self, fn, manifest1, manifest2, linkrev, transaction, changelist):
637 def filecommit(self, fn, manifest1, manifest2, linkrev, transaction, changelist):
638 """
638 """
639 commit an individual file as part of a larger transaction
639 commit an individual file as part of a larger transaction
640 """
640 """
641
641
642 t = self.wread(fn)
642 t = self.wread(fn)
643 fl = self.file(fn)
643 fl = self.file(fn)
644 fp1 = manifest1.get(fn, nullid)
644 fp1 = manifest1.get(fn, nullid)
645 fp2 = manifest2.get(fn, nullid)
645 fp2 = manifest2.get(fn, nullid)
646
646
647 meta = {}
647 meta = {}
648 cp = self.dirstate.copied(fn)
648 cp = self.dirstate.copied(fn)
649 if cp:
649 if cp:
650 # Mark the new revision of this file as a copy of another
650 # Mark the new revision of this file as a copy of another
651 # file. This copy data will effectively act as a parent
651 # file. This copy data will effectively act as a parent
652 # of this new revision. If this is a merge, the first
652 # of this new revision. If this is a merge, the first
653 # parent will be the nullid (meaning "look up the copy data")
653 # parent will be the nullid (meaning "look up the copy data")
654 # and the second one will be the other parent. For example:
654 # and the second one will be the other parent. For example:
655 #
655 #
656 # 0 --- 1 --- 3 rev1 changes file foo
656 # 0 --- 1 --- 3 rev1 changes file foo
657 # \ / rev2 renames foo to bar and changes it
657 # \ / rev2 renames foo to bar and changes it
658 # \- 2 -/ rev3 should have bar with all changes and
658 # \- 2 -/ rev3 should have bar with all changes and
659 # should record that bar descends from
659 # should record that bar descends from
660 # bar in rev2 and foo in rev1
660 # bar in rev2 and foo in rev1
661 #
661 #
662 # this allows this merge to succeed:
662 # this allows this merge to succeed:
663 #
663 #
664 # 0 --- 1 --- 3 rev4 reverts the content change from rev2
664 # 0 --- 1 --- 3 rev4 reverts the content change from rev2
665 # \ / merging rev3 and rev4 should use bar@rev2
665 # \ / merging rev3 and rev4 should use bar@rev2
666 # \- 2 --- 4 as the merge base
666 # \- 2 --- 4 as the merge base
667 #
667 #
668 meta["copy"] = cp
668 meta["copy"] = cp
669 if not manifest2: # not a branch merge
669 if not manifest2: # not a branch merge
670 meta["copyrev"] = hex(manifest1.get(cp, nullid))
670 meta["copyrev"] = hex(manifest1.get(cp, nullid))
671 fp2 = nullid
671 fp2 = nullid
672 elif fp2 != nullid: # copied on remote side
672 elif fp2 != nullid: # copied on remote side
673 meta["copyrev"] = hex(manifest1.get(cp, nullid))
673 meta["copyrev"] = hex(manifest1.get(cp, nullid))
674 elif fp1 != nullid: # copied on local side, reversed
674 elif fp1 != nullid: # copied on local side, reversed
675 meta["copyrev"] = hex(manifest2.get(cp))
675 meta["copyrev"] = hex(manifest2.get(cp))
676 fp2 = fp1
676 fp2 = fp1
677 else: # directory rename
677 else: # directory rename
678 meta["copyrev"] = hex(manifest1.get(cp, nullid))
678 meta["copyrev"] = hex(manifest1.get(cp, nullid))
679 self.ui.debug(_(" %s: copy %s:%s\n") %
679 self.ui.debug(_(" %s: copy %s:%s\n") %
680 (fn, cp, meta["copyrev"]))
680 (fn, cp, meta["copyrev"]))
681 fp1 = nullid
681 fp1 = nullid
682 elif fp2 != nullid:
682 elif fp2 != nullid:
683 # is one parent an ancestor of the other?
683 # is one parent an ancestor of the other?
684 fpa = fl.ancestor(fp1, fp2)
684 fpa = fl.ancestor(fp1, fp2)
685 if fpa == fp1:
685 if fpa == fp1:
686 fp1, fp2 = fp2, nullid
686 fp1, fp2 = fp2, nullid
687 elif fpa == fp2:
687 elif fpa == fp2:
688 fp2 = nullid
688 fp2 = nullid
689
689
690 # is the file unmodified from the parent? report existing entry
690 # is the file unmodified from the parent? report existing entry
691 if fp2 == nullid and not fl.cmp(fp1, t):
691 if fp2 == nullid and not fl.cmp(fp1, t):
692 return fp1
692 return fp1
693
693
694 changelist.append(fn)
694 changelist.append(fn)
695 return fl.add(t, meta, transaction, linkrev, fp1, fp2)
695 return fl.add(t, meta, transaction, linkrev, fp1, fp2)
696
696
697 def rawcommit(self, files, text, user, date, p1=None, p2=None, wlock=None, extra={}):
697 def rawcommit(self, files, text, user, date, p1=None, p2=None, wlock=None, extra={}):
698 if p1 is None:
698 if p1 is None:
699 p1, p2 = self.dirstate.parents()
699 p1, p2 = self.dirstate.parents()
700 return self.commit(files=files, text=text, user=user, date=date,
700 return self.commit(files=files, text=text, user=user, date=date,
701 p1=p1, p2=p2, wlock=wlock, extra=extra)
701 p1=p1, p2=p2, wlock=wlock, extra=extra)
702
702
703 def commit(self, files=None, text="", user=None, date=None,
703 def commit(self, files=None, text="", user=None, date=None,
704 match=util.always, force=False, lock=None, wlock=None,
704 match=util.always, force=False, lock=None, wlock=None,
705 force_editor=False, p1=None, p2=None, extra={}):
705 force_editor=False, p1=None, p2=None, extra={}):
706
706
707 commit = []
707 commit = []
708 remove = []
708 remove = []
709 changed = []
709 changed = []
710 use_dirstate = (p1 is None) # not rawcommit
710 use_dirstate = (p1 is None) # not rawcommit
711 extra = extra.copy()
711 extra = extra.copy()
712
712
713 if use_dirstate:
713 if use_dirstate:
714 if files:
714 if files:
715 for f in files:
715 for f in files:
716 s = self.dirstate.state(f)
716 s = self.dirstate.state(f)
717 if s in 'nmai':
717 if s in 'nmai':
718 commit.append(f)
718 commit.append(f)
719 elif s == 'r':
719 elif s == 'r':
720 remove.append(f)
720 remove.append(f)
721 else:
721 else:
722 self.ui.warn(_("%s not tracked!\n") % f)
722 self.ui.warn(_("%s not tracked!\n") % f)
723 else:
723 else:
724 changes = self.status(match=match)[:5]
724 changes = self.status(match=match)[:5]
725 modified, added, removed, deleted, unknown = changes
725 modified, added, removed, deleted, unknown = changes
726 commit = modified + added
726 commit = modified + added
727 remove = removed
727 remove = removed
728 else:
728 else:
729 commit = files
729 commit = files
730
730
731 if use_dirstate:
731 if use_dirstate:
732 p1, p2 = self.dirstate.parents()
732 p1, p2 = self.dirstate.parents()
733 update_dirstate = True
733 update_dirstate = True
734 else:
734 else:
735 p1, p2 = p1, p2 or nullid
735 p1, p2 = p1, p2 or nullid
736 update_dirstate = (self.dirstate.parents()[0] == p1)
736 update_dirstate = (self.dirstate.parents()[0] == p1)
737
737
738 c1 = self.changelog.read(p1)
738 c1 = self.changelog.read(p1)
739 c2 = self.changelog.read(p2)
739 c2 = self.changelog.read(p2)
740 m1 = self.manifest.read(c1[0]).copy()
740 m1 = self.manifest.read(c1[0]).copy()
741 m2 = self.manifest.read(c2[0])
741 m2 = self.manifest.read(c2[0])
742
742
743 if use_dirstate:
743 if use_dirstate:
744 branchname = self.workingctx().branch()
744 branchname = self.workingctx().branch()
745 try:
745 try:
746 branchname = branchname.decode('UTF-8').encode('UTF-8')
746 branchname = branchname.decode('UTF-8').encode('UTF-8')
747 except UnicodeDecodeError:
747 except UnicodeDecodeError:
748 raise util.Abort(_('branch name not in UTF-8!'))
748 raise util.Abort(_('branch name not in UTF-8!'))
749 else:
749 else:
750 branchname = ""
750 branchname = ""
751
751
752 if use_dirstate:
752 if use_dirstate:
753 oldname = c1[5].get("branch") # stored in UTF-8
753 oldname = c1[5].get("branch") # stored in UTF-8
754 if not commit and not remove and not force and p2 == nullid and \
754 if not commit and not remove and not force and p2 == nullid and \
755 branchname == oldname:
755 branchname == oldname:
756 self.ui.status(_("nothing changed\n"))
756 self.ui.status(_("nothing changed\n"))
757 return None
757 return None
758
758
759 xp1 = hex(p1)
759 xp1 = hex(p1)
760 if p2 == nullid: xp2 = ''
760 if p2 == nullid: xp2 = ''
761 else: xp2 = hex(p2)
761 else: xp2 = hex(p2)
762
762
763 self.hook("precommit", throw=True, parent1=xp1, parent2=xp2)
763 self.hook("precommit", throw=True, parent1=xp1, parent2=xp2)
764
764
765 if not wlock:
765 if not wlock:
766 wlock = self.wlock()
766 wlock = self.wlock()
767 if not lock:
767 if not lock:
768 lock = self.lock()
768 lock = self.lock()
769 tr = self.transaction()
769 tr = self.transaction()
770
770
771 # check in files
771 # check in files
772 new = {}
772 new = {}
773 linkrev = self.changelog.count()
773 linkrev = self.changelog.count()
774 commit.sort()
774 commit.sort()
775 is_exec = util.execfunc(self.root, m1.execf)
775 is_exec = util.execfunc(self.root, m1.execf)
776 is_link = util.linkfunc(self.root, m1.linkf)
776 is_link = util.linkfunc(self.root, m1.linkf)
777 for f in commit:
777 for f in commit:
778 self.ui.note(f + "\n")
778 self.ui.note(f + "\n")
779 try:
779 try:
780 new[f] = self.filecommit(f, m1, m2, linkrev, tr, changed)
780 new[f] = self.filecommit(f, m1, m2, linkrev, tr, changed)
781 m1.set(f, is_exec(f), is_link(f))
781 m1.set(f, is_exec(f), is_link(f))
782 except (OSError, IOError):
782 except (OSError, IOError):
783 if use_dirstate:
783 if use_dirstate:
784 self.ui.warn(_("trouble committing %s!\n") % f)
784 self.ui.warn(_("trouble committing %s!\n") % f)
785 raise
785 raise
786 else:
786 else:
787 remove.append(f)
787 remove.append(f)
788
788
789 # update manifest
789 # update manifest
790 m1.update(new)
790 m1.update(new)
791 remove.sort()
791 remove.sort()
792 removed = []
792 removed = []
793
793
794 for f in remove:
794 for f in remove:
795 if f in m1:
795 if f in m1:
796 del m1[f]
796 del m1[f]
797 removed.append(f)
797 removed.append(f)
798 mn = self.manifest.add(m1, tr, linkrev, c1[0], c2[0], (new, removed))
798 mn = self.manifest.add(m1, tr, linkrev, c1[0], c2[0], (new, removed))
799
799
800 # add changeset
800 # add changeset
801 new = new.keys()
801 new = new.keys()
802 new.sort()
802 new.sort()
803
803
804 user = user or self.ui.username()
804 user = user or self.ui.username()
805 if not text or force_editor:
805 if not text or force_editor:
806 edittext = []
806 edittext = []
807 if text:
807 if text:
808 edittext.append(text)
808 edittext.append(text)
809 edittext.append("")
809 edittext.append("")
810 edittext.append("HG: user: %s" % user)
810 edittext.append("HG: user: %s" % user)
811 if p2 != nullid:
811 if p2 != nullid:
812 edittext.append("HG: branch merge")
812 edittext.append("HG: branch merge")
813 if branchname:
813 if branchname:
814 edittext.append("HG: branch %s" % util.tolocal(branchname))
814 edittext.append("HG: branch %s" % util.tolocal(branchname))
815 edittext.extend(["HG: changed %s" % f for f in changed])
815 edittext.extend(["HG: changed %s" % f for f in changed])
816 edittext.extend(["HG: removed %s" % f for f in removed])
816 edittext.extend(["HG: removed %s" % f for f in removed])
817 if not changed and not remove:
817 if not changed and not remove:
818 edittext.append("HG: no files changed")
818 edittext.append("HG: no files changed")
819 edittext.append("")
819 edittext.append("")
820 # run editor in the repository root
820 # run editor in the repository root
821 olddir = os.getcwd()
821 olddir = os.getcwd()
822 os.chdir(self.root)
822 os.chdir(self.root)
823 text = self.ui.edit("\n".join(edittext), user)
823 text = self.ui.edit("\n".join(edittext), user)
824 os.chdir(olddir)
824 os.chdir(olddir)
825
825
826 lines = [line.rstrip() for line in text.rstrip().splitlines()]
826 lines = [line.rstrip() for line in text.rstrip().splitlines()]
827 while lines and not lines[0]:
827 while lines and not lines[0]:
828 del lines[0]
828 del lines[0]
829 if not lines:
829 if not lines:
830 return None
830 return None
831 text = '\n'.join(lines)
831 text = '\n'.join(lines)
832 if branchname:
832 if branchname:
833 extra["branch"] = branchname
833 extra["branch"] = branchname
834 n = self.changelog.add(mn, changed + removed, text, tr, p1, p2,
834 n = self.changelog.add(mn, changed + removed, text, tr, p1, p2,
835 user, date, extra)
835 user, date, extra)
836 self.hook('pretxncommit', throw=True, node=hex(n), parent1=xp1,
836 self.hook('pretxncommit', throw=True, node=hex(n), parent1=xp1,
837 parent2=xp2)
837 parent2=xp2)
838 tr.close()
838 tr.close()
839
839
840 if self.branchcache and "branch" in extra:
840 if self.branchcache and "branch" in extra:
841 self.branchcache[util.tolocal(extra["branch"])] = n
841 self.branchcache[util.tolocal(extra["branch"])] = n
842
842
843 if use_dirstate or update_dirstate:
843 if use_dirstate or update_dirstate:
844 self.dirstate.setparents(n)
844 self.dirstate.setparents(n)
845 if use_dirstate:
845 if use_dirstate:
846 self.dirstate.update(new, "n")
846 self.dirstate.update(new, "n")
847 self.dirstate.forget(removed)
847 self.dirstate.forget(removed)
848
848
849 self.hook("commit", node=hex(n), parent1=xp1, parent2=xp2)
849 self.hook("commit", node=hex(n), parent1=xp1, parent2=xp2)
850 return n
850 return n
851
851
852 def walk(self, node=None, files=[], match=util.always, badmatch=None):
852 def walk(self, node=None, files=[], match=util.always, badmatch=None):
853 '''
853 '''
854 walk recursively through the directory tree or a given
854 walk recursively through the directory tree or a given
855 changeset, finding all files matched by the match
855 changeset, finding all files matched by the match
856 function
856 function
857
857
858 results are yielded in a tuple (src, filename), where src
858 results are yielded in a tuple (src, filename), where src
859 is one of:
859 is one of:
860 'f' the file was found in the directory tree
860 'f' the file was found in the directory tree
861 'm' the file was only in the dirstate and not in the tree
861 'm' the file was only in the dirstate and not in the tree
862 'b' file was not found and matched badmatch
862 'b' file was not found and matched badmatch
863 '''
863 '''
864
864
865 if node:
865 if node:
866 fdict = dict.fromkeys(files)
866 fdict = dict.fromkeys(files)
867 # for dirstate.walk, files=['.'] means "walk the whole tree".
867 # for dirstate.walk, files=['.'] means "walk the whole tree".
868 # follow that here, too
868 # follow that here, too
869 fdict.pop('.', None)
869 fdict.pop('.', None)
870 mdict = self.manifest.read(self.changelog.read(node)[0])
870 mdict = self.manifest.read(self.changelog.read(node)[0])
871 mfiles = mdict.keys()
871 mfiles = mdict.keys()
872 mfiles.sort()
872 mfiles.sort()
873 for fn in mfiles:
873 for fn in mfiles:
874 for ffn in fdict:
874 for ffn in fdict:
875 # match if the file is the exact name or a directory
875 # match if the file is the exact name or a directory
876 if ffn == fn or fn.startswith("%s/" % ffn):
876 if ffn == fn or fn.startswith("%s/" % ffn):
877 del fdict[ffn]
877 del fdict[ffn]
878 break
878 break
879 if match(fn):
879 if match(fn):
880 yield 'm', fn
880 yield 'm', fn
881 ffiles = fdict.keys()
881 ffiles = fdict.keys()
882 ffiles.sort()
882 ffiles.sort()
883 for fn in ffiles:
883 for fn in ffiles:
884 if badmatch and badmatch(fn):
884 if badmatch and badmatch(fn):
885 if match(fn):
885 if match(fn):
886 yield 'b', fn
886 yield 'b', fn
887 else:
887 else:
888 self.ui.warn(_('%s: No such file in rev %s\n') % (
888 self.ui.warn(_('%s: No such file in rev %s\n') % (
889 util.pathto(self.root, self.getcwd(), fn), short(node)))
889 util.pathto(self.root, self.getcwd(), fn), short(node)))
890 else:
890 else:
891 for src, fn in self.dirstate.walk(files, match, badmatch=badmatch):
891 for src, fn in self.dirstate.walk(files, match, badmatch=badmatch):
892 yield src, fn
892 yield src, fn
893
893
894 def status(self, node1=None, node2=None, files=[], match=util.always,
894 def status(self, node1=None, node2=None, files=[], match=util.always,
895 wlock=None, list_ignored=False, list_clean=False):
895 wlock=None, list_ignored=False, list_clean=False):
896 """return status of files between two nodes or node and working directory
896 """return status of files between two nodes or node and working directory
897
897
898 If node1 is None, use the first dirstate parent instead.
898 If node1 is None, use the first dirstate parent instead.
899 If node2 is None, compare node1 with working directory.
899 If node2 is None, compare node1 with working directory.
900 """
900 """
901
901
902 def fcmp(fn, getnode):
902 def fcmp(fn, getnode):
903 t1 = self.wread(fn)
903 t1 = self.wread(fn)
904 return self.file(fn).cmp(getnode(fn), t1)
904 return self.file(fn).cmp(getnode(fn), t1)
905
905
906 def mfmatches(node):
906 def mfmatches(node):
907 change = self.changelog.read(node)
907 change = self.changelog.read(node)
908 mf = self.manifest.read(change[0]).copy()
908 mf = self.manifest.read(change[0]).copy()
909 for fn in mf.keys():
909 for fn in mf.keys():
910 if not match(fn):
910 if not match(fn):
911 del mf[fn]
911 del mf[fn]
912 return mf
912 return mf
913
913
914 modified, added, removed, deleted, unknown = [], [], [], [], []
914 modified, added, removed, deleted, unknown = [], [], [], [], []
915 ignored, clean = [], []
915 ignored, clean = [], []
916
916
917 compareworking = False
917 compareworking = False
918 if not node1 or (not node2 and node1 == self.dirstate.parents()[0]):
918 if not node1 or (not node2 and node1 == self.dirstate.parents()[0]):
919 compareworking = True
919 compareworking = True
920
920
921 if not compareworking:
921 if not compareworking:
922 # read the manifest from node1 before the manifest from node2,
922 # read the manifest from node1 before the manifest from node2,
923 # so that we'll hit the manifest cache if we're going through
923 # so that we'll hit the manifest cache if we're going through
924 # all the revisions in parent->child order.
924 # all the revisions in parent->child order.
925 mf1 = mfmatches(node1)
925 mf1 = mfmatches(node1)
926
926
927 mywlock = False
927 mywlock = False
928
928
929 # are we comparing the working directory?
929 # are we comparing the working directory?
930 if not node2:
930 if not node2:
931 (lookup, modified, added, removed, deleted, unknown,
931 (lookup, modified, added, removed, deleted, unknown,
932 ignored, clean) = self.dirstate.status(files, match,
932 ignored, clean) = self.dirstate.status(files, match,
933 list_ignored, list_clean)
933 list_ignored, list_clean)
934
934
935 # are we comparing working dir against its parent?
935 # are we comparing working dir against its parent?
936 if compareworking:
936 if compareworking:
937 if lookup:
937 if lookup:
938 # do a full compare of any files that might have changed
938 # do a full compare of any files that might have changed
939 mnode = self.changelog.read(self.dirstate.parents()[0])[0]
939 mnode = self.changelog.read(self.dirstate.parents()[0])[0]
940 getnode = lambda fn: (self.manifest.find(mnode, fn)[0] or
940 getnode = lambda fn: (self.manifest.find(mnode, fn)[0] or
941 nullid)
941 nullid)
942 for f in lookup:
942 for f in lookup:
943 if fcmp(f, getnode):
943 if fcmp(f, getnode):
944 modified.append(f)
944 modified.append(f)
945 else:
945 else:
946 clean.append(f)
946 clean.append(f)
947 if not wlock and not mywlock:
947 if not wlock and not mywlock:
948 mywlock = True
948 mywlock = True
949 try:
949 try:
950 wlock = self.wlock(wait=0)
950 wlock = self.wlock(wait=0)
951 except lock.LockException:
951 except lock.LockException:
952 pass
952 pass
953 if wlock:
953 if wlock:
954 self.dirstate.update([f], "n")
954 self.dirstate.update([f], "n")
955 else:
955 else:
956 # we are comparing working dir against non-parent
956 # we are comparing working dir against non-parent
957 # generate a pseudo-manifest for the working dir
957 # generate a pseudo-manifest for the working dir
958 # XXX: create it in dirstate.py ?
958 # XXX: create it in dirstate.py ?
959 mf2 = mfmatches(self.dirstate.parents()[0])
959 mf2 = mfmatches(self.dirstate.parents()[0])
960 is_exec = util.execfunc(self.root, mf2.execf)
960 is_exec = util.execfunc(self.root, mf2.execf)
961 is_link = util.linkfunc(self.root, mf2.linkf)
961 is_link = util.linkfunc(self.root, mf2.linkf)
962 for f in lookup + modified + added:
962 for f in lookup + modified + added:
963 mf2[f] = ""
963 mf2[f] = ""
964 mf2.set(f, is_exec(f), is_link(f))
964 mf2.set(f, is_exec(f), is_link(f))
965 for f in removed:
965 for f in removed:
966 if f in mf2:
966 if f in mf2:
967 del mf2[f]
967 del mf2[f]
968
968
969 if mywlock and wlock:
969 if mywlock and wlock:
970 wlock.release()
970 wlock.release()
971 else:
971 else:
972 # we are comparing two revisions
972 # we are comparing two revisions
973 mf2 = mfmatches(node2)
973 mf2 = mfmatches(node2)
974
974
975 if not compareworking:
975 if not compareworking:
976 # flush lists from dirstate before comparing manifests
976 # flush lists from dirstate before comparing manifests
977 modified, added, clean = [], [], []
977 modified, added, clean = [], [], []
978
978
979 # make sure to sort the files so we talk to the disk in a
979 # make sure to sort the files so we talk to the disk in a
980 # reasonable order
980 # reasonable order
981 mf2keys = mf2.keys()
981 mf2keys = mf2.keys()
982 mf2keys.sort()
982 mf2keys.sort()
983 getnode = lambda fn: mf1.get(fn, nullid)
983 getnode = lambda fn: mf1.get(fn, nullid)
984 for fn in mf2keys:
984 for fn in mf2keys:
985 if mf1.has_key(fn):
985 if mf1.has_key(fn):
986 if mf1.flags(fn) != mf2.flags(fn) or \
986 if mf1.flags(fn) != mf2.flags(fn) or \
987 (mf1[fn] != mf2[fn] and (mf2[fn] != "" or
987 (mf1[fn] != mf2[fn] and (mf2[fn] != "" or
988 fcmp(fn, getnode))):
988 fcmp(fn, getnode))):
989 modified.append(fn)
989 modified.append(fn)
990 elif list_clean:
990 elif list_clean:
991 clean.append(fn)
991 clean.append(fn)
992 del mf1[fn]
992 del mf1[fn]
993 else:
993 else:
994 added.append(fn)
994 added.append(fn)
995
995
996 removed = mf1.keys()
996 removed = mf1.keys()
997
997
998 # sort and return results:
998 # sort and return results:
999 for l in modified, added, removed, deleted, unknown, ignored, clean:
999 for l in modified, added, removed, deleted, unknown, ignored, clean:
1000 l.sort()
1000 l.sort()
1001 return (modified, added, removed, deleted, unknown, ignored, clean)
1001 return (modified, added, removed, deleted, unknown, ignored, clean)
1002
1002
1003 def add(self, list, wlock=None):
1003 def add(self, list, wlock=None):
1004 if not wlock:
1004 if not wlock:
1005 wlock = self.wlock()
1005 wlock = self.wlock()
1006 for f in list:
1006 for f in list:
1007 p = self.wjoin(f)
1007 p = self.wjoin(f)
1008 islink = os.path.islink(p)
1008 islink = os.path.islink(p)
1009 size = os.lstat(p).st_size
1010 if size > 10000000:
1011 self.ui.warn(_("%s: files over 10MB may cause memory and"
1012 " performance problems\n"
1013 "(use 'hg revert %s' to unadd the file)\n")
1014 % (f, f))
1009 if not islink and not os.path.exists(p):
1015 if not islink and not os.path.exists(p):
1010 self.ui.warn(_("%s does not exist!\n") % f)
1016 self.ui.warn(_("%s does not exist!\n") % f)
1011 elif not islink and not os.path.isfile(p):
1017 elif not islink and not os.path.isfile(p):
1012 self.ui.warn(_("%s not added: only files and symlinks "
1018 self.ui.warn(_("%s not added: only files and symlinks "
1013 "supported currently\n") % f)
1019 "supported currently\n") % f)
1014 elif self.dirstate.state(f) in 'an':
1020 elif self.dirstate.state(f) in 'an':
1015 self.ui.warn(_("%s already tracked!\n") % f)
1021 self.ui.warn(_("%s already tracked!\n") % f)
1016 else:
1022 else:
1017 self.dirstate.update([f], "a")
1023 self.dirstate.update([f], "a")
1018
1024
1019 def forget(self, list, wlock=None):
1025 def forget(self, list, wlock=None):
1020 if not wlock:
1026 if not wlock:
1021 wlock = self.wlock()
1027 wlock = self.wlock()
1022 for f in list:
1028 for f in list:
1023 if self.dirstate.state(f) not in 'ai':
1029 if self.dirstate.state(f) not in 'ai':
1024 self.ui.warn(_("%s not added!\n") % f)
1030 self.ui.warn(_("%s not added!\n") % f)
1025 else:
1031 else:
1026 self.dirstate.forget([f])
1032 self.dirstate.forget([f])
1027
1033
1028 def remove(self, list, unlink=False, wlock=None):
1034 def remove(self, list, unlink=False, wlock=None):
1029 if unlink:
1035 if unlink:
1030 for f in list:
1036 for f in list:
1031 try:
1037 try:
1032 util.unlink(self.wjoin(f))
1038 util.unlink(self.wjoin(f))
1033 except OSError, inst:
1039 except OSError, inst:
1034 if inst.errno != errno.ENOENT:
1040 if inst.errno != errno.ENOENT:
1035 raise
1041 raise
1036 if not wlock:
1042 if not wlock:
1037 wlock = self.wlock()
1043 wlock = self.wlock()
1038 for f in list:
1044 for f in list:
1039 if unlink and os.path.exists(self.wjoin(f)):
1045 if unlink and os.path.exists(self.wjoin(f)):
1040 self.ui.warn(_("%s still exists!\n") % f)
1046 self.ui.warn(_("%s still exists!\n") % f)
1041 elif self.dirstate.state(f) == 'a':
1047 elif self.dirstate.state(f) == 'a':
1042 self.dirstate.forget([f])
1048 self.dirstate.forget([f])
1043 elif f not in self.dirstate:
1049 elif f not in self.dirstate:
1044 self.ui.warn(_("%s not tracked!\n") % f)
1050 self.ui.warn(_("%s not tracked!\n") % f)
1045 else:
1051 else:
1046 self.dirstate.update([f], "r")
1052 self.dirstate.update([f], "r")
1047
1053
1048 def undelete(self, list, wlock=None):
1054 def undelete(self, list, wlock=None):
1049 p = self.dirstate.parents()[0]
1055 p = self.dirstate.parents()[0]
1050 mn = self.changelog.read(p)[0]
1056 mn = self.changelog.read(p)[0]
1051 m = self.manifest.read(mn)
1057 m = self.manifest.read(mn)
1052 if not wlock:
1058 if not wlock:
1053 wlock = self.wlock()
1059 wlock = self.wlock()
1054 for f in list:
1060 for f in list:
1055 if self.dirstate.state(f) not in "r":
1061 if self.dirstate.state(f) not in "r":
1056 self.ui.warn("%s not removed!\n" % f)
1062 self.ui.warn("%s not removed!\n" % f)
1057 else:
1063 else:
1058 t = self.file(f).read(m[f])
1064 t = self.file(f).read(m[f])
1059 self.wwrite(f, t, m.flags(f))
1065 self.wwrite(f, t, m.flags(f))
1060 self.dirstate.update([f], "n")
1066 self.dirstate.update([f], "n")
1061
1067
1062 def copy(self, source, dest, wlock=None):
1068 def copy(self, source, dest, wlock=None):
1063 p = self.wjoin(dest)
1069 p = self.wjoin(dest)
1064 if not (os.path.exists(p) or os.path.islink(p)):
1070 if not (os.path.exists(p) or os.path.islink(p)):
1065 self.ui.warn(_("%s does not exist!\n") % dest)
1071 self.ui.warn(_("%s does not exist!\n") % dest)
1066 elif not (os.path.isfile(p) or os.path.islink(p)):
1072 elif not (os.path.isfile(p) or os.path.islink(p)):
1067 self.ui.warn(_("copy failed: %s is not a file or a "
1073 self.ui.warn(_("copy failed: %s is not a file or a "
1068 "symbolic link\n") % dest)
1074 "symbolic link\n") % dest)
1069 else:
1075 else:
1070 if not wlock:
1076 if not wlock:
1071 wlock = self.wlock()
1077 wlock = self.wlock()
1072 if self.dirstate.state(dest) == '?':
1078 if self.dirstate.state(dest) == '?':
1073 self.dirstate.update([dest], "a")
1079 self.dirstate.update([dest], "a")
1074 self.dirstate.copy(source, dest)
1080 self.dirstate.copy(source, dest)
1075
1081
1076 def heads(self, start=None):
1082 def heads(self, start=None):
1077 heads = self.changelog.heads(start)
1083 heads = self.changelog.heads(start)
1078 # sort the output in rev descending order
1084 # sort the output in rev descending order
1079 heads = [(-self.changelog.rev(h), h) for h in heads]
1085 heads = [(-self.changelog.rev(h), h) for h in heads]
1080 heads.sort()
1086 heads.sort()
1081 return [n for (r, n) in heads]
1087 return [n for (r, n) in heads]
1082
1088
1083 def branches(self, nodes):
1089 def branches(self, nodes):
1084 if not nodes:
1090 if not nodes:
1085 nodes = [self.changelog.tip()]
1091 nodes = [self.changelog.tip()]
1086 b = []
1092 b = []
1087 for n in nodes:
1093 for n in nodes:
1088 t = n
1094 t = n
1089 while 1:
1095 while 1:
1090 p = self.changelog.parents(n)
1096 p = self.changelog.parents(n)
1091 if p[1] != nullid or p[0] == nullid:
1097 if p[1] != nullid or p[0] == nullid:
1092 b.append((t, n, p[0], p[1]))
1098 b.append((t, n, p[0], p[1]))
1093 break
1099 break
1094 n = p[0]
1100 n = p[0]
1095 return b
1101 return b
1096
1102
1097 def between(self, pairs):
1103 def between(self, pairs):
1098 r = []
1104 r = []
1099
1105
1100 for top, bottom in pairs:
1106 for top, bottom in pairs:
1101 n, l, i = top, [], 0
1107 n, l, i = top, [], 0
1102 f = 1
1108 f = 1
1103
1109
1104 while n != bottom:
1110 while n != bottom:
1105 p = self.changelog.parents(n)[0]
1111 p = self.changelog.parents(n)[0]
1106 if i == f:
1112 if i == f:
1107 l.append(n)
1113 l.append(n)
1108 f = f * 2
1114 f = f * 2
1109 n = p
1115 n = p
1110 i += 1
1116 i += 1
1111
1117
1112 r.append(l)
1118 r.append(l)
1113
1119
1114 return r
1120 return r
1115
1121
1116 def findincoming(self, remote, base=None, heads=None, force=False):
1122 def findincoming(self, remote, base=None, heads=None, force=False):
1117 """Return list of roots of the subsets of missing nodes from remote
1123 """Return list of roots of the subsets of missing nodes from remote
1118
1124
1119 If base dict is specified, assume that these nodes and their parents
1125 If base dict is specified, assume that these nodes and their parents
1120 exist on the remote side and that no child of a node of base exists
1126 exist on the remote side and that no child of a node of base exists
1121 in both remote and self.
1127 in both remote and self.
1122 Furthermore base will be updated to include the nodes that exists
1128 Furthermore base will be updated to include the nodes that exists
1123 in self and remote but no children exists in self and remote.
1129 in self and remote but no children exists in self and remote.
1124 If a list of heads is specified, return only nodes which are heads
1130 If a list of heads is specified, return only nodes which are heads
1125 or ancestors of these heads.
1131 or ancestors of these heads.
1126
1132
1127 All the ancestors of base are in self and in remote.
1133 All the ancestors of base are in self and in remote.
1128 All the descendants of the list returned are missing in self.
1134 All the descendants of the list returned are missing in self.
1129 (and so we know that the rest of the nodes are missing in remote, see
1135 (and so we know that the rest of the nodes are missing in remote, see
1130 outgoing)
1136 outgoing)
1131 """
1137 """
1132 m = self.changelog.nodemap
1138 m = self.changelog.nodemap
1133 search = []
1139 search = []
1134 fetch = {}
1140 fetch = {}
1135 seen = {}
1141 seen = {}
1136 seenbranch = {}
1142 seenbranch = {}
1137 if base == None:
1143 if base == None:
1138 base = {}
1144 base = {}
1139
1145
1140 if not heads:
1146 if not heads:
1141 heads = remote.heads()
1147 heads = remote.heads()
1142
1148
1143 if self.changelog.tip() == nullid:
1149 if self.changelog.tip() == nullid:
1144 base[nullid] = 1
1150 base[nullid] = 1
1145 if heads != [nullid]:
1151 if heads != [nullid]:
1146 return [nullid]
1152 return [nullid]
1147 return []
1153 return []
1148
1154
1149 # assume we're closer to the tip than the root
1155 # assume we're closer to the tip than the root
1150 # and start by examining the heads
1156 # and start by examining the heads
1151 self.ui.status(_("searching for changes\n"))
1157 self.ui.status(_("searching for changes\n"))
1152
1158
1153 unknown = []
1159 unknown = []
1154 for h in heads:
1160 for h in heads:
1155 if h not in m:
1161 if h not in m:
1156 unknown.append(h)
1162 unknown.append(h)
1157 else:
1163 else:
1158 base[h] = 1
1164 base[h] = 1
1159
1165
1160 if not unknown:
1166 if not unknown:
1161 return []
1167 return []
1162
1168
1163 req = dict.fromkeys(unknown)
1169 req = dict.fromkeys(unknown)
1164 reqcnt = 0
1170 reqcnt = 0
1165
1171
1166 # search through remote branches
1172 # search through remote branches
1167 # a 'branch' here is a linear segment of history, with four parts:
1173 # a 'branch' here is a linear segment of history, with four parts:
1168 # head, root, first parent, second parent
1174 # head, root, first parent, second parent
1169 # (a branch always has two parents (or none) by definition)
1175 # (a branch always has two parents (or none) by definition)
1170 unknown = remote.branches(unknown)
1176 unknown = remote.branches(unknown)
1171 while unknown:
1177 while unknown:
1172 r = []
1178 r = []
1173 while unknown:
1179 while unknown:
1174 n = unknown.pop(0)
1180 n = unknown.pop(0)
1175 if n[0] in seen:
1181 if n[0] in seen:
1176 continue
1182 continue
1177
1183
1178 self.ui.debug(_("examining %s:%s\n")
1184 self.ui.debug(_("examining %s:%s\n")
1179 % (short(n[0]), short(n[1])))
1185 % (short(n[0]), short(n[1])))
1180 if n[0] == nullid: # found the end of the branch
1186 if n[0] == nullid: # found the end of the branch
1181 pass
1187 pass
1182 elif n in seenbranch:
1188 elif n in seenbranch:
1183 self.ui.debug(_("branch already found\n"))
1189 self.ui.debug(_("branch already found\n"))
1184 continue
1190 continue
1185 elif n[1] and n[1] in m: # do we know the base?
1191 elif n[1] and n[1] in m: # do we know the base?
1186 self.ui.debug(_("found incomplete branch %s:%s\n")
1192 self.ui.debug(_("found incomplete branch %s:%s\n")
1187 % (short(n[0]), short(n[1])))
1193 % (short(n[0]), short(n[1])))
1188 search.append(n) # schedule branch range for scanning
1194 search.append(n) # schedule branch range for scanning
1189 seenbranch[n] = 1
1195 seenbranch[n] = 1
1190 else:
1196 else:
1191 if n[1] not in seen and n[1] not in fetch:
1197 if n[1] not in seen and n[1] not in fetch:
1192 if n[2] in m and n[3] in m:
1198 if n[2] in m and n[3] in m:
1193 self.ui.debug(_("found new changeset %s\n") %
1199 self.ui.debug(_("found new changeset %s\n") %
1194 short(n[1]))
1200 short(n[1]))
1195 fetch[n[1]] = 1 # earliest unknown
1201 fetch[n[1]] = 1 # earliest unknown
1196 for p in n[2:4]:
1202 for p in n[2:4]:
1197 if p in m:
1203 if p in m:
1198 base[p] = 1 # latest known
1204 base[p] = 1 # latest known
1199
1205
1200 for p in n[2:4]:
1206 for p in n[2:4]:
1201 if p not in req and p not in m:
1207 if p not in req and p not in m:
1202 r.append(p)
1208 r.append(p)
1203 req[p] = 1
1209 req[p] = 1
1204 seen[n[0]] = 1
1210 seen[n[0]] = 1
1205
1211
1206 if r:
1212 if r:
1207 reqcnt += 1
1213 reqcnt += 1
1208 self.ui.debug(_("request %d: %s\n") %
1214 self.ui.debug(_("request %d: %s\n") %
1209 (reqcnt, " ".join(map(short, r))))
1215 (reqcnt, " ".join(map(short, r))))
1210 for p in xrange(0, len(r), 10):
1216 for p in xrange(0, len(r), 10):
1211 for b in remote.branches(r[p:p+10]):
1217 for b in remote.branches(r[p:p+10]):
1212 self.ui.debug(_("received %s:%s\n") %
1218 self.ui.debug(_("received %s:%s\n") %
1213 (short(b[0]), short(b[1])))
1219 (short(b[0]), short(b[1])))
1214 unknown.append(b)
1220 unknown.append(b)
1215
1221
1216 # do binary search on the branches we found
1222 # do binary search on the branches we found
1217 while search:
1223 while search:
1218 n = search.pop(0)
1224 n = search.pop(0)
1219 reqcnt += 1
1225 reqcnt += 1
1220 l = remote.between([(n[0], n[1])])[0]
1226 l = remote.between([(n[0], n[1])])[0]
1221 l.append(n[1])
1227 l.append(n[1])
1222 p = n[0]
1228 p = n[0]
1223 f = 1
1229 f = 1
1224 for i in l:
1230 for i in l:
1225 self.ui.debug(_("narrowing %d:%d %s\n") % (f, len(l), short(i)))
1231 self.ui.debug(_("narrowing %d:%d %s\n") % (f, len(l), short(i)))
1226 if i in m:
1232 if i in m:
1227 if f <= 2:
1233 if f <= 2:
1228 self.ui.debug(_("found new branch changeset %s\n") %
1234 self.ui.debug(_("found new branch changeset %s\n") %
1229 short(p))
1235 short(p))
1230 fetch[p] = 1
1236 fetch[p] = 1
1231 base[i] = 1
1237 base[i] = 1
1232 else:
1238 else:
1233 self.ui.debug(_("narrowed branch search to %s:%s\n")
1239 self.ui.debug(_("narrowed branch search to %s:%s\n")
1234 % (short(p), short(i)))
1240 % (short(p), short(i)))
1235 search.append((p, i))
1241 search.append((p, i))
1236 break
1242 break
1237 p, f = i, f * 2
1243 p, f = i, f * 2
1238
1244
1239 # sanity check our fetch list
1245 # sanity check our fetch list
1240 for f in fetch.keys():
1246 for f in fetch.keys():
1241 if f in m:
1247 if f in m:
1242 raise repo.RepoError(_("already have changeset ") + short(f[:4]))
1248 raise repo.RepoError(_("already have changeset ") + short(f[:4]))
1243
1249
1244 if base.keys() == [nullid]:
1250 if base.keys() == [nullid]:
1245 if force:
1251 if force:
1246 self.ui.warn(_("warning: repository is unrelated\n"))
1252 self.ui.warn(_("warning: repository is unrelated\n"))
1247 else:
1253 else:
1248 raise util.Abort(_("repository is unrelated"))
1254 raise util.Abort(_("repository is unrelated"))
1249
1255
1250 self.ui.debug(_("found new changesets starting at ") +
1256 self.ui.debug(_("found new changesets starting at ") +
1251 " ".join([short(f) for f in fetch]) + "\n")
1257 " ".join([short(f) for f in fetch]) + "\n")
1252
1258
1253 self.ui.debug(_("%d total queries\n") % reqcnt)
1259 self.ui.debug(_("%d total queries\n") % reqcnt)
1254
1260
1255 return fetch.keys()
1261 return fetch.keys()
1256
1262
1257 def findoutgoing(self, remote, base=None, heads=None, force=False):
1263 def findoutgoing(self, remote, base=None, heads=None, force=False):
1258 """Return list of nodes that are roots of subsets not in remote
1264 """Return list of nodes that are roots of subsets not in remote
1259
1265
1260 If base dict is specified, assume that these nodes and their parents
1266 If base dict is specified, assume that these nodes and their parents
1261 exist on the remote side.
1267 exist on the remote side.
1262 If a list of heads is specified, return only nodes which are heads
1268 If a list of heads is specified, return only nodes which are heads
1263 or ancestors of these heads, and return a second element which
1269 or ancestors of these heads, and return a second element which
1264 contains all remote heads which get new children.
1270 contains all remote heads which get new children.
1265 """
1271 """
1266 if base == None:
1272 if base == None:
1267 base = {}
1273 base = {}
1268 self.findincoming(remote, base, heads, force=force)
1274 self.findincoming(remote, base, heads, force=force)
1269
1275
1270 self.ui.debug(_("common changesets up to ")
1276 self.ui.debug(_("common changesets up to ")
1271 + " ".join(map(short, base.keys())) + "\n")
1277 + " ".join(map(short, base.keys())) + "\n")
1272
1278
1273 remain = dict.fromkeys(self.changelog.nodemap)
1279 remain = dict.fromkeys(self.changelog.nodemap)
1274
1280
1275 # prune everything remote has from the tree
1281 # prune everything remote has from the tree
1276 del remain[nullid]
1282 del remain[nullid]
1277 remove = base.keys()
1283 remove = base.keys()
1278 while remove:
1284 while remove:
1279 n = remove.pop(0)
1285 n = remove.pop(0)
1280 if n in remain:
1286 if n in remain:
1281 del remain[n]
1287 del remain[n]
1282 for p in self.changelog.parents(n):
1288 for p in self.changelog.parents(n):
1283 remove.append(p)
1289 remove.append(p)
1284
1290
1285 # find every node whose parents have been pruned
1291 # find every node whose parents have been pruned
1286 subset = []
1292 subset = []
1287 # find every remote head that will get new children
1293 # find every remote head that will get new children
1288 updated_heads = {}
1294 updated_heads = {}
1289 for n in remain:
1295 for n in remain:
1290 p1, p2 = self.changelog.parents(n)
1296 p1, p2 = self.changelog.parents(n)
1291 if p1 not in remain and p2 not in remain:
1297 if p1 not in remain and p2 not in remain:
1292 subset.append(n)
1298 subset.append(n)
1293 if heads:
1299 if heads:
1294 if p1 in heads:
1300 if p1 in heads:
1295 updated_heads[p1] = True
1301 updated_heads[p1] = True
1296 if p2 in heads:
1302 if p2 in heads:
1297 updated_heads[p2] = True
1303 updated_heads[p2] = True
1298
1304
1299 # this is the set of all roots we have to push
1305 # this is the set of all roots we have to push
1300 if heads:
1306 if heads:
1301 return subset, updated_heads.keys()
1307 return subset, updated_heads.keys()
1302 else:
1308 else:
1303 return subset
1309 return subset
1304
1310
1305 def pull(self, remote, heads=None, force=False, lock=None):
1311 def pull(self, remote, heads=None, force=False, lock=None):
1306 mylock = False
1312 mylock = False
1307 if not lock:
1313 if not lock:
1308 lock = self.lock()
1314 lock = self.lock()
1309 mylock = True
1315 mylock = True
1310
1316
1311 try:
1317 try:
1312 fetch = self.findincoming(remote, force=force)
1318 fetch = self.findincoming(remote, force=force)
1313 if fetch == [nullid]:
1319 if fetch == [nullid]:
1314 self.ui.status(_("requesting all changes\n"))
1320 self.ui.status(_("requesting all changes\n"))
1315
1321
1316 if not fetch:
1322 if not fetch:
1317 self.ui.status(_("no changes found\n"))
1323 self.ui.status(_("no changes found\n"))
1318 return 0
1324 return 0
1319
1325
1320 if heads is None:
1326 if heads is None:
1321 cg = remote.changegroup(fetch, 'pull')
1327 cg = remote.changegroup(fetch, 'pull')
1322 else:
1328 else:
1323 if 'changegroupsubset' not in remote.capabilities:
1329 if 'changegroupsubset' not in remote.capabilities:
1324 raise util.Abort(_("Partial pull cannot be done because other repository doesn't support changegroupsubset."))
1330 raise util.Abort(_("Partial pull cannot be done because other repository doesn't support changegroupsubset."))
1325 cg = remote.changegroupsubset(fetch, heads, 'pull')
1331 cg = remote.changegroupsubset(fetch, heads, 'pull')
1326 return self.addchangegroup(cg, 'pull', remote.url())
1332 return self.addchangegroup(cg, 'pull', remote.url())
1327 finally:
1333 finally:
1328 if mylock:
1334 if mylock:
1329 lock.release()
1335 lock.release()
1330
1336
1331 def push(self, remote, force=False, revs=None):
1337 def push(self, remote, force=False, revs=None):
1332 # there are two ways to push to remote repo:
1338 # there are two ways to push to remote repo:
1333 #
1339 #
1334 # addchangegroup assumes local user can lock remote
1340 # addchangegroup assumes local user can lock remote
1335 # repo (local filesystem, old ssh servers).
1341 # repo (local filesystem, old ssh servers).
1336 #
1342 #
1337 # unbundle assumes local user cannot lock remote repo (new ssh
1343 # unbundle assumes local user cannot lock remote repo (new ssh
1338 # servers, http servers).
1344 # servers, http servers).
1339
1345
1340 if remote.capable('unbundle'):
1346 if remote.capable('unbundle'):
1341 return self.push_unbundle(remote, force, revs)
1347 return self.push_unbundle(remote, force, revs)
1342 return self.push_addchangegroup(remote, force, revs)
1348 return self.push_addchangegroup(remote, force, revs)
1343
1349
1344 def prepush(self, remote, force, revs):
1350 def prepush(self, remote, force, revs):
1345 base = {}
1351 base = {}
1346 remote_heads = remote.heads()
1352 remote_heads = remote.heads()
1347 inc = self.findincoming(remote, base, remote_heads, force=force)
1353 inc = self.findincoming(remote, base, remote_heads, force=force)
1348
1354
1349 update, updated_heads = self.findoutgoing(remote, base, remote_heads)
1355 update, updated_heads = self.findoutgoing(remote, base, remote_heads)
1350 if revs is not None:
1356 if revs is not None:
1351 msng_cl, bases, heads = self.changelog.nodesbetween(update, revs)
1357 msng_cl, bases, heads = self.changelog.nodesbetween(update, revs)
1352 else:
1358 else:
1353 bases, heads = update, self.changelog.heads()
1359 bases, heads = update, self.changelog.heads()
1354
1360
1355 if not bases:
1361 if not bases:
1356 self.ui.status(_("no changes found\n"))
1362 self.ui.status(_("no changes found\n"))
1357 return None, 1
1363 return None, 1
1358 elif not force:
1364 elif not force:
1359 # check if we're creating new remote heads
1365 # check if we're creating new remote heads
1360 # to be a remote head after push, node must be either
1366 # to be a remote head after push, node must be either
1361 # - unknown locally
1367 # - unknown locally
1362 # - a local outgoing head descended from update
1368 # - a local outgoing head descended from update
1363 # - a remote head that's known locally and not
1369 # - a remote head that's known locally and not
1364 # ancestral to an outgoing head
1370 # ancestral to an outgoing head
1365
1371
1366 warn = 0
1372 warn = 0
1367
1373
1368 if remote_heads == [nullid]:
1374 if remote_heads == [nullid]:
1369 warn = 0
1375 warn = 0
1370 elif not revs and len(heads) > len(remote_heads):
1376 elif not revs and len(heads) > len(remote_heads):
1371 warn = 1
1377 warn = 1
1372 else:
1378 else:
1373 newheads = list(heads)
1379 newheads = list(heads)
1374 for r in remote_heads:
1380 for r in remote_heads:
1375 if r in self.changelog.nodemap:
1381 if r in self.changelog.nodemap:
1376 desc = self.changelog.heads(r, heads)
1382 desc = self.changelog.heads(r, heads)
1377 l = [h for h in heads if h in desc]
1383 l = [h for h in heads if h in desc]
1378 if not l:
1384 if not l:
1379 newheads.append(r)
1385 newheads.append(r)
1380 else:
1386 else:
1381 newheads.append(r)
1387 newheads.append(r)
1382 if len(newheads) > len(remote_heads):
1388 if len(newheads) > len(remote_heads):
1383 warn = 1
1389 warn = 1
1384
1390
1385 if warn:
1391 if warn:
1386 self.ui.warn(_("abort: push creates new remote branches!\n"))
1392 self.ui.warn(_("abort: push creates new remote branches!\n"))
1387 self.ui.status(_("(did you forget to merge?"
1393 self.ui.status(_("(did you forget to merge?"
1388 " use push -f to force)\n"))
1394 " use push -f to force)\n"))
1389 return None, 1
1395 return None, 1
1390 elif inc:
1396 elif inc:
1391 self.ui.warn(_("note: unsynced remote changes!\n"))
1397 self.ui.warn(_("note: unsynced remote changes!\n"))
1392
1398
1393
1399
1394 if revs is None:
1400 if revs is None:
1395 cg = self.changegroup(update, 'push')
1401 cg = self.changegroup(update, 'push')
1396 else:
1402 else:
1397 cg = self.changegroupsubset(update, revs, 'push')
1403 cg = self.changegroupsubset(update, revs, 'push')
1398 return cg, remote_heads
1404 return cg, remote_heads
1399
1405
1400 def push_addchangegroup(self, remote, force, revs):
1406 def push_addchangegroup(self, remote, force, revs):
1401 lock = remote.lock()
1407 lock = remote.lock()
1402
1408
1403 ret = self.prepush(remote, force, revs)
1409 ret = self.prepush(remote, force, revs)
1404 if ret[0] is not None:
1410 if ret[0] is not None:
1405 cg, remote_heads = ret
1411 cg, remote_heads = ret
1406 return remote.addchangegroup(cg, 'push', self.url())
1412 return remote.addchangegroup(cg, 'push', self.url())
1407 return ret[1]
1413 return ret[1]
1408
1414
1409 def push_unbundle(self, remote, force, revs):
1415 def push_unbundle(self, remote, force, revs):
1410 # local repo finds heads on server, finds out what revs it
1416 # local repo finds heads on server, finds out what revs it
1411 # must push. once revs transferred, if server finds it has
1417 # must push. once revs transferred, if server finds it has
1412 # different heads (someone else won commit/push race), server
1418 # different heads (someone else won commit/push race), server
1413 # aborts.
1419 # aborts.
1414
1420
1415 ret = self.prepush(remote, force, revs)
1421 ret = self.prepush(remote, force, revs)
1416 if ret[0] is not None:
1422 if ret[0] is not None:
1417 cg, remote_heads = ret
1423 cg, remote_heads = ret
1418 if force: remote_heads = ['force']
1424 if force: remote_heads = ['force']
1419 return remote.unbundle(cg, remote_heads, 'push')
1425 return remote.unbundle(cg, remote_heads, 'push')
1420 return ret[1]
1426 return ret[1]
1421
1427
1422 def changegroupinfo(self, nodes):
1428 def changegroupinfo(self, nodes):
1423 self.ui.note(_("%d changesets found\n") % len(nodes))
1429 self.ui.note(_("%d changesets found\n") % len(nodes))
1424 if self.ui.debugflag:
1430 if self.ui.debugflag:
1425 self.ui.debug(_("List of changesets:\n"))
1431 self.ui.debug(_("List of changesets:\n"))
1426 for node in nodes:
1432 for node in nodes:
1427 self.ui.debug("%s\n" % hex(node))
1433 self.ui.debug("%s\n" % hex(node))
1428
1434
1429 def changegroupsubset(self, bases, heads, source):
1435 def changegroupsubset(self, bases, heads, source):
1430 """This function generates a changegroup consisting of all the nodes
1436 """This function generates a changegroup consisting of all the nodes
1431 that are descendents of any of the bases, and ancestors of any of
1437 that are descendents of any of the bases, and ancestors of any of
1432 the heads.
1438 the heads.
1433
1439
1434 It is fairly complex as determining which filenodes and which
1440 It is fairly complex as determining which filenodes and which
1435 manifest nodes need to be included for the changeset to be complete
1441 manifest nodes need to be included for the changeset to be complete
1436 is non-trivial.
1442 is non-trivial.
1437
1443
1438 Another wrinkle is doing the reverse, figuring out which changeset in
1444 Another wrinkle is doing the reverse, figuring out which changeset in
1439 the changegroup a particular filenode or manifestnode belongs to."""
1445 the changegroup a particular filenode or manifestnode belongs to."""
1440
1446
1441 self.hook('preoutgoing', throw=True, source=source)
1447 self.hook('preoutgoing', throw=True, source=source)
1442
1448
1443 # Set up some initial variables
1449 # Set up some initial variables
1444 # Make it easy to refer to self.changelog
1450 # Make it easy to refer to self.changelog
1445 cl = self.changelog
1451 cl = self.changelog
1446 # msng is short for missing - compute the list of changesets in this
1452 # msng is short for missing - compute the list of changesets in this
1447 # changegroup.
1453 # changegroup.
1448 msng_cl_lst, bases, heads = cl.nodesbetween(bases, heads)
1454 msng_cl_lst, bases, heads = cl.nodesbetween(bases, heads)
1449 self.changegroupinfo(msng_cl_lst)
1455 self.changegroupinfo(msng_cl_lst)
1450 # Some bases may turn out to be superfluous, and some heads may be
1456 # Some bases may turn out to be superfluous, and some heads may be
1451 # too. nodesbetween will return the minimal set of bases and heads
1457 # too. nodesbetween will return the minimal set of bases and heads
1452 # necessary to re-create the changegroup.
1458 # necessary to re-create the changegroup.
1453
1459
1454 # Known heads are the list of heads that it is assumed the recipient
1460 # Known heads are the list of heads that it is assumed the recipient
1455 # of this changegroup will know about.
1461 # of this changegroup will know about.
1456 knownheads = {}
1462 knownheads = {}
1457 # We assume that all parents of bases are known heads.
1463 # We assume that all parents of bases are known heads.
1458 for n in bases:
1464 for n in bases:
1459 for p in cl.parents(n):
1465 for p in cl.parents(n):
1460 if p != nullid:
1466 if p != nullid:
1461 knownheads[p] = 1
1467 knownheads[p] = 1
1462 knownheads = knownheads.keys()
1468 knownheads = knownheads.keys()
1463 if knownheads:
1469 if knownheads:
1464 # Now that we know what heads are known, we can compute which
1470 # Now that we know what heads are known, we can compute which
1465 # changesets are known. The recipient must know about all
1471 # changesets are known. The recipient must know about all
1466 # changesets required to reach the known heads from the null
1472 # changesets required to reach the known heads from the null
1467 # changeset.
1473 # changeset.
1468 has_cl_set, junk, junk = cl.nodesbetween(None, knownheads)
1474 has_cl_set, junk, junk = cl.nodesbetween(None, knownheads)
1469 junk = None
1475 junk = None
1470 # Transform the list into an ersatz set.
1476 # Transform the list into an ersatz set.
1471 has_cl_set = dict.fromkeys(has_cl_set)
1477 has_cl_set = dict.fromkeys(has_cl_set)
1472 else:
1478 else:
1473 # If there were no known heads, the recipient cannot be assumed to
1479 # If there were no known heads, the recipient cannot be assumed to
1474 # know about any changesets.
1480 # know about any changesets.
1475 has_cl_set = {}
1481 has_cl_set = {}
1476
1482
1477 # Make it easy to refer to self.manifest
1483 # Make it easy to refer to self.manifest
1478 mnfst = self.manifest
1484 mnfst = self.manifest
1479 # We don't know which manifests are missing yet
1485 # We don't know which manifests are missing yet
1480 msng_mnfst_set = {}
1486 msng_mnfst_set = {}
1481 # Nor do we know which filenodes are missing.
1487 # Nor do we know which filenodes are missing.
1482 msng_filenode_set = {}
1488 msng_filenode_set = {}
1483
1489
1484 junk = mnfst.index[mnfst.count() - 1] # Get around a bug in lazyindex
1490 junk = mnfst.index[mnfst.count() - 1] # Get around a bug in lazyindex
1485 junk = None
1491 junk = None
1486
1492
1487 # A changeset always belongs to itself, so the changenode lookup
1493 # A changeset always belongs to itself, so the changenode lookup
1488 # function for a changenode is identity.
1494 # function for a changenode is identity.
1489 def identity(x):
1495 def identity(x):
1490 return x
1496 return x
1491
1497
1492 # A function generating function. Sets up an environment for the
1498 # A function generating function. Sets up an environment for the
1493 # inner function.
1499 # inner function.
1494 def cmp_by_rev_func(revlog):
1500 def cmp_by_rev_func(revlog):
1495 # Compare two nodes by their revision number in the environment's
1501 # Compare two nodes by their revision number in the environment's
1496 # revision history. Since the revision number both represents the
1502 # revision history. Since the revision number both represents the
1497 # most efficient order to read the nodes in, and represents a
1503 # most efficient order to read the nodes in, and represents a
1498 # topological sorting of the nodes, this function is often useful.
1504 # topological sorting of the nodes, this function is often useful.
1499 def cmp_by_rev(a, b):
1505 def cmp_by_rev(a, b):
1500 return cmp(revlog.rev(a), revlog.rev(b))
1506 return cmp(revlog.rev(a), revlog.rev(b))
1501 return cmp_by_rev
1507 return cmp_by_rev
1502
1508
1503 # If we determine that a particular file or manifest node must be a
1509 # If we determine that a particular file or manifest node must be a
1504 # node that the recipient of the changegroup will already have, we can
1510 # node that the recipient of the changegroup will already have, we can
1505 # also assume the recipient will have all the parents. This function
1511 # also assume the recipient will have all the parents. This function
1506 # prunes them from the set of missing nodes.
1512 # prunes them from the set of missing nodes.
1507 def prune_parents(revlog, hasset, msngset):
1513 def prune_parents(revlog, hasset, msngset):
1508 haslst = hasset.keys()
1514 haslst = hasset.keys()
1509 haslst.sort(cmp_by_rev_func(revlog))
1515 haslst.sort(cmp_by_rev_func(revlog))
1510 for node in haslst:
1516 for node in haslst:
1511 parentlst = [p for p in revlog.parents(node) if p != nullid]
1517 parentlst = [p for p in revlog.parents(node) if p != nullid]
1512 while parentlst:
1518 while parentlst:
1513 n = parentlst.pop()
1519 n = parentlst.pop()
1514 if n not in hasset:
1520 if n not in hasset:
1515 hasset[n] = 1
1521 hasset[n] = 1
1516 p = [p for p in revlog.parents(n) if p != nullid]
1522 p = [p for p in revlog.parents(n) if p != nullid]
1517 parentlst.extend(p)
1523 parentlst.extend(p)
1518 for n in hasset:
1524 for n in hasset:
1519 msngset.pop(n, None)
1525 msngset.pop(n, None)
1520
1526
1521 # This is a function generating function used to set up an environment
1527 # This is a function generating function used to set up an environment
1522 # for the inner function to execute in.
1528 # for the inner function to execute in.
1523 def manifest_and_file_collector(changedfileset):
1529 def manifest_and_file_collector(changedfileset):
1524 # This is an information gathering function that gathers
1530 # This is an information gathering function that gathers
1525 # information from each changeset node that goes out as part of
1531 # information from each changeset node that goes out as part of
1526 # the changegroup. The information gathered is a list of which
1532 # the changegroup. The information gathered is a list of which
1527 # manifest nodes are potentially required (the recipient may
1533 # manifest nodes are potentially required (the recipient may
1528 # already have them) and total list of all files which were
1534 # already have them) and total list of all files which were
1529 # changed in any changeset in the changegroup.
1535 # changed in any changeset in the changegroup.
1530 #
1536 #
1531 # We also remember the first changenode we saw any manifest
1537 # We also remember the first changenode we saw any manifest
1532 # referenced by so we can later determine which changenode 'owns'
1538 # referenced by so we can later determine which changenode 'owns'
1533 # the manifest.
1539 # the manifest.
1534 def collect_manifests_and_files(clnode):
1540 def collect_manifests_and_files(clnode):
1535 c = cl.read(clnode)
1541 c = cl.read(clnode)
1536 for f in c[3]:
1542 for f in c[3]:
1537 # This is to make sure we only have one instance of each
1543 # This is to make sure we only have one instance of each
1538 # filename string for each filename.
1544 # filename string for each filename.
1539 changedfileset.setdefault(f, f)
1545 changedfileset.setdefault(f, f)
1540 msng_mnfst_set.setdefault(c[0], clnode)
1546 msng_mnfst_set.setdefault(c[0], clnode)
1541 return collect_manifests_and_files
1547 return collect_manifests_and_files
1542
1548
1543 # Figure out which manifest nodes (of the ones we think might be part
1549 # Figure out which manifest nodes (of the ones we think might be part
1544 # of the changegroup) the recipient must know about and remove them
1550 # of the changegroup) the recipient must know about and remove them
1545 # from the changegroup.
1551 # from the changegroup.
1546 def prune_manifests():
1552 def prune_manifests():
1547 has_mnfst_set = {}
1553 has_mnfst_set = {}
1548 for n in msng_mnfst_set:
1554 for n in msng_mnfst_set:
1549 # If a 'missing' manifest thinks it belongs to a changenode
1555 # If a 'missing' manifest thinks it belongs to a changenode
1550 # the recipient is assumed to have, obviously the recipient
1556 # the recipient is assumed to have, obviously the recipient
1551 # must have that manifest.
1557 # must have that manifest.
1552 linknode = cl.node(mnfst.linkrev(n))
1558 linknode = cl.node(mnfst.linkrev(n))
1553 if linknode in has_cl_set:
1559 if linknode in has_cl_set:
1554 has_mnfst_set[n] = 1
1560 has_mnfst_set[n] = 1
1555 prune_parents(mnfst, has_mnfst_set, msng_mnfst_set)
1561 prune_parents(mnfst, has_mnfst_set, msng_mnfst_set)
1556
1562
1557 # Use the information collected in collect_manifests_and_files to say
1563 # Use the information collected in collect_manifests_and_files to say
1558 # which changenode any manifestnode belongs to.
1564 # which changenode any manifestnode belongs to.
1559 def lookup_manifest_link(mnfstnode):
1565 def lookup_manifest_link(mnfstnode):
1560 return msng_mnfst_set[mnfstnode]
1566 return msng_mnfst_set[mnfstnode]
1561
1567
1562 # A function generating function that sets up the initial environment
1568 # A function generating function that sets up the initial environment
1563 # the inner function.
1569 # the inner function.
1564 def filenode_collector(changedfiles):
1570 def filenode_collector(changedfiles):
1565 next_rev = [0]
1571 next_rev = [0]
1566 # This gathers information from each manifestnode included in the
1572 # This gathers information from each manifestnode included in the
1567 # changegroup about which filenodes the manifest node references
1573 # changegroup about which filenodes the manifest node references
1568 # so we can include those in the changegroup too.
1574 # so we can include those in the changegroup too.
1569 #
1575 #
1570 # It also remembers which changenode each filenode belongs to. It
1576 # It also remembers which changenode each filenode belongs to. It
1571 # does this by assuming the a filenode belongs to the changenode
1577 # does this by assuming the a filenode belongs to the changenode
1572 # the first manifest that references it belongs to.
1578 # the first manifest that references it belongs to.
1573 def collect_msng_filenodes(mnfstnode):
1579 def collect_msng_filenodes(mnfstnode):
1574 r = mnfst.rev(mnfstnode)
1580 r = mnfst.rev(mnfstnode)
1575 if r == next_rev[0]:
1581 if r == next_rev[0]:
1576 # If the last rev we looked at was the one just previous,
1582 # If the last rev we looked at was the one just previous,
1577 # we only need to see a diff.
1583 # we only need to see a diff.
1578 delta = mdiff.patchtext(mnfst.delta(mnfstnode))
1584 delta = mdiff.patchtext(mnfst.delta(mnfstnode))
1579 # For each line in the delta
1585 # For each line in the delta
1580 for dline in delta.splitlines():
1586 for dline in delta.splitlines():
1581 # get the filename and filenode for that line
1587 # get the filename and filenode for that line
1582 f, fnode = dline.split('\0')
1588 f, fnode = dline.split('\0')
1583 fnode = bin(fnode[:40])
1589 fnode = bin(fnode[:40])
1584 f = changedfiles.get(f, None)
1590 f = changedfiles.get(f, None)
1585 # And if the file is in the list of files we care
1591 # And if the file is in the list of files we care
1586 # about.
1592 # about.
1587 if f is not None:
1593 if f is not None:
1588 # Get the changenode this manifest belongs to
1594 # Get the changenode this manifest belongs to
1589 clnode = msng_mnfst_set[mnfstnode]
1595 clnode = msng_mnfst_set[mnfstnode]
1590 # Create the set of filenodes for the file if
1596 # Create the set of filenodes for the file if
1591 # there isn't one already.
1597 # there isn't one already.
1592 ndset = msng_filenode_set.setdefault(f, {})
1598 ndset = msng_filenode_set.setdefault(f, {})
1593 # And set the filenode's changelog node to the
1599 # And set the filenode's changelog node to the
1594 # manifest's if it hasn't been set already.
1600 # manifest's if it hasn't been set already.
1595 ndset.setdefault(fnode, clnode)
1601 ndset.setdefault(fnode, clnode)
1596 else:
1602 else:
1597 # Otherwise we need a full manifest.
1603 # Otherwise we need a full manifest.
1598 m = mnfst.read(mnfstnode)
1604 m = mnfst.read(mnfstnode)
1599 # For every file in we care about.
1605 # For every file in we care about.
1600 for f in changedfiles:
1606 for f in changedfiles:
1601 fnode = m.get(f, None)
1607 fnode = m.get(f, None)
1602 # If it's in the manifest
1608 # If it's in the manifest
1603 if fnode is not None:
1609 if fnode is not None:
1604 # See comments above.
1610 # See comments above.
1605 clnode = msng_mnfst_set[mnfstnode]
1611 clnode = msng_mnfst_set[mnfstnode]
1606 ndset = msng_filenode_set.setdefault(f, {})
1612 ndset = msng_filenode_set.setdefault(f, {})
1607 ndset.setdefault(fnode, clnode)
1613 ndset.setdefault(fnode, clnode)
1608 # Remember the revision we hope to see next.
1614 # Remember the revision we hope to see next.
1609 next_rev[0] = r + 1
1615 next_rev[0] = r + 1
1610 return collect_msng_filenodes
1616 return collect_msng_filenodes
1611
1617
1612 # We have a list of filenodes we think we need for a file, lets remove
1618 # We have a list of filenodes we think we need for a file, lets remove
1613 # all those we now the recipient must have.
1619 # all those we now the recipient must have.
1614 def prune_filenodes(f, filerevlog):
1620 def prune_filenodes(f, filerevlog):
1615 msngset = msng_filenode_set[f]
1621 msngset = msng_filenode_set[f]
1616 hasset = {}
1622 hasset = {}
1617 # If a 'missing' filenode thinks it belongs to a changenode we
1623 # If a 'missing' filenode thinks it belongs to a changenode we
1618 # assume the recipient must have, then the recipient must have
1624 # assume the recipient must have, then the recipient must have
1619 # that filenode.
1625 # that filenode.
1620 for n in msngset:
1626 for n in msngset:
1621 clnode = cl.node(filerevlog.linkrev(n))
1627 clnode = cl.node(filerevlog.linkrev(n))
1622 if clnode in has_cl_set:
1628 if clnode in has_cl_set:
1623 hasset[n] = 1
1629 hasset[n] = 1
1624 prune_parents(filerevlog, hasset, msngset)
1630 prune_parents(filerevlog, hasset, msngset)
1625
1631
1626 # A function generator function that sets up the a context for the
1632 # A function generator function that sets up the a context for the
1627 # inner function.
1633 # inner function.
1628 def lookup_filenode_link_func(fname):
1634 def lookup_filenode_link_func(fname):
1629 msngset = msng_filenode_set[fname]
1635 msngset = msng_filenode_set[fname]
1630 # Lookup the changenode the filenode belongs to.
1636 # Lookup the changenode the filenode belongs to.
1631 def lookup_filenode_link(fnode):
1637 def lookup_filenode_link(fnode):
1632 return msngset[fnode]
1638 return msngset[fnode]
1633 return lookup_filenode_link
1639 return lookup_filenode_link
1634
1640
1635 # Now that we have all theses utility functions to help out and
1641 # Now that we have all theses utility functions to help out and
1636 # logically divide up the task, generate the group.
1642 # logically divide up the task, generate the group.
1637 def gengroup():
1643 def gengroup():
1638 # The set of changed files starts empty.
1644 # The set of changed files starts empty.
1639 changedfiles = {}
1645 changedfiles = {}
1640 # Create a changenode group generator that will call our functions
1646 # Create a changenode group generator that will call our functions
1641 # back to lookup the owning changenode and collect information.
1647 # back to lookup the owning changenode and collect information.
1642 group = cl.group(msng_cl_lst, identity,
1648 group = cl.group(msng_cl_lst, identity,
1643 manifest_and_file_collector(changedfiles))
1649 manifest_and_file_collector(changedfiles))
1644 for chnk in group:
1650 for chnk in group:
1645 yield chnk
1651 yield chnk
1646
1652
1647 # The list of manifests has been collected by the generator
1653 # The list of manifests has been collected by the generator
1648 # calling our functions back.
1654 # calling our functions back.
1649 prune_manifests()
1655 prune_manifests()
1650 msng_mnfst_lst = msng_mnfst_set.keys()
1656 msng_mnfst_lst = msng_mnfst_set.keys()
1651 # Sort the manifestnodes by revision number.
1657 # Sort the manifestnodes by revision number.
1652 msng_mnfst_lst.sort(cmp_by_rev_func(mnfst))
1658 msng_mnfst_lst.sort(cmp_by_rev_func(mnfst))
1653 # Create a generator for the manifestnodes that calls our lookup
1659 # Create a generator for the manifestnodes that calls our lookup
1654 # and data collection functions back.
1660 # and data collection functions back.
1655 group = mnfst.group(msng_mnfst_lst, lookup_manifest_link,
1661 group = mnfst.group(msng_mnfst_lst, lookup_manifest_link,
1656 filenode_collector(changedfiles))
1662 filenode_collector(changedfiles))
1657 for chnk in group:
1663 for chnk in group:
1658 yield chnk
1664 yield chnk
1659
1665
1660 # These are no longer needed, dereference and toss the memory for
1666 # These are no longer needed, dereference and toss the memory for
1661 # them.
1667 # them.
1662 msng_mnfst_lst = None
1668 msng_mnfst_lst = None
1663 msng_mnfst_set.clear()
1669 msng_mnfst_set.clear()
1664
1670
1665 changedfiles = changedfiles.keys()
1671 changedfiles = changedfiles.keys()
1666 changedfiles.sort()
1672 changedfiles.sort()
1667 # Go through all our files in order sorted by name.
1673 # Go through all our files in order sorted by name.
1668 for fname in changedfiles:
1674 for fname in changedfiles:
1669 filerevlog = self.file(fname)
1675 filerevlog = self.file(fname)
1670 # Toss out the filenodes that the recipient isn't really
1676 # Toss out the filenodes that the recipient isn't really
1671 # missing.
1677 # missing.
1672 if msng_filenode_set.has_key(fname):
1678 if msng_filenode_set.has_key(fname):
1673 prune_filenodes(fname, filerevlog)
1679 prune_filenodes(fname, filerevlog)
1674 msng_filenode_lst = msng_filenode_set[fname].keys()
1680 msng_filenode_lst = msng_filenode_set[fname].keys()
1675 else:
1681 else:
1676 msng_filenode_lst = []
1682 msng_filenode_lst = []
1677 # If any filenodes are left, generate the group for them,
1683 # If any filenodes are left, generate the group for them,
1678 # otherwise don't bother.
1684 # otherwise don't bother.
1679 if len(msng_filenode_lst) > 0:
1685 if len(msng_filenode_lst) > 0:
1680 yield changegroup.genchunk(fname)
1686 yield changegroup.genchunk(fname)
1681 # Sort the filenodes by their revision #
1687 # Sort the filenodes by their revision #
1682 msng_filenode_lst.sort(cmp_by_rev_func(filerevlog))
1688 msng_filenode_lst.sort(cmp_by_rev_func(filerevlog))
1683 # Create a group generator and only pass in a changenode
1689 # Create a group generator and only pass in a changenode
1684 # lookup function as we need to collect no information
1690 # lookup function as we need to collect no information
1685 # from filenodes.
1691 # from filenodes.
1686 group = filerevlog.group(msng_filenode_lst,
1692 group = filerevlog.group(msng_filenode_lst,
1687 lookup_filenode_link_func(fname))
1693 lookup_filenode_link_func(fname))
1688 for chnk in group:
1694 for chnk in group:
1689 yield chnk
1695 yield chnk
1690 if msng_filenode_set.has_key(fname):
1696 if msng_filenode_set.has_key(fname):
1691 # Don't need this anymore, toss it to free memory.
1697 # Don't need this anymore, toss it to free memory.
1692 del msng_filenode_set[fname]
1698 del msng_filenode_set[fname]
1693 # Signal that no more groups are left.
1699 # Signal that no more groups are left.
1694 yield changegroup.closechunk()
1700 yield changegroup.closechunk()
1695
1701
1696 if msng_cl_lst:
1702 if msng_cl_lst:
1697 self.hook('outgoing', node=hex(msng_cl_lst[0]), source=source)
1703 self.hook('outgoing', node=hex(msng_cl_lst[0]), source=source)
1698
1704
1699 return util.chunkbuffer(gengroup())
1705 return util.chunkbuffer(gengroup())
1700
1706
1701 def changegroup(self, basenodes, source):
1707 def changegroup(self, basenodes, source):
1702 """Generate a changegroup of all nodes that we have that a recipient
1708 """Generate a changegroup of all nodes that we have that a recipient
1703 doesn't.
1709 doesn't.
1704
1710
1705 This is much easier than the previous function as we can assume that
1711 This is much easier than the previous function as we can assume that
1706 the recipient has any changenode we aren't sending them."""
1712 the recipient has any changenode we aren't sending them."""
1707
1713
1708 self.hook('preoutgoing', throw=True, source=source)
1714 self.hook('preoutgoing', throw=True, source=source)
1709
1715
1710 cl = self.changelog
1716 cl = self.changelog
1711 nodes = cl.nodesbetween(basenodes, None)[0]
1717 nodes = cl.nodesbetween(basenodes, None)[0]
1712 revset = dict.fromkeys([cl.rev(n) for n in nodes])
1718 revset = dict.fromkeys([cl.rev(n) for n in nodes])
1713 self.changegroupinfo(nodes)
1719 self.changegroupinfo(nodes)
1714
1720
1715 def identity(x):
1721 def identity(x):
1716 return x
1722 return x
1717
1723
1718 def gennodelst(revlog):
1724 def gennodelst(revlog):
1719 for r in xrange(0, revlog.count()):
1725 for r in xrange(0, revlog.count()):
1720 n = revlog.node(r)
1726 n = revlog.node(r)
1721 if revlog.linkrev(n) in revset:
1727 if revlog.linkrev(n) in revset:
1722 yield n
1728 yield n
1723
1729
1724 def changed_file_collector(changedfileset):
1730 def changed_file_collector(changedfileset):
1725 def collect_changed_files(clnode):
1731 def collect_changed_files(clnode):
1726 c = cl.read(clnode)
1732 c = cl.read(clnode)
1727 for fname in c[3]:
1733 for fname in c[3]:
1728 changedfileset[fname] = 1
1734 changedfileset[fname] = 1
1729 return collect_changed_files
1735 return collect_changed_files
1730
1736
1731 def lookuprevlink_func(revlog):
1737 def lookuprevlink_func(revlog):
1732 def lookuprevlink(n):
1738 def lookuprevlink(n):
1733 return cl.node(revlog.linkrev(n))
1739 return cl.node(revlog.linkrev(n))
1734 return lookuprevlink
1740 return lookuprevlink
1735
1741
1736 def gengroup():
1742 def gengroup():
1737 # construct a list of all changed files
1743 # construct a list of all changed files
1738 changedfiles = {}
1744 changedfiles = {}
1739
1745
1740 for chnk in cl.group(nodes, identity,
1746 for chnk in cl.group(nodes, identity,
1741 changed_file_collector(changedfiles)):
1747 changed_file_collector(changedfiles)):
1742 yield chnk
1748 yield chnk
1743 changedfiles = changedfiles.keys()
1749 changedfiles = changedfiles.keys()
1744 changedfiles.sort()
1750 changedfiles.sort()
1745
1751
1746 mnfst = self.manifest
1752 mnfst = self.manifest
1747 nodeiter = gennodelst(mnfst)
1753 nodeiter = gennodelst(mnfst)
1748 for chnk in mnfst.group(nodeiter, lookuprevlink_func(mnfst)):
1754 for chnk in mnfst.group(nodeiter, lookuprevlink_func(mnfst)):
1749 yield chnk
1755 yield chnk
1750
1756
1751 for fname in changedfiles:
1757 for fname in changedfiles:
1752 filerevlog = self.file(fname)
1758 filerevlog = self.file(fname)
1753 nodeiter = gennodelst(filerevlog)
1759 nodeiter = gennodelst(filerevlog)
1754 nodeiter = list(nodeiter)
1760 nodeiter = list(nodeiter)
1755 if nodeiter:
1761 if nodeiter:
1756 yield changegroup.genchunk(fname)
1762 yield changegroup.genchunk(fname)
1757 lookup = lookuprevlink_func(filerevlog)
1763 lookup = lookuprevlink_func(filerevlog)
1758 for chnk in filerevlog.group(nodeiter, lookup):
1764 for chnk in filerevlog.group(nodeiter, lookup):
1759 yield chnk
1765 yield chnk
1760
1766
1761 yield changegroup.closechunk()
1767 yield changegroup.closechunk()
1762
1768
1763 if nodes:
1769 if nodes:
1764 self.hook('outgoing', node=hex(nodes[0]), source=source)
1770 self.hook('outgoing', node=hex(nodes[0]), source=source)
1765
1771
1766 return util.chunkbuffer(gengroup())
1772 return util.chunkbuffer(gengroup())
1767
1773
1768 def addchangegroup(self, source, srctype, url):
1774 def addchangegroup(self, source, srctype, url):
1769 """add changegroup to repo.
1775 """add changegroup to repo.
1770
1776
1771 return values:
1777 return values:
1772 - nothing changed or no source: 0
1778 - nothing changed or no source: 0
1773 - more heads than before: 1+added heads (2..n)
1779 - more heads than before: 1+added heads (2..n)
1774 - less heads than before: -1-removed heads (-2..-n)
1780 - less heads than before: -1-removed heads (-2..-n)
1775 - number of heads stays the same: 1
1781 - number of heads stays the same: 1
1776 """
1782 """
1777 def csmap(x):
1783 def csmap(x):
1778 self.ui.debug(_("add changeset %s\n") % short(x))
1784 self.ui.debug(_("add changeset %s\n") % short(x))
1779 return cl.count()
1785 return cl.count()
1780
1786
1781 def revmap(x):
1787 def revmap(x):
1782 return cl.rev(x)
1788 return cl.rev(x)
1783
1789
1784 if not source:
1790 if not source:
1785 return 0
1791 return 0
1786
1792
1787 self.hook('prechangegroup', throw=True, source=srctype, url=url)
1793 self.hook('prechangegroup', throw=True, source=srctype, url=url)
1788
1794
1789 changesets = files = revisions = 0
1795 changesets = files = revisions = 0
1790
1796
1791 tr = self.transaction()
1797 tr = self.transaction()
1792
1798
1793 # write changelog data to temp files so concurrent readers will not see
1799 # write changelog data to temp files so concurrent readers will not see
1794 # inconsistent view
1800 # inconsistent view
1795 cl = self.changelog
1801 cl = self.changelog
1796 cl.delayupdate()
1802 cl.delayupdate()
1797 oldheads = len(cl.heads())
1803 oldheads = len(cl.heads())
1798
1804
1799 # pull off the changeset group
1805 # pull off the changeset group
1800 self.ui.status(_("adding changesets\n"))
1806 self.ui.status(_("adding changesets\n"))
1801 cor = cl.count() - 1
1807 cor = cl.count() - 1
1802 chunkiter = changegroup.chunkiter(source)
1808 chunkiter = changegroup.chunkiter(source)
1803 if cl.addgroup(chunkiter, csmap, tr, 1) is None:
1809 if cl.addgroup(chunkiter, csmap, tr, 1) is None:
1804 raise util.Abort(_("received changelog group is empty"))
1810 raise util.Abort(_("received changelog group is empty"))
1805 cnr = cl.count() - 1
1811 cnr = cl.count() - 1
1806 changesets = cnr - cor
1812 changesets = cnr - cor
1807
1813
1808 # pull off the manifest group
1814 # pull off the manifest group
1809 self.ui.status(_("adding manifests\n"))
1815 self.ui.status(_("adding manifests\n"))
1810 chunkiter = changegroup.chunkiter(source)
1816 chunkiter = changegroup.chunkiter(source)
1811 # no need to check for empty manifest group here:
1817 # no need to check for empty manifest group here:
1812 # if the result of the merge of 1 and 2 is the same in 3 and 4,
1818 # if the result of the merge of 1 and 2 is the same in 3 and 4,
1813 # no new manifest will be created and the manifest group will
1819 # no new manifest will be created and the manifest group will
1814 # be empty during the pull
1820 # be empty during the pull
1815 self.manifest.addgroup(chunkiter, revmap, tr)
1821 self.manifest.addgroup(chunkiter, revmap, tr)
1816
1822
1817 # process the files
1823 # process the files
1818 self.ui.status(_("adding file changes\n"))
1824 self.ui.status(_("adding file changes\n"))
1819 while 1:
1825 while 1:
1820 f = changegroup.getchunk(source)
1826 f = changegroup.getchunk(source)
1821 if not f:
1827 if not f:
1822 break
1828 break
1823 self.ui.debug(_("adding %s revisions\n") % f)
1829 self.ui.debug(_("adding %s revisions\n") % f)
1824 fl = self.file(f)
1830 fl = self.file(f)
1825 o = fl.count()
1831 o = fl.count()
1826 chunkiter = changegroup.chunkiter(source)
1832 chunkiter = changegroup.chunkiter(source)
1827 if fl.addgroup(chunkiter, revmap, tr) is None:
1833 if fl.addgroup(chunkiter, revmap, tr) is None:
1828 raise util.Abort(_("received file revlog group is empty"))
1834 raise util.Abort(_("received file revlog group is empty"))
1829 revisions += fl.count() - o
1835 revisions += fl.count() - o
1830 files += 1
1836 files += 1
1831
1837
1832 # make changelog see real files again
1838 # make changelog see real files again
1833 cl.finalize(tr)
1839 cl.finalize(tr)
1834
1840
1835 newheads = len(self.changelog.heads())
1841 newheads = len(self.changelog.heads())
1836 heads = ""
1842 heads = ""
1837 if oldheads and newheads != oldheads:
1843 if oldheads and newheads != oldheads:
1838 heads = _(" (%+d heads)") % (newheads - oldheads)
1844 heads = _(" (%+d heads)") % (newheads - oldheads)
1839
1845
1840 self.ui.status(_("added %d changesets"
1846 self.ui.status(_("added %d changesets"
1841 " with %d changes to %d files%s\n")
1847 " with %d changes to %d files%s\n")
1842 % (changesets, revisions, files, heads))
1848 % (changesets, revisions, files, heads))
1843
1849
1844 if changesets > 0:
1850 if changesets > 0:
1845 self.hook('pretxnchangegroup', throw=True,
1851 self.hook('pretxnchangegroup', throw=True,
1846 node=hex(self.changelog.node(cor+1)), source=srctype,
1852 node=hex(self.changelog.node(cor+1)), source=srctype,
1847 url=url)
1853 url=url)
1848
1854
1849 tr.close()
1855 tr.close()
1850
1856
1851 if changesets > 0:
1857 if changesets > 0:
1852 self.hook("changegroup", node=hex(self.changelog.node(cor+1)),
1858 self.hook("changegroup", node=hex(self.changelog.node(cor+1)),
1853 source=srctype, url=url)
1859 source=srctype, url=url)
1854
1860
1855 for i in xrange(cor + 1, cnr + 1):
1861 for i in xrange(cor + 1, cnr + 1):
1856 self.hook("incoming", node=hex(self.changelog.node(i)),
1862 self.hook("incoming", node=hex(self.changelog.node(i)),
1857 source=srctype, url=url)
1863 source=srctype, url=url)
1858
1864
1859 # never return 0 here:
1865 # never return 0 here:
1860 if newheads < oldheads:
1866 if newheads < oldheads:
1861 return newheads - oldheads - 1
1867 return newheads - oldheads - 1
1862 else:
1868 else:
1863 return newheads - oldheads + 1
1869 return newheads - oldheads + 1
1864
1870
1865
1871
1866 def stream_in(self, remote):
1872 def stream_in(self, remote):
1867 fp = remote.stream_out()
1873 fp = remote.stream_out()
1868 l = fp.readline()
1874 l = fp.readline()
1869 try:
1875 try:
1870 resp = int(l)
1876 resp = int(l)
1871 except ValueError:
1877 except ValueError:
1872 raise util.UnexpectedOutput(
1878 raise util.UnexpectedOutput(
1873 _('Unexpected response from remote server:'), l)
1879 _('Unexpected response from remote server:'), l)
1874 if resp == 1:
1880 if resp == 1:
1875 raise util.Abort(_('operation forbidden by server'))
1881 raise util.Abort(_('operation forbidden by server'))
1876 elif resp == 2:
1882 elif resp == 2:
1877 raise util.Abort(_('locking the remote repository failed'))
1883 raise util.Abort(_('locking the remote repository failed'))
1878 elif resp != 0:
1884 elif resp != 0:
1879 raise util.Abort(_('the server sent an unknown error code'))
1885 raise util.Abort(_('the server sent an unknown error code'))
1880 self.ui.status(_('streaming all changes\n'))
1886 self.ui.status(_('streaming all changes\n'))
1881 l = fp.readline()
1887 l = fp.readline()
1882 try:
1888 try:
1883 total_files, total_bytes = map(int, l.split(' ', 1))
1889 total_files, total_bytes = map(int, l.split(' ', 1))
1884 except ValueError, TypeError:
1890 except ValueError, TypeError:
1885 raise util.UnexpectedOutput(
1891 raise util.UnexpectedOutput(
1886 _('Unexpected response from remote server:'), l)
1892 _('Unexpected response from remote server:'), l)
1887 self.ui.status(_('%d files to transfer, %s of data\n') %
1893 self.ui.status(_('%d files to transfer, %s of data\n') %
1888 (total_files, util.bytecount(total_bytes)))
1894 (total_files, util.bytecount(total_bytes)))
1889 start = time.time()
1895 start = time.time()
1890 for i in xrange(total_files):
1896 for i in xrange(total_files):
1891 # XXX doesn't support '\n' or '\r' in filenames
1897 # XXX doesn't support '\n' or '\r' in filenames
1892 l = fp.readline()
1898 l = fp.readline()
1893 try:
1899 try:
1894 name, size = l.split('\0', 1)
1900 name, size = l.split('\0', 1)
1895 size = int(size)
1901 size = int(size)
1896 except ValueError, TypeError:
1902 except ValueError, TypeError:
1897 raise util.UnexpectedOutput(
1903 raise util.UnexpectedOutput(
1898 _('Unexpected response from remote server:'), l)
1904 _('Unexpected response from remote server:'), l)
1899 self.ui.debug('adding %s (%s)\n' % (name, util.bytecount(size)))
1905 self.ui.debug('adding %s (%s)\n' % (name, util.bytecount(size)))
1900 ofp = self.sopener(name, 'w')
1906 ofp = self.sopener(name, 'w')
1901 for chunk in util.filechunkiter(fp, limit=size):
1907 for chunk in util.filechunkiter(fp, limit=size):
1902 ofp.write(chunk)
1908 ofp.write(chunk)
1903 ofp.close()
1909 ofp.close()
1904 elapsed = time.time() - start
1910 elapsed = time.time() - start
1905 if elapsed <= 0:
1911 if elapsed <= 0:
1906 elapsed = 0.001
1912 elapsed = 0.001
1907 self.ui.status(_('transferred %s in %.1f seconds (%s/sec)\n') %
1913 self.ui.status(_('transferred %s in %.1f seconds (%s/sec)\n') %
1908 (util.bytecount(total_bytes), elapsed,
1914 (util.bytecount(total_bytes), elapsed,
1909 util.bytecount(total_bytes / elapsed)))
1915 util.bytecount(total_bytes / elapsed)))
1910 self.reload()
1916 self.reload()
1911 return len(self.heads()) + 1
1917 return len(self.heads()) + 1
1912
1918
1913 def clone(self, remote, heads=[], stream=False):
1919 def clone(self, remote, heads=[], stream=False):
1914 '''clone remote repository.
1920 '''clone remote repository.
1915
1921
1916 keyword arguments:
1922 keyword arguments:
1917 heads: list of revs to clone (forces use of pull)
1923 heads: list of revs to clone (forces use of pull)
1918 stream: use streaming clone if possible'''
1924 stream: use streaming clone if possible'''
1919
1925
1920 # now, all clients that can request uncompressed clones can
1926 # now, all clients that can request uncompressed clones can
1921 # read repo formats supported by all servers that can serve
1927 # read repo formats supported by all servers that can serve
1922 # them.
1928 # them.
1923
1929
1924 # if revlog format changes, client will have to check version
1930 # if revlog format changes, client will have to check version
1925 # and format flags on "stream" capability, and use
1931 # and format flags on "stream" capability, and use
1926 # uncompressed only if compatible.
1932 # uncompressed only if compatible.
1927
1933
1928 if stream and not heads and remote.capable('stream'):
1934 if stream and not heads and remote.capable('stream'):
1929 return self.stream_in(remote)
1935 return self.stream_in(remote)
1930 return self.pull(remote, heads)
1936 return self.pull(remote, heads)
1931
1937
1932 # used to avoid circular references so destructors work
1938 # used to avoid circular references so destructors work
1933 def aftertrans(files):
1939 def aftertrans(files):
1934 renamefiles = [tuple(t) for t in files]
1940 renamefiles = [tuple(t) for t in files]
1935 def a():
1941 def a():
1936 for src, dest in renamefiles:
1942 for src, dest in renamefiles:
1937 util.rename(src, dest)
1943 util.rename(src, dest)
1938 return a
1944 return a
1939
1945
1940 def instance(ui, path, create):
1946 def instance(ui, path, create):
1941 return localrepository(ui, util.drop_scheme('file', path), create)
1947 return localrepository(ui, util.drop_scheme('file', path), create)
1942
1948
1943 def islocal(path):
1949 def islocal(path):
1944 return True
1950 return True
@@ -1,32 +1,32 b''
1 #header#
1 #header#
2 <title>#repo|escape#: Changelog</title>
2 <title>#repo|escape#: Changelog</title>
3 <link rel="alternate" type="application/rss+xml"
3 <link rel="alternate" type="application/rss+xml"
4 href="{url}rss-log" title="RSS feed for #repo|escape#">
4 href="{url}rss-log" title="RSS feed for #repo|escape#">
5 </head>
5 </head>
6 <body>
6 <body>
7
7
8 <div class="page_header">
8 <div class="page_header">
9 <a href="http://www.selenic.com/mercurial/" title="Mercurial"><div style="float:right;">Mercurial</div></a><a href="{url}summary{sessionvars%urlparameter}">#repo|escape#</a> / changelog
9 <a href="http://www.selenic.com/mercurial/" title="Mercurial"><div style="float:right;">Mercurial</div></a><a href="{url}summary{sessionvars%urlparameter}">#repo|escape#</a> / changelog
10 </div>
10 </div>
11
11
12 <form action="{url}log">
12 <form action="{url}log">
13 {sessionvars%hiddenformentry}
13 {sessionvars%hiddenformentry}
14 <div class="search">
14 <div class="search">
15 <input type="text" name="rev" />
15 <input type="text" name="rev" />
16 </div>
16 </div>
17 </form>
17 </form>
18 </div>
18 </div>
19
19
20 <div class="page_nav">
20 <div class="page_nav">
21 <a href="{url}summary{sessionvars%urlparameter}">summary</a> | <a href="{url}shortlog/#rev#{sessionvars%urlparameter}">shortlog</a> | changelog | <a href="{url}tags{sessionvars%urlparameter}">tags</a> | <a href="{url}file/#node|short#{sessionvars%urlparameter}">manifest</a>#archives%archiveentry#<br/>
21 <a href="{url}summary{sessionvars%urlparameter}">summary</a> | <a href="{url}shortlog/#rev#{sessionvars%urlparameter}">shortlog</a> | changelog | <a href="{url}tags{sessionvars%urlparameter}">tags</a> | <a href="{url}file/#node|short#{sessionvars%urlparameter}">manifest</a>#archives%archiveentry#
22 <br/>
22 <br/>
23 #changenav%naventry#<br/>
23 #changenav%naventry#<br/>
24 </div>
24 </div>
25
25
26 #entries%changelogentry#
26 #entries%changelogentry#
27
27
28 <div class="page_nav">
28 <div class="page_nav">
29 #changenav%naventry#<br/>
29 #changenav%naventry#<br/>
30 </div>
30 </div>
31
31
32 #footer#
32 #footer#
@@ -1,41 +1,40 b''
1 #header#
1 #header#
2 <title>{repo|escape}: changeset {rev}:{node|short}</title>
2 <title>{repo|escape}: changeset {rev}:{node|short}</title>
3 <link rel="alternate" type="application/rss+xml"
3 <link rel="alternate" type="application/rss+xml"
4 href="{url}rss-log" title="RSS feed for #repo|escape#">
4 href="{url}rss-log" title="RSS feed for #repo|escape#">
5 </head>
5 </head>
6 <body>
6 <body>
7
7
8 <div class="page_header">
8 <div class="page_header">
9 <a href="http://www.selenic.com/mercurial/" title="Mercurial"><div style="float:right;">Mercurial</div></a><a href="#url#summary{sessionvars%urlparameter}">#repo|escape#</a> / changeset
9 <a href="http://www.selenic.com/mercurial/" title="Mercurial"><div style="float:right;">Mercurial</div></a><a href="#url#summary{sessionvars%urlparameter}">#repo|escape#</a> / changeset
10 </div>
10 </div>
11
11
12 <div class="page_nav">
12 <div class="page_nav">
13 <a href="{url}summary{sessionvars%urlparameter}">summary</a> | <a href="{url}shortlog/#rev#{sessionvars%urlparameter}">shortlog</a> | <a href="{url}log/#rev#{sessionvars%urlparameter}">changelog</a> | <a href="{url}tags{sessionvars%urlparameter}">tags</a> | <a href="{url}file/#node|short#{sessionvars%urlparameter}">manifest</a> | changeset | <a href="{url}raw-rev/#node|short#">raw</a> #archives%archiveentry#<br/>
13 <a href="{url}summary{sessionvars%urlparameter}">summary</a> | <a href="{url}shortlog/#rev#{sessionvars%urlparameter}">shortlog</a> | <a href="{url}log/#rev#{sessionvars%urlparameter}">changelog</a> | <a href="{url}tags{sessionvars%urlparameter}">tags</a> | <a href="{url}file/#node|short#{sessionvars%urlparameter}">manifest</a> | changeset | <a href="{url}raw-rev/#node|short#">raw</a> #archives%archiveentry#<br/>
14 </div>
14 </div>
15
15
16 <div>
16 <div>
17 <a class="title" href="{url}raw-rev/#node|short#">#desc|strip|escape|firstline#</a>
17 <a class="title" href="{url}raw-rev/#node|short#">#desc|strip|escape|firstline#</a>
18 </div>
18 </div>
19 <div class="title_text">
19 <div class="title_text">
20 <table cellspacing="0">
20 <table cellspacing="0">
21 <tr><td>author</td><td>#author|obfuscate#</td></tr>
21 <tr><td>author</td><td>#author|obfuscate#</td></tr>
22 <tr><td></td><td>#date|date# (#date|age# ago)</td></tr>
22 <tr><td></td><td>#date|date# (#date|age# ago)</td></tr>
23 <tr><td>changeset {rev}</td><td style="font-family:monospace">{node|short}</td></tr>
23 <tr><td>changeset {rev}</td><td style="font-family:monospace">{node|short}</td></tr>
24 <tr><td>manifest</td><td style="font-family:monospace"><a class="list" href="{url}file/#node|short#{sessionvars%urlparameter}">#node|short#</a></td></tr>
25 #parent%changesetparent#
24 #parent%changesetparent#
26 #child%changesetchild#
25 #child%changesetchild#
27 #changesettag#
26 #changesettag#
28 </table></div>
27 </table></div>
29
28
30 <div class="page_body">
29 <div class="page_body">
31 #desc|strip|escape|addbreaks#
30 #desc|strip|escape|addbreaks#
32 </div>
31 </div>
33 <div class="list_head"></div>
32 <div class="list_head"></div>
34 <div class="title_text">
33 <div class="title_text">
35 <table cellspacing="0">
34 <table cellspacing="0">
36 #files#
35 #files#
37 </table></div>
36 </table></div>
38
37
39 <div class="page_body">#diff#</div>
38 <div class="page_body">#diff#</div>
40
39
41 #footer#
40 #footer#
@@ -1,59 +1,56 b''
1 #header#
1 #header#
2 <title>{repo|escape}: {file|escape}@{node|short} (annotated)</title>
2 <title>{repo|escape}: {file|escape}@{node|short} (annotated)</title>
3 <link rel="alternate" type="application/rss+xml"
3 <link rel="alternate" type="application/rss+xml"
4 href="{url}rss-log" title="RSS feed for #repo|escape#">
4 href="{url}rss-log" title="RSS feed for #repo|escape#">
5 </head>
5 </head>
6 <body>
6 <body>
7
7
8 <div class="page_header">
8 <div class="page_header">
9 <a href="http://www.selenic.com/mercurial/" title="Mercurial"><div style="float:right;">Mercurial</div></a><a href="{url}summary{sessionvars%urlparameter}">#repo|escape#</a> / annotate
9 <a href="http://www.selenic.com/mercurial/" title="Mercurial"><div style="float:right;">Mercurial</div></a><a href="{url}summary{sessionvars%urlparameter}">#repo|escape#</a> / annotate
10 </div>
10 </div>
11
11
12 <div class="page_nav">
12 <div class="page_nav">
13 <a href="{url}summary{sessionvars%urlparameter}">summary</a> |
13 <a href="{url}summary{sessionvars%urlparameter}">summary</a> |
14 <a href="{url}shortlog{sessionvars%urlparameter}">shortlog</a> |
14 <a href="{url}shortlog{sessionvars%urlparameter}">shortlog</a> |
15 <a href="{url}log{sessionvars%urlparameter}">changelog</a> |
15 <a href="{url}log{sessionvars%urlparameter}">changelog</a> |
16 <a href="{url}tags{sessionvars%urlparameter}">tags</a> |
16 <a href="{url}tags{sessionvars%urlparameter}">tags</a> |
17 <a href="{url}file/#node|short##path|urlescape#{sessionvars%urlparameter}">manifest</a> |
17 <a href="{url}file/#node|short##path|urlescape#{sessionvars%urlparameter}">manifest</a> |
18 <a href="{url}rev/#node|short#{sessionvars%urlparameter}">changeset</a> |
18 <a href="{url}rev/#node|short#{sessionvars%urlparameter}">changeset</a> |
19 <a href="{url}file/{node|short}/#file|urlescape#{sessionvars%urlparameter}">file</a> |
19 <a href="{url}file/{node|short}/#file|urlescape#{sessionvars%urlparameter}">file</a> |
20 <a href="{url}log/{node|short}/#file|urlescape#{sessionvars%urlparameter}">revisions</a> |
20 <a href="{url}log/{node|short}/#file|urlescape#{sessionvars%urlparameter}">revisions</a> |
21 annotate |
21 annotate |
22 <a href="{url}diff/{node|short}/{file|urlescape}{sessionvars%urlparameter}">diff</a> |
22 <a href="{url}diff/{node|short}/{file|urlescape}{sessionvars%urlparameter}">diff</a> |
23 <a href="{url}raw-annotate/{node|short}/#file|urlescape#">raw</a><br/>
23 <a href="{url}raw-annotate/{node|short}/#file|urlescape#">raw</a><br/>
24 </div>
24 </div>
25
25
26 <div class="title">#file|escape#</div>
26 <div class="title">#file|escape#</div>
27
27
28 <div class="title_text">
28 <div class="title_text">
29 <table>
29 <table cellspacing="0">
30 <tr>
31 <td>author</td>
32 <td>#author|obfuscate#</td></tr>
30 <tr>
33 <tr>
31 <td class="metatag">changeset #rev#:</td>
34 <td></td>
32 <td><a href="{url}rev/#node|short#{sessionvars%urlparameter}">#node|short#</a></td></tr>
35 <td>#date|date# (#date|age# ago)</td></tr>
36 <tr>
37 <td>changeset {rev}</td>
38 <td style="font-family:monospace"><a class="list" href="{url}rev/#node|short#{sessionvars%urlparameter}">#node|short#</a></td></tr>
33 #parent%fileannotateparent#
39 #parent%fileannotateparent#
34 #child%fileannotatechild#
40 #child%fileannotatechild#
35 <tr>
41 <tr>
36 <td class="metatag">manifest:</td>
42 <td>permissions</td>
37 <td><a href="{url}file/#node|short#{sessionvars%urlparameter}">#node|short#</a></td></tr>
43 <td style="font-family:monospace">#permissions|permissions#</td></tr>
38 <tr>
39 <td class="metatag">author:</td>
40 <td>#author|obfuscate#</td></tr>
41 <tr>
42 <td class="metatag">date:</td>
43 <td>#date|date# (#date|age# ago)</td></tr>
44 <tr>
45 <td class="metatag">permissions:</td>
46 <td>#permissions|permissions#</td></tr>
47 </table>
44 </table>
48 </div>
45 </div>
49
46
50 <div class="page_path">
47 <div class="page_path">
51 {desc|strip|escape|addbreaks}
48 {desc|strip|escape|addbreaks}
52 </div>
49 </div>
53 <div class="page_body">
50 <div class="page_body">
54 <table>
51 <table>
55 #annotate%annotateline#
52 #annotate%annotateline#
56 </table>
53 </table>
57 </div>
54 </div>
58
55
59 #footer#
56 #footer#
@@ -1,47 +1,42 b''
1 {header}
1 {header}
2 <title>{repo|escape}: diff {file|escape}</title>
2 <title>{repo|escape}: diff {file|escape}</title>
3 <link rel="alternate" type="application/rss+xml"
3 <link rel="alternate" type="application/rss+xml"
4 href="{url}rss-log" title="RSS feed for {repo|escape}">
4 href="{url}rss-log" title="RSS feed for {repo|escape}">
5 </head>
5 </head>
6 <body>
6 <body>
7
7
8 <div class="page_header">
8 <div class="page_header">
9 <a href="http://www.selenic.com/mercurial/" title="Mercurial"><div style="float:right;">Mercurial</div></a><a href="{url}summary{sessionvars%urlparameter}">{repo|escape}</a> / annotate
9 <a href="http://www.selenic.com/mercurial/" title="Mercurial"><div style="float:right;">Mercurial</div></a><a href="{url}summary{sessionvars%urlparameter}">{repo|escape}</a> / diff
10 </div>
10 </div>
11
11
12 <div class="page_nav">
12 <div class="page_nav">
13 <a href="{url}summary{sessionvars%urlparameter}">summary</a> |
13 <a href="{url}summary{sessionvars%urlparameter}">summary</a> |
14 <a href="{url}shortlog{sessionvars%urlparameter}">shortlog</a> |
14 <a href="{url}shortlog{sessionvars%urlparameter}">shortlog</a> |
15 <a href="{url}log{sessionvars%urlparameter}">changelog</a> |
15 <a href="{url}log{sessionvars%urlparameter}">changelog</a> |
16 <a href="{url}tags{sessionvars%urlparameter}">tags</a> |
16 <a href="{url}tags{sessionvars%urlparameter}">tags</a> |
17 <a href="{url}file/{node|short}{path|urlescape}{sessionvars%urlparameter}">manifest</a> |
17 <a href="{url}file/{node|short}{path|urlescape}{sessionvars%urlparameter}">manifest</a> |
18 <a href="{url}rev/{node|short}{sessionvars%urlparameter}">changeset</a> |
18 <a href="{url}rev/{node|short}{sessionvars%urlparameter}">changeset</a> |
19 <a href="{url}file/{node|short}/{file|urlescape}{sessionvars%urlparameter}">file</a> |
19 <a href="{url}file/{node|short}/{file|urlescape}{sessionvars%urlparameter}">file</a> |
20 <a href="{url}log/{node|short}/{file|urlescape}{sessionvars%urlparameter}">revisions</a> |
20 <a href="{url}log/{node|short}/{file|urlescape}{sessionvars%urlparameter}">revisions</a> |
21 <a href="{url}annotate/{node|short}/{file|urlescape}{sessionvars%urlparameter}">annotate</a> |
21 <a href="{url}annotate/{node|short}/{file|urlescape}{sessionvars%urlparameter}">annotate</a> |
22 diff |
22 diff |
23 <a href="{url}raw-diff/{node|short}/{file|urlescape}">raw</a><br/>
23 <a href="{url}raw-diff/{node|short}/{file|urlescape}">raw</a><br/>
24 </div>
24 </div>
25
25
26 <div class="title">{file|escape}</div>
26 <div class="title">{file|escape}</div>
27
27
28 <table>
28 <table>
29 <tr>
29 <tr>
30 <td class="metatag">changeset {rev}:</td>
30 <td>changeset {rev}</td>
31 <td><a href="{url}rev/{node|short}{sessionvars%urlparameter}">{node|short}</a></td>
31 <td style="font-family:monospace"><a class="list" href="{url}rev/{node|short}{sessionvars%urlparameter}">{node|short}</a></td></tr>
32 </tr>
33 {parent%filediffparent}
32 {parent%filediffparent}
34 {child%filediffchild}
33 {child%filediffchild}
35 <tr>
36 <td class="metatag">manifest:</td>
37 <td><a href="{url}file/{node|short}{sessionvars%urlparameter}">{node|short}</a></td>
38 </tr>
39 </table>
34 </table>
40
35
36 <div class="list_head"></div>
37
41 <div class="page_body">
38 <div class="page_body">
42 <table>
43 {diff}
39 {diff}
44 </table>
45 </div>
40 </div>
46
41
47 {footer}
42 {footer}
@@ -1,33 +1,36 b''
1 #header#
1 #header#
2 <title>#repo|escape#: File revisions</title>
2 <title>#repo|escape#: File revisions</title>
3 <link rel="alternate" type="application/rss+xml"
3 <link rel="alternate" type="application/rss+xml"
4 href="{url}rss-log" title="RSS feed for #repo|escape#">
4 href="{url}rss-log" title="RSS feed for #repo|escape#">
5 </head>
5 </head>
6 <body>
6 <body>
7
7
8 <div class="page_header">
8 <div class="page_header">
9 <a href="http://www.selenic.com/mercurial/" title="Mercurial"><div style="float:right;">Mercurial</div></a><a href="{url}summary{sessionvars%urlparameter}">#repo|escape#</a> / file revisions
9 <a href="http://www.selenic.com/mercurial/" title="Mercurial"><div style="float:right;">Mercurial</div></a><a href="{url}summary{sessionvars%urlparameter}">#repo|escape#</a> / file revisions
10 </div>
10 </div>
11
11
12 <div class="page_nav">
12 <div class="page_nav">
13 <a href="{url}summary{sessionvars%urlparameter}">summary</a> |
13 <a href="{url}summary{sessionvars%urlparameter}">summary</a> |
14 <a href="{url}shortlog{sessionvars%urlparameter}">shortlog</a> |
14 <a href="{url}shortlog{sessionvars%urlparameter}">shortlog</a> |
15 <a href="{url}log{sessionvars%urlparameter}">changelog</a> |
15 <a href="{url}log{sessionvars%urlparameter}">changelog</a> |
16 <a href="{url}tags{sessionvars%urlparameter}">tags</a> |
16 <a href="{url}tags{sessionvars%urlparameter}">tags</a> |
17 <a href="{url}file/{node|short}/#file|urlescape#{sessionvars%urlparameter}">file</a> |
17 <a href="{url}file/{node|short}/#file|urlescape#{sessionvars%urlparameter}">file</a> |
18 revisions |
18 revisions |
19 <a href="{url}annotate/{node|short}/#file|urlescape#{sessionvars%urlparameter}">annotate</a> |
19 <a href="{url}annotate/{node|short}/#file|urlescape#{sessionvars%urlparameter}">annotate</a> |
20 <a href="{url}diff/{node|short}/{file|urlescape}{sessionvars%urlparameter}">diff</a> |
20 <a href="{url}diff/{node|short}/{file|urlescape}{sessionvars%urlparameter}">diff</a> |
21 <a href="{url}rss-log/#node|short#/#file|urlescape#">rss</a><br/>
21 <a href="{url}rss-log/#node|short#/#file|urlescape#">rss</a>
22
23 <br/>
22 <br/>
24 {nav%filenaventry}<br/>
23 {nav%filenaventry}
25 </div>
24 </div>
26
25
27 <div class="title" >#file|urlescape#</div>
26 <div class="title" >#file|urlescape#</div>
28
27
29 <table>
28 <table>
30 #entries%filelogentry#
29 #entries%filelogentry#
31 </table>
30 </table>
32
31
32 <div class="page_nav">
33 {nav%filenaventry}
34 </div>
35
33 #footer#
36 #footer#
@@ -1,58 +1,55 b''
1 #header#
1 #header#
2 <title>{repo|escape}: {file|escape}@{node|short}</title>
2 <title>{repo|escape}: {file|escape}@{node|short}</title>
3 <link rel="alternate" type="application/rss+xml"
3 <link rel="alternate" type="application/rss+xml"
4 href="{url}rss-log" title="RSS feed for #repo|escape#">
4 href="{url}rss-log" title="RSS feed for #repo|escape#">
5 </head>
5 </head>
6 <body>
6 <body>
7
7
8 <div class="page_header">
8 <div class="page_header">
9 <a href="http://www.selenic.com/mercurial/" title="Mercurial"><div style="float:right;">Mercurial</div></a><a href="{url}summary{sessionvars%urlparameter}">#repo|escape#</a> / file revision
9 <a href="http://www.selenic.com/mercurial/" title="Mercurial"><div style="float:right;">Mercurial</div></a><a href="{url}summary{sessionvars%urlparameter}">#repo|escape#</a> / file revision
10 </div>
10 </div>
11
11
12 <div class="page_nav">
12 <div class="page_nav">
13 <a href="{url}summary{sessionvars%urlparameter}">summary</a> |
13 <a href="{url}summary{sessionvars%urlparameter}">summary</a> |
14 <a href="{url}shortlog{sessionvars%urlparameter}">shortlog</a> |
14 <a href="{url}shortlog{sessionvars%urlparameter}">shortlog</a> |
15 <a href="{url}log{sessionvars%urlparameter}">changelog</a> |
15 <a href="{url}log{sessionvars%urlparameter}">changelog</a> |
16 <a href="{url}tags{sessionvars%urlparameter}">tags</a> |
16 <a href="{url}tags{sessionvars%urlparameter}">tags</a> |
17 <a href="{url}file/#node|short##path|urlescape#{sessionvars%urlparameter}">manifest</a> |
17 <a href="{url}file/#node|short##path|urlescape#{sessionvars%urlparameter}">manifest</a> |
18 <a href="{url}rev/#node|short#{sessionvars%urlparameter}">changeset</a> |
18 <a href="{url}rev/#node|short#{sessionvars%urlparameter}">changeset</a> |
19 file |
19 file |
20 <a href="{url}log/{node|short}/#file|urlescape#{sessionvars%urlparameter}">revisions</a> |
20 <a href="{url}log/{node|short}/#file|urlescape#{sessionvars%urlparameter}">revisions</a> |
21 <a href="{url}annotate/{node|short}/#file|urlescape#{sessionvars%urlparameter}">annotate</a> |
21 <a href="{url}annotate/{node|short}/#file|urlescape#{sessionvars%urlparameter}">annotate</a> |
22 <a href="{url}diff/{node|short}/{file|urlescape}{sessionvars%urlparameter}">diff</a> |
22 <a href="{url}diff/{node|short}/{file|urlescape}{sessionvars%urlparameter}">diff</a> |
23 <a href="{url}raw-file/{node|short}/#file|urlescape#">raw</a><br/>
23 <a href="{url}raw-file/{node|short}/#file|urlescape#">raw</a><br/>
24 </div>
24 </div>
25
25
26 <div class="title">#file|escape#</div>
26 <div class="title">#file|escape#</div>
27
27
28 <div class="title_text">
28 <div class="title_text">
29 <table>
29 <table cellspacing="0">
30 <tr>
31 <td>author</td>
32 <td>#author|obfuscate#</td></tr>
30 <tr>
33 <tr>
31 <td class="metatag">changeset #rev#:</td>
34 <td></td>
32 <td><a href="{url}rev/#node|short#{sessionvars%urlparameter}">#node|short#</a></td></tr>
35 <td>#date|date# (#date|age# ago)</td></tr>
36 <tr>
37 <td>changeset {rev}</td>
38 <td style="font-family:monospace"><a class="list" href="{url}rev/#node|short#{sessionvars%urlparameter}">#node|short#</a></td></tr>
33 #parent%filerevparent#
39 #parent%filerevparent#
34 #child%filerevchild#
40 #child%filerevchild#
35 <tr>
41 <tr>
36 <td class="metatag">manifest:</td>
42 <td>permissions</td>
37 <td><a href="{url}file/#node|short#{sessionvars%urlparameter}">#node|short#</a></td></tr>
43 <td style="font-family:monospace">#permissions|permissions#</td></tr>
38 <tr>
39 <td class="metatag">author:</td>
40 <td>#author|obfuscate#</td></tr>
41 <tr>
42 <td class="metatag">date:</td>
43 <td>#date|date# (#date|age# ago)</td></tr>
44 <tr>
45 <td class="metatag">permissions:</td>
46 <td>#permissions|permissions#</td></tr>
47 </table>
44 </table>
48 </div>
45 </div>
49
46
50 <div class="page_path">
47 <div class="page_path">
51 {desc|strip|escape|addbreaks}
48 {desc|strip|escape|addbreaks}
52 </div>
49 </div>
53
50
54 <div class="page_body">
51 <div class="page_body">
55 #text%fileline#
52 #text%fileline#
56 </div>
53 </div>
57
54
58 #footer#
55 #footer#
@@ -1,33 +1,33 b''
1 #header#
1 #header#
2 <title>#repo|escape#: Manifest</title>
2 <title>#repo|escape#: Manifest</title>
3 <link rel="alternate" type="application/rss+xml"
3 <link rel="alternate" type="application/rss+xml"
4 href="{url}rss-log" title="RSS feed for #repo|escape#">
4 href="{url}rss-log" title="RSS feed for #repo|escape#">
5 </head>
5 </head>
6 <body>
6 <body>
7
7
8 <div class="page_header">
8 <div class="page_header">
9 <a href="http://www.selenic.com/mercurial/" title="Mercurial"><div style="float:right;">Mercurial</div></a><a href="{url}summary{sessionvars%urlparameter}">#repo|escape#</a> / manifest
9 <a href="http://www.selenic.com/mercurial/" title="Mercurial"><div style="float:right;">Mercurial</div></a><a href="{url}summary{sessionvars%urlparameter}">#repo|escape#</a> / manifest
10 </div>
10 </div>
11
11
12 <div class="page_nav">
12 <div class="page_nav">
13 <a href="{url}summary{sessionvars%urlparameter}">summary</a> |
13 <a href="{url}summary{sessionvars%urlparameter}">summary</a> |
14 <a href="{url}shortlog{sessionvars%urlparameter}">shortlog</a> |
14 <a href="{url}shortlog{sessionvars%urlparameter}">shortlog</a> |
15 <a href="{url}log{sessionvars%urlparameter}">changelog</a> |
15 <a href="{url}log{sessionvars%urlparameter}">changelog</a> |
16 <a href="{url}tags{sessionvars%urlparameter}">tags</a> |
16 <a href="{url}tags{sessionvars%urlparameter}">tags</a> |
17 manifest |
17 manifest |
18 <a href="{url}rev/#node|short#{sessionvars%urlparameter}">changeset</a> #archives%archiveentry#<br/>
18 <a href="{url}rev/#node|short#{sessionvars%urlparameter}">changeset</a> #archives%archiveentry#<br/>
19 </div>
19 </div>
20
20
21 <div class="title" >#path|escape#</div>
21 <div class="title" >#path|escape#</div>
22 <div class="page_body">
23 <table cellspacing="0">
22 <table cellspacing="0">
24 <tr class="light">
23 <tr class="parity#upparity#">
25 <td style="font-family:monospace">drwxr-xr-x</td>
24 <td style="font-family:monospace">drwxr-xr-x</td>
26 <td style="font-family:monospace"></td>
25 <td style="font-family:monospace"></td>
27 <td><a href="{url}file/#node|short##up|urlescape#{sessionvars%urlparameter}">[up]</a></td>
26 <td><a href="{url}file/#node|short##up|urlescape#{sessionvars%urlparameter}">[up]</a></td>
28 <td class="link">&nbsp;</td>
27 <td class="link">&nbsp;</td>
29 </tr>
28 </tr>
30 #dentries%manifestdirentry#
29 #dentries%manifestdirentry#
31 #fentries%manifestfileentry#
30 #fentries%manifestfileentry#
32 </table>
31 </table>
32
33 #footer#
33 #footer#
@@ -1,57 +1,57 b''
1 default = 'summary'
1 default = 'summary'
2 header = header.tmpl
2 header = header.tmpl
3 footer = footer.tmpl
3 footer = footer.tmpl
4 search = search.tmpl
4 search = search.tmpl
5 changelog = changelog.tmpl
5 changelog = changelog.tmpl
6 summary = summary.tmpl
6 summary = summary.tmpl
7 error = error.tmpl
7 error = error.tmpl
8 naventry = '<a href="{url}log/{node|short}{sessionvars%urlparameter}">{label|escape}</a> '
8 naventry = '<a href="{url}log/{node|short}{sessionvars%urlparameter}">{label|escape}</a> '
9 navshortentry = '<a href="{url}shortlog/{node|short}{sessionvars%urlparameter}">{label|escape}</a> '
9 navshortentry = '<a href="{url}shortlog/{node|short}{sessionvars%urlparameter}">{label|escape}</a> '
10 filenaventry = '<a href="{url}log/{node|short}/{file|urlescape}{sessionvars%urlparameter}">{label|escape}</a> '
10 filenaventry = '<a href="{url}log/{node|short}/{file|urlescape}{sessionvars%urlparameter}">{label|escape}</a> '
11 filedifflink = '<a href="#url#diff/#node|short#/#file|urlescape#{sessionvars%urlparameter}">#file|escape#</a> '
11 filedifflink = '<a href="#url#diff/#node|short#/#file|urlescape#{sessionvars%urlparameter}">#file|escape#</a> '
12 filenodelink = '<tr class="parity#parity#"><td><a class="list" href="{url}diff/{node|short}/{file|urlescape}{sessionvars%urlparameter}">#file|escape#</a></td><td></td><td class="link"><a href="#url#file/#node|short#/#file|urlescape#{sessionvars%urlparameter}">file</a> | <a href="#url#annotate/#node|short#/#file|urlescape#{sessionvars%urlparameter}">annotate</a> | <a href="#url#diff/#node|short#/#file|urlescape#{sessionvars%urlparameter}">diff</a> | <a href="#url#log/#node|short#/#file|urlescape#{sessionvars%urlparameter}">revisions</a></td></tr>'
12 filenodelink = '<tr class="parity#parity#"><td><a class="list" href="{url}diff/{node|short}/{file|urlescape}{sessionvars%urlparameter}">#file|escape#</a></td><td></td><td class="link"><a href="#url#file/#node|short#/#file|urlescape#{sessionvars%urlparameter}">file</a> | <a href="#url#annotate/#node|short#/#file|urlescape#{sessionvars%urlparameter}">annotate</a> | <a href="#url#diff/#node|short#/#file|urlescape#{sessionvars%urlparameter}">diff</a> | <a href="#url#log/#node|short#/#file|urlescape#{sessionvars%urlparameter}">revisions</a></td></tr>'
13 fileellipses = '...'
13 fileellipses = '...'
14 changelogentry = changelogentry.tmpl
14 changelogentry = changelogentry.tmpl
15 searchentry = changelogentry.tmpl
15 searchentry = changelogentry.tmpl
16 changeset = changeset.tmpl
16 changeset = changeset.tmpl
17 manifest = manifest.tmpl
17 manifest = manifest.tmpl
18 manifestdirentry = '<tr class="parity#parity#"><td style="font-family:monospace">drwxr-xr-x</td><td style="font-family:monospace"></td><td><a href="#url#file/#node|short##path|urlescape#{sessionvars%urlparameter}">#basename|escape#/</a></td><td class="link"><a href="#url#file/#node|short##path|urlescape#{sessionvars%urlparameter}">manifest</a></td></tr>'
18 manifestdirentry = '<tr class="parity#parity#"><td style="font-family:monospace">drwxr-xr-x</td><td style="font-family:monospace"></td><td><a href="#url#file/#node|short##path|urlescape#{sessionvars%urlparameter}">#basename|escape#</a></td><td class="link"><a href="#url#file/#node|short##path|urlescape#{sessionvars%urlparameter}">manifest</a></td></tr>'
19 manifestfileentry = '<tr class="parity#parity#"><td style="font-family:monospace">#permissions|permissions#</td><td style="font-family:monospace" align=right>#size#</td><td class="list"><a class="list" href="#url#file/#node|short#/#file|urlescape#{sessionvars%urlparameter}">#basename|escape#</a></td><td class="link"><a href="#url#file/#node|short#/#file|urlescape#{sessionvars%urlparameter}">file</a> | <a href="#url#log/#node|short#/#file|urlescape#{sessionvars%urlparameter}">revisions</a> | <a href="#url#annotate/#node|short#/#file|urlescape#{sessionvars%urlparameter}">annotate</a></td></tr>'
19 manifestfileentry = '<tr class="parity#parity#"><td style="font-family:monospace">#permissions|permissions#</td><td style="font-family:monospace" align=right>#size#</td><td class="list"><a class="list" href="#url#file/#node|short#/#file|urlescape#{sessionvars%urlparameter}">#basename|escape#</a></td><td class="link"><a href="#url#file/#node|short#/#file|urlescape#{sessionvars%urlparameter}">file</a> | <a href="#url#log/#node|short#/#file|urlescape#{sessionvars%urlparameter}">revisions</a> | <a href="#url#annotate/#node|short#/#file|urlescape#{sessionvars%urlparameter}">annotate</a></td></tr>'
20 filerevision = filerevision.tmpl
20 filerevision = filerevision.tmpl
21 fileannotate = fileannotate.tmpl
21 fileannotate = fileannotate.tmpl
22 filediff = filediff.tmpl
22 filediff = filediff.tmpl
23 filelog = filelog.tmpl
23 filelog = filelog.tmpl
24 fileline = '<div style="font-family:monospace" class="parity#parity#"><pre><span class="linenr"> #linenumber#</span> #line|escape#</pre></div>'
24 fileline = '<div style="font-family:monospace" class="parity#parity#"><pre><span class="linenr"> #linenumber#</span> #line|escape#</pre></div>'
25 annotateline = '<tr style="font-family:monospace" class="parity#parity#"><td class="linenr" style="text-align: right;"><a href="#url#annotate/#node|short#/#file|urlescape#{sessionvars%urlparameter}">#author|obfuscate#@#rev#</a></td><td><pre>#line|escape#</pre></td></tr>'
25 annotateline = '<tr style="font-family:monospace" class="parity#parity#"><td class="linenr" style="text-align: right;"><a href="#url#annotate/#node|short#/#file|urlescape#{sessionvars%urlparameter}">#author|obfuscate#@#rev#</a></td><td><pre>#line|escape#</pre></td></tr>'
26 difflineplus = '<div style="color:#008800;">#line|escape#</div>'
26 difflineplus = '<div style="color:#008800;">#line|escape#</div>'
27 difflineminus = '<div style="color:#cc0000;">#line|escape#</div>'
27 difflineminus = '<div style="color:#cc0000;">#line|escape#</div>'
28 difflineat = '<div style="color:#990099;">#line|escape#</div>'
28 difflineat = '<div style="color:#990099;">#line|escape#</div>'
29 diffline = '<div>#line|escape#</div>'
29 diffline = '<div>#line|escape#</div>'
30 changelogparent = '<tr><th class="parent">parent #rev#:</th><td class="parent"><a href="#url#rev/#node|short#{sessionvars%urlparameter}">#node|short#</a></td></tr>'
30 changelogparent = '<tr><th class="parent">parent #rev#:</th><td class="parent"><a href="#url#rev/#node|short#{sessionvars%urlparameter}">#node|short#</a></td></tr>'
31 changesetparent = '<tr><td>parent {rev}</td><td style="font-family:monospace"><a class="list" href="{url}rev/{node|short}{sessionvars%urlparameter}">{node|short}</a></td></tr>'
31 changesetparent = '<tr><td>parent {rev}</td><td style="font-family:monospace"><a class="list" href="{url}rev/{node|short}{sessionvars%urlparameter}">{node|short}</a></td></tr>'
32 filerevparent = '<tr><td class="metatag">parent {rev}:</td><td><a href="{url}file/{node|short}/{file|urlescape}{sessionvars%urlparameter}">{rename%filerename}{node|short}</a></td></tr>'
32 filerevparent = '<tr><td>parent {rev}</td><td style="font-family:monospace"><a class="list" href="{url}file/{node|short}/{file|urlescape}{sessionvars%urlparameter}">{rename%filerename}{node|short}</a></td></tr>'
33 filerename = '{file|escape}@'
33 filerename = '{file|escape}@'
34 filelogrename = '| <a href="{url}file/#node|short#/#file|urlescape#{sessionvars%urlparameter}">base</a>'
34 filelogrename = '| <a href="{url}file/#node|short#/#file|urlescape#{sessionvars%urlparameter}">base</a>'
35 fileannotateparent = '<tr><td class="metatag">parent {rev}:</td><td><a href="{url}annotate/{node|short}/{file|urlescape}{sessionvars%urlparameter}">{rename%filerename}{node|short}</a></td></tr>'
35 fileannotateparent = '<tr><td>parent {rev}</td><td style="font-family:monospace"><a class="list" href="{url}annotate/{node|short}/{file|urlescape}{sessionvars%urlparameter}">{rename%filerename}{node|short}</a></td></tr>'
36 changelogchild = '<tr><th class="child">child #rev#:</th><td class="child"><a href="{url}rev/#node|short#{sessionvars%urlparameter}">#node|short#</a></td></tr>'
36 changelogchild = '<tr><th class="child">child #rev#:</th><td class="child"><a href="{url}rev/#node|short#{sessionvars%urlparameter}">#node|short#</a></td></tr>'
37 changesetchild = '<tr><td>child {rev}</td><td style="font-family:monospace"><a class="list" href="{url}rev/{node|short}{sessionvars%urlparameter}">{node|short}</a></td></tr>'
37 changesetchild = '<tr><td>child {rev}</td><td style="font-family:monospace"><a class="list" href="{url}rev/{node|short}{sessionvars%urlparameter}">{node|short}</a></td></tr>'
38 filerevchild = '<tr><td class="metatag">child {rev}:</td><td><a href="{url}file/{node|short}/{file|urlescape}{sessionvars%urlparameter}">{node|short}</a></td></tr>'
38 filerevchild = '<tr><td>child {rev}</td><td style="font-family:monospace"><a class="list" href="{url}file/{node|short}/{file|urlescape}{sessionvars%urlparameter}">{node|short}</a></td></tr>'
39 fileannotatechild = '<tr><td class="metatag">child {rev}:</td><td><a href="{url}annotate/{node|short}/{file|urlescape}{sessionvars%urlparameter}">{node|short}</a></td></tr>'
39 fileannotatechild = '<tr><td>child {rev}</td><td style="font-family:monospace"><a class="list" href="{url}annotate/{node|short}/{file|urlescape}{sessionvars%urlparameter}">{node|short}</a></td></tr>'
40 tags = tags.tmpl
40 tags = tags.tmpl
41 tagentry = '<tr class="parity#parity#"><td class="age"><i>#date|age# ago</i></td><td><a class="list" href="{url}rev/{node|short}{sessionvars%urlparameter}"><b>#tag|escape#</b></a></td><td class="link"><a href="{url}rev/#node|short#{sessionvars%urlparameter}">changeset</a> | <a href="{url}log/#node|short#{sessionvars%urlparameter}">changelog</a> | <a href="{url}file/#node|short#{sessionvars%urlparameter}">manifest</a></td></tr>'
41 tagentry = '<tr class="parity#parity#"><td class="age"><i>#date|age# ago</i></td><td><a class="list" href="{url}rev/{node|short}{sessionvars%urlparameter}"><b>#tag|escape#</b></a></td><td class="link"><a href="{url}rev/#node|short#{sessionvars%urlparameter}">changeset</a> | <a href="{url}log/#node|short#{sessionvars%urlparameter}">changelog</a> | <a href="{url}file/#node|short#{sessionvars%urlparameter}">manifest</a></td></tr>'
42 branchentry = '<tr class="parity{parity}"><td class="age"><i>{date|age} ago</i></td><td><a class="list" href="{url}rev/{node|short}{sessionvars%urlparameter}"><b>{node|short}</b></td><td>{branch|escape}</td><td class="link"><a href="{url}rev/{node|short}{sessionvars%urlparameter}">changeset</a> | <a href="{url}log/{node|short}{sessionvars%urlparameter}">changelog</a> | <a href="{url}file/{node|short}{sessionvars%urlparameter}">manifest</a></td></tr>'
42 branchentry = '<tr class="parity{parity}"><td class="age"><i>{date|age} ago</i></td><td><a class="list" href="{url}rev/{node|short}{sessionvars%urlparameter}"><b>{node|short}</b></td><td>{branch|escape}</td><td class="link"><a href="{url}rev/{node|short}{sessionvars%urlparameter}">changeset</a> | <a href="{url}log/{node|short}{sessionvars%urlparameter}">changelog</a> | <a href="{url}file/{node|short}{sessionvars%urlparameter}">manifest</a></td></tr>'
43 diffblock = '<pre>#lines#</pre>'
43 diffblock = '<pre>#lines#</pre>'
44 changelogtag = '<tr><th class="tag">tag:</th><td class="tag">#tag|escape#</td></tr>'
44 changelogtag = '<tr><th class="tag">tag:</th><td class="tag">#tag|escape#</td></tr>'
45 changesettag = '<tr><td>tag</td><td>#tag|escape#</td></tr>'
45 changesettag = '<tr><td>tag</td><td>#tag|escape#</td></tr>'
46 filediffparent = '<tr><th class="parent">parent {rev}:</th><td class="parent"><a href="{url}diff/{node|short}/{file|urlescape}{sessionvars%urlparameter}">{node|short}</a></td></tr>'
46 filediffparent = '<tr><td>parent {rev}</td><td style="font-family:monospace"><a class="list" href="{url}diff/{node|short}/{file|urlescape}{sessionvars%urlparameter}">{node|short}</a></td></tr>'
47 filelogparent = '<tr><td align="right">parent #rev#:&nbsp;</td><td><a href="{url}file/{node|short}/#file|urlescape#{sessionvars%urlparameter}">#node|short#</a></td></tr>'
47 filelogparent = '<tr><td align="right">parent #rev#:&nbsp;</td><td><a href="{url}file/{node|short}/#file|urlescape#{sessionvars%urlparameter}">#node|short#</a></td></tr>'
48 filediffchild = '<tr><th class="child">child {rev}:</th><td class="child"><a href="{url}diff/{node|short}/{file|urlescape}{sessionvars%urlparameter}">{node|short}</a></td></tr>'
48 filediffchild = '<tr><td>child {rev}</td><td style="font-family:monospace"><a class="list" href="{url}diff/{node|short}/{file|urlescape}{sessionvars%urlparameter}">{node|short}</a></td></tr>'
49 filelogchild = '<tr><td align="right">child #rev#:&nbsp;</td><td><a href="{url}file{node|short}/#file|urlescape#{sessionvars%urlparameter}">#node|short#</a></td></tr>'
49 filelogchild = '<tr><td align="right">child #rev#:&nbsp;</td><td><a href="{url}file{node|short}/#file|urlescape#{sessionvars%urlparameter}">#node|short#</a></td></tr>'
50 shortlog = shortlog.tmpl
50 shortlog = shortlog.tmpl
51 shortlogentry = '<tr class="parity#parity#"><td class="age"><i>#date|age# ago</i></td><td><i>#author#</i></td><td><a class="list" href="{url}rev/#node|short#{sessionvars%urlparameter}"><b>#desc|strip|firstline|escape#</b></a></td><td class="link" nowrap><a href="{url}rev/#node|short#{sessionvars%urlparameter}">changeset</a> | <a href="{url}file/#node|short#{sessionvars%urlparameter}">manifest</a></td></tr>'
51 shortlogentry = '<tr class="parity#parity#"><td class="age"><i>#date|age# ago</i></td><td><i>#author#</i></td><td><a class="list" href="{url}rev/#node|short#{sessionvars%urlparameter}"><b>#desc|strip|firstline|escape#</b></a></td><td class="link" nowrap><a href="{url}rev/#node|short#{sessionvars%urlparameter}">changeset</a> | <a href="{url}file/#node|short#{sessionvars%urlparameter}">manifest</a></td></tr>'
52 filelogentry = '<tr class="parity#parity#"><td class="age"><i>#date|age# ago</i></td><td><a class="list" href="{url}rev/#node|short#{sessionvars%urlparameter}"><b>#desc|strip|firstline|escape#</b></a></td><td class="link"><a href="{url}file/#node|short#/#file|urlescape#{sessionvars%urlparameter}">file</a>&nbsp;|&nbsp;<a href="{url}diff/#node|short#/#file|urlescape#{sessionvars%urlparameter}">diff</a>&nbsp;|&nbsp;<a href="{url}annotate/#node|short#/#file|urlescape#{sessionvars%urlparameter}">annotate</a> #rename%filelogrename#</td></tr>'
52 filelogentry = '<tr class="parity#parity#"><td class="age"><i>#date|age# ago</i></td><td><a class="list" href="{url}rev/#node|short#{sessionvars%urlparameter}"><b>#desc|strip|firstline|escape#</b></a></td><td class="link"><a href="{url}file/#node|short#/#file|urlescape#{sessionvars%urlparameter}">file</a>&nbsp;|&nbsp;<a href="{url}diff/#node|short#/#file|urlescape#{sessionvars%urlparameter}">diff</a>&nbsp;|&nbsp;<a href="{url}annotate/#node|short#/#file|urlescape#{sessionvars%urlparameter}">annotate</a> #rename%filelogrename#</td></tr>'
53 archiveentry = ' | <a href="{url}archive/{node|short}{extension}">#type|escape#</a> '
53 archiveentry = ' | <a href="{url}archive/{node|short}{extension}">#type|escape#</a> '
54 indexentry = '<tr class="parity#parity#"><td><a class="list" href="#url#{sessionvars%urlparameter}"><b>#name|escape#</b></a></td><td>#description#</td><td>#contact|obfuscate#</td><td class="age">#lastchange|age# ago</td><td class="indexlinks"><a class="rss_logo" href="#url#rss-log">RSS</a> #archives%archiveentry#</td></tr>'
54 indexentry = '<tr class="parity#parity#"><td><a class="list" href="#url#{sessionvars%urlparameter}"><b>#name|escape#</b></a></td><td>#description#</td><td>#contact|obfuscate#</td><td class="age">#lastchange|age# ago</td><td class="indexlinks"><a class="rss_logo" href="#url#rss-log">RSS</a> #archives%archiveentry#</td></tr>'
55 index = index.tmpl
55 index = index.tmpl
56 urlparameter = '#separator##name#=#value|urlescape#'
56 urlparameter = '#separator##name#=#value|urlescape#'
57 hiddenformentry = '<input type="hidden" name="#name#" value="#value|escape#" />'
57 hiddenformentry = '<input type="hidden" name="#name#" value="#value|escape#" />'
@@ -1,27 +1,32 b''
1 #header#
1 #header#
2 <title>#repo|escape#: Search</title>
3 <link rel="alternate" type="application/rss+xml"
4 href="{url}rss-log" title="RSS feed for #repo|escape#">
5 </head>
6 <body>
7
8 <div class="page_header">
9 <a href="http://www.selenic.com/mercurial/" title="Mercurial"><div style="float:right;">Mercurial</div></a><a href="{url}summary{sessionvars%urlparameter}">#repo|escape#</a> / search
10
11 <form action="{url}log">
12 {sessionvars%hiddenformentry}
13 <div class="search">
14 <input type="text" name="rev" value="#query|escape#" />
15 </div>
16 </form>
17 </div>
18
2 <div class="page_nav">
19 <div class="page_nav">
3 <a href="{url}summary{sessionvars%urlparameter}">summary</a> |
20 <a href="{url}summary{sessionvars%urlparameter}">summary</a> |
4 <a href="{url}shortlog{sessionvars%urlparameter}">shortlog</a> |
21 <a href="{url}shortlog{sessionvars%urlparameter}">shortlog</a> |
5 <a href="{url}log{sessionvars%urlparameter}">changelog</a> |
22 <a href="{url}log{sessionvars%urlparameter}">changelog</a> |
6 <a href="{url}tags{sessionvars%urlparameter}">tags</a> |
23 <a href="{url}tags{sessionvars%urlparameter}">tags</a> |
7 <a href="{url}file/#node|short#{sessionvars%urlparameter}">manifest</a><br/>
24 <a href="{url}file/#node|short#{sessionvars%urlparameter}">manifest</a>#archives%archiveentry#
25 <br/>
8 </div>
26 </div>
9
27
10 <h2>searching for #query|escape#</h2>
28 <div class="title">searching for #query|escape#</div>
11
12 <form action="{url}log">
13 {sessionvars%hiddenformentry}
14 search:
15 <input name="rev" type="text" width="30" value="#query|escape#">
16 </form>
17
29
18 #entries#
30 #entries#
19
31
20 <form action="{url}log">
21 {sessionvars%hiddenformentry}
22 search:
23 <input type="hidden" name="style" value="gitweb">
24 <input name="rev" type="text" width="30">
25 </form>
26
27 #footer#
32 #footer#
@@ -1,34 +1,38 b''
1 #header#
1 #header#
2 <title>#repo|escape#: Shortlog</title>
2 <title>#repo|escape#: Shortlog</title>
3 <link rel="alternate" type="application/rss+xml"
3 <link rel="alternate" type="application/rss+xml"
4 href="{url}rss-log" title="RSS feed for #repo|escape#">
4 href="{url}rss-log" title="RSS feed for #repo|escape#">
5 </head>
5 </head>
6 <body>
6 <body>
7
7
8 <div class="page_header">
8 <div class="page_header">
9 <a href="http://www.selenic.com/mercurial/" title="Mercurial"><div style="float:right;">Mercurial</div></a><a href="{url}summary{sessionvars%urlparameter}">#repo|escape#</a> / shortlog
9 <a href="http://www.selenic.com/mercurial/" title="Mercurial"><div style="float:right;">Mercurial</div></a><a href="{url}summary{sessionvars%urlparameter}">#repo|escape#</a> / shortlog
10 </div>
10 </div>
11
11
12 <form action="{url}log">
12 <form action="{url}log">
13 {sessionvars%hiddenformentry}
13 {sessionvars%hiddenformentry}
14 <div class="search">
14 <div class="search">
15 <input type="text" name="rev" />
15 <input type="text" name="rev" />
16 </div>
16 </div>
17 </form>
17 </form>
18 </div>
18 </div>
19 <div class="page_nav">
19 <div class="page_nav">
20 <a href="{url}summary{sessionvars%urlparameter}">summary</a> |
20 <a href="{url}summary{sessionvars%urlparameter}">summary</a> |
21 shortlog |
21 shortlog |
22 <a href="{url}log/#rev#{sessionvars%urlparameter}">changelog</a> |
22 <a href="{url}log/#rev#{sessionvars%urlparameter}">changelog</a> |
23 <a href="{url}tags{sessionvars%urlparameter}">tags</a> |
23 <a href="{url}tags{sessionvars%urlparameter}">tags</a> |
24 <a href="{url}file/#node|short#{sessionvars%urlparameter}">manifest</a>#archives%archiveentry#<br/>
24 <a href="{url}file/#node|short#{sessionvars%urlparameter}">manifest</a>#archives%archiveentry#
25 <br/>
25 <br/>
26
27 #changenav%navshortentry#<br/>
26 #changenav%navshortentry#<br/>
28 </div>
27 </div>
29
28
29 <div class="title">&nbsp;</div>
30 <table cellspacing="0">
30 <table cellspacing="0">
31 #entries%shortlogentry#
31 #entries%shortlogentry#
32 </table>
32 </table>
33
33
34 <div class="page_nav">
35 #changenav%navshortentry#
36 </div>
37
34 #footer#
38 #footer#
@@ -1,46 +1,54 b''
1 #header#
1 #header#
2 <title>#repo|escape#: Summary</title>
2 <title>#repo|escape#: Summary</title>
3 <link rel="alternate" type="application/rss+xml"
3 <link rel="alternate" type="application/rss+xml"
4 href="{url}rss-log" title="RSS feed for #repo|escape#">
4 href="{url}rss-log" title="RSS feed for #repo|escape#">
5 </head>
5 </head>
6 <body>
6 <body>
7
7
8 <div class="page_header">
8 <div class="page_header">
9 <a href="http://www.selenic.com/mercurial/" title="Mercurial"><div style="float:right;">Mercurial</div></a><a href="{url}summary{sessionvars%urlparameter}">#repo|escape#</a> / summary
9 <a href="http://www.selenic.com/mercurial/" title="Mercurial"><div style="float:right;">Mercurial</div></a><a href="{url}summary{sessionvars%urlparameter}">#repo|escape#</a> / summary
10
11 <form action="{url}log">
12 {sessionvars%hiddenformentry}
13 <div class="search">
14 <input type="text" name="rev" />
10 </div>
15 </div>
16 </form>
17 </div>
18
11 <div class="page_nav">
19 <div class="page_nav">
12 summary |
20 summary |
13 <a href="{url}shortlog{sessionvars%urlparameter}">shortlog</a> |
21 <a href="{url}shortlog{sessionvars%urlparameter}">shortlog</a> |
14 <a href="{url}log{sessionvars%urlparameter}">changelog</a> |
22 <a href="{url}log{sessionvars%urlparameter}">changelog</a> |
15 <a href="{url}tags{sessionvars%urlparameter}">tags</a> |
23 <a href="{url}tags{sessionvars%urlparameter}">tags</a> |
16 <a href="{url}file/#node|short#{sessionvars%urlparameter}">manifest</a>#archives%archiveentry#
24 <a href="{url}file/#node|short#{sessionvars%urlparameter}">manifest</a>#archives%archiveentry#
17 <br/>
25 <br/>
18 </div>
26 </div>
19
27
20 <div class="title">&nbsp;</div>
28 <div class="title">&nbsp;</div>
21 <table cellspacing="0">
29 <table cellspacing="0">
22 <tr><td>description</td><td>#desc#</td></tr>
30 <tr><td>description</td><td>#desc#</td></tr>
23 <tr><td>owner</td><td>#owner|escape#</td></tr>
31 <tr><td>owner</td><td>#owner|escape#</td></tr>
24 <tr><td>last change</td><td>#lastchange|rfc822date#</td></tr>
32 <tr><td>last change</td><td>#lastchange|rfc822date#</td></tr>
25 </table>
33 </table>
26
34
27 <div><a class="title" href="{url}log{sessionvars%urlparameter}">changes</a></div>
35 <div><a class="title" href="{url}log{sessionvars%urlparameter}">changes</a></div>
28 <table cellspacing="0">
36 <table cellspacing="0">
29 #shortlog#
37 #shortlog#
30 <tr class="light"><td colspan="3"><a class="list" href="{url}log{sessionvars%urlparameter}">...</a></td></tr>
38 <tr class="light"><td colspan="4"><a class="list" href="{url}log{sessionvars%urlparameter}">...</a></td></tr>
31 </table>
39 </table>
32
40
33 <div><a class="title" href="{url}tags{sessionvars%urlparameter}">tags</a></div>
41 <div><a class="title" href="{url}tags{sessionvars%urlparameter}">tags</a></div>
34 <table cellspacing="0">
42 <table cellspacing="0">
35 #tags#
43 #tags#
36 <tr class="light"><td colspan="3"><a class="list" href="{url}tags{sessionvars%urlparameter}">...</a></td></tr>
44 <tr class="light"><td colspan="3"><a class="list" href="{url}tags{sessionvars%urlparameter}">...</a></td></tr>
37 </table>
45 </table>
38
46
39 <div><a class="title" href="#">branches</a></div>
47 <div><a class="title" href="#">branches</a></div>
40 <table cellspacing="0">
48 <table cellspacing="0">
41 {branches%branchentry}
49 {branches%branchentry}
42 <tr class="light">
50 <tr class="light">
43 <td colspan="3"><a class="list" href="#">...</a></td>
51 <td colspan="4"><a class="list" href="#">...</a></td>
44 </tr>
52 </tr>
45 </table>
53 </table>
46 #footer#
54 #footer#
@@ -1,25 +1,26 b''
1 #header#
1 #header#
2 <title>#repo|escape#: Tags</title>
2 <title>#repo|escape#: Tags</title>
3 <link rel="alternate" type="application/rss+xml"
3 <link rel="alternate" type="application/rss+xml"
4 href="{url}rss-log" title="RSS feed for #repo|escape#">
4 href="{url}rss-log" title="RSS feed for #repo|escape#">
5 </head>
5 </head>
6 <body>
6 <body>
7
7
8 <div class="page_header">
8 <div class="page_header">
9 <a href="http://www.selenic.com/mercurial/" title="Mercurial"><div style="float:right;">Mercurial</div></a><a href="{url}summary{sessionvars%urlparameter}">#repo|escape#</a> / tags
9 <a href="http://www.selenic.com/mercurial/" title="Mercurial"><div style="float:right;">Mercurial</div></a><a href="{url}summary{sessionvars%urlparameter}">#repo|escape#</a> / tags
10 </div>
10 </div>
11
11
12 <div class="page_nav">
12 <div class="page_nav">
13 <a href="{url}summary{sessionvars%urlparameter}">summary</a> |
13 <a href="{url}summary{sessionvars%urlparameter}">summary</a> |
14 <a href="{url}shortlog{sessionvars%urlparameter}">shortlog</a> |
14 <a href="{url}shortlog{sessionvars%urlparameter}">shortlog</a> |
15 <a href="{url}log{sessionvars%urlparameter}">changelog</a> |
15 <a href="{url}log{sessionvars%urlparameter}">changelog</a> |
16 tags |
16 tags |
17 <a href="{url}file/#node|short#{sessionvars%urlparameter}">manifest</a>
17 <a href="{url}file/#node|short#{sessionvars%urlparameter}">manifest</a>
18 <br/>
18 <br/>
19 </div>
19 </div>
20
20
21 <div class="title">&nbsp;</div>
21 <table cellspacing="0">
22 <table cellspacing="0">
22 #entries%tagentry#
23 #entries%tagentry#
23 </table>
24 </table>
24
25
25 #footer#
26 #footer#
@@ -1,25 +1,25 b''
1 #header#
1 #header#
2 <title>#repo|escape#: manifest for changeset #node|short#</title>
2 <title>#repo|escape#: manifest for changeset #node|short#</title>
3 </head>
3 </head>
4 <body>
4 <body>
5
5
6 <div class="buttons">
6 <div class="buttons">
7 <a href="#url#log/#rev#{sessionvars%urlparameter}">changelog</a>
7 <a href="#url#log/#rev#{sessionvars%urlparameter}">changelog</a>
8 <a href="#url#shortlog/#rev#{sessionvars%urlparameter}">shortlog</a>
8 <a href="#url#shortlog/#rev#{sessionvars%urlparameter}">shortlog</a>
9 <a href="#url#tags{sessionvars%urlparameter}">tags</a>
9 <a href="#url#tags{sessionvars%urlparameter}">tags</a>
10 <a href="#url#rev/#node|short#{sessionvars%urlparameter}">changeset</a>
10 <a href="#url#rev/#node|short#{sessionvars%urlparameter}">changeset</a>
11 #archives%archiveentry#
11 #archives%archiveentry#
12 </div>
12 </div>
13
13
14 <h2>manifest for changeset #node|short#: #path|escape#</h2>
14 <h2>manifest for changeset #node|short#: #path|escape#</h2>
15
15
16 <table cellpadding="0" cellspacing="0">
16 <table cellpadding="0" cellspacing="0">
17 <tr class="parity1">
17 <tr class="parity#upparity#">
18 <td><tt>drwxr-xr-x</tt>&nbsp;
18 <td><tt>drwxr-xr-x</tt>&nbsp;
19 <td>&nbsp;
19 <td>&nbsp;
20 <td><a href="#url#file/#node|short##up|urlescape#{sessionvars%urlparameter}">[up]</a>
20 <td><a href="#url#file/#node|short##up|urlescape#{sessionvars%urlparameter}">[up]</a>
21 </tr>
21 </tr>
22 #dentries%manifestdirentry#
22 #dentries%manifestdirentry#
23 #fentries%manifestfileentry#
23 #fentries%manifestfileentry#
24 </table>
24 </table>
25 #footer#
25 #footer#
@@ -1,33 +1,34 b''
1 #header#
1 #header#
2 <title>#repo|escape#: searching for #query|escape#</title>
2 <title>#repo|escape#: searching for #query|escape#</title>
3 </head>
3 </head>
4 <body>
4 <body>
5
5
6 <div class="buttons">
6 <div class="buttons">
7 <a href="#url#log{sessionvars%urlparameter}">changelog</a>
7 <a href="#url#log{sessionvars%urlparameter}">changelog</a>
8 <a href="#url#shortlog{sessionvars%urlparameter}">shortlog</a>
8 <a href="#url#shortlog{sessionvars%urlparameter}">shortlog</a>
9 <a href="#url#tags{sessionvars%urlparameter}">tags</a>
9 <a href="#url#tags{sessionvars%urlparameter}">tags</a>
10 <a href="#url#file/#node|short#{sessionvars%urlparameter}">manifest</a>
10 <a href="#url#file/#node|short#{sessionvars%urlparameter}">manifest</a>
11 #archives%archiveentry#
11 </div>
12 </div>
12
13
13 <h2>searching for #query|escape#</h2>
14 <h2>searching for #query|escape#</h2>
14
15
15 <form>
16 <form>
16 {sessionvars%hiddenformentry}
17 {sessionvars%hiddenformentry}
17 <p>
18 <p>
18 search:
19 search:
19 <input name="rev" type="text" width="30" value="#query|escape#">
20 <input name="rev" type="text" width="30" value="#query|escape#">
20 </p>
21 </p>
21 </form>
22 </form>
22
23
23 #entries#
24 #entries#
24
25
25 <form>
26 <form>
26 {sessionvars%hiddenformentry}
27 {sessionvars%hiddenformentry}
27 <p>
28 <p>
28 search:
29 search:
29 <input name="rev" type="text" width="30" value="#query|escape#">
30 <input name="rev" type="text" width="30" value="#query|escape#">
30 </p>
31 </p>
31 </form>
32 </form>
32
33
33 #footer#
34 #footer#
@@ -1,26 +1,26 b''
1 #!/bin/sh
1 #!/bin/sh
2
2
3 hg init rep
3 hg init rep
4 cd rep
4 cd rep
5 mkdir dir
5 mkdir dir
6 touch foo dir/bar
6 touch foo dir/bar
7 hg -v addremove
7 hg -v addremove
8 hg -v commit -m "add 1" -d "1000000 0"
8 hg -v commit -m "add 1" -d "1000000 0"
9 cd dir/
9 cd dir/
10 touch ../foo_2 bar_2
10 touch ../foo_2 bar_2
11 hg -v addremove
11 hg -v addremove
12 hg -v commit -m "add 2" -d "1000000 0"
12 hg -v commit -m "add 2" -d "1000000 0"
13
13
14 cd ..
14 cd ..
15 hg init sim
15 hg init sim
16 cd sim
16 cd sim
17 echo a > a
17 echo a > a
18 echo a >> a
18 echo a >> a
19 echo a >> a
19 echo a >> a
20 echo c > c
20 echo c > c
21 hg commit -Ama
21 hg commit -Ama
22 mv a b
22 mv a b
23 rm c
23 rm c
24 echo d > d
24 echo d > d
25 hg addremove -s 0.5
25 hg addremove -s 50
26 hg commit -mb
26 hg commit -mb
@@ -1,37 +1,43 b''
1 #!/bin/sh
1 #!/bin/sh
2
2
3 hg init rep; cd rep
3 hg init rep; cd rep
4
4
5 touch empty-file
5 touch empty-file
6 python -c 'for x in range(10000): print x' > large-file
6 python -c 'for x in range(10000): print x' > large-file
7
7
8 hg addremove
8 hg addremove
9
9
10 hg commit -m A
10 hg commit -m A
11
11
12 rm large-file empty-file
12 rm large-file empty-file
13 python -c 'for x in range(10,10000): print x' > another-file
13 python -c 'for x in range(10,10000): print x' > another-file
14
14
15 hg addremove -s50
15 hg addremove -s50
16
16
17 hg commit -m B
17 hg commit -m B
18
18
19 echo % comparing two empty files caused ZeroDivisionError in the past
20 hg update -C 0
21 rm empty-file
22 touch another-empty-file
23 hg addremove -s50
24
19 cd ..
25 cd ..
20
26
21 hg init rep2; cd rep2
27 hg init rep2; cd rep2
22
28
23 python -c 'for x in range(10000): print x' > large-file
29 python -c 'for x in range(10000): print x' > large-file
24 python -c 'for x in range(50): print x' > tiny-file
30 python -c 'for x in range(50): print x' > tiny-file
25
31
26 hg addremove
32 hg addremove
27
33
28 hg commit -m A
34 hg commit -m A
29
35
30 python -c 'for x in range(70): print x' > small-file
36 python -c 'for x in range(70): print x' > small-file
31 rm tiny-file
37 rm tiny-file
32 rm large-file
38 rm large-file
33
39
34 hg addremove -s50
40 hg addremove -s50
35
41
36 hg commit -m B
42 hg commit -m B
37
43
@@ -1,12 +1,16 b''
1 adding empty-file
1 adding empty-file
2 adding large-file
2 adding large-file
3 adding another-file
3 adding another-file
4 removing empty-file
4 removing empty-file
5 removing large-file
5 removing large-file
6 recording removal of large-file as rename to another-file (99% similar)
6 recording removal of large-file as rename to another-file (99% similar)
7 % comparing two empty files caused ZeroDivisionError in the past
8 2 files updated, 0 files merged, 1 files removed, 0 files unresolved
9 adding another-empty-file
10 removing empty-file
7 adding large-file
11 adding large-file
8 adding tiny-file
12 adding tiny-file
9 adding small-file
13 adding small-file
10 removing large-file
14 removing large-file
11 removing tiny-file
15 removing tiny-file
12 recording removal of tiny-file as rename to small-file (82% similar)
16 recording removal of tiny-file as rename to small-file (82% similar)
@@ -1,347 +1,348 b''
1 adding foo
1 adding foo
2 checking changesets
2 checking changesets
3 checking manifests
3 checking manifests
4 crosschecking files in changesets and manifests
4 crosschecking files in changesets and manifests
5 checking files
5 checking files
6 1 files, 9 changesets, 9 total revisions
6 1 files, 9 changesets, 9 total revisions
7 comparing with http://localhost:20059/
7 comparing with http://localhost:20059/
8 changeset: 0:9cb21d99fe27
8 changeset: 0:9cb21d99fe27
9 user: test
9 user: test
10 date: Mon Jan 12 13:46:40 1970 +0000
10 date: Mon Jan 12 13:46:40 1970 +0000
11 summary: 0
11 summary: 0
12
12
13 changeset: 1:d717f5dfad6a
13 changeset: 1:d717f5dfad6a
14 user: test
14 user: test
15 date: Mon Jan 12 13:46:40 1970 +0000
15 date: Mon Jan 12 13:46:40 1970 +0000
16 summary: 1
16 summary: 1
17
17
18 changeset: 2:c0d6b86da426
18 changeset: 2:c0d6b86da426
19 user: test
19 user: test
20 date: Mon Jan 12 13:46:40 1970 +0000
20 date: Mon Jan 12 13:46:40 1970 +0000
21 summary: 2
21 summary: 2
22
22
23 changeset: 3:dfacbd43b3fe
23 changeset: 3:dfacbd43b3fe
24 user: test
24 user: test
25 date: Mon Jan 12 13:46:40 1970 +0000
25 date: Mon Jan 12 13:46:40 1970 +0000
26 summary: 3
26 summary: 3
27
27
28 changeset: 4:1f3a964b6022
28 changeset: 4:1f3a964b6022
29 user: test
29 user: test
30 date: Mon Jan 12 13:46:40 1970 +0000
30 date: Mon Jan 12 13:46:40 1970 +0000
31 summary: 4
31 summary: 4
32
32
33 changeset: 5:c028bcc7a28a
33 changeset: 5:c028bcc7a28a
34 user: test
34 user: test
35 date: Mon Jan 12 13:46:40 1970 +0000
35 date: Mon Jan 12 13:46:40 1970 +0000
36 summary: 5
36 summary: 5
37
37
38 changeset: 6:a0c0095f3389
38 changeset: 6:a0c0095f3389
39 user: test
39 user: test
40 date: Mon Jan 12 13:46:40 1970 +0000
40 date: Mon Jan 12 13:46:40 1970 +0000
41 summary: 6
41 summary: 6
42
42
43 changeset: 7:d4be65f4e891
43 changeset: 7:d4be65f4e891
44 user: test
44 user: test
45 date: Mon Jan 12 13:46:40 1970 +0000
45 date: Mon Jan 12 13:46:40 1970 +0000
46 summary: 7
46 summary: 7
47
47
48 changeset: 8:92b83e334ef8
48 changeset: 8:92b83e334ef8
49 tag: tip
49 tag: tip
50 user: test
50 user: test
51 date: Mon Jan 12 13:46:40 1970 +0000
51 date: Mon Jan 12 13:46:40 1970 +0000
52 summary: 8
52 summary: 8
53
53
54 comparing with http://localhost:20059/
54 comparing with http://localhost:20059/
55 changeset: 0:9cb21d99fe27
55 changeset: 0:9cb21d99fe27
56 user: test
56 user: test
57 date: Mon Jan 12 13:46:40 1970 +0000
57 date: Mon Jan 12 13:46:40 1970 +0000
58 summary: 0
58 summary: 0
59
59
60 changeset: 1:d717f5dfad6a
60 changeset: 1:d717f5dfad6a
61 user: test
61 user: test
62 date: Mon Jan 12 13:46:40 1970 +0000
62 date: Mon Jan 12 13:46:40 1970 +0000
63 summary: 1
63 summary: 1
64
64
65 changeset: 2:c0d6b86da426
65 changeset: 2:c0d6b86da426
66 user: test
66 user: test
67 date: Mon Jan 12 13:46:40 1970 +0000
67 date: Mon Jan 12 13:46:40 1970 +0000
68 summary: 2
68 summary: 2
69
69
70 changeset: 3:dfacbd43b3fe
70 changeset: 3:dfacbd43b3fe
71 user: test
71 user: test
72 date: Mon Jan 12 13:46:40 1970 +0000
72 date: Mon Jan 12 13:46:40 1970 +0000
73 summary: 3
73 summary: 3
74
74
75 changeset: 4:1f3a964b6022
75 changeset: 4:1f3a964b6022
76 tag: tip
76 user: test
77 user: test
77 date: Mon Jan 12 13:46:40 1970 +0000
78 date: Mon Jan 12 13:46:40 1970 +0000
78 summary: 4
79 summary: 4
79
80
80 comparing with test
81 comparing with test
81 changeset: 0:9cb21d99fe27
82 changeset: 0:9cb21d99fe27
82 user: test
83 user: test
83 date: Mon Jan 12 13:46:40 1970 +0000
84 date: Mon Jan 12 13:46:40 1970 +0000
84 summary: 0
85 summary: 0
85
86
86 changeset: 1:d717f5dfad6a
87 changeset: 1:d717f5dfad6a
87 user: test
88 user: test
88 date: Mon Jan 12 13:46:40 1970 +0000
89 date: Mon Jan 12 13:46:40 1970 +0000
89 summary: 1
90 summary: 1
90
91
91 changeset: 2:c0d6b86da426
92 changeset: 2:c0d6b86da426
92 user: test
93 user: test
93 date: Mon Jan 12 13:46:40 1970 +0000
94 date: Mon Jan 12 13:46:40 1970 +0000
94 summary: 2
95 summary: 2
95
96
96 changeset: 3:dfacbd43b3fe
97 changeset: 3:dfacbd43b3fe
97 user: test
98 user: test
98 date: Mon Jan 12 13:46:40 1970 +0000
99 date: Mon Jan 12 13:46:40 1970 +0000
99 summary: 3
100 summary: 3
100
101
101 changeset: 4:1f3a964b6022
102 changeset: 4:1f3a964b6022
102 user: test
103 user: test
103 date: Mon Jan 12 13:46:40 1970 +0000
104 date: Mon Jan 12 13:46:40 1970 +0000
104 summary: 4
105 summary: 4
105
106
106 changeset: 5:c028bcc7a28a
107 changeset: 5:c028bcc7a28a
107 user: test
108 user: test
108 date: Mon Jan 12 13:46:40 1970 +0000
109 date: Mon Jan 12 13:46:40 1970 +0000
109 summary: 5
110 summary: 5
110
111
111 changeset: 6:a0c0095f3389
112 changeset: 6:a0c0095f3389
112 user: test
113 user: test
113 date: Mon Jan 12 13:46:40 1970 +0000
114 date: Mon Jan 12 13:46:40 1970 +0000
114 summary: 6
115 summary: 6
115
116
116 changeset: 7:d4be65f4e891
117 changeset: 7:d4be65f4e891
117 user: test
118 user: test
118 date: Mon Jan 12 13:46:40 1970 +0000
119 date: Mon Jan 12 13:46:40 1970 +0000
119 summary: 7
120 summary: 7
120
121
121 changeset: 8:92b83e334ef8
122 changeset: 8:92b83e334ef8
122 tag: tip
123 tag: tip
123 user: test
124 user: test
124 date: Mon Jan 12 13:46:40 1970 +0000
125 date: Mon Jan 12 13:46:40 1970 +0000
125 summary: 8
126 summary: 8
126
127
127 comparing with test
128 comparing with test
128 changeset: 0:9cb21d99fe27
129 changeset: 0:9cb21d99fe27
129 user: test
130 user: test
130 date: Mon Jan 12 13:46:40 1970 +0000
131 date: Mon Jan 12 13:46:40 1970 +0000
131 summary: 0
132 summary: 0
132
133
133 changeset: 1:d717f5dfad6a
134 changeset: 1:d717f5dfad6a
134 user: test
135 user: test
135 date: Mon Jan 12 13:46:40 1970 +0000
136 date: Mon Jan 12 13:46:40 1970 +0000
136 summary: 1
137 summary: 1
137
138
138 changeset: 2:c0d6b86da426
139 changeset: 2:c0d6b86da426
139 user: test
140 user: test
140 date: Mon Jan 12 13:46:40 1970 +0000
141 date: Mon Jan 12 13:46:40 1970 +0000
141 summary: 2
142 summary: 2
142
143
143 changeset: 3:dfacbd43b3fe
144 changeset: 3:dfacbd43b3fe
144 user: test
145 user: test
145 date: Mon Jan 12 13:46:40 1970 +0000
146 date: Mon Jan 12 13:46:40 1970 +0000
146 summary: 3
147 summary: 3
147
148
148 changeset: 4:1f3a964b6022
149 changeset: 4:1f3a964b6022
149 user: test
150 user: test
150 date: Mon Jan 12 13:46:40 1970 +0000
151 date: Mon Jan 12 13:46:40 1970 +0000
151 summary: 4
152 summary: 4
152
153
153 comparing with http://localhost:20059/
154 comparing with http://localhost:20059/
154 changeset: 0:9cb21d99fe27
155 changeset: 0:9cb21d99fe27
155 user: test
156 user: test
156 date: Mon Jan 12 13:46:40 1970 +0000
157 date: Mon Jan 12 13:46:40 1970 +0000
157 summary: 0
158 summary: 0
158
159
159 changeset: 1:d717f5dfad6a
160 changeset: 1:d717f5dfad6a
160 user: test
161 user: test
161 date: Mon Jan 12 13:46:40 1970 +0000
162 date: Mon Jan 12 13:46:40 1970 +0000
162 summary: 1
163 summary: 1
163
164
164 changeset: 2:c0d6b86da426
165 changeset: 2:c0d6b86da426
165 user: test
166 user: test
166 date: Mon Jan 12 13:46:40 1970 +0000
167 date: Mon Jan 12 13:46:40 1970 +0000
167 summary: 2
168 summary: 2
168
169
169 changeset: 3:dfacbd43b3fe
170 changeset: 3:dfacbd43b3fe
170 user: test
171 user: test
171 date: Mon Jan 12 13:46:40 1970 +0000
172 date: Mon Jan 12 13:46:40 1970 +0000
172 summary: 3
173 summary: 3
173
174
174 changeset: 4:1f3a964b6022
175 changeset: 4:1f3a964b6022
175 user: test
176 user: test
176 date: Mon Jan 12 13:46:40 1970 +0000
177 date: Mon Jan 12 13:46:40 1970 +0000
177 summary: 4
178 summary: 4
178
179
179 changeset: 5:c028bcc7a28a
180 changeset: 5:c028bcc7a28a
180 user: test
181 user: test
181 date: Mon Jan 12 13:46:40 1970 +0000
182 date: Mon Jan 12 13:46:40 1970 +0000
182 summary: 5
183 summary: 5
183
184
184 changeset: 6:a0c0095f3389
185 changeset: 6:a0c0095f3389
185 user: test
186 user: test
186 date: Mon Jan 12 13:46:40 1970 +0000
187 date: Mon Jan 12 13:46:40 1970 +0000
187 summary: 6
188 summary: 6
188
189
189 changeset: 7:d4be65f4e891
190 changeset: 7:d4be65f4e891
190 user: test
191 user: test
191 date: Mon Jan 12 13:46:40 1970 +0000
192 date: Mon Jan 12 13:46:40 1970 +0000
192 summary: 7
193 summary: 7
193
194
194 changeset: 8:92b83e334ef8
195 changeset: 8:92b83e334ef8
195 tag: tip
196 tag: tip
196 user: test
197 user: test
197 date: Mon Jan 12 13:46:40 1970 +0000
198 date: Mon Jan 12 13:46:40 1970 +0000
198 summary: 8
199 summary: 8
199
200
200 comparing with test
201 comparing with test
201 changeset: 0:9cb21d99fe27
202 changeset: 0:9cb21d99fe27
202 user: test
203 user: test
203 date: Mon Jan 12 13:46:40 1970 +0000
204 date: Mon Jan 12 13:46:40 1970 +0000
204 summary: 0
205 summary: 0
205
206
206 changeset: 1:d717f5dfad6a
207 changeset: 1:d717f5dfad6a
207 user: test
208 user: test
208 date: Mon Jan 12 13:46:40 1970 +0000
209 date: Mon Jan 12 13:46:40 1970 +0000
209 summary: 1
210 summary: 1
210
211
211 changeset: 2:c0d6b86da426
212 changeset: 2:c0d6b86da426
212 user: test
213 user: test
213 date: Mon Jan 12 13:46:40 1970 +0000
214 date: Mon Jan 12 13:46:40 1970 +0000
214 summary: 2
215 summary: 2
215
216
216 changeset: 3:dfacbd43b3fe
217 changeset: 3:dfacbd43b3fe
217 user: test
218 user: test
218 date: Mon Jan 12 13:46:40 1970 +0000
219 date: Mon Jan 12 13:46:40 1970 +0000
219 summary: 3
220 summary: 3
220
221
221 changeset: 4:1f3a964b6022
222 changeset: 4:1f3a964b6022
222 user: test
223 user: test
223 date: Mon Jan 12 13:46:40 1970 +0000
224 date: Mon Jan 12 13:46:40 1970 +0000
224 summary: 4
225 summary: 4
225
226
226 changeset: 5:c028bcc7a28a
227 changeset: 5:c028bcc7a28a
227 user: test
228 user: test
228 date: Mon Jan 12 13:46:40 1970 +0000
229 date: Mon Jan 12 13:46:40 1970 +0000
229 summary: 5
230 summary: 5
230
231
231 changeset: 6:a0c0095f3389
232 changeset: 6:a0c0095f3389
232 user: test
233 user: test
233 date: Mon Jan 12 13:46:40 1970 +0000
234 date: Mon Jan 12 13:46:40 1970 +0000
234 summary: 6
235 summary: 6
235
236
236 changeset: 7:d4be65f4e891
237 changeset: 7:d4be65f4e891
237 user: test
238 user: test
238 date: Mon Jan 12 13:46:40 1970 +0000
239 date: Mon Jan 12 13:46:40 1970 +0000
239 summary: 7
240 summary: 7
240
241
241 changeset: 8:92b83e334ef8
242 changeset: 8:92b83e334ef8
242 tag: tip
243 tag: tip
243 user: test
244 user: test
244 date: Mon Jan 12 13:46:40 1970 +0000
245 date: Mon Jan 12 13:46:40 1970 +0000
245 summary: 8
246 summary: 8
246
247
247 adding changesets
248 adding changesets
248 adding manifests
249 adding manifests
249 adding file changes
250 adding file changes
250 added 9 changesets with 9 changes to 1 files
251 added 9 changesets with 9 changes to 1 files
251 (run 'hg update' to get a working copy)
252 (run 'hg update' to get a working copy)
252 adding changesets
253 adding changesets
253 adding manifests
254 adding manifests
254 adding file changes
255 adding file changes
255 added 9 changesets with 9 changes to 1 files
256 added 9 changesets with 9 changes to 1 files
256 (run 'hg update' to get a working copy)
257 (run 'hg update' to get a working copy)
257 changeset: 8:92b83e334ef8
258 changeset: 8:92b83e334ef8
258 tag: tip
259 tag: tip
259 user: test
260 user: test
260 date: Mon Jan 12 13:46:40 1970 +0000
261 date: Mon Jan 12 13:46:40 1970 +0000
261 summary: 8
262 summary: 8
262
263
263 changeset: 8:92b83e334ef8
264 changeset: 8:92b83e334ef8
264 tag: tip
265 tag: tip
265 user: test
266 user: test
266 date: Mon Jan 12 13:46:40 1970 +0000
267 date: Mon Jan 12 13:46:40 1970 +0000
267 summary: 8
268 summary: 8
268
269
269 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
270 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
270 checking changesets
271 checking changesets
271 checking manifests
272 checking manifests
272 crosschecking files in changesets and manifests
273 crosschecking files in changesets and manifests
273 checking files
274 checking files
274 1 files, 14 changesets, 14 total revisions
275 1 files, 14 changesets, 14 total revisions
275 comparing with test
276 comparing with test
276 searching for changes
277 searching for changes
277 changeset: 9:3741c3ad1096
278 changeset: 9:3741c3ad1096
278 user: test
279 user: test
279 date: Mon Jan 12 13:46:40 1970 +0000
280 date: Mon Jan 12 13:46:40 1970 +0000
280 summary: 9
281 summary: 9
281
282
282 changeset: 10:de4143c8d9a5
283 changeset: 10:de4143c8d9a5
283 user: test
284 user: test
284 date: Mon Jan 12 13:46:40 1970 +0000
285 date: Mon Jan 12 13:46:40 1970 +0000
285 summary: 10
286 summary: 10
286
287
287 changeset: 11:0e1c188b9a7a
288 changeset: 11:0e1c188b9a7a
288 user: test
289 user: test
289 date: Mon Jan 12 13:46:40 1970 +0000
290 date: Mon Jan 12 13:46:40 1970 +0000
290 summary: 11
291 summary: 11
291
292
292 changeset: 12:251354d0fdd3
293 changeset: 12:251354d0fdd3
293 user: test
294 user: test
294 date: Mon Jan 12 13:46:40 1970 +0000
295 date: Mon Jan 12 13:46:40 1970 +0000
295 summary: 12
296 summary: 12
296
297
297 changeset: 13:bdaadd969642
298 changeset: 13:bdaadd969642
298 tag: tip
299 tag: tip
299 user: test
300 user: test
300 date: Mon Jan 12 13:46:40 1970 +0000
301 date: Mon Jan 12 13:46:40 1970 +0000
301 summary: 13
302 summary: 13
302
303
303 comparing with http://localhost:20059/
304 comparing with http://localhost:20059/
304 searching for changes
305 searching for changes
305 changeset: 9:3741c3ad1096
306 changeset: 9:3741c3ad1096
306 user: test
307 user: test
307 date: Mon Jan 12 13:46:40 1970 +0000
308 date: Mon Jan 12 13:46:40 1970 +0000
308 summary: 9
309 summary: 9
309
310
310 changeset: 10:de4143c8d9a5
311 changeset: 10:de4143c8d9a5
311 user: test
312 user: test
312 date: Mon Jan 12 13:46:40 1970 +0000
313 date: Mon Jan 12 13:46:40 1970 +0000
313 summary: 10
314 summary: 10
314
315
315 changeset: 11:0e1c188b9a7a
316 changeset: 11:0e1c188b9a7a
316 user: test
317 user: test
317 date: Mon Jan 12 13:46:40 1970 +0000
318 date: Mon Jan 12 13:46:40 1970 +0000
318 summary: 11
319 summary: 11
319
320
320 changeset: 12:251354d0fdd3
321 changeset: 12:251354d0fdd3
321 user: test
322 user: test
322 date: Mon Jan 12 13:46:40 1970 +0000
323 date: Mon Jan 12 13:46:40 1970 +0000
323 summary: 12
324 summary: 12
324
325
325 changeset: 13:bdaadd969642
326 changeset: 13:bdaadd969642
326 tag: tip
327 tag: tip
327 user: test
328 user: test
328 date: Mon Jan 12 13:46:40 1970 +0000
329 date: Mon Jan 12 13:46:40 1970 +0000
329 summary: 13
330 summary: 13
330
331
331 comparing with http://localhost:20059/
332 comparing with http://localhost:20059/
332 searching for changes
333 searching for changes
333 changeset: 9:3741c3ad1096
334 changeset: 9:3741c3ad1096
334 user: test
335 user: test
335 date: Mon Jan 12 13:46:40 1970 +0000
336 date: Mon Jan 12 13:46:40 1970 +0000
336 summary: 9
337 summary: 9
337
338
338 changeset: 10:de4143c8d9a5
339 changeset: 10:de4143c8d9a5
339 user: test
340 user: test
340 date: Mon Jan 12 13:46:40 1970 +0000
341 date: Mon Jan 12 13:46:40 1970 +0000
341 summary: 10
342 summary: 10
342
343
343 changeset: 11:0e1c188b9a7a
344 changeset: 11:0e1c188b9a7a
344 user: test
345 user: test
345 date: Mon Jan 12 13:46:40 1970 +0000
346 date: Mon Jan 12 13:46:40 1970 +0000
346 summary: 11
347 summary: 11
347
348
@@ -1,99 +1,130 b''
1 #!/bin/sh
1 #!/bin/sh
2
2
3 cat <<EOF >> $HGRCPATH
3 cat <<EOF >> $HGRCPATH
4 [extensions]
4 [extensions]
5 hgext.purge=
5 hgext.purge=
6 EOF
6 EOF
7
7
8 echo % init
8 echo % init
9 hg init t
9 hg init t
10 cd t
10 cd t
11
11
12 echo % setup
12 echo % setup
13 echo r1 > r1
13 echo r1 > r1
14 hg ci -qAmr1 -d'0 0'
14 hg ci -qAmr1 -d'0 0'
15 mkdir directory
15 mkdir directory
16 echo r2 > directory/r2
16 echo r2 > directory/r2
17 hg ci -qAmr2 -d'1 0'
17 hg ci -qAmr2 -d'1 0'
18 echo 'ignored' > .hgignore
18 echo 'ignored' > .hgignore
19 hg ci -qAmr3 -d'2 0'
19 hg ci -qAmr3 -d'2 0'
20
20
21 echo % delete an empty directory
21 echo % delete an empty directory
22 mkdir empty_dir
22 mkdir empty_dir
23 hg purge -p
23 hg purge -p
24 hg purge -v
24 hg purge -v
25 ls
25 ls
26
26
27 echo % delete an untracked directory
27 echo % delete an untracked directory
28 mkdir untracked_dir
28 mkdir untracked_dir
29 touch untracked_dir/untracked_file1
29 touch untracked_dir/untracked_file1
30 touch untracked_dir/untracked_file2
30 touch untracked_dir/untracked_file2
31 hg purge -p
31 hg purge -p
32 hg purge -v
32 hg purge -v
33 ls
33 ls
34
34
35 echo % delete an untracked file
35 echo % delete an untracked file
36 touch untracked_file
36 touch untracked_file
37 hg purge -p
37 hg purge -p
38 hg purge -v
38 hg purge -v
39 ls
39 ls
40
40
41 echo % delete an untracked file in a tracked directory
41 echo % delete an untracked file in a tracked directory
42 touch directory/untracked_file
42 touch directory/untracked_file
43 hg purge -p
43 hg purge -p
44 hg purge -v
44 hg purge -v
45 ls
45 ls
46
46
47 echo % delete nested directories
47 echo % delete nested directories
48 mkdir -p untracked_directory/nested_directory
48 mkdir -p untracked_directory/nested_directory
49 hg purge -p
49 hg purge -p
50 hg purge -v
50 hg purge -v
51 ls
51 ls
52
52
53 echo % delete nested directories from a subdir
53 echo % delete nested directories from a subdir
54 mkdir -p untracked_directory/nested_directory
54 mkdir -p untracked_directory/nested_directory
55 cd directory
55 cd directory
56 hg purge -p
56 hg purge -p
57 hg purge -v
57 hg purge -v
58 cd ..
58 cd ..
59 ls
59 ls
60
60
61 echo % delete only part of the tree
61 echo % delete only part of the tree
62 mkdir -p untracked_directory/nested_directory
62 mkdir -p untracked_directory/nested_directory
63 touch directory/untracked_file
63 touch directory/untracked_file
64 cd directory
64 cd directory
65 hg purge -p ../untracked_directory
65 hg purge -p ../untracked_directory
66 hg purge -v ../untracked_directory
66 hg purge -v ../untracked_directory
67 cd ..
67 cd ..
68 ls
68 ls
69 ls directory/untracked_file
69 ls directory/untracked_file
70 rm directory/untracked_file
70 rm directory/untracked_file
71
71
72 echo % delete ignored files
72 echo % delete ignored files
73 touch ignored
73 touch ignored
74 hg purge -p
74 hg purge -p
75 hg purge -v
75 hg purge -v
76 ls
76 ls
77
77
78 echo % abort with missing files until we support name mangling filesystems
78 echo % abort with missing files until we support name mangling filesystems
79 touch untracked_file
79 touch untracked_file
80 rm r1
80 rm r1
81 # hide error messages to avoid changing the output when the text changes
81 # hide error messages to avoid changing the output when the text changes
82 hg purge -p 2> /dev/null
82 hg purge -p 2> /dev/null
83 if [ $? -ne 0 ]; then
83 if [ $? -ne 0 ]; then
84 echo "refused to run"
84 echo "refused to run"
85 fi
85 fi
86 if [ -f untracked_file ]; then
86 if [ -f untracked_file ]; then
87 echo "untracked_file still around"
87 echo "untracked_file still around"
88 fi
88 fi
89 hg purge -p --force
89 hg purge -p --force
90 hg purge -v 2> /dev/null
90 hg purge -v 2> /dev/null
91 if [ $? -ne 0 ]; then
91 if [ $? -ne 0 ]; then
92 echo "refused to run"
92 echo "refused to run"
93 fi
93 fi
94 if [ -f untracked_file ]; then
94 if [ -f untracked_file ]; then
95 echo "untracked_file still around"
95 echo "untracked_file still around"
96 fi
96 fi
97 hg purge -v --force
97 hg purge -v --force
98 hg revert --all --quiet
98 hg revert --all --quiet
99 ls
99 ls
100
101 echo % skip excluded files
102 touch excluded_file
103 hg purge -p -X excluded_file
104 hg purge -v -X excluded_file
105 ls
106 rm excluded_file
107
108 echo % skip files in excluded dirs
109 mkdir excluded_dir
110 touch excluded_dir/file
111 hg purge -p -X excluded_dir
112 hg purge -v -X excluded_dir
113 ls
114 ls excluded_dir
115 rm -R excluded_dir
116
117 echo % skip excluded empty dirs
118 mkdir excluded_dir
119 hg purge -p -X excluded_dir
120 hg purge -v -X excluded_dir
121 ls
122 rmdir excluded_dir
123
124 echo % skip patterns
125 mkdir .svn
126 touch .svn/foo
127 mkdir directory/.svn
128 touch directory/.svn/foo
129 hg purge -p -X .svn -X '*/.svn'
130 hg purge -p -X re:.*.svn
@@ -1,58 +1,72 b''
1 % init
1 % init
2 % setup
2 % setup
3 % delete an empty directory
3 % delete an empty directory
4 empty_dir
4 empty_dir
5 Removing directory empty_dir
5 Removing directory empty_dir
6 directory
6 directory
7 r1
7 r1
8 % delete an untracked directory
8 % delete an untracked directory
9 untracked_dir/untracked_file1
9 untracked_dir/untracked_file1
10 untracked_dir/untracked_file2
10 untracked_dir/untracked_file2
11 Removing file untracked_dir/untracked_file1
11 Removing file untracked_dir/untracked_file1
12 Removing file untracked_dir/untracked_file2
12 Removing file untracked_dir/untracked_file2
13 Removing directory untracked_dir
13 Removing directory untracked_dir
14 directory
14 directory
15 r1
15 r1
16 % delete an untracked file
16 % delete an untracked file
17 untracked_file
17 untracked_file
18 Removing file untracked_file
18 Removing file untracked_file
19 directory
19 directory
20 r1
20 r1
21 % delete an untracked file in a tracked directory
21 % delete an untracked file in a tracked directory
22 directory/untracked_file
22 directory/untracked_file
23 Removing file directory/untracked_file
23 Removing file directory/untracked_file
24 directory
24 directory
25 r1
25 r1
26 % delete nested directories
26 % delete nested directories
27 untracked_directory/nested_directory
27 untracked_directory/nested_directory
28 Removing directory untracked_directory/nested_directory
28 Removing directory untracked_directory/nested_directory
29 Removing directory untracked_directory
29 Removing directory untracked_directory
30 directory
30 directory
31 r1
31 r1
32 % delete nested directories from a subdir
32 % delete nested directories from a subdir
33 untracked_directory/nested_directory
33 untracked_directory/nested_directory
34 Removing directory untracked_directory/nested_directory
34 Removing directory untracked_directory/nested_directory
35 Removing directory untracked_directory
35 Removing directory untracked_directory
36 directory
36 directory
37 r1
37 r1
38 % delete only part of the tree
38 % delete only part of the tree
39 untracked_directory/nested_directory
39 untracked_directory/nested_directory
40 Removing directory untracked_directory/nested_directory
40 Removing directory untracked_directory/nested_directory
41 Removing directory untracked_directory
41 Removing directory untracked_directory
42 directory
42 directory
43 r1
43 r1
44 directory/untracked_file
44 directory/untracked_file
45 % delete ignored files
45 % delete ignored files
46 ignored
46 ignored
47 Removing file ignored
47 Removing file ignored
48 directory
48 directory
49 r1
49 r1
50 % abort with missing files until we support name mangling filesystems
50 % abort with missing files until we support name mangling filesystems
51 refused to run
51 refused to run
52 untracked_file still around
52 untracked_file still around
53 untracked_file
53 untracked_file
54 refused to run
54 refused to run
55 untracked_file still around
55 untracked_file still around
56 Removing file untracked_file
56 Removing file untracked_file
57 directory
57 directory
58 r1
58 r1
59 % skip excluded files
60 directory
61 excluded_file
62 r1
63 % skip files in excluded dirs
64 directory
65 excluded_dir
66 r1
67 file
68 % skip excluded empty dirs
69 directory
70 excluded_dir
71 r1
72 % skip patterns
General Comments 0
You need to be logged in to leave comments. Login now