##// END OF EJS Templates
Change remaining users of manifest flags
Matt Mackall -
r2840:046a8b03 default
parent child Browse files
Show More
@@ -1,1926 +1,1925 b''
1
1
2 # queue.py - patch queues for mercurial
2 # queue.py - patch queues for mercurial
3 #
3 #
4 # Copyright 2005 Chris Mason <mason@suse.com>
4 # Copyright 2005 Chris Mason <mason@suse.com>
5 #
5 #
6 # This software may be used and distributed according to the terms
6 # This software may be used and distributed according to the terms
7 # of the GNU General Public License, incorporated herein by reference.
7 # of the GNU General Public License, incorporated herein by reference.
8
8
9 '''patch management and development
9 '''patch management and development
10
10
11 This extension lets you work with a stack of patches in a Mercurial
11 This extension lets you work with a stack of patches in a Mercurial
12 repository. It manages two stacks of patches - all known patches, and
12 repository. It manages two stacks of patches - all known patches, and
13 applied patches (subset of known patches).
13 applied patches (subset of known patches).
14
14
15 Known patches are represented as patch files in the .hg/patches
15 Known patches are represented as patch files in the .hg/patches
16 directory. Applied patches are both patch files and changesets.
16 directory. Applied patches are both patch files and changesets.
17
17
18 Common tasks (use "hg help command" for more details):
18 Common tasks (use "hg help command" for more details):
19
19
20 prepare repository to work with patches qinit
20 prepare repository to work with patches qinit
21 create new patch qnew
21 create new patch qnew
22 import existing patch qimport
22 import existing patch qimport
23
23
24 print patch series qseries
24 print patch series qseries
25 print applied patches qapplied
25 print applied patches qapplied
26 print name of top applied patch qtop
26 print name of top applied patch qtop
27
27
28 add known patch to applied stack qpush
28 add known patch to applied stack qpush
29 remove patch from applied stack qpop
29 remove patch from applied stack qpop
30 refresh contents of top applied patch qrefresh
30 refresh contents of top applied patch qrefresh
31 '''
31 '''
32
32
33 from mercurial.demandload import *
33 from mercurial.demandload import *
34 demandload(globals(), "os sys re struct traceback errno bz2")
34 demandload(globals(), "os sys re struct traceback errno bz2")
35 from mercurial.i18n import gettext as _
35 from mercurial.i18n import gettext as _
36 from mercurial import ui, hg, revlog, commands, util
36 from mercurial import ui, hg, revlog, commands, util
37
37
38 commands.norepo += " qclone qversion"
38 commands.norepo += " qclone qversion"
39
39
40 class statusentry:
40 class statusentry:
41 def __init__(self, rev, name=None):
41 def __init__(self, rev, name=None):
42 if not name:
42 if not name:
43 fields = rev.split(':')
43 fields = rev.split(':')
44 if len(fields) == 2:
44 if len(fields) == 2:
45 self.rev, self.name = fields
45 self.rev, self.name = fields
46 else:
46 else:
47 self.rev, self.name = None, None
47 self.rev, self.name = None, None
48 else:
48 else:
49 self.rev, self.name = rev, name
49 self.rev, self.name = rev, name
50
50
51 def __str__(self):
51 def __str__(self):
52 return self.rev + ':' + self.name
52 return self.rev + ':' + self.name
53
53
54 class queue:
54 class queue:
55 def __init__(self, ui, path, patchdir=None):
55 def __init__(self, ui, path, patchdir=None):
56 self.basepath = path
56 self.basepath = path
57 self.path = patchdir or os.path.join(path, "patches")
57 self.path = patchdir or os.path.join(path, "patches")
58 self.opener = util.opener(self.path)
58 self.opener = util.opener(self.path)
59 self.ui = ui
59 self.ui = ui
60 self.applied = []
60 self.applied = []
61 self.full_series = []
61 self.full_series = []
62 self.applied_dirty = 0
62 self.applied_dirty = 0
63 self.series_dirty = 0
63 self.series_dirty = 0
64 self.series_path = "series"
64 self.series_path = "series"
65 self.status_path = "status"
65 self.status_path = "status"
66 self.guards_path = "guards"
66 self.guards_path = "guards"
67 self.active_guards = None
67 self.active_guards = None
68 self.guards_dirty = False
68 self.guards_dirty = False
69
69
70 if os.path.exists(self.join(self.series_path)):
70 if os.path.exists(self.join(self.series_path)):
71 self.full_series = self.opener(self.series_path).read().splitlines()
71 self.full_series = self.opener(self.series_path).read().splitlines()
72 self.parse_series()
72 self.parse_series()
73
73
74 if os.path.exists(self.join(self.status_path)):
74 if os.path.exists(self.join(self.status_path)):
75 lines = self.opener(self.status_path).read().splitlines()
75 lines = self.opener(self.status_path).read().splitlines()
76 self.applied = [statusentry(l) for l in lines]
76 self.applied = [statusentry(l) for l in lines]
77
77
78 def join(self, *p):
78 def join(self, *p):
79 return os.path.join(self.path, *p)
79 return os.path.join(self.path, *p)
80
80
81 def find_series(self, patch):
81 def find_series(self, patch):
82 pre = re.compile("(\s*)([^#]+)")
82 pre = re.compile("(\s*)([^#]+)")
83 index = 0
83 index = 0
84 for l in self.full_series:
84 for l in self.full_series:
85 m = pre.match(l)
85 m = pre.match(l)
86 if m:
86 if m:
87 s = m.group(2)
87 s = m.group(2)
88 s = s.rstrip()
88 s = s.rstrip()
89 if s == patch:
89 if s == patch:
90 return index
90 return index
91 index += 1
91 index += 1
92 return None
92 return None
93
93
94 guard_re = re.compile(r'\s?#([-+][^-+# \t\r\n\f][^# \t\r\n\f]*)')
94 guard_re = re.compile(r'\s?#([-+][^-+# \t\r\n\f][^# \t\r\n\f]*)')
95
95
96 def parse_series(self):
96 def parse_series(self):
97 self.series = []
97 self.series = []
98 self.series_guards = []
98 self.series_guards = []
99 for l in self.full_series:
99 for l in self.full_series:
100 h = l.find('#')
100 h = l.find('#')
101 if h == -1:
101 if h == -1:
102 patch = l
102 patch = l
103 comment = ''
103 comment = ''
104 elif h == 0:
104 elif h == 0:
105 continue
105 continue
106 else:
106 else:
107 patch = l[:h]
107 patch = l[:h]
108 comment = l[h:]
108 comment = l[h:]
109 patch = patch.strip()
109 patch = patch.strip()
110 if patch:
110 if patch:
111 self.series.append(patch)
111 self.series.append(patch)
112 self.series_guards.append(self.guard_re.findall(comment))
112 self.series_guards.append(self.guard_re.findall(comment))
113
113
114 def check_guard(self, guard):
114 def check_guard(self, guard):
115 bad_chars = '# \t\r\n\f'
115 bad_chars = '# \t\r\n\f'
116 first = guard[0]
116 first = guard[0]
117 for c in '-+':
117 for c in '-+':
118 if first == c:
118 if first == c:
119 return (_('guard %r starts with invalid character: %r') %
119 return (_('guard %r starts with invalid character: %r') %
120 (guard, c))
120 (guard, c))
121 for c in bad_chars:
121 for c in bad_chars:
122 if c in guard:
122 if c in guard:
123 return _('invalid character in guard %r: %r') % (guard, c)
123 return _('invalid character in guard %r: %r') % (guard, c)
124
124
125 def set_active(self, guards):
125 def set_active(self, guards):
126 for guard in guards:
126 for guard in guards:
127 bad = self.check_guard(guard)
127 bad = self.check_guard(guard)
128 if bad:
128 if bad:
129 raise util.Abort(bad)
129 raise util.Abort(bad)
130 guards = dict.fromkeys(guards).keys()
130 guards = dict.fromkeys(guards).keys()
131 guards.sort()
131 guards.sort()
132 self.ui.debug('active guards: %s\n' % ' '.join(guards))
132 self.ui.debug('active guards: %s\n' % ' '.join(guards))
133 self.active_guards = guards
133 self.active_guards = guards
134 self.guards_dirty = True
134 self.guards_dirty = True
135
135
136 def active(self):
136 def active(self):
137 if self.active_guards is None:
137 if self.active_guards is None:
138 self.active_guards = []
138 self.active_guards = []
139 try:
139 try:
140 guards = self.opener(self.guards_path).read().split()
140 guards = self.opener(self.guards_path).read().split()
141 except IOError, err:
141 except IOError, err:
142 if err.errno != errno.ENOENT: raise
142 if err.errno != errno.ENOENT: raise
143 guards = []
143 guards = []
144 for i, guard in enumerate(guards):
144 for i, guard in enumerate(guards):
145 bad = self.check_guard(guard)
145 bad = self.check_guard(guard)
146 if bad:
146 if bad:
147 self.ui.warn('%s:%d: %s\n' %
147 self.ui.warn('%s:%d: %s\n' %
148 (self.join(self.guards_path), i + 1, bad))
148 (self.join(self.guards_path), i + 1, bad))
149 else:
149 else:
150 self.active_guards.append(guard)
150 self.active_guards.append(guard)
151 return self.active_guards
151 return self.active_guards
152
152
153 def set_guards(self, idx, guards):
153 def set_guards(self, idx, guards):
154 for g in guards:
154 for g in guards:
155 if len(g) < 2:
155 if len(g) < 2:
156 raise util.Abort(_('guard %r too short') % g)
156 raise util.Abort(_('guard %r too short') % g)
157 if g[0] not in '-+':
157 if g[0] not in '-+':
158 raise util.Abort(_('guard %r starts with invalid char') % g)
158 raise util.Abort(_('guard %r starts with invalid char') % g)
159 bad = self.check_guard(g[1:])
159 bad = self.check_guard(g[1:])
160 if bad:
160 if bad:
161 raise util.Abort(bad)
161 raise util.Abort(bad)
162 drop = self.guard_re.sub('', self.full_series[idx])
162 drop = self.guard_re.sub('', self.full_series[idx])
163 self.full_series[idx] = drop + ''.join([' #' + g for g in guards])
163 self.full_series[idx] = drop + ''.join([' #' + g for g in guards])
164 self.parse_series()
164 self.parse_series()
165 self.series_dirty = True
165 self.series_dirty = True
166
166
167 def pushable(self, idx):
167 def pushable(self, idx):
168 if isinstance(idx, str):
168 if isinstance(idx, str):
169 idx = self.series.index(idx)
169 idx = self.series.index(idx)
170 patchguards = self.series_guards[idx]
170 patchguards = self.series_guards[idx]
171 if not patchguards:
171 if not patchguards:
172 return True, None
172 return True, None
173 default = False
173 default = False
174 guards = self.active()
174 guards = self.active()
175 exactneg = [g for g in patchguards if g[0] == '-' and g[1:] in guards]
175 exactneg = [g for g in patchguards if g[0] == '-' and g[1:] in guards]
176 if exactneg:
176 if exactneg:
177 return False, exactneg[0]
177 return False, exactneg[0]
178 pos = [g for g in patchguards if g[0] == '+']
178 pos = [g for g in patchguards if g[0] == '+']
179 nonpos = [g for g in pos if g[1:] not in guards]
179 nonpos = [g for g in pos if g[1:] not in guards]
180 if pos:
180 if pos:
181 if not nonpos:
181 if not nonpos:
182 return True, ''
182 return True, ''
183 return False, nonpos
183 return False, nonpos
184 return True, ''
184 return True, ''
185
185
186 def explain_pushable(self, idx, all_patches=False):
186 def explain_pushable(self, idx, all_patches=False):
187 write = all_patches and self.ui.write or self.ui.warn
187 write = all_patches and self.ui.write or self.ui.warn
188 if all_patches or self.ui.verbose:
188 if all_patches or self.ui.verbose:
189 if isinstance(idx, str):
189 if isinstance(idx, str):
190 idx = self.series.index(idx)
190 idx = self.series.index(idx)
191 pushable, why = self.pushable(idx)
191 pushable, why = self.pushable(idx)
192 if all_patches and pushable:
192 if all_patches and pushable:
193 if why is None:
193 if why is None:
194 write(_('allowing %s - no guards in effect\n') %
194 write(_('allowing %s - no guards in effect\n') %
195 self.series[idx])
195 self.series[idx])
196 else:
196 else:
197 if not why:
197 if not why:
198 write(_('allowing %s - no matching negative guards\n') %
198 write(_('allowing %s - no matching negative guards\n') %
199 self.series[idx])
199 self.series[idx])
200 else:
200 else:
201 write(_('allowing %s - guarded by %r\n') %
201 write(_('allowing %s - guarded by %r\n') %
202 (self.series[idx], why))
202 (self.series[idx], why))
203 if not pushable:
203 if not pushable:
204 if why:
204 if why:
205 write(_('skipping %s - guarded by %r\n') %
205 write(_('skipping %s - guarded by %r\n') %
206 (self.series[idx], ' '.join(why)))
206 (self.series[idx], ' '.join(why)))
207 else:
207 else:
208 write(_('skipping %s - no matching guards\n') %
208 write(_('skipping %s - no matching guards\n') %
209 self.series[idx])
209 self.series[idx])
210
210
211 def save_dirty(self):
211 def save_dirty(self):
212 def write_list(items, path):
212 def write_list(items, path):
213 fp = self.opener(path, 'w')
213 fp = self.opener(path, 'w')
214 for i in items:
214 for i in items:
215 print >> fp, i
215 print >> fp, i
216 fp.close()
216 fp.close()
217 if self.applied_dirty: write_list(map(str, self.applied), self.status_path)
217 if self.applied_dirty: write_list(map(str, self.applied), self.status_path)
218 if self.series_dirty: write_list(self.full_series, self.series_path)
218 if self.series_dirty: write_list(self.full_series, self.series_path)
219 if self.guards_dirty: write_list(self.active_guards, self.guards_path)
219 if self.guards_dirty: write_list(self.active_guards, self.guards_path)
220
220
221 def readheaders(self, patch):
221 def readheaders(self, patch):
222 def eatdiff(lines):
222 def eatdiff(lines):
223 while lines:
223 while lines:
224 l = lines[-1]
224 l = lines[-1]
225 if (l.startswith("diff -") or
225 if (l.startswith("diff -") or
226 l.startswith("Index:") or
226 l.startswith("Index:") or
227 l.startswith("===========")):
227 l.startswith("===========")):
228 del lines[-1]
228 del lines[-1]
229 else:
229 else:
230 break
230 break
231 def eatempty(lines):
231 def eatempty(lines):
232 while lines:
232 while lines:
233 l = lines[-1]
233 l = lines[-1]
234 if re.match('\s*$', l):
234 if re.match('\s*$', l):
235 del lines[-1]
235 del lines[-1]
236 else:
236 else:
237 break
237 break
238
238
239 pf = self.join(patch)
239 pf = self.join(patch)
240 message = []
240 message = []
241 comments = []
241 comments = []
242 user = None
242 user = None
243 date = None
243 date = None
244 format = None
244 format = None
245 subject = None
245 subject = None
246 diffstart = 0
246 diffstart = 0
247
247
248 for line in file(pf):
248 for line in file(pf):
249 line = line.rstrip()
249 line = line.rstrip()
250 if diffstart:
250 if diffstart:
251 if line.startswith('+++ '):
251 if line.startswith('+++ '):
252 diffstart = 2
252 diffstart = 2
253 break
253 break
254 if line.startswith("--- "):
254 if line.startswith("--- "):
255 diffstart = 1
255 diffstart = 1
256 continue
256 continue
257 elif format == "hgpatch":
257 elif format == "hgpatch":
258 # parse values when importing the result of an hg export
258 # parse values when importing the result of an hg export
259 if line.startswith("# User "):
259 if line.startswith("# User "):
260 user = line[7:]
260 user = line[7:]
261 elif line.startswith("# Date "):
261 elif line.startswith("# Date "):
262 date = line[7:]
262 date = line[7:]
263 elif not line.startswith("# ") and line:
263 elif not line.startswith("# ") and line:
264 message.append(line)
264 message.append(line)
265 format = None
265 format = None
266 elif line == '# HG changeset patch':
266 elif line == '# HG changeset patch':
267 format = "hgpatch"
267 format = "hgpatch"
268 elif (format != "tagdone" and (line.startswith("Subject: ") or
268 elif (format != "tagdone" and (line.startswith("Subject: ") or
269 line.startswith("subject: "))):
269 line.startswith("subject: "))):
270 subject = line[9:]
270 subject = line[9:]
271 format = "tag"
271 format = "tag"
272 elif (format != "tagdone" and (line.startswith("From: ") or
272 elif (format != "tagdone" and (line.startswith("From: ") or
273 line.startswith("from: "))):
273 line.startswith("from: "))):
274 user = line[6:]
274 user = line[6:]
275 format = "tag"
275 format = "tag"
276 elif format == "tag" and line == "":
276 elif format == "tag" and line == "":
277 # when looking for tags (subject: from: etc) they
277 # when looking for tags (subject: from: etc) they
278 # end once you find a blank line in the source
278 # end once you find a blank line in the source
279 format = "tagdone"
279 format = "tagdone"
280 elif message or line:
280 elif message or line:
281 message.append(line)
281 message.append(line)
282 comments.append(line)
282 comments.append(line)
283
283
284 eatdiff(message)
284 eatdiff(message)
285 eatdiff(comments)
285 eatdiff(comments)
286 eatempty(message)
286 eatempty(message)
287 eatempty(comments)
287 eatempty(comments)
288
288
289 # make sure message isn't empty
289 # make sure message isn't empty
290 if format and format.startswith("tag") and subject:
290 if format and format.startswith("tag") and subject:
291 message.insert(0, "")
291 message.insert(0, "")
292 message.insert(0, subject)
292 message.insert(0, subject)
293 return (message, comments, user, date, diffstart > 1)
293 return (message, comments, user, date, diffstart > 1)
294
294
295 def mergeone(self, repo, mergeq, head, patch, rev, wlock):
295 def mergeone(self, repo, mergeq, head, patch, rev, wlock):
296 # first try just applying the patch
296 # first try just applying the patch
297 (err, n) = self.apply(repo, [ patch ], update_status=False,
297 (err, n) = self.apply(repo, [ patch ], update_status=False,
298 strict=True, merge=rev, wlock=wlock)
298 strict=True, merge=rev, wlock=wlock)
299
299
300 if err == 0:
300 if err == 0:
301 return (err, n)
301 return (err, n)
302
302
303 if n is None:
303 if n is None:
304 raise util.Abort(_("apply failed for patch %s") % patch)
304 raise util.Abort(_("apply failed for patch %s") % patch)
305
305
306 self.ui.warn("patch didn't work out, merging %s\n" % patch)
306 self.ui.warn("patch didn't work out, merging %s\n" % patch)
307
307
308 # apply failed, strip away that rev and merge.
308 # apply failed, strip away that rev and merge.
309 hg.clean(repo, head, wlock=wlock)
309 hg.clean(repo, head, wlock=wlock)
310 self.strip(repo, n, update=False, backup='strip', wlock=wlock)
310 self.strip(repo, n, update=False, backup='strip', wlock=wlock)
311
311
312 c = repo.changelog.read(rev)
312 c = repo.changelog.read(rev)
313 ret = hg.merge(repo, rev, wlock=wlock)
313 ret = hg.merge(repo, rev, wlock=wlock)
314 if ret:
314 if ret:
315 raise util.Abort(_("update returned %d") % ret)
315 raise util.Abort(_("update returned %d") % ret)
316 n = repo.commit(None, c[4], c[1], force=1, wlock=wlock)
316 n = repo.commit(None, c[4], c[1], force=1, wlock=wlock)
317 if n == None:
317 if n == None:
318 raise util.Abort(_("repo commit failed"))
318 raise util.Abort(_("repo commit failed"))
319 try:
319 try:
320 message, comments, user, date, patchfound = mergeq.readheaders(patch)
320 message, comments, user, date, patchfound = mergeq.readheaders(patch)
321 except:
321 except:
322 raise util.Abort(_("unable to read %s") % patch)
322 raise util.Abort(_("unable to read %s") % patch)
323
323
324 patchf = self.opener(patch, "w")
324 patchf = self.opener(patch, "w")
325 if comments:
325 if comments:
326 comments = "\n".join(comments) + '\n\n'
326 comments = "\n".join(comments) + '\n\n'
327 patchf.write(comments)
327 patchf.write(comments)
328 commands.dodiff(patchf, self.ui, repo, head, n)
328 commands.dodiff(patchf, self.ui, repo, head, n)
329 patchf.close()
329 patchf.close()
330 return (0, n)
330 return (0, n)
331
331
332 def qparents(self, repo, rev=None):
332 def qparents(self, repo, rev=None):
333 if rev is None:
333 if rev is None:
334 (p1, p2) = repo.dirstate.parents()
334 (p1, p2) = repo.dirstate.parents()
335 if p2 == revlog.nullid:
335 if p2 == revlog.nullid:
336 return p1
336 return p1
337 if len(self.applied) == 0:
337 if len(self.applied) == 0:
338 return None
338 return None
339 return revlog.bin(self.applied[-1].rev)
339 return revlog.bin(self.applied[-1].rev)
340 pp = repo.changelog.parents(rev)
340 pp = repo.changelog.parents(rev)
341 if pp[1] != revlog.nullid:
341 if pp[1] != revlog.nullid:
342 arevs = [ x.rev for x in self.applied ]
342 arevs = [ x.rev for x in self.applied ]
343 p0 = revlog.hex(pp[0])
343 p0 = revlog.hex(pp[0])
344 p1 = revlog.hex(pp[1])
344 p1 = revlog.hex(pp[1])
345 if p0 in arevs:
345 if p0 in arevs:
346 return pp[0]
346 return pp[0]
347 if p1 in arevs:
347 if p1 in arevs:
348 return pp[1]
348 return pp[1]
349 return pp[0]
349 return pp[0]
350
350
351 def mergepatch(self, repo, mergeq, series, wlock):
351 def mergepatch(self, repo, mergeq, series, wlock):
352 if len(self.applied) == 0:
352 if len(self.applied) == 0:
353 # each of the patches merged in will have two parents. This
353 # each of the patches merged in will have two parents. This
354 # can confuse the qrefresh, qdiff, and strip code because it
354 # can confuse the qrefresh, qdiff, and strip code because it
355 # needs to know which parent is actually in the patch queue.
355 # needs to know which parent is actually in the patch queue.
356 # so, we insert a merge marker with only one parent. This way
356 # so, we insert a merge marker with only one parent. This way
357 # the first patch in the queue is never a merge patch
357 # the first patch in the queue is never a merge patch
358 #
358 #
359 pname = ".hg.patches.merge.marker"
359 pname = ".hg.patches.merge.marker"
360 n = repo.commit(None, '[mq]: merge marker', user=None, force=1,
360 n = repo.commit(None, '[mq]: merge marker', user=None, force=1,
361 wlock=wlock)
361 wlock=wlock)
362 self.applied.append(statusentry(revlog.hex(n), pname))
362 self.applied.append(statusentry(revlog.hex(n), pname))
363 self.applied_dirty = 1
363 self.applied_dirty = 1
364
364
365 head = self.qparents(repo)
365 head = self.qparents(repo)
366
366
367 for patch in series:
367 for patch in series:
368 patch = mergeq.lookup(patch, strict=True)
368 patch = mergeq.lookup(patch, strict=True)
369 if not patch:
369 if not patch:
370 self.ui.warn("patch %s does not exist\n" % patch)
370 self.ui.warn("patch %s does not exist\n" % patch)
371 return (1, None)
371 return (1, None)
372 pushable, reason = self.pushable(patch)
372 pushable, reason = self.pushable(patch)
373 if not pushable:
373 if not pushable:
374 self.explain_pushable(patch, all_patches=True)
374 self.explain_pushable(patch, all_patches=True)
375 continue
375 continue
376 info = mergeq.isapplied(patch)
376 info = mergeq.isapplied(patch)
377 if not info:
377 if not info:
378 self.ui.warn("patch %s is not applied\n" % patch)
378 self.ui.warn("patch %s is not applied\n" % patch)
379 return (1, None)
379 return (1, None)
380 rev = revlog.bin(info[1])
380 rev = revlog.bin(info[1])
381 (err, head) = self.mergeone(repo, mergeq, head, patch, rev, wlock)
381 (err, head) = self.mergeone(repo, mergeq, head, patch, rev, wlock)
382 if head:
382 if head:
383 self.applied.append(statusentry(revlog.hex(head), patch))
383 self.applied.append(statusentry(revlog.hex(head), patch))
384 self.applied_dirty = 1
384 self.applied_dirty = 1
385 if err:
385 if err:
386 return (err, head)
386 return (err, head)
387 return (0, head)
387 return (0, head)
388
388
389 def patch(self, repo, patchfile):
389 def patch(self, repo, patchfile):
390 '''Apply patchfile to the working directory.
390 '''Apply patchfile to the working directory.
391 patchfile: file name of patch'''
391 patchfile: file name of patch'''
392 try:
392 try:
393 pp = util.find_in_path('gpatch', os.environ.get('PATH', ''), 'patch')
393 pp = util.find_in_path('gpatch', os.environ.get('PATH', ''), 'patch')
394 f = os.popen("%s -d %s -p1 --no-backup-if-mismatch < %s" %
394 f = os.popen("%s -d %s -p1 --no-backup-if-mismatch < %s" %
395 (pp, util.shellquote(repo.root), util.shellquote(patchfile)))
395 (pp, util.shellquote(repo.root), util.shellquote(patchfile)))
396 except:
396 except:
397 self.ui.warn("patch failed, unable to continue (try -v)\n")
397 self.ui.warn("patch failed, unable to continue (try -v)\n")
398 return (None, [], False)
398 return (None, [], False)
399 files = []
399 files = []
400 fuzz = False
400 fuzz = False
401 for l in f:
401 for l in f:
402 l = l.rstrip('\r\n');
402 l = l.rstrip('\r\n');
403 if self.ui.verbose:
403 if self.ui.verbose:
404 self.ui.warn(l + "\n")
404 self.ui.warn(l + "\n")
405 if l[:14] == 'patching file ':
405 if l[:14] == 'patching file ':
406 pf = os.path.normpath(util.parse_patch_output(l))
406 pf = os.path.normpath(util.parse_patch_output(l))
407 if pf not in files:
407 if pf not in files:
408 files.append(pf)
408 files.append(pf)
409 printed_file = False
409 printed_file = False
410 file_str = l
410 file_str = l
411 elif l.find('with fuzz') >= 0:
411 elif l.find('with fuzz') >= 0:
412 if not printed_file:
412 if not printed_file:
413 self.ui.warn(file_str + '\n')
413 self.ui.warn(file_str + '\n')
414 printed_file = True
414 printed_file = True
415 self.ui.warn(l + '\n')
415 self.ui.warn(l + '\n')
416 fuzz = True
416 fuzz = True
417 elif l.find('saving rejects to file') >= 0:
417 elif l.find('saving rejects to file') >= 0:
418 self.ui.warn(l + '\n')
418 self.ui.warn(l + '\n')
419 elif l.find('FAILED') >= 0:
419 elif l.find('FAILED') >= 0:
420 if not printed_file:
420 if not printed_file:
421 self.ui.warn(file_str + '\n')
421 self.ui.warn(file_str + '\n')
422 printed_file = True
422 printed_file = True
423 self.ui.warn(l + '\n')
423 self.ui.warn(l + '\n')
424
424
425 return (not f.close(), files, fuzz)
425 return (not f.close(), files, fuzz)
426
426
427 def apply(self, repo, series, list=False, update_status=True,
427 def apply(self, repo, series, list=False, update_status=True,
428 strict=False, patchdir=None, merge=None, wlock=None):
428 strict=False, patchdir=None, merge=None, wlock=None):
429 # TODO unify with commands.py
429 # TODO unify with commands.py
430 if not patchdir:
430 if not patchdir:
431 patchdir = self.path
431 patchdir = self.path
432 err = 0
432 err = 0
433 if not wlock:
433 if not wlock:
434 wlock = repo.wlock()
434 wlock = repo.wlock()
435 lock = repo.lock()
435 lock = repo.lock()
436 tr = repo.transaction()
436 tr = repo.transaction()
437 n = None
437 n = None
438 for patch in series:
438 for patch in series:
439 pushable, reason = self.pushable(patch)
439 pushable, reason = self.pushable(patch)
440 if not pushable:
440 if not pushable:
441 self.explain_pushable(patch, all_patches=True)
441 self.explain_pushable(patch, all_patches=True)
442 continue
442 continue
443 self.ui.warn("applying %s\n" % patch)
443 self.ui.warn("applying %s\n" % patch)
444 pf = os.path.join(patchdir, patch)
444 pf = os.path.join(patchdir, patch)
445
445
446 try:
446 try:
447 message, comments, user, date, patchfound = self.readheaders(patch)
447 message, comments, user, date, patchfound = self.readheaders(patch)
448 except:
448 except:
449 self.ui.warn("Unable to read %s\n" % pf)
449 self.ui.warn("Unable to read %s\n" % pf)
450 err = 1
450 err = 1
451 break
451 break
452
452
453 if not message:
453 if not message:
454 message = "imported patch %s\n" % patch
454 message = "imported patch %s\n" % patch
455 else:
455 else:
456 if list:
456 if list:
457 message.append("\nimported patch %s" % patch)
457 message.append("\nimported patch %s" % patch)
458 message = '\n'.join(message)
458 message = '\n'.join(message)
459
459
460 (patcherr, files, fuzz) = self.patch(repo, pf)
460 (patcherr, files, fuzz) = self.patch(repo, pf)
461 patcherr = not patcherr
461 patcherr = not patcherr
462
462
463 if merge and len(files) > 0:
463 if merge and len(files) > 0:
464 # Mark as merged and update dirstate parent info
464 # Mark as merged and update dirstate parent info
465 repo.dirstate.update(repo.dirstate.filterfiles(files), 'm')
465 repo.dirstate.update(repo.dirstate.filterfiles(files), 'm')
466 p1, p2 = repo.dirstate.parents()
466 p1, p2 = repo.dirstate.parents()
467 repo.dirstate.setparents(p1, merge)
467 repo.dirstate.setparents(p1, merge)
468 if len(files) > 0:
468 if len(files) > 0:
469 cwd = repo.getcwd()
469 cwd = repo.getcwd()
470 cfiles = files
470 cfiles = files
471 if cwd:
471 if cwd:
472 cfiles = [util.pathto(cwd, f) for f in files]
472 cfiles = [util.pathto(cwd, f) for f in files]
473 commands.addremove_lock(self.ui, repo, cfiles,
473 commands.addremove_lock(self.ui, repo, cfiles,
474 opts={}, wlock=wlock)
474 opts={}, wlock=wlock)
475 n = repo.commit(files, message, user, date, force=1, lock=lock,
475 n = repo.commit(files, message, user, date, force=1, lock=lock,
476 wlock=wlock)
476 wlock=wlock)
477
477
478 if n == None:
478 if n == None:
479 raise util.Abort(_("repo commit failed"))
479 raise util.Abort(_("repo commit failed"))
480
480
481 if update_status:
481 if update_status:
482 self.applied.append(statusentry(revlog.hex(n), patch))
482 self.applied.append(statusentry(revlog.hex(n), patch))
483
483
484 if patcherr:
484 if patcherr:
485 if not patchfound:
485 if not patchfound:
486 self.ui.warn("patch %s is empty\n" % patch)
486 self.ui.warn("patch %s is empty\n" % patch)
487 err = 0
487 err = 0
488 else:
488 else:
489 self.ui.warn("patch failed, rejects left in working dir\n")
489 self.ui.warn("patch failed, rejects left in working dir\n")
490 err = 1
490 err = 1
491 break
491 break
492
492
493 if fuzz and strict:
493 if fuzz and strict:
494 self.ui.warn("fuzz found when applying patch, stopping\n")
494 self.ui.warn("fuzz found when applying patch, stopping\n")
495 err = 1
495 err = 1
496 break
496 break
497 tr.close()
497 tr.close()
498 return (err, n)
498 return (err, n)
499
499
500 def delete(self, repo, patch, force=False):
500 def delete(self, repo, patch, force=False):
501 patch = self.lookup(patch, strict=True)
501 patch = self.lookup(patch, strict=True)
502 info = self.isapplied(patch)
502 info = self.isapplied(patch)
503 if info:
503 if info:
504 raise util.Abort(_("cannot delete applied patch %s") % patch)
504 raise util.Abort(_("cannot delete applied patch %s") % patch)
505 if patch not in self.series:
505 if patch not in self.series:
506 raise util.Abort(_("patch %s not in series file") % patch)
506 raise util.Abort(_("patch %s not in series file") % patch)
507 if force:
507 if force:
508 r = self.qrepo()
508 r = self.qrepo()
509 if r:
509 if r:
510 r.remove([patch], True)
510 r.remove([patch], True)
511 else:
511 else:
512 os.unlink(self.join(patch))
512 os.unlink(self.join(patch))
513 i = self.find_series(patch)
513 i = self.find_series(patch)
514 del self.full_series[i]
514 del self.full_series[i]
515 self.parse_series()
515 self.parse_series()
516 self.series_dirty = 1
516 self.series_dirty = 1
517
517
518 def check_toppatch(self, repo):
518 def check_toppatch(self, repo):
519 if len(self.applied) > 0:
519 if len(self.applied) > 0:
520 top = revlog.bin(self.applied[-1].rev)
520 top = revlog.bin(self.applied[-1].rev)
521 pp = repo.dirstate.parents()
521 pp = repo.dirstate.parents()
522 if top not in pp:
522 if top not in pp:
523 raise util.Abort(_("queue top not at same revision as working directory"))
523 raise util.Abort(_("queue top not at same revision as working directory"))
524 return top
524 return top
525 return None
525 return None
526 def check_localchanges(self, repo):
526 def check_localchanges(self, repo):
527 (c, a, r, d, u) = repo.changes(None, None)
527 (c, a, r, d, u) = repo.changes(None, None)
528 if c or a or d or r:
528 if c or a or d or r:
529 raise util.Abort(_("local changes found, refresh first"))
529 raise util.Abort(_("local changes found, refresh first"))
530 def new(self, repo, patch, msg=None, force=None):
530 def new(self, repo, patch, msg=None, force=None):
531 if os.path.exists(self.join(patch)):
531 if os.path.exists(self.join(patch)):
532 raise util.Abort(_('patch "%s" already exists') % patch)
532 raise util.Abort(_('patch "%s" already exists') % patch)
533 commitfiles = []
533 commitfiles = []
534 (c, a, r, d, u) = repo.changes(None, None)
534 (c, a, r, d, u) = repo.changes(None, None)
535 if c or a or d or r:
535 if c or a or d or r:
536 if not force:
536 if not force:
537 raise util.Abort(_("local changes found, refresh first"))
537 raise util.Abort(_("local changes found, refresh first"))
538 commitfiles = c + a + r
538 commitfiles = c + a + r
539 self.check_toppatch(repo)
539 self.check_toppatch(repo)
540 wlock = repo.wlock()
540 wlock = repo.wlock()
541 insert = self.full_series_end()
541 insert = self.full_series_end()
542 if msg:
542 if msg:
543 n = repo.commit(commitfiles, "[mq]: %s" % msg, force=True,
543 n = repo.commit(commitfiles, "[mq]: %s" % msg, force=True,
544 wlock=wlock)
544 wlock=wlock)
545 else:
545 else:
546 n = repo.commit(commitfiles,
546 n = repo.commit(commitfiles,
547 "New patch: %s" % patch, force=True, wlock=wlock)
547 "New patch: %s" % patch, force=True, wlock=wlock)
548 if n == None:
548 if n == None:
549 raise util.Abort(_("repo commit failed"))
549 raise util.Abort(_("repo commit failed"))
550 self.full_series[insert:insert] = [patch]
550 self.full_series[insert:insert] = [patch]
551 self.applied.append(statusentry(revlog.hex(n), patch))
551 self.applied.append(statusentry(revlog.hex(n), patch))
552 self.parse_series()
552 self.parse_series()
553 self.series_dirty = 1
553 self.series_dirty = 1
554 self.applied_dirty = 1
554 self.applied_dirty = 1
555 p = self.opener(patch, "w")
555 p = self.opener(patch, "w")
556 if msg:
556 if msg:
557 msg = msg + "\n"
557 msg = msg + "\n"
558 p.write(msg)
558 p.write(msg)
559 p.close()
559 p.close()
560 wlock = None
560 wlock = None
561 r = self.qrepo()
561 r = self.qrepo()
562 if r: r.add([patch])
562 if r: r.add([patch])
563 if commitfiles:
563 if commitfiles:
564 self.refresh(repo, msg=None, short=True)
564 self.refresh(repo, msg=None, short=True)
565
565
566 def strip(self, repo, rev, update=True, backup="all", wlock=None):
566 def strip(self, repo, rev, update=True, backup="all", wlock=None):
567 def limitheads(chlog, stop):
567 def limitheads(chlog, stop):
568 """return the list of all nodes that have no children"""
568 """return the list of all nodes that have no children"""
569 p = {}
569 p = {}
570 h = []
570 h = []
571 stoprev = 0
571 stoprev = 0
572 if stop in chlog.nodemap:
572 if stop in chlog.nodemap:
573 stoprev = chlog.rev(stop)
573 stoprev = chlog.rev(stop)
574
574
575 for r in range(chlog.count() - 1, -1, -1):
575 for r in range(chlog.count() - 1, -1, -1):
576 n = chlog.node(r)
576 n = chlog.node(r)
577 if n not in p:
577 if n not in p:
578 h.append(n)
578 h.append(n)
579 if n == stop:
579 if n == stop:
580 break
580 break
581 if r < stoprev:
581 if r < stoprev:
582 break
582 break
583 for pn in chlog.parents(n):
583 for pn in chlog.parents(n):
584 p[pn] = 1
584 p[pn] = 1
585 return h
585 return h
586
586
587 def bundle(cg):
587 def bundle(cg):
588 backupdir = repo.join("strip-backup")
588 backupdir = repo.join("strip-backup")
589 if not os.path.isdir(backupdir):
589 if not os.path.isdir(backupdir):
590 os.mkdir(backupdir)
590 os.mkdir(backupdir)
591 name = os.path.join(backupdir, "%s" % revlog.short(rev))
591 name = os.path.join(backupdir, "%s" % revlog.short(rev))
592 name = savename(name)
592 name = savename(name)
593 self.ui.warn("saving bundle to %s\n" % name)
593 self.ui.warn("saving bundle to %s\n" % name)
594 # TODO, exclusive open
594 # TODO, exclusive open
595 f = open(name, "wb")
595 f = open(name, "wb")
596 try:
596 try:
597 f.write("HG10")
597 f.write("HG10")
598 z = bz2.BZ2Compressor(9)
598 z = bz2.BZ2Compressor(9)
599 while 1:
599 while 1:
600 chunk = cg.read(4096)
600 chunk = cg.read(4096)
601 if not chunk:
601 if not chunk:
602 break
602 break
603 f.write(z.compress(chunk))
603 f.write(z.compress(chunk))
604 f.write(z.flush())
604 f.write(z.flush())
605 except:
605 except:
606 os.unlink(name)
606 os.unlink(name)
607 raise
607 raise
608 f.close()
608 f.close()
609 return name
609 return name
610
610
611 def stripall(rev, revnum):
611 def stripall(rev, revnum):
612 cl = repo.changelog
612 cl = repo.changelog
613 c = cl.read(rev)
613 c = cl.read(rev)
614 mm = repo.manifest.read(c[0])
614 mm = repo.manifest.read(c[0])
615 seen = {}
615 seen = {}
616
616
617 for x in xrange(revnum, cl.count()):
617 for x in xrange(revnum, cl.count()):
618 c = cl.read(cl.node(x))
618 c = cl.read(cl.node(x))
619 for f in c[3]:
619 for f in c[3]:
620 if f in seen:
620 if f in seen:
621 continue
621 continue
622 seen[f] = 1
622 seen[f] = 1
623 if f in mm:
623 if f in mm:
624 filerev = mm[f]
624 filerev = mm[f]
625 else:
625 else:
626 filerev = 0
626 filerev = 0
627 seen[f] = filerev
627 seen[f] = filerev
628 # we go in two steps here so the strip loop happens in a
628 # we go in two steps here so the strip loop happens in a
629 # sensible order. When stripping many files, this helps keep
629 # sensible order. When stripping many files, this helps keep
630 # our disk access patterns under control.
630 # our disk access patterns under control.
631 seen_list = seen.keys()
631 seen_list = seen.keys()
632 seen_list.sort()
632 seen_list.sort()
633 for f in seen_list:
633 for f in seen_list:
634 ff = repo.file(f)
634 ff = repo.file(f)
635 filerev = seen[f]
635 filerev = seen[f]
636 if filerev != 0:
636 if filerev != 0:
637 if filerev in ff.nodemap:
637 if filerev in ff.nodemap:
638 filerev = ff.rev(filerev)
638 filerev = ff.rev(filerev)
639 else:
639 else:
640 filerev = 0
640 filerev = 0
641 ff.strip(filerev, revnum)
641 ff.strip(filerev, revnum)
642
642
643 if not wlock:
643 if not wlock:
644 wlock = repo.wlock()
644 wlock = repo.wlock()
645 lock = repo.lock()
645 lock = repo.lock()
646 chlog = repo.changelog
646 chlog = repo.changelog
647 # TODO delete the undo files, and handle undo of merge sets
647 # TODO delete the undo files, and handle undo of merge sets
648 pp = chlog.parents(rev)
648 pp = chlog.parents(rev)
649 revnum = chlog.rev(rev)
649 revnum = chlog.rev(rev)
650
650
651 if update:
651 if update:
652 (c, a, r, d, u) = repo.changes(None, None)
652 (c, a, r, d, u) = repo.changes(None, None)
653 if c or a or d or r:
653 if c or a or d or r:
654 raise util.Abort(_("local changes found"))
654 raise util.Abort(_("local changes found"))
655 urev = self.qparents(repo, rev)
655 urev = self.qparents(repo, rev)
656 hg.clean(repo, urev, wlock=wlock)
656 hg.clean(repo, urev, wlock=wlock)
657 repo.dirstate.write()
657 repo.dirstate.write()
658
658
659 # save is a list of all the branches we are truncating away
659 # save is a list of all the branches we are truncating away
660 # that we actually want to keep. changegroup will be used
660 # that we actually want to keep. changegroup will be used
661 # to preserve them and add them back after the truncate
661 # to preserve them and add them back after the truncate
662 saveheads = []
662 saveheads = []
663 savebases = {}
663 savebases = {}
664
664
665 heads = limitheads(chlog, rev)
665 heads = limitheads(chlog, rev)
666 seen = {}
666 seen = {}
667
667
668 # search through all the heads, finding those where the revision
668 # search through all the heads, finding those where the revision
669 # we want to strip away is an ancestor. Also look for merges
669 # we want to strip away is an ancestor. Also look for merges
670 # that might be turned into new heads by the strip.
670 # that might be turned into new heads by the strip.
671 while heads:
671 while heads:
672 h = heads.pop()
672 h = heads.pop()
673 n = h
673 n = h
674 while True:
674 while True:
675 seen[n] = 1
675 seen[n] = 1
676 pp = chlog.parents(n)
676 pp = chlog.parents(n)
677 if pp[1] != revlog.nullid and chlog.rev(pp[1]) > revnum:
677 if pp[1] != revlog.nullid and chlog.rev(pp[1]) > revnum:
678 if pp[1] not in seen:
678 if pp[1] not in seen:
679 heads.append(pp[1])
679 heads.append(pp[1])
680 if pp[0] == revlog.nullid:
680 if pp[0] == revlog.nullid:
681 break
681 break
682 if chlog.rev(pp[0]) < revnum:
682 if chlog.rev(pp[0]) < revnum:
683 break
683 break
684 n = pp[0]
684 n = pp[0]
685 if n == rev:
685 if n == rev:
686 break
686 break
687 r = chlog.reachable(h, rev)
687 r = chlog.reachable(h, rev)
688 if rev not in r:
688 if rev not in r:
689 saveheads.append(h)
689 saveheads.append(h)
690 for x in r:
690 for x in r:
691 if chlog.rev(x) > revnum:
691 if chlog.rev(x) > revnum:
692 savebases[x] = 1
692 savebases[x] = 1
693
693
694 # create a changegroup for all the branches we need to keep
694 # create a changegroup for all the branches we need to keep
695 if backup == "all":
695 if backup == "all":
696 backupch = repo.changegroupsubset([rev], chlog.heads(), 'strip')
696 backupch = repo.changegroupsubset([rev], chlog.heads(), 'strip')
697 bundle(backupch)
697 bundle(backupch)
698 if saveheads:
698 if saveheads:
699 backupch = repo.changegroupsubset(savebases.keys(), saveheads, 'strip')
699 backupch = repo.changegroupsubset(savebases.keys(), saveheads, 'strip')
700 chgrpfile = bundle(backupch)
700 chgrpfile = bundle(backupch)
701
701
702 stripall(rev, revnum)
702 stripall(rev, revnum)
703
703
704 change = chlog.read(rev)
704 change = chlog.read(rev)
705 repo.manifest.strip(repo.manifest.rev(change[0]), revnum)
705 repo.manifest.strip(repo.manifest.rev(change[0]), revnum)
706 chlog.strip(revnum, revnum)
706 chlog.strip(revnum, revnum)
707 if saveheads:
707 if saveheads:
708 self.ui.status("adding branch\n")
708 self.ui.status("adding branch\n")
709 commands.unbundle(self.ui, repo, chgrpfile, update=False)
709 commands.unbundle(self.ui, repo, chgrpfile, update=False)
710 if backup != "strip":
710 if backup != "strip":
711 os.unlink(chgrpfile)
711 os.unlink(chgrpfile)
712
712
713 def isapplied(self, patch):
713 def isapplied(self, patch):
714 """returns (index, rev, patch)"""
714 """returns (index, rev, patch)"""
715 for i in xrange(len(self.applied)):
715 for i in xrange(len(self.applied)):
716 a = self.applied[i]
716 a = self.applied[i]
717 if a.name == patch:
717 if a.name == patch:
718 return (i, a.rev, a.name)
718 return (i, a.rev, a.name)
719 return None
719 return None
720
720
721 # if the exact patch name does not exist, we try a few
721 # if the exact patch name does not exist, we try a few
722 # variations. If strict is passed, we try only #1
722 # variations. If strict is passed, we try only #1
723 #
723 #
724 # 1) a number to indicate an offset in the series file
724 # 1) a number to indicate an offset in the series file
725 # 2) a unique substring of the patch name was given
725 # 2) a unique substring of the patch name was given
726 # 3) patchname[-+]num to indicate an offset in the series file
726 # 3) patchname[-+]num to indicate an offset in the series file
727 def lookup(self, patch, strict=False):
727 def lookup(self, patch, strict=False):
728 def partial_name(s):
728 def partial_name(s):
729 if s in self.series:
729 if s in self.series:
730 return s
730 return s
731 matches = [x for x in self.series if s in x]
731 matches = [x for x in self.series if s in x]
732 if len(matches) > 1:
732 if len(matches) > 1:
733 self.ui.warn(_('patch name "%s" is ambiguous:\n') % s)
733 self.ui.warn(_('patch name "%s" is ambiguous:\n') % s)
734 for m in matches:
734 for m in matches:
735 self.ui.warn(' %s\n' % m)
735 self.ui.warn(' %s\n' % m)
736 return None
736 return None
737 if matches:
737 if matches:
738 return matches[0]
738 return matches[0]
739 if len(self.series) > 0 and len(self.applied) > 0:
739 if len(self.series) > 0 and len(self.applied) > 0:
740 if s == 'qtip':
740 if s == 'qtip':
741 return self.series[self.series_end()-1]
741 return self.series[self.series_end()-1]
742 if s == 'qbase':
742 if s == 'qbase':
743 return self.series[0]
743 return self.series[0]
744 return None
744 return None
745 if patch == None:
745 if patch == None:
746 return None
746 return None
747
747
748 # we don't want to return a partial match until we make
748 # we don't want to return a partial match until we make
749 # sure the file name passed in does not exist (checked below)
749 # sure the file name passed in does not exist (checked below)
750 res = partial_name(patch)
750 res = partial_name(patch)
751 if res and res == patch:
751 if res and res == patch:
752 return res
752 return res
753
753
754 if not os.path.isfile(self.join(patch)):
754 if not os.path.isfile(self.join(patch)):
755 try:
755 try:
756 sno = int(patch)
756 sno = int(patch)
757 except(ValueError, OverflowError):
757 except(ValueError, OverflowError):
758 pass
758 pass
759 else:
759 else:
760 if sno < len(self.series):
760 if sno < len(self.series):
761 return self.series[sno]
761 return self.series[sno]
762 if not strict:
762 if not strict:
763 # return any partial match made above
763 # return any partial match made above
764 if res:
764 if res:
765 return res
765 return res
766 minus = patch.rsplit('-', 1)
766 minus = patch.rsplit('-', 1)
767 if len(minus) > 1:
767 if len(minus) > 1:
768 res = partial_name(minus[0])
768 res = partial_name(minus[0])
769 if res:
769 if res:
770 i = self.series.index(res)
770 i = self.series.index(res)
771 try:
771 try:
772 off = int(minus[1] or 1)
772 off = int(minus[1] or 1)
773 except(ValueError, OverflowError):
773 except(ValueError, OverflowError):
774 pass
774 pass
775 else:
775 else:
776 if i - off >= 0:
776 if i - off >= 0:
777 return self.series[i - off]
777 return self.series[i - off]
778 plus = patch.rsplit('+', 1)
778 plus = patch.rsplit('+', 1)
779 if len(plus) > 1:
779 if len(plus) > 1:
780 res = partial_name(plus[0])
780 res = partial_name(plus[0])
781 if res:
781 if res:
782 i = self.series.index(res)
782 i = self.series.index(res)
783 try:
783 try:
784 off = int(plus[1] or 1)
784 off = int(plus[1] or 1)
785 except(ValueError, OverflowError):
785 except(ValueError, OverflowError):
786 pass
786 pass
787 else:
787 else:
788 if i + off < len(self.series):
788 if i + off < len(self.series):
789 return self.series[i + off]
789 return self.series[i + off]
790 raise util.Abort(_("patch %s not in series") % patch)
790 raise util.Abort(_("patch %s not in series") % patch)
791
791
792 def push(self, repo, patch=None, force=False, list=False,
792 def push(self, repo, patch=None, force=False, list=False,
793 mergeq=None, wlock=None):
793 mergeq=None, wlock=None):
794 if not wlock:
794 if not wlock:
795 wlock = repo.wlock()
795 wlock = repo.wlock()
796 patch = self.lookup(patch)
796 patch = self.lookup(patch)
797 if patch and self.isapplied(patch):
797 if patch and self.isapplied(patch):
798 self.ui.warn(_("patch %s is already applied\n") % patch)
798 self.ui.warn(_("patch %s is already applied\n") % patch)
799 sys.exit(1)
799 sys.exit(1)
800 if self.series_end() == len(self.series):
800 if self.series_end() == len(self.series):
801 self.ui.warn(_("patch series fully applied\n"))
801 self.ui.warn(_("patch series fully applied\n"))
802 sys.exit(1)
802 sys.exit(1)
803 if not force:
803 if not force:
804 self.check_localchanges(repo)
804 self.check_localchanges(repo)
805
805
806 self.applied_dirty = 1;
806 self.applied_dirty = 1;
807 start = self.series_end()
807 start = self.series_end()
808 if start > 0:
808 if start > 0:
809 self.check_toppatch(repo)
809 self.check_toppatch(repo)
810 if not patch:
810 if not patch:
811 patch = self.series[start]
811 patch = self.series[start]
812 end = start + 1
812 end = start + 1
813 else:
813 else:
814 end = self.series.index(patch, start) + 1
814 end = self.series.index(patch, start) + 1
815 s = self.series[start:end]
815 s = self.series[start:end]
816 if mergeq:
816 if mergeq:
817 ret = self.mergepatch(repo, mergeq, s, wlock)
817 ret = self.mergepatch(repo, mergeq, s, wlock)
818 else:
818 else:
819 ret = self.apply(repo, s, list, wlock=wlock)
819 ret = self.apply(repo, s, list, wlock=wlock)
820 top = self.applied[-1].name
820 top = self.applied[-1].name
821 if ret[0]:
821 if ret[0]:
822 self.ui.write("Errors during apply, please fix and refresh %s\n" %
822 self.ui.write("Errors during apply, please fix and refresh %s\n" %
823 top)
823 top)
824 else:
824 else:
825 self.ui.write("Now at: %s\n" % top)
825 self.ui.write("Now at: %s\n" % top)
826 return ret[0]
826 return ret[0]
827
827
828 def pop(self, repo, patch=None, force=False, update=True, all=False,
828 def pop(self, repo, patch=None, force=False, update=True, all=False,
829 wlock=None):
829 wlock=None):
830 def getfile(f, rev):
830 def getfile(f, rev):
831 t = repo.file(f).read(rev)
831 t = repo.file(f).read(rev)
832 try:
832 try:
833 repo.wfile(f, "w").write(t)
833 repo.wfile(f, "w").write(t)
834 except IOError:
834 except IOError:
835 try:
835 try:
836 os.makedirs(os.path.dirname(repo.wjoin(f)))
836 os.makedirs(os.path.dirname(repo.wjoin(f)))
837 except OSError, err:
837 except OSError, err:
838 if err.errno != errno.EEXIST: raise
838 if err.errno != errno.EEXIST: raise
839 repo.wfile(f, "w").write(t)
839 repo.wfile(f, "w").write(t)
840
840
841 if not wlock:
841 if not wlock:
842 wlock = repo.wlock()
842 wlock = repo.wlock()
843 if patch:
843 if patch:
844 # index, rev, patch
844 # index, rev, patch
845 info = self.isapplied(patch)
845 info = self.isapplied(patch)
846 if not info:
846 if not info:
847 patch = self.lookup(patch)
847 patch = self.lookup(patch)
848 info = self.isapplied(patch)
848 info = self.isapplied(patch)
849 if not info:
849 if not info:
850 raise util.Abort(_("patch %s is not applied") % patch)
850 raise util.Abort(_("patch %s is not applied") % patch)
851 if len(self.applied) == 0:
851 if len(self.applied) == 0:
852 self.ui.warn(_("no patches applied\n"))
852 self.ui.warn(_("no patches applied\n"))
853 sys.exit(1)
853 sys.exit(1)
854
854
855 if not update:
855 if not update:
856 parents = repo.dirstate.parents()
856 parents = repo.dirstate.parents()
857 rr = [ revlog.bin(x.rev) for x in self.applied ]
857 rr = [ revlog.bin(x.rev) for x in self.applied ]
858 for p in parents:
858 for p in parents:
859 if p in rr:
859 if p in rr:
860 self.ui.warn("qpop: forcing dirstate update\n")
860 self.ui.warn("qpop: forcing dirstate update\n")
861 update = True
861 update = True
862
862
863 if not force and update:
863 if not force and update:
864 self.check_localchanges(repo)
864 self.check_localchanges(repo)
865
865
866 self.applied_dirty = 1;
866 self.applied_dirty = 1;
867 end = len(self.applied)
867 end = len(self.applied)
868 if not patch:
868 if not patch:
869 if all:
869 if all:
870 popi = 0
870 popi = 0
871 else:
871 else:
872 popi = len(self.applied) - 1
872 popi = len(self.applied) - 1
873 else:
873 else:
874 popi = info[0] + 1
874 popi = info[0] + 1
875 if popi >= end:
875 if popi >= end:
876 self.ui.warn("qpop: %s is already at the top\n" % patch)
876 self.ui.warn("qpop: %s is already at the top\n" % patch)
877 return
877 return
878 info = [ popi ] + [self.applied[popi].rev, self.applied[popi].name]
878 info = [ popi ] + [self.applied[popi].rev, self.applied[popi].name]
879
879
880 start = info[0]
880 start = info[0]
881 rev = revlog.bin(info[1])
881 rev = revlog.bin(info[1])
882
882
883 # we know there are no local changes, so we can make a simplified
883 # we know there are no local changes, so we can make a simplified
884 # form of hg.update.
884 # form of hg.update.
885 if update:
885 if update:
886 top = self.check_toppatch(repo)
886 top = self.check_toppatch(repo)
887 qp = self.qparents(repo, rev)
887 qp = self.qparents(repo, rev)
888 changes = repo.changelog.read(qp)
888 changes = repo.changelog.read(qp)
889 mf1 = repo.manifest.readflags(changes[0])
890 mmap = repo.manifest.read(changes[0])
889 mmap = repo.manifest.read(changes[0])
891 (c, a, r, d, u) = repo.changes(qp, top)
890 (c, a, r, d, u) = repo.changes(qp, top)
892 if d:
891 if d:
893 raise util.Abort("deletions found between repo revs")
892 raise util.Abort("deletions found between repo revs")
894 for f in c:
893 for f in c:
895 getfile(f, mmap[f])
894 getfile(f, mmap[f])
896 for f in r:
895 for f in r:
897 getfile(f, mmap[f])
896 getfile(f, mmap[f])
898 util.set_exec(repo.wjoin(f), mf1[f])
897 util.set_exec(repo.wjoin(f), mmap.execf[f])
899 repo.dirstate.update(c + r, 'n')
898 repo.dirstate.update(c + r, 'n')
900 for f in a:
899 for f in a:
901 try: os.unlink(repo.wjoin(f))
900 try: os.unlink(repo.wjoin(f))
902 except: raise
901 except: raise
903 try: os.removedirs(os.path.dirname(repo.wjoin(f)))
902 try: os.removedirs(os.path.dirname(repo.wjoin(f)))
904 except: pass
903 except: pass
905 if a:
904 if a:
906 repo.dirstate.forget(a)
905 repo.dirstate.forget(a)
907 repo.dirstate.setparents(qp, revlog.nullid)
906 repo.dirstate.setparents(qp, revlog.nullid)
908 self.strip(repo, rev, update=False, backup='strip', wlock=wlock)
907 self.strip(repo, rev, update=False, backup='strip', wlock=wlock)
909 del self.applied[start:end]
908 del self.applied[start:end]
910 if len(self.applied):
909 if len(self.applied):
911 self.ui.write("Now at: %s\n" % self.applied[-1].name)
910 self.ui.write("Now at: %s\n" % self.applied[-1].name)
912 else:
911 else:
913 self.ui.write("Patch queue now empty\n")
912 self.ui.write("Patch queue now empty\n")
914
913
915 def diff(self, repo, files):
914 def diff(self, repo, files):
916 top = self.check_toppatch(repo)
915 top = self.check_toppatch(repo)
917 if not top:
916 if not top:
918 self.ui.write("No patches applied\n")
917 self.ui.write("No patches applied\n")
919 return
918 return
920 qp = self.qparents(repo, top)
919 qp = self.qparents(repo, top)
921 commands.dodiff(sys.stdout, self.ui, repo, qp, None, files)
920 commands.dodiff(sys.stdout, self.ui, repo, qp, None, files)
922
921
923 def refresh(self, repo, msg=None, short=False):
922 def refresh(self, repo, msg=None, short=False):
924 if len(self.applied) == 0:
923 if len(self.applied) == 0:
925 self.ui.write("No patches applied\n")
924 self.ui.write("No patches applied\n")
926 return
925 return
927 wlock = repo.wlock()
926 wlock = repo.wlock()
928 self.check_toppatch(repo)
927 self.check_toppatch(repo)
929 (top, patch) = (self.applied[-1].rev, self.applied[-1].name)
928 (top, patch) = (self.applied[-1].rev, self.applied[-1].name)
930 top = revlog.bin(top)
929 top = revlog.bin(top)
931 cparents = repo.changelog.parents(top)
930 cparents = repo.changelog.parents(top)
932 patchparent = self.qparents(repo, top)
931 patchparent = self.qparents(repo, top)
933 message, comments, user, date, patchfound = self.readheaders(patch)
932 message, comments, user, date, patchfound = self.readheaders(patch)
934
933
935 patchf = self.opener(patch, "w")
934 patchf = self.opener(patch, "w")
936 msg = msg.rstrip()
935 msg = msg.rstrip()
937 if msg:
936 if msg:
938 if comments:
937 if comments:
939 # Remove existing message.
938 # Remove existing message.
940 ci = 0
939 ci = 0
941 for mi in range(len(message)):
940 for mi in range(len(message)):
942 while message[mi] != comments[ci]:
941 while message[mi] != comments[ci]:
943 ci += 1
942 ci += 1
944 del comments[ci]
943 del comments[ci]
945 comments.append(msg)
944 comments.append(msg)
946 if comments:
945 if comments:
947 comments = "\n".join(comments) + '\n\n'
946 comments = "\n".join(comments) + '\n\n'
948 patchf.write(comments)
947 patchf.write(comments)
949
948
950 tip = repo.changelog.tip()
949 tip = repo.changelog.tip()
951 if top == tip:
950 if top == tip:
952 # if the top of our patch queue is also the tip, there is an
951 # if the top of our patch queue is also the tip, there is an
953 # optimization here. We update the dirstate in place and strip
952 # optimization here. We update the dirstate in place and strip
954 # off the tip commit. Then just commit the current directory
953 # off the tip commit. Then just commit the current directory
955 # tree. We can also send repo.commit the list of files
954 # tree. We can also send repo.commit the list of files
956 # changed to speed up the diff
955 # changed to speed up the diff
957 #
956 #
958 # in short mode, we only diff the files included in the
957 # in short mode, we only diff the files included in the
959 # patch already
958 # patch already
960 #
959 #
961 # this should really read:
960 # this should really read:
962 #(cc, dd, aa, aa2, uu) = repo.changes(tip, patchparent)
961 #(cc, dd, aa, aa2, uu) = repo.changes(tip, patchparent)
963 # but we do it backwards to take advantage of manifest/chlog
962 # but we do it backwards to take advantage of manifest/chlog
964 # caching against the next repo.changes call
963 # caching against the next repo.changes call
965 #
964 #
966 (cc, aa, dd, aa2, uu) = repo.changes(patchparent, tip)
965 (cc, aa, dd, aa2, uu) = repo.changes(patchparent, tip)
967 if short:
966 if short:
968 filelist = cc + aa + dd
967 filelist = cc + aa + dd
969 else:
968 else:
970 filelist = None
969 filelist = None
971 (c, a, r, d, u) = repo.changes(None, None, filelist)
970 (c, a, r, d, u) = repo.changes(None, None, filelist)
972
971
973 # we might end up with files that were added between tip and
972 # we might end up with files that were added between tip and
974 # the dirstate parent, but then changed in the local dirstate.
973 # the dirstate parent, but then changed in the local dirstate.
975 # in this case, we want them to only show up in the added section
974 # in this case, we want them to only show up in the added section
976 for x in c:
975 for x in c:
977 if x not in aa:
976 if x not in aa:
978 cc.append(x)
977 cc.append(x)
979 # we might end up with files added by the local dirstate that
978 # we might end up with files added by the local dirstate that
980 # were deleted by the patch. In this case, they should only
979 # were deleted by the patch. In this case, they should only
981 # show up in the changed section.
980 # show up in the changed section.
982 for x in a:
981 for x in a:
983 if x in dd:
982 if x in dd:
984 del dd[dd.index(x)]
983 del dd[dd.index(x)]
985 cc.append(x)
984 cc.append(x)
986 else:
985 else:
987 aa.append(x)
986 aa.append(x)
988 # make sure any files deleted in the local dirstate
987 # make sure any files deleted in the local dirstate
989 # are not in the add or change column of the patch
988 # are not in the add or change column of the patch
990 forget = []
989 forget = []
991 for x in d + r:
990 for x in d + r:
992 if x in aa:
991 if x in aa:
993 del aa[aa.index(x)]
992 del aa[aa.index(x)]
994 forget.append(x)
993 forget.append(x)
995 continue
994 continue
996 elif x in cc:
995 elif x in cc:
997 del cc[cc.index(x)]
996 del cc[cc.index(x)]
998 dd.append(x)
997 dd.append(x)
999
998
1000 c = list(util.unique(cc))
999 c = list(util.unique(cc))
1001 r = list(util.unique(dd))
1000 r = list(util.unique(dd))
1002 a = list(util.unique(aa))
1001 a = list(util.unique(aa))
1003 filelist = list(util.unique(c + r + a ))
1002 filelist = list(util.unique(c + r + a ))
1004 commands.dodiff(patchf, self.ui, repo, patchparent, None,
1003 commands.dodiff(patchf, self.ui, repo, patchparent, None,
1005 filelist, changes=(c, a, r, [], u))
1004 filelist, changes=(c, a, r, [], u))
1006 patchf.close()
1005 patchf.close()
1007
1006
1008 changes = repo.changelog.read(tip)
1007 changes = repo.changelog.read(tip)
1009 repo.dirstate.setparents(*cparents)
1008 repo.dirstate.setparents(*cparents)
1010 repo.dirstate.update(a, 'a')
1009 repo.dirstate.update(a, 'a')
1011 repo.dirstate.update(r, 'r')
1010 repo.dirstate.update(r, 'r')
1012 repo.dirstate.update(c, 'n')
1011 repo.dirstate.update(c, 'n')
1013 repo.dirstate.forget(forget)
1012 repo.dirstate.forget(forget)
1014
1013
1015 if not msg:
1014 if not msg:
1016 if not message:
1015 if not message:
1017 message = "patch queue: %s\n" % patch
1016 message = "patch queue: %s\n" % patch
1018 else:
1017 else:
1019 message = "\n".join(message)
1018 message = "\n".join(message)
1020 else:
1019 else:
1021 message = msg
1020 message = msg
1022
1021
1023 self.strip(repo, top, update=False, backup='strip', wlock=wlock)
1022 self.strip(repo, top, update=False, backup='strip', wlock=wlock)
1024 n = repo.commit(filelist, message, changes[1], force=1, wlock=wlock)
1023 n = repo.commit(filelist, message, changes[1], force=1, wlock=wlock)
1025 self.applied[-1] = statusentry(revlog.hex(n), patch)
1024 self.applied[-1] = statusentry(revlog.hex(n), patch)
1026 self.applied_dirty = 1
1025 self.applied_dirty = 1
1027 else:
1026 else:
1028 commands.dodiff(patchf, self.ui, repo, patchparent, None)
1027 commands.dodiff(patchf, self.ui, repo, patchparent, None)
1029 patchf.close()
1028 patchf.close()
1030 self.pop(repo, force=True, wlock=wlock)
1029 self.pop(repo, force=True, wlock=wlock)
1031 self.push(repo, force=True, wlock=wlock)
1030 self.push(repo, force=True, wlock=wlock)
1032
1031
1033 def init(self, repo, create=False):
1032 def init(self, repo, create=False):
1034 if os.path.isdir(self.path):
1033 if os.path.isdir(self.path):
1035 raise util.Abort(_("patch queue directory already exists"))
1034 raise util.Abort(_("patch queue directory already exists"))
1036 os.mkdir(self.path)
1035 os.mkdir(self.path)
1037 if create:
1036 if create:
1038 return self.qrepo(create=True)
1037 return self.qrepo(create=True)
1039
1038
1040 def unapplied(self, repo, patch=None):
1039 def unapplied(self, repo, patch=None):
1041 if patch and patch not in self.series:
1040 if patch and patch not in self.series:
1042 raise util.Abort(_("patch %s is not in series file") % patch)
1041 raise util.Abort(_("patch %s is not in series file") % patch)
1043 if not patch:
1042 if not patch:
1044 start = self.series_end()
1043 start = self.series_end()
1045 else:
1044 else:
1046 start = self.series.index(patch) + 1
1045 start = self.series.index(patch) + 1
1047 unapplied = []
1046 unapplied = []
1048 for i in xrange(start, len(self.series)):
1047 for i in xrange(start, len(self.series)):
1049 pushable, reason = self.pushable(i)
1048 pushable, reason = self.pushable(i)
1050 if pushable:
1049 if pushable:
1051 unapplied.append((i, self.series[i]))
1050 unapplied.append((i, self.series[i]))
1052 self.explain_pushable(i)
1051 self.explain_pushable(i)
1053 return unapplied
1052 return unapplied
1054
1053
1055 def qseries(self, repo, missing=None, summary=False):
1054 def qseries(self, repo, missing=None, summary=False):
1056 start = self.series_end(all_patches=True)
1055 start = self.series_end(all_patches=True)
1057 if not missing:
1056 if not missing:
1058 for i in range(len(self.series)):
1057 for i in range(len(self.series)):
1059 patch = self.series[i]
1058 patch = self.series[i]
1060 if self.ui.verbose:
1059 if self.ui.verbose:
1061 if i < start:
1060 if i < start:
1062 status = 'A'
1061 status = 'A'
1063 elif self.pushable(i)[0]:
1062 elif self.pushable(i)[0]:
1064 status = 'U'
1063 status = 'U'
1065 else:
1064 else:
1066 status = 'G'
1065 status = 'G'
1067 self.ui.write('%d %s ' % (i, status))
1066 self.ui.write('%d %s ' % (i, status))
1068 if summary:
1067 if summary:
1069 msg = self.readheaders(patch)[0]
1068 msg = self.readheaders(patch)[0]
1070 msg = msg and ': ' + msg[0] or ': '
1069 msg = msg and ': ' + msg[0] or ': '
1071 else:
1070 else:
1072 msg = ''
1071 msg = ''
1073 self.ui.write('%s%s\n' % (patch, msg))
1072 self.ui.write('%s%s\n' % (patch, msg))
1074 else:
1073 else:
1075 msng_list = []
1074 msng_list = []
1076 for root, dirs, files in os.walk(self.path):
1075 for root, dirs, files in os.walk(self.path):
1077 d = root[len(self.path) + 1:]
1076 d = root[len(self.path) + 1:]
1078 for f in files:
1077 for f in files:
1079 fl = os.path.join(d, f)
1078 fl = os.path.join(d, f)
1080 if (fl not in self.series and
1079 if (fl not in self.series and
1081 fl not in (self.status_path, self.series_path)
1080 fl not in (self.status_path, self.series_path)
1082 and not fl.startswith('.')):
1081 and not fl.startswith('.')):
1083 msng_list.append(fl)
1082 msng_list.append(fl)
1084 msng_list.sort()
1083 msng_list.sort()
1085 for x in msng_list:
1084 for x in msng_list:
1086 if self.ui.verbose:
1085 if self.ui.verbose:
1087 self.ui.write("D ")
1086 self.ui.write("D ")
1088 self.ui.write("%s\n" % x)
1087 self.ui.write("%s\n" % x)
1089
1088
1090 def issaveline(self, l):
1089 def issaveline(self, l):
1091 if l.name == '.hg.patches.save.line':
1090 if l.name == '.hg.patches.save.line':
1092 return True
1091 return True
1093
1092
1094 def qrepo(self, create=False):
1093 def qrepo(self, create=False):
1095 if create or os.path.isdir(self.join(".hg")):
1094 if create or os.path.isdir(self.join(".hg")):
1096 return hg.repository(self.ui, path=self.path, create=create)
1095 return hg.repository(self.ui, path=self.path, create=create)
1097
1096
1098 def restore(self, repo, rev, delete=None, qupdate=None):
1097 def restore(self, repo, rev, delete=None, qupdate=None):
1099 c = repo.changelog.read(rev)
1098 c = repo.changelog.read(rev)
1100 desc = c[4].strip()
1099 desc = c[4].strip()
1101 lines = desc.splitlines()
1100 lines = desc.splitlines()
1102 i = 0
1101 i = 0
1103 datastart = None
1102 datastart = None
1104 series = []
1103 series = []
1105 applied = []
1104 applied = []
1106 qpp = None
1105 qpp = None
1107 for i in xrange(0, len(lines)):
1106 for i in xrange(0, len(lines)):
1108 if lines[i] == 'Patch Data:':
1107 if lines[i] == 'Patch Data:':
1109 datastart = i + 1
1108 datastart = i + 1
1110 elif lines[i].startswith('Dirstate:'):
1109 elif lines[i].startswith('Dirstate:'):
1111 l = lines[i].rstrip()
1110 l = lines[i].rstrip()
1112 l = l[10:].split(' ')
1111 l = l[10:].split(' ')
1113 qpp = [ hg.bin(x) for x in l ]
1112 qpp = [ hg.bin(x) for x in l ]
1114 elif datastart != None:
1113 elif datastart != None:
1115 l = lines[i].rstrip()
1114 l = lines[i].rstrip()
1116 se = statusentry(l)
1115 se = statusentry(l)
1117 file_ = se.name
1116 file_ = se.name
1118 if se.rev:
1117 if se.rev:
1119 applied.append(se)
1118 applied.append(se)
1120 series.append(file_)
1119 series.append(file_)
1121 if datastart == None:
1120 if datastart == None:
1122 self.ui.warn("No saved patch data found\n")
1121 self.ui.warn("No saved patch data found\n")
1123 return 1
1122 return 1
1124 self.ui.warn("restoring status: %s\n" % lines[0])
1123 self.ui.warn("restoring status: %s\n" % lines[0])
1125 self.full_series = series
1124 self.full_series = series
1126 self.applied = applied
1125 self.applied = applied
1127 self.parse_series()
1126 self.parse_series()
1128 self.series_dirty = 1
1127 self.series_dirty = 1
1129 self.applied_dirty = 1
1128 self.applied_dirty = 1
1130 heads = repo.changelog.heads()
1129 heads = repo.changelog.heads()
1131 if delete:
1130 if delete:
1132 if rev not in heads:
1131 if rev not in heads:
1133 self.ui.warn("save entry has children, leaving it alone\n")
1132 self.ui.warn("save entry has children, leaving it alone\n")
1134 else:
1133 else:
1135 self.ui.warn("removing save entry %s\n" % hg.short(rev))
1134 self.ui.warn("removing save entry %s\n" % hg.short(rev))
1136 pp = repo.dirstate.parents()
1135 pp = repo.dirstate.parents()
1137 if rev in pp:
1136 if rev in pp:
1138 update = True
1137 update = True
1139 else:
1138 else:
1140 update = False
1139 update = False
1141 self.strip(repo, rev, update=update, backup='strip')
1140 self.strip(repo, rev, update=update, backup='strip')
1142 if qpp:
1141 if qpp:
1143 self.ui.warn("saved queue repository parents: %s %s\n" %
1142 self.ui.warn("saved queue repository parents: %s %s\n" %
1144 (hg.short(qpp[0]), hg.short(qpp[1])))
1143 (hg.short(qpp[0]), hg.short(qpp[1])))
1145 if qupdate:
1144 if qupdate:
1146 print "queue directory updating"
1145 print "queue directory updating"
1147 r = self.qrepo()
1146 r = self.qrepo()
1148 if not r:
1147 if not r:
1149 self.ui.warn("Unable to load queue repository\n")
1148 self.ui.warn("Unable to load queue repository\n")
1150 return 1
1149 return 1
1151 hg.clean(r, qpp[0])
1150 hg.clean(r, qpp[0])
1152
1151
1153 def save(self, repo, msg=None):
1152 def save(self, repo, msg=None):
1154 if len(self.applied) == 0:
1153 if len(self.applied) == 0:
1155 self.ui.warn("save: no patches applied, exiting\n")
1154 self.ui.warn("save: no patches applied, exiting\n")
1156 return 1
1155 return 1
1157 if self.issaveline(self.applied[-1]):
1156 if self.issaveline(self.applied[-1]):
1158 self.ui.warn("status is already saved\n")
1157 self.ui.warn("status is already saved\n")
1159 return 1
1158 return 1
1160
1159
1161 ar = [ ':' + x for x in self.full_series ]
1160 ar = [ ':' + x for x in self.full_series ]
1162 if not msg:
1161 if not msg:
1163 msg = "hg patches saved state"
1162 msg = "hg patches saved state"
1164 else:
1163 else:
1165 msg = "hg patches: " + msg.rstrip('\r\n')
1164 msg = "hg patches: " + msg.rstrip('\r\n')
1166 r = self.qrepo()
1165 r = self.qrepo()
1167 if r:
1166 if r:
1168 pp = r.dirstate.parents()
1167 pp = r.dirstate.parents()
1169 msg += "\nDirstate: %s %s" % (hg.hex(pp[0]), hg.hex(pp[1]))
1168 msg += "\nDirstate: %s %s" % (hg.hex(pp[0]), hg.hex(pp[1]))
1170 msg += "\n\nPatch Data:\n"
1169 msg += "\n\nPatch Data:\n"
1171 text = msg + "\n".join([str(x) for x in self.applied]) + '\n' + (ar and
1170 text = msg + "\n".join([str(x) for x in self.applied]) + '\n' + (ar and
1172 "\n".join(ar) + '\n' or "")
1171 "\n".join(ar) + '\n' or "")
1173 n = repo.commit(None, text, user=None, force=1)
1172 n = repo.commit(None, text, user=None, force=1)
1174 if not n:
1173 if not n:
1175 self.ui.warn("repo commit failed\n")
1174 self.ui.warn("repo commit failed\n")
1176 return 1
1175 return 1
1177 self.applied.append(statusentry(revlog.hex(n),'.hg.patches.save.line'))
1176 self.applied.append(statusentry(revlog.hex(n),'.hg.patches.save.line'))
1178 self.applied_dirty = 1
1177 self.applied_dirty = 1
1179
1178
1180 def full_series_end(self):
1179 def full_series_end(self):
1181 if len(self.applied) > 0:
1180 if len(self.applied) > 0:
1182 p = self.applied[-1].name
1181 p = self.applied[-1].name
1183 end = self.find_series(p)
1182 end = self.find_series(p)
1184 if end == None:
1183 if end == None:
1185 return len(self.full_series)
1184 return len(self.full_series)
1186 return end + 1
1185 return end + 1
1187 return 0
1186 return 0
1188
1187
1189 def series_end(self, all_patches=False):
1188 def series_end(self, all_patches=False):
1190 end = 0
1189 end = 0
1191 def next(start):
1190 def next(start):
1192 if all_patches:
1191 if all_patches:
1193 return start
1192 return start
1194 i = start
1193 i = start
1195 while i < len(self.series):
1194 while i < len(self.series):
1196 p, reason = self.pushable(i)
1195 p, reason = self.pushable(i)
1197 if p:
1196 if p:
1198 break
1197 break
1199 self.explain_pushable(i)
1198 self.explain_pushable(i)
1200 i += 1
1199 i += 1
1201 return i
1200 return i
1202 if len(self.applied) > 0:
1201 if len(self.applied) > 0:
1203 p = self.applied[-1].name
1202 p = self.applied[-1].name
1204 try:
1203 try:
1205 end = self.series.index(p)
1204 end = self.series.index(p)
1206 except ValueError:
1205 except ValueError:
1207 return 0
1206 return 0
1208 return next(end + 1)
1207 return next(end + 1)
1209 return next(end)
1208 return next(end)
1210
1209
1211 def qapplied(self, repo, patch=None):
1210 def qapplied(self, repo, patch=None):
1212 if patch and patch not in self.series:
1211 if patch and patch not in self.series:
1213 raise util.Abort(_("patch %s is not in series file") % patch)
1212 raise util.Abort(_("patch %s is not in series file") % patch)
1214 if not patch:
1213 if not patch:
1215 end = len(self.applied)
1214 end = len(self.applied)
1216 else:
1215 else:
1217 end = self.series.index(patch) + 1
1216 end = self.series.index(patch) + 1
1218 for x in xrange(end):
1217 for x in xrange(end):
1219 p = self.appliedname(x)
1218 p = self.appliedname(x)
1220 self.ui.write("%s\n" % p)
1219 self.ui.write("%s\n" % p)
1221
1220
1222 def appliedname(self, index):
1221 def appliedname(self, index):
1223 pname = self.applied[index].name
1222 pname = self.applied[index].name
1224 if not self.ui.verbose:
1223 if not self.ui.verbose:
1225 p = pname
1224 p = pname
1226 else:
1225 else:
1227 p = str(self.series.index(pname)) + " " + p
1226 p = str(self.series.index(pname)) + " " + p
1228 return p
1227 return p
1229
1228
1230 def top(self, repo):
1229 def top(self, repo):
1231 if len(self.applied):
1230 if len(self.applied):
1232 p = self.appliedname(-1)
1231 p = self.appliedname(-1)
1233 self.ui.write(p + '\n')
1232 self.ui.write(p + '\n')
1234 else:
1233 else:
1235 self.ui.write("No patches applied\n")
1234 self.ui.write("No patches applied\n")
1236
1235
1237 def next(self, repo):
1236 def next(self, repo):
1238 end = self.series_end()
1237 end = self.series_end()
1239 if end == len(self.series):
1238 if end == len(self.series):
1240 self.ui.write("All patches applied\n")
1239 self.ui.write("All patches applied\n")
1241 else:
1240 else:
1242 p = self.series[end]
1241 p = self.series[end]
1243 if self.ui.verbose:
1242 if self.ui.verbose:
1244 self.ui.write("%d " % self.series.index(p))
1243 self.ui.write("%d " % self.series.index(p))
1245 self.ui.write(p + '\n')
1244 self.ui.write(p + '\n')
1246
1245
1247 def prev(self, repo):
1246 def prev(self, repo):
1248 if len(self.applied) > 1:
1247 if len(self.applied) > 1:
1249 p = self.appliedname(-2)
1248 p = self.appliedname(-2)
1250 self.ui.write(p + '\n')
1249 self.ui.write(p + '\n')
1251 elif len(self.applied) == 1:
1250 elif len(self.applied) == 1:
1252 self.ui.write("Only one patch applied\n")
1251 self.ui.write("Only one patch applied\n")
1253 else:
1252 else:
1254 self.ui.write("No patches applied\n")
1253 self.ui.write("No patches applied\n")
1255
1254
1256 def qimport(self, repo, files, patch=None, existing=None, force=None):
1255 def qimport(self, repo, files, patch=None, existing=None, force=None):
1257 if len(files) > 1 and patch:
1256 if len(files) > 1 and patch:
1258 raise util.Abort(_('option "-n" not valid when importing multiple '
1257 raise util.Abort(_('option "-n" not valid when importing multiple '
1259 'files'))
1258 'files'))
1260 i = 0
1259 i = 0
1261 added = []
1260 added = []
1262 for filename in files:
1261 for filename in files:
1263 if existing:
1262 if existing:
1264 if not patch:
1263 if not patch:
1265 patch = filename
1264 patch = filename
1266 if not os.path.isfile(self.join(patch)):
1265 if not os.path.isfile(self.join(patch)):
1267 raise util.Abort(_("patch %s does not exist") % patch)
1266 raise util.Abort(_("patch %s does not exist") % patch)
1268 else:
1267 else:
1269 try:
1268 try:
1270 text = file(filename).read()
1269 text = file(filename).read()
1271 except IOError:
1270 except IOError:
1272 raise util.Abort(_("unable to read %s") % patch)
1271 raise util.Abort(_("unable to read %s") % patch)
1273 if not patch:
1272 if not patch:
1274 patch = os.path.split(filename)[1]
1273 patch = os.path.split(filename)[1]
1275 if not force and os.path.exists(self.join(patch)):
1274 if not force and os.path.exists(self.join(patch)):
1276 raise util.Abort(_('patch "%s" already exists') % patch)
1275 raise util.Abort(_('patch "%s" already exists') % patch)
1277 patchf = self.opener(patch, "w")
1276 patchf = self.opener(patch, "w")
1278 patchf.write(text)
1277 patchf.write(text)
1279 if patch in self.series:
1278 if patch in self.series:
1280 raise util.Abort(_('patch %s is already in the series file')
1279 raise util.Abort(_('patch %s is already in the series file')
1281 % patch)
1280 % patch)
1282 index = self.full_series_end() + i
1281 index = self.full_series_end() + i
1283 self.full_series[index:index] = [patch]
1282 self.full_series[index:index] = [patch]
1284 self.parse_series()
1283 self.parse_series()
1285 self.ui.warn("adding %s to series file\n" % patch)
1284 self.ui.warn("adding %s to series file\n" % patch)
1286 i += 1
1285 i += 1
1287 added.append(patch)
1286 added.append(patch)
1288 patch = None
1287 patch = None
1289 self.series_dirty = 1
1288 self.series_dirty = 1
1290 qrepo = self.qrepo()
1289 qrepo = self.qrepo()
1291 if qrepo:
1290 if qrepo:
1292 qrepo.add(added)
1291 qrepo.add(added)
1293
1292
1294 def delete(ui, repo, patch, **opts):
1293 def delete(ui, repo, patch, **opts):
1295 """remove a patch from the series file
1294 """remove a patch from the series file
1296
1295
1297 The patch must not be applied.
1296 The patch must not be applied.
1298 With -f, deletes the patch file as well as the series entry."""
1297 With -f, deletes the patch file as well as the series entry."""
1299 q = repo.mq
1298 q = repo.mq
1300 q.delete(repo, patch, force=opts.get('force'))
1299 q.delete(repo, patch, force=opts.get('force'))
1301 q.save_dirty()
1300 q.save_dirty()
1302 return 0
1301 return 0
1303
1302
1304 def applied(ui, repo, patch=None, **opts):
1303 def applied(ui, repo, patch=None, **opts):
1305 """print the patches already applied"""
1304 """print the patches already applied"""
1306 repo.mq.qapplied(repo, patch)
1305 repo.mq.qapplied(repo, patch)
1307 return 0
1306 return 0
1308
1307
1309 def unapplied(ui, repo, patch=None, **opts):
1308 def unapplied(ui, repo, patch=None, **opts):
1310 """print the patches not yet applied"""
1309 """print the patches not yet applied"""
1311 for i, p in repo.mq.unapplied(repo, patch):
1310 for i, p in repo.mq.unapplied(repo, patch):
1312 if ui.verbose:
1311 if ui.verbose:
1313 ui.write("%d " % i)
1312 ui.write("%d " % i)
1314 ui.write("%s\n" % p)
1313 ui.write("%s\n" % p)
1315
1314
1316 def qimport(ui, repo, *filename, **opts):
1315 def qimport(ui, repo, *filename, **opts):
1317 """import a patch"""
1316 """import a patch"""
1318 q = repo.mq
1317 q = repo.mq
1319 q.qimport(repo, filename, patch=opts['name'],
1318 q.qimport(repo, filename, patch=opts['name'],
1320 existing=opts['existing'], force=opts['force'])
1319 existing=opts['existing'], force=opts['force'])
1321 q.save_dirty()
1320 q.save_dirty()
1322 return 0
1321 return 0
1323
1322
1324 def init(ui, repo, **opts):
1323 def init(ui, repo, **opts):
1325 """init a new queue repository
1324 """init a new queue repository
1326
1325
1327 The queue repository is unversioned by default. If -c is
1326 The queue repository is unversioned by default. If -c is
1328 specified, qinit will create a separate nested repository
1327 specified, qinit will create a separate nested repository
1329 for patches. Use qcommit to commit changes to this queue
1328 for patches. Use qcommit to commit changes to this queue
1330 repository."""
1329 repository."""
1331 q = repo.mq
1330 q = repo.mq
1332 r = q.init(repo, create=opts['create_repo'])
1331 r = q.init(repo, create=opts['create_repo'])
1333 q.save_dirty()
1332 q.save_dirty()
1334 if r:
1333 if r:
1335 fp = r.wopener('.hgignore', 'w')
1334 fp = r.wopener('.hgignore', 'w')
1336 print >> fp, 'syntax: glob'
1335 print >> fp, 'syntax: glob'
1337 print >> fp, 'status'
1336 print >> fp, 'status'
1338 fp.close()
1337 fp.close()
1339 r.wopener('series', 'w').close()
1338 r.wopener('series', 'w').close()
1340 r.add(['.hgignore', 'series'])
1339 r.add(['.hgignore', 'series'])
1341 return 0
1340 return 0
1342
1341
1343 def clone(ui, source, dest=None, **opts):
1342 def clone(ui, source, dest=None, **opts):
1344 '''clone main and patch repository at same time
1343 '''clone main and patch repository at same time
1345
1344
1346 If source is local, destination will have no patches applied. If
1345 If source is local, destination will have no patches applied. If
1347 source is remote, this command can not check if patches are
1346 source is remote, this command can not check if patches are
1348 applied in source, so cannot guarantee that patches are not
1347 applied in source, so cannot guarantee that patches are not
1349 applied in destination. If you clone remote repository, be sure
1348 applied in destination. If you clone remote repository, be sure
1350 before that it has no patches applied.
1349 before that it has no patches applied.
1351
1350
1352 Source patch repository is looked for in <src>/.hg/patches by
1351 Source patch repository is looked for in <src>/.hg/patches by
1353 default. Use -p <url> to change.
1352 default. Use -p <url> to change.
1354 '''
1353 '''
1355 commands.setremoteconfig(ui, opts)
1354 commands.setremoteconfig(ui, opts)
1356 if dest is None:
1355 if dest is None:
1357 dest = hg.defaultdest(source)
1356 dest = hg.defaultdest(source)
1358 sr = hg.repository(ui, ui.expandpath(source))
1357 sr = hg.repository(ui, ui.expandpath(source))
1359 qbase, destrev = None, None
1358 qbase, destrev = None, None
1360 if sr.local():
1359 if sr.local():
1361 reposetup(ui, sr)
1360 reposetup(ui, sr)
1362 if sr.mq.applied:
1361 if sr.mq.applied:
1363 qbase = revlog.bin(sr.mq.applied[0].rev)
1362 qbase = revlog.bin(sr.mq.applied[0].rev)
1364 if not hg.islocal(dest):
1363 if not hg.islocal(dest):
1365 destrev = sr.parents(qbase)[0]
1364 destrev = sr.parents(qbase)[0]
1366 ui.note(_('cloning main repo\n'))
1365 ui.note(_('cloning main repo\n'))
1367 sr, dr = hg.clone(ui, sr, dest,
1366 sr, dr = hg.clone(ui, sr, dest,
1368 pull=opts['pull'],
1367 pull=opts['pull'],
1369 rev=destrev,
1368 rev=destrev,
1370 update=False,
1369 update=False,
1371 stream=opts['uncompressed'])
1370 stream=opts['uncompressed'])
1372 ui.note(_('cloning patch repo\n'))
1371 ui.note(_('cloning patch repo\n'))
1373 spr, dpr = hg.clone(ui, opts['patches'] or (sr.url() + '/.hg/patches'),
1372 spr, dpr = hg.clone(ui, opts['patches'] or (sr.url() + '/.hg/patches'),
1374 dr.url() + '/.hg/patches',
1373 dr.url() + '/.hg/patches',
1375 pull=opts['pull'],
1374 pull=opts['pull'],
1376 update=not opts['noupdate'],
1375 update=not opts['noupdate'],
1377 stream=opts['uncompressed'])
1376 stream=opts['uncompressed'])
1378 if dr.local():
1377 if dr.local():
1379 if qbase:
1378 if qbase:
1380 ui.note(_('stripping applied patches from destination repo\n'))
1379 ui.note(_('stripping applied patches from destination repo\n'))
1381 reposetup(ui, dr)
1380 reposetup(ui, dr)
1382 dr.mq.strip(dr, qbase, update=False, backup=None)
1381 dr.mq.strip(dr, qbase, update=False, backup=None)
1383 if not opts['noupdate']:
1382 if not opts['noupdate']:
1384 ui.note(_('updating destination repo\n'))
1383 ui.note(_('updating destination repo\n'))
1385 hg.update(dr, dr.changelog.tip())
1384 hg.update(dr, dr.changelog.tip())
1386
1385
1387 def commit(ui, repo, *pats, **opts):
1386 def commit(ui, repo, *pats, **opts):
1388 """commit changes in the queue repository"""
1387 """commit changes in the queue repository"""
1389 q = repo.mq
1388 q = repo.mq
1390 r = q.qrepo()
1389 r = q.qrepo()
1391 if not r: raise util.Abort('no queue repository')
1390 if not r: raise util.Abort('no queue repository')
1392 commands.commit(r.ui, r, *pats, **opts)
1391 commands.commit(r.ui, r, *pats, **opts)
1393
1392
1394 def series(ui, repo, **opts):
1393 def series(ui, repo, **opts):
1395 """print the entire series file"""
1394 """print the entire series file"""
1396 repo.mq.qseries(repo, missing=opts['missing'], summary=opts['summary'])
1395 repo.mq.qseries(repo, missing=opts['missing'], summary=opts['summary'])
1397 return 0
1396 return 0
1398
1397
1399 def top(ui, repo, **opts):
1398 def top(ui, repo, **opts):
1400 """print the name of the current patch"""
1399 """print the name of the current patch"""
1401 repo.mq.top(repo)
1400 repo.mq.top(repo)
1402 return 0
1401 return 0
1403
1402
1404 def next(ui, repo, **opts):
1403 def next(ui, repo, **opts):
1405 """print the name of the next patch"""
1404 """print the name of the next patch"""
1406 repo.mq.next(repo)
1405 repo.mq.next(repo)
1407 return 0
1406 return 0
1408
1407
1409 def prev(ui, repo, **opts):
1408 def prev(ui, repo, **opts):
1410 """print the name of the previous patch"""
1409 """print the name of the previous patch"""
1411 repo.mq.prev(repo)
1410 repo.mq.prev(repo)
1412 return 0
1411 return 0
1413
1412
1414 def new(ui, repo, patch, **opts):
1413 def new(ui, repo, patch, **opts):
1415 """create a new patch
1414 """create a new patch
1416
1415
1417 qnew creates a new patch on top of the currently-applied patch
1416 qnew creates a new patch on top of the currently-applied patch
1418 (if any). It will refuse to run if there are any outstanding
1417 (if any). It will refuse to run if there are any outstanding
1419 changes unless -f is specified, in which case the patch will
1418 changes unless -f is specified, in which case the patch will
1420 be initialised with them.
1419 be initialised with them.
1421
1420
1422 -m or -l set the patch header as well as the commit message.
1421 -m or -l set the patch header as well as the commit message.
1423 If neither is specified, the patch header is empty and the
1422 If neither is specified, the patch header is empty and the
1424 commit message is 'New patch: PATCH'"""
1423 commit message is 'New patch: PATCH'"""
1425 q = repo.mq
1424 q = repo.mq
1426 message = commands.logmessage(opts)
1425 message = commands.logmessage(opts)
1427 q.new(repo, patch, msg=message, force=opts['force'])
1426 q.new(repo, patch, msg=message, force=opts['force'])
1428 q.save_dirty()
1427 q.save_dirty()
1429 return 0
1428 return 0
1430
1429
1431 def refresh(ui, repo, **opts):
1430 def refresh(ui, repo, **opts):
1432 """update the current patch"""
1431 """update the current patch"""
1433 q = repo.mq
1432 q = repo.mq
1434 message = commands.logmessage(opts)
1433 message = commands.logmessage(opts)
1435 if opts['edit']:
1434 if opts['edit']:
1436 if message:
1435 if message:
1437 raise util.Abort(_('option "-e" incompatible with "-m" or "-l"'))
1436 raise util.Abort(_('option "-e" incompatible with "-m" or "-l"'))
1438 patch = q.applied[-1].name
1437 patch = q.applied[-1].name
1439 (message, comment, user, date, hasdiff) = q.readheaders(patch)
1438 (message, comment, user, date, hasdiff) = q.readheaders(patch)
1440 message = ui.edit('\n'.join(message), user or ui.username())
1439 message = ui.edit('\n'.join(message), user or ui.username())
1441 q.refresh(repo, msg=message, short=opts['short'])
1440 q.refresh(repo, msg=message, short=opts['short'])
1442 q.save_dirty()
1441 q.save_dirty()
1443 return 0
1442 return 0
1444
1443
1445 def diff(ui, repo, *files, **opts):
1444 def diff(ui, repo, *files, **opts):
1446 """diff of the current patch"""
1445 """diff of the current patch"""
1447 # deep in the dirstate code, the walkhelper method wants a list, not a tuple
1446 # deep in the dirstate code, the walkhelper method wants a list, not a tuple
1448 repo.mq.diff(repo, list(files))
1447 repo.mq.diff(repo, list(files))
1449 return 0
1448 return 0
1450
1449
1451 def fold(ui, repo, *files, **opts):
1450 def fold(ui, repo, *files, **opts):
1452 """fold the named patches into the current patch
1451 """fold the named patches into the current patch
1453
1452
1454 Patches must not yet be applied. Each patch will be successively
1453 Patches must not yet be applied. Each patch will be successively
1455 applied to the current patch in the order given. If all the
1454 applied to the current patch in the order given. If all the
1456 patches apply successfully, the current patch will be refreshed
1455 patches apply successfully, the current patch will be refreshed
1457 with the new cumulative patch, and the folded patches will
1456 with the new cumulative patch, and the folded patches will
1458 be deleted. With -f/--force, the folded patch files will
1457 be deleted. With -f/--force, the folded patch files will
1459 be removed afterwards.
1458 be removed afterwards.
1460
1459
1461 The header for each folded patch will be concatenated with
1460 The header for each folded patch will be concatenated with
1462 the current patch header, separated by a line of '* * *'."""
1461 the current patch header, separated by a line of '* * *'."""
1463
1462
1464 q = repo.mq
1463 q = repo.mq
1465
1464
1466 if not files:
1465 if not files:
1467 raise util.Abort(_('qfold requires at least one patch name'))
1466 raise util.Abort(_('qfold requires at least one patch name'))
1468 if not q.check_toppatch(repo):
1467 if not q.check_toppatch(repo):
1469 raise util.Abort(_('No patches applied\n'))
1468 raise util.Abort(_('No patches applied\n'))
1470
1469
1471 message = commands.logmessage(opts)
1470 message = commands.logmessage(opts)
1472 if opts['edit']:
1471 if opts['edit']:
1473 if message:
1472 if message:
1474 raise util.Abort(_('option "-e" incompatible with "-m" or "-l"'))
1473 raise util.Abort(_('option "-e" incompatible with "-m" or "-l"'))
1475
1474
1476 parent = q.lookup('qtip')
1475 parent = q.lookup('qtip')
1477 patches = []
1476 patches = []
1478 messages = []
1477 messages = []
1479 for f in files:
1478 for f in files:
1480 patch = q.lookup(f)
1479 patch = q.lookup(f)
1481 if patch in patches or patch == parent:
1480 if patch in patches or patch == parent:
1482 ui.warn(_('Skipping already folded patch %s') % patch)
1481 ui.warn(_('Skipping already folded patch %s') % patch)
1483 if q.isapplied(patch):
1482 if q.isapplied(patch):
1484 raise util.Abort(_('qfold cannot fold already applied patch %s') % patch)
1483 raise util.Abort(_('qfold cannot fold already applied patch %s') % patch)
1485 patches.append(patch)
1484 patches.append(patch)
1486
1485
1487 for patch in patches:
1486 for patch in patches:
1488 if not message:
1487 if not message:
1489 messages.append(q.readheaders(patch)[0])
1488 messages.append(q.readheaders(patch)[0])
1490 pf = q.join(patch)
1489 pf = q.join(patch)
1491 (patchsuccess, files, fuzz) = q.patch(repo, pf)
1490 (patchsuccess, files, fuzz) = q.patch(repo, pf)
1492 if not patchsuccess:
1491 if not patchsuccess:
1493 raise util.Abort(_('Error folding patch %s') % patch)
1492 raise util.Abort(_('Error folding patch %s') % patch)
1494
1493
1495 if not message:
1494 if not message:
1496 message, comments, user = q.readheaders(parent)[0:3]
1495 message, comments, user = q.readheaders(parent)[0:3]
1497 for msg in messages:
1496 for msg in messages:
1498 message.append('* * *')
1497 message.append('* * *')
1499 message.extend(msg)
1498 message.extend(msg)
1500 message = '\n'.join(message)
1499 message = '\n'.join(message)
1501
1500
1502 if opts['edit']:
1501 if opts['edit']:
1503 message = ui.edit(message, user or ui.username())
1502 message = ui.edit(message, user or ui.username())
1504
1503
1505 q.refresh(repo, msg=message)
1504 q.refresh(repo, msg=message)
1506
1505
1507 for patch in patches:
1506 for patch in patches:
1508 q.delete(repo, patch, force=opts['force'])
1507 q.delete(repo, patch, force=opts['force'])
1509
1508
1510 q.save_dirty()
1509 q.save_dirty()
1511
1510
1512 def guard(ui, repo, *args, **opts):
1511 def guard(ui, repo, *args, **opts):
1513 '''set or print guards for a patch
1512 '''set or print guards for a patch
1514
1513
1515 guards control whether a patch can be pushed. a patch with no
1514 guards control whether a patch can be pushed. a patch with no
1516 guards is aways pushed. a patch with posative guard ("+foo") is
1515 guards is aways pushed. a patch with posative guard ("+foo") is
1517 pushed only if qselect command enables guard "foo". a patch with
1516 pushed only if qselect command enables guard "foo". a patch with
1518 nagative guard ("-foo") is never pushed if qselect command enables
1517 nagative guard ("-foo") is never pushed if qselect command enables
1519 guard "foo".
1518 guard "foo".
1520
1519
1521 with no arguments, default is to print current active guards.
1520 with no arguments, default is to print current active guards.
1522 with arguments, set active guards for patch.
1521 with arguments, set active guards for patch.
1523
1522
1524 to set nagative guard "-foo" on topmost patch ("--" is needed so
1523 to set nagative guard "-foo" on topmost patch ("--" is needed so
1525 hg will not interpret "-foo" as argument):
1524 hg will not interpret "-foo" as argument):
1526 hg qguard -- -foo
1525 hg qguard -- -foo
1527
1526
1528 to set guards on other patch:
1527 to set guards on other patch:
1529 hg qguard other.patch +2.6.17 -stable
1528 hg qguard other.patch +2.6.17 -stable
1530 '''
1529 '''
1531 def status(idx):
1530 def status(idx):
1532 guards = q.series_guards[idx] or ['unguarded']
1531 guards = q.series_guards[idx] or ['unguarded']
1533 ui.write('%s: %s\n' % (q.series[idx], ' '.join(guards)))
1532 ui.write('%s: %s\n' % (q.series[idx], ' '.join(guards)))
1534 q = repo.mq
1533 q = repo.mq
1535 patch = None
1534 patch = None
1536 args = list(args)
1535 args = list(args)
1537 if opts['list']:
1536 if opts['list']:
1538 if args or opts['none']:
1537 if args or opts['none']:
1539 raise util.Abort(_('cannot mix -l/--list with options or arguments'))
1538 raise util.Abort(_('cannot mix -l/--list with options or arguments'))
1540 for i in xrange(len(q.series)):
1539 for i in xrange(len(q.series)):
1541 status(i)
1540 status(i)
1542 return
1541 return
1543 if not args or args[0][0:1] in '-+':
1542 if not args or args[0][0:1] in '-+':
1544 if not q.applied:
1543 if not q.applied:
1545 raise util.Abort(_('no patches applied'))
1544 raise util.Abort(_('no patches applied'))
1546 patch = q.applied[-1].name
1545 patch = q.applied[-1].name
1547 if patch is None and args[0][0:1] not in '-+':
1546 if patch is None and args[0][0:1] not in '-+':
1548 patch = args.pop(0)
1547 patch = args.pop(0)
1549 if patch is None:
1548 if patch is None:
1550 raise util.Abort(_('no patch to work with'))
1549 raise util.Abort(_('no patch to work with'))
1551 if args or opts['none']:
1550 if args or opts['none']:
1552 q.set_guards(q.find_series(patch), args)
1551 q.set_guards(q.find_series(patch), args)
1553 q.save_dirty()
1552 q.save_dirty()
1554 else:
1553 else:
1555 status(q.series.index(q.lookup(patch)))
1554 status(q.series.index(q.lookup(patch)))
1556
1555
1557 def header(ui, repo, patch=None):
1556 def header(ui, repo, patch=None):
1558 """Print the header of the topmost or specified patch"""
1557 """Print the header of the topmost or specified patch"""
1559 q = repo.mq
1558 q = repo.mq
1560
1559
1561 if patch:
1560 if patch:
1562 patch = q.lookup(patch)
1561 patch = q.lookup(patch)
1563 else:
1562 else:
1564 if not q.applied:
1563 if not q.applied:
1565 ui.write('No patches applied\n')
1564 ui.write('No patches applied\n')
1566 return
1565 return
1567 patch = q.lookup('qtip')
1566 patch = q.lookup('qtip')
1568 message = repo.mq.readheaders(patch)[0]
1567 message = repo.mq.readheaders(patch)[0]
1569
1568
1570 ui.write('\n'.join(message) + '\n')
1569 ui.write('\n'.join(message) + '\n')
1571
1570
1572 def lastsavename(path):
1571 def lastsavename(path):
1573 (directory, base) = os.path.split(path)
1572 (directory, base) = os.path.split(path)
1574 names = os.listdir(directory)
1573 names = os.listdir(directory)
1575 namere = re.compile("%s.([0-9]+)" % base)
1574 namere = re.compile("%s.([0-9]+)" % base)
1576 maxindex = None
1575 maxindex = None
1577 maxname = None
1576 maxname = None
1578 for f in names:
1577 for f in names:
1579 m = namere.match(f)
1578 m = namere.match(f)
1580 if m:
1579 if m:
1581 index = int(m.group(1))
1580 index = int(m.group(1))
1582 if maxindex == None or index > maxindex:
1581 if maxindex == None or index > maxindex:
1583 maxindex = index
1582 maxindex = index
1584 maxname = f
1583 maxname = f
1585 if maxname:
1584 if maxname:
1586 return (os.path.join(directory, maxname), maxindex)
1585 return (os.path.join(directory, maxname), maxindex)
1587 return (None, None)
1586 return (None, None)
1588
1587
1589 def savename(path):
1588 def savename(path):
1590 (last, index) = lastsavename(path)
1589 (last, index) = lastsavename(path)
1591 if last is None:
1590 if last is None:
1592 index = 0
1591 index = 0
1593 newpath = path + ".%d" % (index + 1)
1592 newpath = path + ".%d" % (index + 1)
1594 return newpath
1593 return newpath
1595
1594
1596 def push(ui, repo, patch=None, **opts):
1595 def push(ui, repo, patch=None, **opts):
1597 """push the next patch onto the stack"""
1596 """push the next patch onto the stack"""
1598 q = repo.mq
1597 q = repo.mq
1599 mergeq = None
1598 mergeq = None
1600
1599
1601 if opts['all']:
1600 if opts['all']:
1602 patch = q.series[-1]
1601 patch = q.series[-1]
1603 if opts['merge']:
1602 if opts['merge']:
1604 if opts['name']:
1603 if opts['name']:
1605 newpath = opts['name']
1604 newpath = opts['name']
1606 else:
1605 else:
1607 newpath, i = lastsavename(q.path)
1606 newpath, i = lastsavename(q.path)
1608 if not newpath:
1607 if not newpath:
1609 ui.warn("no saved queues found, please use -n\n")
1608 ui.warn("no saved queues found, please use -n\n")
1610 return 1
1609 return 1
1611 mergeq = queue(ui, repo.join(""), newpath)
1610 mergeq = queue(ui, repo.join(""), newpath)
1612 ui.warn("merging with queue at: %s\n" % mergeq.path)
1611 ui.warn("merging with queue at: %s\n" % mergeq.path)
1613 ret = q.push(repo, patch, force=opts['force'], list=opts['list'],
1612 ret = q.push(repo, patch, force=opts['force'], list=opts['list'],
1614 mergeq=mergeq)
1613 mergeq=mergeq)
1615 q.save_dirty()
1614 q.save_dirty()
1616 return ret
1615 return ret
1617
1616
1618 def pop(ui, repo, patch=None, **opts):
1617 def pop(ui, repo, patch=None, **opts):
1619 """pop the current patch off the stack"""
1618 """pop the current patch off the stack"""
1620 localupdate = True
1619 localupdate = True
1621 if opts['name']:
1620 if opts['name']:
1622 q = queue(ui, repo.join(""), repo.join(opts['name']))
1621 q = queue(ui, repo.join(""), repo.join(opts['name']))
1623 ui.warn('using patch queue: %s\n' % q.path)
1622 ui.warn('using patch queue: %s\n' % q.path)
1624 localupdate = False
1623 localupdate = False
1625 else:
1624 else:
1626 q = repo.mq
1625 q = repo.mq
1627 q.pop(repo, patch, force=opts['force'], update=localupdate, all=opts['all'])
1626 q.pop(repo, patch, force=opts['force'], update=localupdate, all=opts['all'])
1628 q.save_dirty()
1627 q.save_dirty()
1629 return 0
1628 return 0
1630
1629
1631 def rename(ui, repo, patch, name=None, **opts):
1630 def rename(ui, repo, patch, name=None, **opts):
1632 """rename a patch
1631 """rename a patch
1633
1632
1634 With one argument, renames the current patch to PATCH1.
1633 With one argument, renames the current patch to PATCH1.
1635 With two arguments, renames PATCH1 to PATCH2."""
1634 With two arguments, renames PATCH1 to PATCH2."""
1636
1635
1637 q = repo.mq
1636 q = repo.mq
1638
1637
1639 if not name:
1638 if not name:
1640 name = patch
1639 name = patch
1641 patch = None
1640 patch = None
1642
1641
1643 if name in q.series:
1642 if name in q.series:
1644 raise util.Abort(_('A patch named %s already exists in the series file') % name)
1643 raise util.Abort(_('A patch named %s already exists in the series file') % name)
1645
1644
1646 absdest = q.join(name)
1645 absdest = q.join(name)
1647 if os.path.exists(absdest):
1646 if os.path.exists(absdest):
1648 raise util.Abort(_('%s already exists') % absdest)
1647 raise util.Abort(_('%s already exists') % absdest)
1649
1648
1650 if patch:
1649 if patch:
1651 patch = q.lookup(patch)
1650 patch = q.lookup(patch)
1652 else:
1651 else:
1653 if not q.applied:
1652 if not q.applied:
1654 ui.write(_('No patches applied\n'))
1653 ui.write(_('No patches applied\n'))
1655 return
1654 return
1656 patch = q.lookup('qtip')
1655 patch = q.lookup('qtip')
1657
1656
1658 if ui.verbose:
1657 if ui.verbose:
1659 ui.write('Renaming %s to %s\n' % (patch, name))
1658 ui.write('Renaming %s to %s\n' % (patch, name))
1660 i = q.find_series(patch)
1659 i = q.find_series(patch)
1661 q.full_series[i] = name
1660 q.full_series[i] = name
1662 q.parse_series()
1661 q.parse_series()
1663 q.series_dirty = 1
1662 q.series_dirty = 1
1664
1663
1665 info = q.isapplied(patch)
1664 info = q.isapplied(patch)
1666 if info:
1665 if info:
1667 q.applied[info[0]] = statusentry(info[1], name)
1666 q.applied[info[0]] = statusentry(info[1], name)
1668 q.applied_dirty = 1
1667 q.applied_dirty = 1
1669
1668
1670 util.rename(q.join(patch), absdest)
1669 util.rename(q.join(patch), absdest)
1671 r = q.qrepo()
1670 r = q.qrepo()
1672 if r:
1671 if r:
1673 wlock = r.wlock()
1672 wlock = r.wlock()
1674 if r.dirstate.state(name) == 'r':
1673 if r.dirstate.state(name) == 'r':
1675 r.undelete([name], wlock)
1674 r.undelete([name], wlock)
1676 r.copy(patch, name, wlock)
1675 r.copy(patch, name, wlock)
1677 r.remove([patch], False, wlock)
1676 r.remove([patch], False, wlock)
1678
1677
1679 q.save_dirty()
1678 q.save_dirty()
1680
1679
1681 def restore(ui, repo, rev, **opts):
1680 def restore(ui, repo, rev, **opts):
1682 """restore the queue state saved by a rev"""
1681 """restore the queue state saved by a rev"""
1683 rev = repo.lookup(rev)
1682 rev = repo.lookup(rev)
1684 q = repo.mq
1683 q = repo.mq
1685 q.restore(repo, rev, delete=opts['delete'],
1684 q.restore(repo, rev, delete=opts['delete'],
1686 qupdate=opts['update'])
1685 qupdate=opts['update'])
1687 q.save_dirty()
1686 q.save_dirty()
1688 return 0
1687 return 0
1689
1688
1690 def save(ui, repo, **opts):
1689 def save(ui, repo, **opts):
1691 """save current queue state"""
1690 """save current queue state"""
1692 q = repo.mq
1691 q = repo.mq
1693 message = commands.logmessage(opts)
1692 message = commands.logmessage(opts)
1694 ret = q.save(repo, msg=message)
1693 ret = q.save(repo, msg=message)
1695 if ret:
1694 if ret:
1696 return ret
1695 return ret
1697 q.save_dirty()
1696 q.save_dirty()
1698 if opts['copy']:
1697 if opts['copy']:
1699 path = q.path
1698 path = q.path
1700 if opts['name']:
1699 if opts['name']:
1701 newpath = os.path.join(q.basepath, opts['name'])
1700 newpath = os.path.join(q.basepath, opts['name'])
1702 if os.path.exists(newpath):
1701 if os.path.exists(newpath):
1703 if not os.path.isdir(newpath):
1702 if not os.path.isdir(newpath):
1704 raise util.Abort(_('destination %s exists and is not '
1703 raise util.Abort(_('destination %s exists and is not '
1705 'a directory') % newpath)
1704 'a directory') % newpath)
1706 if not opts['force']:
1705 if not opts['force']:
1707 raise util.Abort(_('destination %s exists, '
1706 raise util.Abort(_('destination %s exists, '
1708 'use -f to force') % newpath)
1707 'use -f to force') % newpath)
1709 else:
1708 else:
1710 newpath = savename(path)
1709 newpath = savename(path)
1711 ui.warn("copy %s to %s\n" % (path, newpath))
1710 ui.warn("copy %s to %s\n" % (path, newpath))
1712 util.copyfiles(path, newpath)
1711 util.copyfiles(path, newpath)
1713 if opts['empty']:
1712 if opts['empty']:
1714 try:
1713 try:
1715 os.unlink(q.join(q.status_path))
1714 os.unlink(q.join(q.status_path))
1716 except:
1715 except:
1717 pass
1716 pass
1718 return 0
1717 return 0
1719
1718
1720 def strip(ui, repo, rev, **opts):
1719 def strip(ui, repo, rev, **opts):
1721 """strip a revision and all later revs on the same branch"""
1720 """strip a revision and all later revs on the same branch"""
1722 rev = repo.lookup(rev)
1721 rev = repo.lookup(rev)
1723 backup = 'all'
1722 backup = 'all'
1724 if opts['backup']:
1723 if opts['backup']:
1725 backup = 'strip'
1724 backup = 'strip'
1726 elif opts['nobackup']:
1725 elif opts['nobackup']:
1727 backup = 'none'
1726 backup = 'none'
1728 repo.mq.strip(repo, rev, backup=backup)
1727 repo.mq.strip(repo, rev, backup=backup)
1729 return 0
1728 return 0
1730
1729
1731 def select(ui, repo, *args, **opts):
1730 def select(ui, repo, *args, **opts):
1732 '''set or print guarded patches to push
1731 '''set or print guarded patches to push
1733
1732
1734 use qguard command to set or print guards on patch. then use
1733 use qguard command to set or print guards on patch. then use
1735 qselect to tell mq which guards to use. example:
1734 qselect to tell mq which guards to use. example:
1736
1735
1737 qguard foo.patch -stable (nagative guard)
1736 qguard foo.patch -stable (nagative guard)
1738 qguard bar.patch +stable (posative guard)
1737 qguard bar.patch +stable (posative guard)
1739 qselect stable
1738 qselect stable
1740
1739
1741 this sets "stable" guard. mq will skip foo.patch (because it has
1740 this sets "stable" guard. mq will skip foo.patch (because it has
1742 nagative match) but push bar.patch (because it has posative
1741 nagative match) but push bar.patch (because it has posative
1743 match). patch is pushed only if all posative guards match and no
1742 match). patch is pushed only if all posative guards match and no
1744 nagative guards match.
1743 nagative guards match.
1745
1744
1746 with no arguments, default is to print current active guards.
1745 with no arguments, default is to print current active guards.
1747 with arguments, set active guards as given.
1746 with arguments, set active guards as given.
1748
1747
1749 use -n/--none to deactivate guards (no other arguments needed).
1748 use -n/--none to deactivate guards (no other arguments needed).
1750 when no guards active, patches with posative guards are skipped,
1749 when no guards active, patches with posative guards are skipped,
1751 patches with nagative guards are pushed.
1750 patches with nagative guards are pushed.
1752
1751
1753 use -s/--series to print list of all guards in series file (no
1752 use -s/--series to print list of all guards in series file (no
1754 other arguments needed). use -v for more information.'''
1753 other arguments needed). use -v for more information.'''
1755
1754
1756 q = repo.mq
1755 q = repo.mq
1757 guards = q.active()
1756 guards = q.active()
1758 if args or opts['none']:
1757 if args or opts['none']:
1759 q.set_active(args)
1758 q.set_active(args)
1760 q.save_dirty()
1759 q.save_dirty()
1761 if not args:
1760 if not args:
1762 ui.status(_('guards deactivated\n'))
1761 ui.status(_('guards deactivated\n'))
1763 if q.series:
1762 if q.series:
1764 ui.status(_('%d of %d unapplied patches active\n') %
1763 ui.status(_('%d of %d unapplied patches active\n') %
1765 (len(q.unapplied(repo)), len(q.series)))
1764 (len(q.unapplied(repo)), len(q.series)))
1766 elif opts['series']:
1765 elif opts['series']:
1767 guards = {}
1766 guards = {}
1768 noguards = 0
1767 noguards = 0
1769 for gs in q.series_guards:
1768 for gs in q.series_guards:
1770 if not gs:
1769 if not gs:
1771 noguards += 1
1770 noguards += 1
1772 for g in gs:
1771 for g in gs:
1773 guards.setdefault(g, 0)
1772 guards.setdefault(g, 0)
1774 guards[g] += 1
1773 guards[g] += 1
1775 if ui.verbose:
1774 if ui.verbose:
1776 guards['NONE'] = noguards
1775 guards['NONE'] = noguards
1777 guards = guards.items()
1776 guards = guards.items()
1778 guards.sort(lambda a, b: cmp(a[0][1:], b[0][1:]))
1777 guards.sort(lambda a, b: cmp(a[0][1:], b[0][1:]))
1779 if guards:
1778 if guards:
1780 ui.note(_('guards in series file:\n'))
1779 ui.note(_('guards in series file:\n'))
1781 for guard, count in guards:
1780 for guard, count in guards:
1782 ui.note('%2d ' % count)
1781 ui.note('%2d ' % count)
1783 ui.write(guard, '\n')
1782 ui.write(guard, '\n')
1784 else:
1783 else:
1785 ui.note(_('no guards in series file\n'))
1784 ui.note(_('no guards in series file\n'))
1786 else:
1785 else:
1787 if guards:
1786 if guards:
1788 ui.note(_('active guards:\n'))
1787 ui.note(_('active guards:\n'))
1789 for g in guards:
1788 for g in guards:
1790 ui.write(g, '\n')
1789 ui.write(g, '\n')
1791 else:
1790 else:
1792 ui.write(_('no active guards\n'))
1791 ui.write(_('no active guards\n'))
1793
1792
1794 def reposetup(ui, repo):
1793 def reposetup(ui, repo):
1795 class mqrepo(repo.__class__):
1794 class mqrepo(repo.__class__):
1796 def tags(self):
1795 def tags(self):
1797 if self.tagscache:
1796 if self.tagscache:
1798 return self.tagscache
1797 return self.tagscache
1799
1798
1800 tagscache = super(mqrepo, self).tags()
1799 tagscache = super(mqrepo, self).tags()
1801
1800
1802 q = self.mq
1801 q = self.mq
1803 if not q.applied:
1802 if not q.applied:
1804 return tagscache
1803 return tagscache
1805
1804
1806 mqtags = [(patch.rev, patch.name) for patch in q.applied]
1805 mqtags = [(patch.rev, patch.name) for patch in q.applied]
1807 mqtags.append((mqtags[-1][0], 'qtip'))
1806 mqtags.append((mqtags[-1][0], 'qtip'))
1808 mqtags.append((mqtags[0][0], 'qbase'))
1807 mqtags.append((mqtags[0][0], 'qbase'))
1809 for patch in mqtags:
1808 for patch in mqtags:
1810 if patch[1] in tagscache:
1809 if patch[1] in tagscache:
1811 self.ui.warn('Tag %s overrides mq patch of the same name\n' % patch[1])
1810 self.ui.warn('Tag %s overrides mq patch of the same name\n' % patch[1])
1812 else:
1811 else:
1813 tagscache[patch[1]] = revlog.bin(patch[0])
1812 tagscache[patch[1]] = revlog.bin(patch[0])
1814
1813
1815 return tagscache
1814 return tagscache
1816
1815
1817 repo.__class__ = mqrepo
1816 repo.__class__ = mqrepo
1818 repo.mq = queue(ui, repo.join(""))
1817 repo.mq = queue(ui, repo.join(""))
1819
1818
1820 cmdtable = {
1819 cmdtable = {
1821 "qapplied": (applied, [], 'hg qapplied [PATCH]'),
1820 "qapplied": (applied, [], 'hg qapplied [PATCH]'),
1822 "qclone": (clone,
1821 "qclone": (clone,
1823 [('', 'pull', None, _('use pull protocol to copy metadata')),
1822 [('', 'pull', None, _('use pull protocol to copy metadata')),
1824 ('U', 'noupdate', None, _('do not update the new working directories')),
1823 ('U', 'noupdate', None, _('do not update the new working directories')),
1825 ('', 'uncompressed', None,
1824 ('', 'uncompressed', None,
1826 _('use uncompressed transfer (fast over LAN)')),
1825 _('use uncompressed transfer (fast over LAN)')),
1827 ('e', 'ssh', '', _('specify ssh command to use')),
1826 ('e', 'ssh', '', _('specify ssh command to use')),
1828 ('p', 'patches', '', _('location of source patch repo')),
1827 ('p', 'patches', '', _('location of source patch repo')),
1829 ('', 'remotecmd', '',
1828 ('', 'remotecmd', '',
1830 _('specify hg command to run on the remote side'))],
1829 _('specify hg command to run on the remote side'))],
1831 'hg qclone [OPTION]... SOURCE [DEST]'),
1830 'hg qclone [OPTION]... SOURCE [DEST]'),
1832 "qcommit|qci":
1831 "qcommit|qci":
1833 (commit,
1832 (commit,
1834 commands.table["^commit|ci"][1],
1833 commands.table["^commit|ci"][1],
1835 'hg qcommit [OPTION]... [FILE]...'),
1834 'hg qcommit [OPTION]... [FILE]...'),
1836 "^qdiff": (diff, [], 'hg qdiff [FILE]...'),
1835 "^qdiff": (diff, [], 'hg qdiff [FILE]...'),
1837 "qdelete":
1836 "qdelete":
1838 (delete,
1837 (delete,
1839 [('f', 'force', None, _('delete patch file'))],
1838 [('f', 'force', None, _('delete patch file'))],
1840 'hg qdelete [-f] PATCH'),
1839 'hg qdelete [-f] PATCH'),
1841 'qfold':
1840 'qfold':
1842 (fold,
1841 (fold,
1843 [('e', 'edit', None, _('edit patch header')),
1842 [('e', 'edit', None, _('edit patch header')),
1844 ('f', 'force', None, _('delete folded patch files')),
1843 ('f', 'force', None, _('delete folded patch files')),
1845 ('m', 'message', '', _('set patch header to <text>')),
1844 ('m', 'message', '', _('set patch header to <text>')),
1846 ('l', 'logfile', '', _('set patch header to contents of <file>'))],
1845 ('l', 'logfile', '', _('set patch header to contents of <file>'))],
1847 'hg qfold [-e] [-m <text>] [-l <file] PATCH...'),
1846 'hg qfold [-e] [-m <text>] [-l <file] PATCH...'),
1848 'qguard': (guard, [('l', 'list', None, _('list all patches and guards')),
1847 'qguard': (guard, [('l', 'list', None, _('list all patches and guards')),
1849 ('n', 'none', None, _('drop all guards'))],
1848 ('n', 'none', None, _('drop all guards'))],
1850 'hg qguard [PATCH] [+GUARD...] [-GUARD...]'),
1849 'hg qguard [PATCH] [+GUARD...] [-GUARD...]'),
1851 'qheader': (header, [],
1850 'qheader': (header, [],
1852 _('hg qheader [PATCH]')),
1851 _('hg qheader [PATCH]')),
1853 "^qimport":
1852 "^qimport":
1854 (qimport,
1853 (qimport,
1855 [('e', 'existing', None, 'import file in patch dir'),
1854 [('e', 'existing', None, 'import file in patch dir'),
1856 ('n', 'name', '', 'patch file name'),
1855 ('n', 'name', '', 'patch file name'),
1857 ('f', 'force', None, 'overwrite existing files')],
1856 ('f', 'force', None, 'overwrite existing files')],
1858 'hg qimport [-e] [-n NAME] [-f] FILE...'),
1857 'hg qimport [-e] [-n NAME] [-f] FILE...'),
1859 "^qinit":
1858 "^qinit":
1860 (init,
1859 (init,
1861 [('c', 'create-repo', None, 'create queue repository')],
1860 [('c', 'create-repo', None, 'create queue repository')],
1862 'hg qinit [-c]'),
1861 'hg qinit [-c]'),
1863 "qnew":
1862 "qnew":
1864 (new,
1863 (new,
1865 [('m', 'message', '', _('use <text> as commit message')),
1864 [('m', 'message', '', _('use <text> as commit message')),
1866 ('l', 'logfile', '', _('read the commit message from <file>')),
1865 ('l', 'logfile', '', _('read the commit message from <file>')),
1867 ('f', 'force', None, _('import uncommitted changes into patch'))],
1866 ('f', 'force', None, _('import uncommitted changes into patch'))],
1868 'hg qnew [-m TEXT] [-l FILE] [-f] PATCH'),
1867 'hg qnew [-m TEXT] [-l FILE] [-f] PATCH'),
1869 "qnext": (next, [], 'hg qnext'),
1868 "qnext": (next, [], 'hg qnext'),
1870 "qprev": (prev, [], 'hg qprev'),
1869 "qprev": (prev, [], 'hg qprev'),
1871 "^qpop":
1870 "^qpop":
1872 (pop,
1871 (pop,
1873 [('a', 'all', None, 'pop all patches'),
1872 [('a', 'all', None, 'pop all patches'),
1874 ('n', 'name', '', 'queue name to pop'),
1873 ('n', 'name', '', 'queue name to pop'),
1875 ('f', 'force', None, 'forget any local changes')],
1874 ('f', 'force', None, 'forget any local changes')],
1876 'hg qpop [-a] [-n NAME] [-f] [PATCH | INDEX]'),
1875 'hg qpop [-a] [-n NAME] [-f] [PATCH | INDEX]'),
1877 "^qpush":
1876 "^qpush":
1878 (push,
1877 (push,
1879 [('f', 'force', None, 'apply if the patch has rejects'),
1878 [('f', 'force', None, 'apply if the patch has rejects'),
1880 ('l', 'list', None, 'list patch name in commit text'),
1879 ('l', 'list', None, 'list patch name in commit text'),
1881 ('a', 'all', None, 'apply all patches'),
1880 ('a', 'all', None, 'apply all patches'),
1882 ('m', 'merge', None, 'merge from another queue'),
1881 ('m', 'merge', None, 'merge from another queue'),
1883 ('n', 'name', '', 'merge queue name')],
1882 ('n', 'name', '', 'merge queue name')],
1884 'hg qpush [-f] [-l] [-a] [-m] [-n NAME] [PATCH | INDEX]'),
1883 'hg qpush [-f] [-l] [-a] [-m] [-n NAME] [PATCH | INDEX]'),
1885 "^qrefresh":
1884 "^qrefresh":
1886 (refresh,
1885 (refresh,
1887 [('e', 'edit', None, _('edit commit message')),
1886 [('e', 'edit', None, _('edit commit message')),
1888 ('m', 'message', '', _('change commit message with <text>')),
1887 ('m', 'message', '', _('change commit message with <text>')),
1889 ('l', 'logfile', '', _('change commit message with <file> content')),
1888 ('l', 'logfile', '', _('change commit message with <file> content')),
1890 ('s', 'short', None, 'short refresh')],
1889 ('s', 'short', None, 'short refresh')],
1891 'hg qrefresh [-e] [-m TEXT] [-l FILE] [-s]'),
1890 'hg qrefresh [-e] [-m TEXT] [-l FILE] [-s]'),
1892 'qrename|qmv':
1891 'qrename|qmv':
1893 (rename, [], 'hg qrename PATCH1 [PATCH2]'),
1892 (rename, [], 'hg qrename PATCH1 [PATCH2]'),
1894 "qrestore":
1893 "qrestore":
1895 (restore,
1894 (restore,
1896 [('d', 'delete', None, 'delete save entry'),
1895 [('d', 'delete', None, 'delete save entry'),
1897 ('u', 'update', None, 'update queue working dir')],
1896 ('u', 'update', None, 'update queue working dir')],
1898 'hg qrestore [-d] [-u] REV'),
1897 'hg qrestore [-d] [-u] REV'),
1899 "qsave":
1898 "qsave":
1900 (save,
1899 (save,
1901 [('m', 'message', '', _('use <text> as commit message')),
1900 [('m', 'message', '', _('use <text> as commit message')),
1902 ('l', 'logfile', '', _('read the commit message from <file>')),
1901 ('l', 'logfile', '', _('read the commit message from <file>')),
1903 ('c', 'copy', None, 'copy patch directory'),
1902 ('c', 'copy', None, 'copy patch directory'),
1904 ('n', 'name', '', 'copy directory name'),
1903 ('n', 'name', '', 'copy directory name'),
1905 ('e', 'empty', None, 'clear queue status file'),
1904 ('e', 'empty', None, 'clear queue status file'),
1906 ('f', 'force', None, 'force copy')],
1905 ('f', 'force', None, 'force copy')],
1907 'hg qsave [-m TEXT] [-l FILE] [-c] [-n NAME] [-e] [-f]'),
1906 'hg qsave [-m TEXT] [-l FILE] [-c] [-n NAME] [-e] [-f]'),
1908 "qselect": (select,
1907 "qselect": (select,
1909 [('n', 'none', None, _('disable all guards')),
1908 [('n', 'none', None, _('disable all guards')),
1910 ('s', 'series', None, _('list all guards in series file'))],
1909 ('s', 'series', None, _('list all guards in series file'))],
1911 'hg qselect [GUARDS]'),
1910 'hg qselect [GUARDS]'),
1912 "qseries":
1911 "qseries":
1913 (series,
1912 (series,
1914 [('m', 'missing', None, 'print patches not in series'),
1913 [('m', 'missing', None, 'print patches not in series'),
1915 ('s', 'summary', None, _('print first line of patch header'))],
1914 ('s', 'summary', None, _('print first line of patch header'))],
1916 'hg qseries [-m]'),
1915 'hg qseries [-m]'),
1917 "^strip":
1916 "^strip":
1918 (strip,
1917 (strip,
1919 [('f', 'force', None, 'force multi-head removal'),
1918 [('f', 'force', None, 'force multi-head removal'),
1920 ('b', 'backup', None, 'bundle unrelated changesets'),
1919 ('b', 'backup', None, 'bundle unrelated changesets'),
1921 ('n', 'nobackup', None, 'no backups')],
1920 ('n', 'nobackup', None, 'no backups')],
1922 'hg strip [-f] [-b] [-n] REV'),
1921 'hg strip [-f] [-b] [-n] REV'),
1923 "qtop": (top, [], 'hg qtop'),
1922 "qtop": (top, [], 'hg qtop'),
1924 "qunapplied": (unapplied, [], 'hg qunapplied [PATCH]'),
1923 "qunapplied": (unapplied, [], 'hg qunapplied [PATCH]'),
1925 }
1924 }
1926
1925
@@ -1,174 +1,173 b''
1 # archival.py - revision archival for mercurial
1 # archival.py - revision archival for mercurial
2 #
2 #
3 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
3 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of
5 # This software may be used and distributed according to the terms of
6 # the GNU General Public License, incorporated herein by reference.
6 # the GNU General Public License, incorporated herein by reference.
7
7
8 from demandload import *
8 from demandload import *
9 from i18n import gettext as _
9 from i18n import gettext as _
10 from node import *
10 from node import *
11 demandload(globals(), 'cStringIO os stat tarfile time util zipfile')
11 demandload(globals(), 'cStringIO os stat tarfile time util zipfile')
12
12
13 def tidyprefix(dest, prefix, suffixes):
13 def tidyprefix(dest, prefix, suffixes):
14 '''choose prefix to use for names in archive. make sure prefix is
14 '''choose prefix to use for names in archive. make sure prefix is
15 safe for consumers.'''
15 safe for consumers.'''
16
16
17 if prefix:
17 if prefix:
18 prefix = prefix.replace('\\', '/')
18 prefix = prefix.replace('\\', '/')
19 else:
19 else:
20 if not isinstance(dest, str):
20 if not isinstance(dest, str):
21 raise ValueError('dest must be string if no prefix')
21 raise ValueError('dest must be string if no prefix')
22 prefix = os.path.basename(dest)
22 prefix = os.path.basename(dest)
23 lower = prefix.lower()
23 lower = prefix.lower()
24 for sfx in suffixes:
24 for sfx in suffixes:
25 if lower.endswith(sfx):
25 if lower.endswith(sfx):
26 prefix = prefix[:-len(sfx)]
26 prefix = prefix[:-len(sfx)]
27 break
27 break
28 lpfx = os.path.normpath(util.localpath(prefix))
28 lpfx = os.path.normpath(util.localpath(prefix))
29 prefix = util.pconvert(lpfx)
29 prefix = util.pconvert(lpfx)
30 if not prefix.endswith('/'):
30 if not prefix.endswith('/'):
31 prefix += '/'
31 prefix += '/'
32 if prefix.startswith('../') or os.path.isabs(lpfx) or '/../' in prefix:
32 if prefix.startswith('../') or os.path.isabs(lpfx) or '/../' in prefix:
33 raise util.Abort(_('archive prefix contains illegal components'))
33 raise util.Abort(_('archive prefix contains illegal components'))
34 return prefix
34 return prefix
35
35
36 class tarit:
36 class tarit:
37 '''write archive to tar file or stream. can write uncompressed,
37 '''write archive to tar file or stream. can write uncompressed,
38 or compress with gzip or bzip2.'''
38 or compress with gzip or bzip2.'''
39
39
40 def __init__(self, dest, prefix, mtime, kind=''):
40 def __init__(self, dest, prefix, mtime, kind=''):
41 self.prefix = tidyprefix(dest, prefix, ['.tar', '.tar.bz2', '.tar.gz',
41 self.prefix = tidyprefix(dest, prefix, ['.tar', '.tar.bz2', '.tar.gz',
42 '.tgz', 'tbz2'])
42 '.tgz', 'tbz2'])
43 self.mtime = mtime
43 self.mtime = mtime
44 if isinstance(dest, str):
44 if isinstance(dest, str):
45 self.z = tarfile.open(dest, mode='w:'+kind)
45 self.z = tarfile.open(dest, mode='w:'+kind)
46 else:
46 else:
47 self.z = tarfile.open(mode='w|'+kind, fileobj=dest)
47 self.z = tarfile.open(mode='w|'+kind, fileobj=dest)
48
48
49 def addfile(self, name, mode, data):
49 def addfile(self, name, mode, data):
50 i = tarfile.TarInfo(self.prefix + name)
50 i = tarfile.TarInfo(self.prefix + name)
51 i.mtime = self.mtime
51 i.mtime = self.mtime
52 i.size = len(data)
52 i.size = len(data)
53 i.mode = mode
53 i.mode = mode
54 self.z.addfile(i, cStringIO.StringIO(data))
54 self.z.addfile(i, cStringIO.StringIO(data))
55
55
56 def done(self):
56 def done(self):
57 self.z.close()
57 self.z.close()
58
58
59 class tellable:
59 class tellable:
60 '''provide tell method for zipfile.ZipFile when writing to http
60 '''provide tell method for zipfile.ZipFile when writing to http
61 response file object.'''
61 response file object.'''
62
62
63 def __init__(self, fp):
63 def __init__(self, fp):
64 self.fp = fp
64 self.fp = fp
65 self.offset = 0
65 self.offset = 0
66
66
67 def __getattr__(self, key):
67 def __getattr__(self, key):
68 return getattr(self.fp, key)
68 return getattr(self.fp, key)
69
69
70 def write(self, s):
70 def write(self, s):
71 self.fp.write(s)
71 self.fp.write(s)
72 self.offset += len(s)
72 self.offset += len(s)
73
73
74 def tell(self):
74 def tell(self):
75 return self.offset
75 return self.offset
76
76
77 class zipit:
77 class zipit:
78 '''write archive to zip file or stream. can write uncompressed,
78 '''write archive to zip file or stream. can write uncompressed,
79 or compressed with deflate.'''
79 or compressed with deflate.'''
80
80
81 def __init__(self, dest, prefix, mtime, compress=True):
81 def __init__(self, dest, prefix, mtime, compress=True):
82 self.prefix = tidyprefix(dest, prefix, ('.zip',))
82 self.prefix = tidyprefix(dest, prefix, ('.zip',))
83 if not isinstance(dest, str):
83 if not isinstance(dest, str):
84 try:
84 try:
85 dest.tell()
85 dest.tell()
86 except (AttributeError, IOError):
86 except (AttributeError, IOError):
87 dest = tellable(dest)
87 dest = tellable(dest)
88 self.z = zipfile.ZipFile(dest, 'w',
88 self.z = zipfile.ZipFile(dest, 'w',
89 compress and zipfile.ZIP_DEFLATED or
89 compress and zipfile.ZIP_DEFLATED or
90 zipfile.ZIP_STORED)
90 zipfile.ZIP_STORED)
91 self.date_time = time.gmtime(mtime)[:6]
91 self.date_time = time.gmtime(mtime)[:6]
92
92
93 def addfile(self, name, mode, data):
93 def addfile(self, name, mode, data):
94 i = zipfile.ZipInfo(self.prefix + name, self.date_time)
94 i = zipfile.ZipInfo(self.prefix + name, self.date_time)
95 i.compress_type = self.z.compression
95 i.compress_type = self.z.compression
96 i.flag_bits = 0x08
96 i.flag_bits = 0x08
97 # unzip will not honor unix file modes unless file creator is
97 # unzip will not honor unix file modes unless file creator is
98 # set to unix (id 3).
98 # set to unix (id 3).
99 i.create_system = 3
99 i.create_system = 3
100 i.external_attr = (mode | stat.S_IFREG) << 16L
100 i.external_attr = (mode | stat.S_IFREG) << 16L
101 self.z.writestr(i, data)
101 self.z.writestr(i, data)
102
102
103 def done(self):
103 def done(self):
104 self.z.close()
104 self.z.close()
105
105
106 class fileit:
106 class fileit:
107 '''write archive as files in directory.'''
107 '''write archive as files in directory.'''
108
108
109 def __init__(self, name, prefix, mtime):
109 def __init__(self, name, prefix, mtime):
110 if prefix:
110 if prefix:
111 raise util.Abort(_('cannot give prefix when archiving to files'))
111 raise util.Abort(_('cannot give prefix when archiving to files'))
112 self.basedir = name
112 self.basedir = name
113 self.dirs = {}
113 self.dirs = {}
114 self.oflags = (os.O_CREAT | os.O_EXCL | os.O_WRONLY |
114 self.oflags = (os.O_CREAT | os.O_EXCL | os.O_WRONLY |
115 getattr(os, 'O_BINARY', 0) |
115 getattr(os, 'O_BINARY', 0) |
116 getattr(os, 'O_NOFOLLOW', 0))
116 getattr(os, 'O_NOFOLLOW', 0))
117
117
118 def addfile(self, name, mode, data):
118 def addfile(self, name, mode, data):
119 destfile = os.path.join(self.basedir, name)
119 destfile = os.path.join(self.basedir, name)
120 destdir = os.path.dirname(destfile)
120 destdir = os.path.dirname(destfile)
121 if destdir not in self.dirs:
121 if destdir not in self.dirs:
122 if not os.path.isdir(destdir):
122 if not os.path.isdir(destdir):
123 os.makedirs(destdir)
123 os.makedirs(destdir)
124 self.dirs[destdir] = 1
124 self.dirs[destdir] = 1
125 os.fdopen(os.open(destfile, self.oflags, mode), 'wb').write(data)
125 os.fdopen(os.open(destfile, self.oflags, mode), 'wb').write(data)
126
126
127 def done(self):
127 def done(self):
128 pass
128 pass
129
129
130 archivers = {
130 archivers = {
131 'files': fileit,
131 'files': fileit,
132 'tar': tarit,
132 'tar': tarit,
133 'tbz2': lambda name, prefix, mtime: tarit(name, prefix, mtime, 'bz2'),
133 'tbz2': lambda name, prefix, mtime: tarit(name, prefix, mtime, 'bz2'),
134 'tgz': lambda name, prefix, mtime: tarit(name, prefix, mtime, 'gz'),
134 'tgz': lambda name, prefix, mtime: tarit(name, prefix, mtime, 'gz'),
135 'uzip': lambda name, prefix, mtime: zipit(name, prefix, mtime, False),
135 'uzip': lambda name, prefix, mtime: zipit(name, prefix, mtime, False),
136 'zip': zipit,
136 'zip': zipit,
137 }
137 }
138
138
139 def archive(repo, dest, node, kind, decode=True, matchfn=None,
139 def archive(repo, dest, node, kind, decode=True, matchfn=None,
140 prefix=None, mtime=None):
140 prefix=None, mtime=None):
141 '''create archive of repo as it was at node.
141 '''create archive of repo as it was at node.
142
142
143 dest can be name of directory, name of archive file, or file
143 dest can be name of directory, name of archive file, or file
144 object to write archive to.
144 object to write archive to.
145
145
146 kind is type of archive to create.
146 kind is type of archive to create.
147
147
148 decode tells whether to put files through decode filters from
148 decode tells whether to put files through decode filters from
149 hgrc.
149 hgrc.
150
150
151 matchfn is function to filter names of files to write to archive.
151 matchfn is function to filter names of files to write to archive.
152
152
153 prefix is name of path to put before every archive member.'''
153 prefix is name of path to put before every archive member.'''
154
154
155 def write(name, mode, data):
155 def write(name, mode, data):
156 if matchfn and not matchfn(name): return
156 if matchfn and not matchfn(name): return
157 if decode:
157 if decode:
158 fp = cStringIO.StringIO()
158 fp = cStringIO.StringIO()
159 repo.wwrite(name, data, fp)
159 repo.wwrite(name, data, fp)
160 data = fp.getvalue()
160 data = fp.getvalue()
161 archiver.addfile(name, mode, data)
161 archiver.addfile(name, mode, data)
162
162
163 change = repo.changelog.read(node)
163 change = repo.changelog.read(node)
164 mn = change[0]
164 mn = change[0]
165 archiver = archivers[kind](dest, prefix, mtime or change[2][0])
165 archiver = archivers[kind](dest, prefix, mtime or change[2][0])
166 mf = repo.manifest.read(mn).items()
166 mf = repo.manifest.read(mn).items()
167 mff = repo.manifest.readflags(mn)
168 mf.sort()
167 mf.sort()
169 write('.hg_archival.txt', 0644,
168 write('.hg_archival.txt', 0644,
170 'repo: %s\nnode: %s\n' % (hex(repo.changelog.node(0)), hex(node)))
169 'repo: %s\nnode: %s\n' % (hex(repo.changelog.node(0)), hex(node)))
171 for filename, filenode in mf:
170 for filename, filenode in mf:
172 write(filename, mff.execf(filename) and 0755 or 0644,
171 write(filename, mf.execf(filename) and 0755 or 0644,
173 repo.file(filename).read(filenode))
172 repo.file(filename).read(filenode))
174 archiver.done()
173 archiver.done()
@@ -1,3687 +1,3686 b''
1 # commands.py - command processing for mercurial
1 # commands.py - command processing for mercurial
2 #
2 #
3 # Copyright 2005 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7
7
8 from demandload import demandload
8 from demandload import demandload
9 from node import *
9 from node import *
10 from i18n import gettext as _
10 from i18n import gettext as _
11 demandload(globals(), "os re sys signal shutil imp urllib pdb")
11 demandload(globals(), "os re sys signal shutil imp urllib pdb")
12 demandload(globals(), "fancyopts ui hg util lock revlog templater bundlerepo")
12 demandload(globals(), "fancyopts ui hg util lock revlog templater bundlerepo")
13 demandload(globals(), "fnmatch mdiff random signal tempfile time")
13 demandload(globals(), "fnmatch mdiff random signal tempfile time")
14 demandload(globals(), "traceback errno socket version struct atexit sets bz2")
14 demandload(globals(), "traceback errno socket version struct atexit sets bz2")
15 demandload(globals(), "archival cStringIO changegroup email.Parser")
15 demandload(globals(), "archival cStringIO changegroup email.Parser")
16 demandload(globals(), "hgweb.server sshserver")
16 demandload(globals(), "hgweb.server sshserver")
17
17
18 class UnknownCommand(Exception):
18 class UnknownCommand(Exception):
19 """Exception raised if command is not in the command table."""
19 """Exception raised if command is not in the command table."""
20 class AmbiguousCommand(Exception):
20 class AmbiguousCommand(Exception):
21 """Exception raised if command shortcut matches more than one command."""
21 """Exception raised if command shortcut matches more than one command."""
22
22
23 def bail_if_changed(repo):
23 def bail_if_changed(repo):
24 modified, added, removed, deleted, unknown = repo.changes()
24 modified, added, removed, deleted, unknown = repo.changes()
25 if modified or added or removed or deleted:
25 if modified or added or removed or deleted:
26 raise util.Abort(_("outstanding uncommitted changes"))
26 raise util.Abort(_("outstanding uncommitted changes"))
27
27
28 def filterfiles(filters, files):
28 def filterfiles(filters, files):
29 l = [x for x in files if x in filters]
29 l = [x for x in files if x in filters]
30
30
31 for t in filters:
31 for t in filters:
32 if t and t[-1] != "/":
32 if t and t[-1] != "/":
33 t += "/"
33 t += "/"
34 l += [x for x in files if x.startswith(t)]
34 l += [x for x in files if x.startswith(t)]
35 return l
35 return l
36
36
37 def relpath(repo, args):
37 def relpath(repo, args):
38 cwd = repo.getcwd()
38 cwd = repo.getcwd()
39 if cwd:
39 if cwd:
40 return [util.normpath(os.path.join(cwd, x)) for x in args]
40 return [util.normpath(os.path.join(cwd, x)) for x in args]
41 return args
41 return args
42
42
43 def logmessage(opts):
43 def logmessage(opts):
44 """ get the log message according to -m and -l option """
44 """ get the log message according to -m and -l option """
45 message = opts['message']
45 message = opts['message']
46 logfile = opts['logfile']
46 logfile = opts['logfile']
47
47
48 if message and logfile:
48 if message and logfile:
49 raise util.Abort(_('options --message and --logfile are mutually '
49 raise util.Abort(_('options --message and --logfile are mutually '
50 'exclusive'))
50 'exclusive'))
51 if not message and logfile:
51 if not message and logfile:
52 try:
52 try:
53 if logfile == '-':
53 if logfile == '-':
54 message = sys.stdin.read()
54 message = sys.stdin.read()
55 else:
55 else:
56 message = open(logfile).read()
56 message = open(logfile).read()
57 except IOError, inst:
57 except IOError, inst:
58 raise util.Abort(_("can't read commit message '%s': %s") %
58 raise util.Abort(_("can't read commit message '%s': %s") %
59 (logfile, inst.strerror))
59 (logfile, inst.strerror))
60 return message
60 return message
61
61
62 def matchpats(repo, pats=[], opts={}, head=''):
62 def matchpats(repo, pats=[], opts={}, head=''):
63 cwd = repo.getcwd()
63 cwd = repo.getcwd()
64 if not pats and cwd:
64 if not pats and cwd:
65 opts['include'] = [os.path.join(cwd, i) for i in opts['include']]
65 opts['include'] = [os.path.join(cwd, i) for i in opts['include']]
66 opts['exclude'] = [os.path.join(cwd, x) for x in opts['exclude']]
66 opts['exclude'] = [os.path.join(cwd, x) for x in opts['exclude']]
67 cwd = ''
67 cwd = ''
68 return util.cmdmatcher(repo.root, cwd, pats or ['.'], opts.get('include'),
68 return util.cmdmatcher(repo.root, cwd, pats or ['.'], opts.get('include'),
69 opts.get('exclude'), head)
69 opts.get('exclude'), head)
70
70
71 def makewalk(repo, pats, opts, node=None, head='', badmatch=None):
71 def makewalk(repo, pats, opts, node=None, head='', badmatch=None):
72 files, matchfn, anypats = matchpats(repo, pats, opts, head)
72 files, matchfn, anypats = matchpats(repo, pats, opts, head)
73 exact = dict(zip(files, files))
73 exact = dict(zip(files, files))
74 def walk():
74 def walk():
75 for src, fn in repo.walk(node=node, files=files, match=matchfn,
75 for src, fn in repo.walk(node=node, files=files, match=matchfn,
76 badmatch=badmatch):
76 badmatch=badmatch):
77 yield src, fn, util.pathto(repo.getcwd(), fn), fn in exact
77 yield src, fn, util.pathto(repo.getcwd(), fn), fn in exact
78 return files, matchfn, walk()
78 return files, matchfn, walk()
79
79
80 def walk(repo, pats, opts, node=None, head='', badmatch=None):
80 def walk(repo, pats, opts, node=None, head='', badmatch=None):
81 files, matchfn, results = makewalk(repo, pats, opts, node, head, badmatch)
81 files, matchfn, results = makewalk(repo, pats, opts, node, head, badmatch)
82 for r in results:
82 for r in results:
83 yield r
83 yield r
84
84
85 def walkchangerevs(ui, repo, pats, opts):
85 def walkchangerevs(ui, repo, pats, opts):
86 '''Iterate over files and the revs they changed in.
86 '''Iterate over files and the revs they changed in.
87
87
88 Callers most commonly need to iterate backwards over the history
88 Callers most commonly need to iterate backwards over the history
89 it is interested in. Doing so has awful (quadratic-looking)
89 it is interested in. Doing so has awful (quadratic-looking)
90 performance, so we use iterators in a "windowed" way.
90 performance, so we use iterators in a "windowed" way.
91
91
92 We walk a window of revisions in the desired order. Within the
92 We walk a window of revisions in the desired order. Within the
93 window, we first walk forwards to gather data, then in the desired
93 window, we first walk forwards to gather data, then in the desired
94 order (usually backwards) to display it.
94 order (usually backwards) to display it.
95
95
96 This function returns an (iterator, getchange, matchfn) tuple. The
96 This function returns an (iterator, getchange, matchfn) tuple. The
97 getchange function returns the changelog entry for a numeric
97 getchange function returns the changelog entry for a numeric
98 revision. The iterator yields 3-tuples. They will be of one of
98 revision. The iterator yields 3-tuples. They will be of one of
99 the following forms:
99 the following forms:
100
100
101 "window", incrementing, lastrev: stepping through a window,
101 "window", incrementing, lastrev: stepping through a window,
102 positive if walking forwards through revs, last rev in the
102 positive if walking forwards through revs, last rev in the
103 sequence iterated over - use to reset state for the current window
103 sequence iterated over - use to reset state for the current window
104
104
105 "add", rev, fns: out-of-order traversal of the given file names
105 "add", rev, fns: out-of-order traversal of the given file names
106 fns, which changed during revision rev - use to gather data for
106 fns, which changed during revision rev - use to gather data for
107 possible display
107 possible display
108
108
109 "iter", rev, None: in-order traversal of the revs earlier iterated
109 "iter", rev, None: in-order traversal of the revs earlier iterated
110 over with "add" - use to display data'''
110 over with "add" - use to display data'''
111
111
112 def increasing_windows(start, end, windowsize=8, sizelimit=512):
112 def increasing_windows(start, end, windowsize=8, sizelimit=512):
113 if start < end:
113 if start < end:
114 while start < end:
114 while start < end:
115 yield start, min(windowsize, end-start)
115 yield start, min(windowsize, end-start)
116 start += windowsize
116 start += windowsize
117 if windowsize < sizelimit:
117 if windowsize < sizelimit:
118 windowsize *= 2
118 windowsize *= 2
119 else:
119 else:
120 while start > end:
120 while start > end:
121 yield start, min(windowsize, start-end-1)
121 yield start, min(windowsize, start-end-1)
122 start -= windowsize
122 start -= windowsize
123 if windowsize < sizelimit:
123 if windowsize < sizelimit:
124 windowsize *= 2
124 windowsize *= 2
125
125
126
126
127 files, matchfn, anypats = matchpats(repo, pats, opts)
127 files, matchfn, anypats = matchpats(repo, pats, opts)
128 follow = opts.get('follow') or opts.get('follow_first')
128 follow = opts.get('follow') or opts.get('follow_first')
129
129
130 if repo.changelog.count() == 0:
130 if repo.changelog.count() == 0:
131 return [], False, matchfn
131 return [], False, matchfn
132
132
133 if follow:
133 if follow:
134 p = repo.dirstate.parents()[0]
134 p = repo.dirstate.parents()[0]
135 if p == nullid:
135 if p == nullid:
136 ui.warn(_('No working directory revision; defaulting to tip\n'))
136 ui.warn(_('No working directory revision; defaulting to tip\n'))
137 start = 'tip'
137 start = 'tip'
138 else:
138 else:
139 start = repo.changelog.rev(p)
139 start = repo.changelog.rev(p)
140 defrange = '%s:0' % start
140 defrange = '%s:0' % start
141 else:
141 else:
142 defrange = 'tip:0'
142 defrange = 'tip:0'
143 revs = map(int, revrange(ui, repo, opts['rev'] or [defrange]))
143 revs = map(int, revrange(ui, repo, opts['rev'] or [defrange]))
144 wanted = {}
144 wanted = {}
145 slowpath = anypats
145 slowpath = anypats
146 fncache = {}
146 fncache = {}
147
147
148 chcache = {}
148 chcache = {}
149 def getchange(rev):
149 def getchange(rev):
150 ch = chcache.get(rev)
150 ch = chcache.get(rev)
151 if ch is None:
151 if ch is None:
152 chcache[rev] = ch = repo.changelog.read(repo.lookup(str(rev)))
152 chcache[rev] = ch = repo.changelog.read(repo.lookup(str(rev)))
153 return ch
153 return ch
154
154
155 if not slowpath and not files:
155 if not slowpath and not files:
156 # No files, no patterns. Display all revs.
156 # No files, no patterns. Display all revs.
157 wanted = dict(zip(revs, revs))
157 wanted = dict(zip(revs, revs))
158 copies = []
158 copies = []
159 if not slowpath:
159 if not slowpath:
160 # Only files, no patterns. Check the history of each file.
160 # Only files, no patterns. Check the history of each file.
161 def filerevgen(filelog, node):
161 def filerevgen(filelog, node):
162 cl_count = repo.changelog.count()
162 cl_count = repo.changelog.count()
163 if node is None:
163 if node is None:
164 last = filelog.count() - 1
164 last = filelog.count() - 1
165 else:
165 else:
166 last = filelog.rev(node)
166 last = filelog.rev(node)
167 for i, window in increasing_windows(last, -1):
167 for i, window in increasing_windows(last, -1):
168 revs = []
168 revs = []
169 for j in xrange(i - window, i + 1):
169 for j in xrange(i - window, i + 1):
170 n = filelog.node(j)
170 n = filelog.node(j)
171 revs.append((filelog.linkrev(n),
171 revs.append((filelog.linkrev(n),
172 follow and filelog.renamed(n)))
172 follow and filelog.renamed(n)))
173 revs.reverse()
173 revs.reverse()
174 for rev in revs:
174 for rev in revs:
175 # only yield rev for which we have the changelog, it can
175 # only yield rev for which we have the changelog, it can
176 # happen while doing "hg log" during a pull or commit
176 # happen while doing "hg log" during a pull or commit
177 if rev[0] < cl_count:
177 if rev[0] < cl_count:
178 yield rev
178 yield rev
179 def iterfiles():
179 def iterfiles():
180 for filename in files:
180 for filename in files:
181 yield filename, None
181 yield filename, None
182 for filename_node in copies:
182 for filename_node in copies:
183 yield filename_node
183 yield filename_node
184 minrev, maxrev = min(revs), max(revs)
184 minrev, maxrev = min(revs), max(revs)
185 for file_, node in iterfiles():
185 for file_, node in iterfiles():
186 filelog = repo.file(file_)
186 filelog = repo.file(file_)
187 # A zero count may be a directory or deleted file, so
187 # A zero count may be a directory or deleted file, so
188 # try to find matching entries on the slow path.
188 # try to find matching entries on the slow path.
189 if filelog.count() == 0:
189 if filelog.count() == 0:
190 slowpath = True
190 slowpath = True
191 break
191 break
192 for rev, copied in filerevgen(filelog, node):
192 for rev, copied in filerevgen(filelog, node):
193 if rev <= maxrev:
193 if rev <= maxrev:
194 if rev < minrev:
194 if rev < minrev:
195 break
195 break
196 fncache.setdefault(rev, [])
196 fncache.setdefault(rev, [])
197 fncache[rev].append(file_)
197 fncache[rev].append(file_)
198 wanted[rev] = 1
198 wanted[rev] = 1
199 if follow and copied:
199 if follow and copied:
200 copies.append(copied)
200 copies.append(copied)
201 if slowpath:
201 if slowpath:
202 if follow:
202 if follow:
203 raise util.Abort(_('can only follow copies/renames for explicit '
203 raise util.Abort(_('can only follow copies/renames for explicit '
204 'file names'))
204 'file names'))
205
205
206 # The slow path checks files modified in every changeset.
206 # The slow path checks files modified in every changeset.
207 def changerevgen():
207 def changerevgen():
208 for i, window in increasing_windows(repo.changelog.count()-1, -1):
208 for i, window in increasing_windows(repo.changelog.count()-1, -1):
209 for j in xrange(i - window, i + 1):
209 for j in xrange(i - window, i + 1):
210 yield j, getchange(j)[3]
210 yield j, getchange(j)[3]
211
211
212 for rev, changefiles in changerevgen():
212 for rev, changefiles in changerevgen():
213 matches = filter(matchfn, changefiles)
213 matches = filter(matchfn, changefiles)
214 if matches:
214 if matches:
215 fncache[rev] = matches
215 fncache[rev] = matches
216 wanted[rev] = 1
216 wanted[rev] = 1
217
217
218 def iterate():
218 def iterate():
219 class followfilter:
219 class followfilter:
220 def __init__(self, onlyfirst=False):
220 def __init__(self, onlyfirst=False):
221 self.startrev = -1
221 self.startrev = -1
222 self.roots = []
222 self.roots = []
223 self.onlyfirst = onlyfirst
223 self.onlyfirst = onlyfirst
224
224
225 def match(self, rev):
225 def match(self, rev):
226 def realparents(rev):
226 def realparents(rev):
227 if self.onlyfirst:
227 if self.onlyfirst:
228 return repo.changelog.parentrevs(rev)[0:1]
228 return repo.changelog.parentrevs(rev)[0:1]
229 else:
229 else:
230 return filter(lambda x: x != -1, repo.changelog.parentrevs(rev))
230 return filter(lambda x: x != -1, repo.changelog.parentrevs(rev))
231
231
232 if self.startrev == -1:
232 if self.startrev == -1:
233 self.startrev = rev
233 self.startrev = rev
234 return True
234 return True
235
235
236 if rev > self.startrev:
236 if rev > self.startrev:
237 # forward: all descendants
237 # forward: all descendants
238 if not self.roots:
238 if not self.roots:
239 self.roots.append(self.startrev)
239 self.roots.append(self.startrev)
240 for parent in realparents(rev):
240 for parent in realparents(rev):
241 if parent in self.roots:
241 if parent in self.roots:
242 self.roots.append(rev)
242 self.roots.append(rev)
243 return True
243 return True
244 else:
244 else:
245 # backwards: all parents
245 # backwards: all parents
246 if not self.roots:
246 if not self.roots:
247 self.roots.extend(realparents(self.startrev))
247 self.roots.extend(realparents(self.startrev))
248 if rev in self.roots:
248 if rev in self.roots:
249 self.roots.remove(rev)
249 self.roots.remove(rev)
250 self.roots.extend(realparents(rev))
250 self.roots.extend(realparents(rev))
251 return True
251 return True
252
252
253 return False
253 return False
254
254
255 if follow and not files:
255 if follow and not files:
256 ff = followfilter(onlyfirst=opts.get('follow_first'))
256 ff = followfilter(onlyfirst=opts.get('follow_first'))
257 def want(rev):
257 def want(rev):
258 if rev not in wanted:
258 if rev not in wanted:
259 return False
259 return False
260 return ff.match(rev)
260 return ff.match(rev)
261 else:
261 else:
262 def want(rev):
262 def want(rev):
263 return rev in wanted
263 return rev in wanted
264
264
265 for i, window in increasing_windows(0, len(revs)):
265 for i, window in increasing_windows(0, len(revs)):
266 yield 'window', revs[0] < revs[-1], revs[-1]
266 yield 'window', revs[0] < revs[-1], revs[-1]
267 nrevs = [rev for rev in revs[i:i+window] if want(rev)]
267 nrevs = [rev for rev in revs[i:i+window] if want(rev)]
268 srevs = list(nrevs)
268 srevs = list(nrevs)
269 srevs.sort()
269 srevs.sort()
270 for rev in srevs:
270 for rev in srevs:
271 fns = fncache.get(rev) or filter(matchfn, getchange(rev)[3])
271 fns = fncache.get(rev) or filter(matchfn, getchange(rev)[3])
272 yield 'add', rev, fns
272 yield 'add', rev, fns
273 for rev in nrevs:
273 for rev in nrevs:
274 yield 'iter', rev, None
274 yield 'iter', rev, None
275 return iterate(), getchange, matchfn
275 return iterate(), getchange, matchfn
276
276
277 revrangesep = ':'
277 revrangesep = ':'
278
278
279 def revfix(repo, val, defval):
279 def revfix(repo, val, defval):
280 '''turn user-level id of changeset into rev number.
280 '''turn user-level id of changeset into rev number.
281 user-level id can be tag, changeset, rev number, or negative rev
281 user-level id can be tag, changeset, rev number, or negative rev
282 number relative to number of revs (-1 is tip, etc).'''
282 number relative to number of revs (-1 is tip, etc).'''
283 if not val:
283 if not val:
284 return defval
284 return defval
285 try:
285 try:
286 num = int(val)
286 num = int(val)
287 if str(num) != val:
287 if str(num) != val:
288 raise ValueError
288 raise ValueError
289 if num < 0:
289 if num < 0:
290 num += repo.changelog.count()
290 num += repo.changelog.count()
291 if num < 0:
291 if num < 0:
292 num = 0
292 num = 0
293 elif num >= repo.changelog.count():
293 elif num >= repo.changelog.count():
294 raise ValueError
294 raise ValueError
295 except ValueError:
295 except ValueError:
296 try:
296 try:
297 num = repo.changelog.rev(repo.lookup(val))
297 num = repo.changelog.rev(repo.lookup(val))
298 except KeyError:
298 except KeyError:
299 raise util.Abort(_('invalid revision identifier %s'), val)
299 raise util.Abort(_('invalid revision identifier %s'), val)
300 return num
300 return num
301
301
302 def revpair(ui, repo, revs):
302 def revpair(ui, repo, revs):
303 '''return pair of nodes, given list of revisions. second item can
303 '''return pair of nodes, given list of revisions. second item can
304 be None, meaning use working dir.'''
304 be None, meaning use working dir.'''
305 if not revs:
305 if not revs:
306 return repo.dirstate.parents()[0], None
306 return repo.dirstate.parents()[0], None
307 end = None
307 end = None
308 if len(revs) == 1:
308 if len(revs) == 1:
309 start = revs[0]
309 start = revs[0]
310 if revrangesep in start:
310 if revrangesep in start:
311 start, end = start.split(revrangesep, 1)
311 start, end = start.split(revrangesep, 1)
312 start = revfix(repo, start, 0)
312 start = revfix(repo, start, 0)
313 end = revfix(repo, end, repo.changelog.count() - 1)
313 end = revfix(repo, end, repo.changelog.count() - 1)
314 else:
314 else:
315 start = revfix(repo, start, None)
315 start = revfix(repo, start, None)
316 elif len(revs) == 2:
316 elif len(revs) == 2:
317 if revrangesep in revs[0] or revrangesep in revs[1]:
317 if revrangesep in revs[0] or revrangesep in revs[1]:
318 raise util.Abort(_('too many revisions specified'))
318 raise util.Abort(_('too many revisions specified'))
319 start = revfix(repo, revs[0], None)
319 start = revfix(repo, revs[0], None)
320 end = revfix(repo, revs[1], None)
320 end = revfix(repo, revs[1], None)
321 else:
321 else:
322 raise util.Abort(_('too many revisions specified'))
322 raise util.Abort(_('too many revisions specified'))
323 if end is not None: end = repo.lookup(str(end))
323 if end is not None: end = repo.lookup(str(end))
324 return repo.lookup(str(start)), end
324 return repo.lookup(str(start)), end
325
325
326 def revrange(ui, repo, revs):
326 def revrange(ui, repo, revs):
327 """Yield revision as strings from a list of revision specifications."""
327 """Yield revision as strings from a list of revision specifications."""
328 seen = {}
328 seen = {}
329 for spec in revs:
329 for spec in revs:
330 if revrangesep in spec:
330 if revrangesep in spec:
331 start, end = spec.split(revrangesep, 1)
331 start, end = spec.split(revrangesep, 1)
332 start = revfix(repo, start, 0)
332 start = revfix(repo, start, 0)
333 end = revfix(repo, end, repo.changelog.count() - 1)
333 end = revfix(repo, end, repo.changelog.count() - 1)
334 step = start > end and -1 or 1
334 step = start > end and -1 or 1
335 for rev in xrange(start, end+step, step):
335 for rev in xrange(start, end+step, step):
336 if rev in seen:
336 if rev in seen:
337 continue
337 continue
338 seen[rev] = 1
338 seen[rev] = 1
339 yield str(rev)
339 yield str(rev)
340 else:
340 else:
341 rev = revfix(repo, spec, None)
341 rev = revfix(repo, spec, None)
342 if rev in seen:
342 if rev in seen:
343 continue
343 continue
344 seen[rev] = 1
344 seen[rev] = 1
345 yield str(rev)
345 yield str(rev)
346
346
347 def make_filename(repo, pat, node,
347 def make_filename(repo, pat, node,
348 total=None, seqno=None, revwidth=None, pathname=None):
348 total=None, seqno=None, revwidth=None, pathname=None):
349 node_expander = {
349 node_expander = {
350 'H': lambda: hex(node),
350 'H': lambda: hex(node),
351 'R': lambda: str(repo.changelog.rev(node)),
351 'R': lambda: str(repo.changelog.rev(node)),
352 'h': lambda: short(node),
352 'h': lambda: short(node),
353 }
353 }
354 expander = {
354 expander = {
355 '%': lambda: '%',
355 '%': lambda: '%',
356 'b': lambda: os.path.basename(repo.root),
356 'b': lambda: os.path.basename(repo.root),
357 }
357 }
358
358
359 try:
359 try:
360 if node:
360 if node:
361 expander.update(node_expander)
361 expander.update(node_expander)
362 if node and revwidth is not None:
362 if node and revwidth is not None:
363 expander['r'] = (lambda:
363 expander['r'] = (lambda:
364 str(repo.changelog.rev(node)).zfill(revwidth))
364 str(repo.changelog.rev(node)).zfill(revwidth))
365 if total is not None:
365 if total is not None:
366 expander['N'] = lambda: str(total)
366 expander['N'] = lambda: str(total)
367 if seqno is not None:
367 if seqno is not None:
368 expander['n'] = lambda: str(seqno)
368 expander['n'] = lambda: str(seqno)
369 if total is not None and seqno is not None:
369 if total is not None and seqno is not None:
370 expander['n'] = lambda:str(seqno).zfill(len(str(total)))
370 expander['n'] = lambda:str(seqno).zfill(len(str(total)))
371 if pathname is not None:
371 if pathname is not None:
372 expander['s'] = lambda: os.path.basename(pathname)
372 expander['s'] = lambda: os.path.basename(pathname)
373 expander['d'] = lambda: os.path.dirname(pathname) or '.'
373 expander['d'] = lambda: os.path.dirname(pathname) or '.'
374 expander['p'] = lambda: pathname
374 expander['p'] = lambda: pathname
375
375
376 newname = []
376 newname = []
377 patlen = len(pat)
377 patlen = len(pat)
378 i = 0
378 i = 0
379 while i < patlen:
379 while i < patlen:
380 c = pat[i]
380 c = pat[i]
381 if c == '%':
381 if c == '%':
382 i += 1
382 i += 1
383 c = pat[i]
383 c = pat[i]
384 c = expander[c]()
384 c = expander[c]()
385 newname.append(c)
385 newname.append(c)
386 i += 1
386 i += 1
387 return ''.join(newname)
387 return ''.join(newname)
388 except KeyError, inst:
388 except KeyError, inst:
389 raise util.Abort(_("invalid format spec '%%%s' in output file name"),
389 raise util.Abort(_("invalid format spec '%%%s' in output file name"),
390 inst.args[0])
390 inst.args[0])
391
391
392 def make_file(repo, pat, node=None,
392 def make_file(repo, pat, node=None,
393 total=None, seqno=None, revwidth=None, mode='wb', pathname=None):
393 total=None, seqno=None, revwidth=None, mode='wb', pathname=None):
394 if not pat or pat == '-':
394 if not pat or pat == '-':
395 return 'w' in mode and sys.stdout or sys.stdin
395 return 'w' in mode and sys.stdout or sys.stdin
396 if hasattr(pat, 'write') and 'w' in mode:
396 if hasattr(pat, 'write') and 'w' in mode:
397 return pat
397 return pat
398 if hasattr(pat, 'read') and 'r' in mode:
398 if hasattr(pat, 'read') and 'r' in mode:
399 return pat
399 return pat
400 return open(make_filename(repo, pat, node, total, seqno, revwidth,
400 return open(make_filename(repo, pat, node, total, seqno, revwidth,
401 pathname),
401 pathname),
402 mode)
402 mode)
403
403
404 def write_bundle(cg, filename=None, compress=True):
404 def write_bundle(cg, filename=None, compress=True):
405 """Write a bundle file and return its filename.
405 """Write a bundle file and return its filename.
406
406
407 Existing files will not be overwritten.
407 Existing files will not be overwritten.
408 If no filename is specified, a temporary file is created.
408 If no filename is specified, a temporary file is created.
409 bz2 compression can be turned off.
409 bz2 compression can be turned off.
410 The bundle file will be deleted in case of errors.
410 The bundle file will be deleted in case of errors.
411 """
411 """
412 class nocompress(object):
412 class nocompress(object):
413 def compress(self, x):
413 def compress(self, x):
414 return x
414 return x
415 def flush(self):
415 def flush(self):
416 return ""
416 return ""
417
417
418 fh = None
418 fh = None
419 cleanup = None
419 cleanup = None
420 try:
420 try:
421 if filename:
421 if filename:
422 if os.path.exists(filename):
422 if os.path.exists(filename):
423 raise util.Abort(_("file '%s' already exists"), filename)
423 raise util.Abort(_("file '%s' already exists"), filename)
424 fh = open(filename, "wb")
424 fh = open(filename, "wb")
425 else:
425 else:
426 fd, filename = tempfile.mkstemp(prefix="hg-bundle-", suffix=".hg")
426 fd, filename = tempfile.mkstemp(prefix="hg-bundle-", suffix=".hg")
427 fh = os.fdopen(fd, "wb")
427 fh = os.fdopen(fd, "wb")
428 cleanup = filename
428 cleanup = filename
429
429
430 if compress:
430 if compress:
431 fh.write("HG10")
431 fh.write("HG10")
432 z = bz2.BZ2Compressor(9)
432 z = bz2.BZ2Compressor(9)
433 else:
433 else:
434 fh.write("HG10UN")
434 fh.write("HG10UN")
435 z = nocompress()
435 z = nocompress()
436 # parse the changegroup data, otherwise we will block
436 # parse the changegroup data, otherwise we will block
437 # in case of sshrepo because we don't know the end of the stream
437 # in case of sshrepo because we don't know the end of the stream
438
438
439 # an empty chunkiter is the end of the changegroup
439 # an empty chunkiter is the end of the changegroup
440 empty = False
440 empty = False
441 while not empty:
441 while not empty:
442 empty = True
442 empty = True
443 for chunk in changegroup.chunkiter(cg):
443 for chunk in changegroup.chunkiter(cg):
444 empty = False
444 empty = False
445 fh.write(z.compress(changegroup.genchunk(chunk)))
445 fh.write(z.compress(changegroup.genchunk(chunk)))
446 fh.write(z.compress(changegroup.closechunk()))
446 fh.write(z.compress(changegroup.closechunk()))
447 fh.write(z.flush())
447 fh.write(z.flush())
448 cleanup = None
448 cleanup = None
449 return filename
449 return filename
450 finally:
450 finally:
451 if fh is not None:
451 if fh is not None:
452 fh.close()
452 fh.close()
453 if cleanup is not None:
453 if cleanup is not None:
454 os.unlink(cleanup)
454 os.unlink(cleanup)
455
455
456 def dodiff(fp, ui, repo, node1, node2, files=None, match=util.always,
456 def dodiff(fp, ui, repo, node1, node2, files=None, match=util.always,
457 changes=None, text=False, opts={}):
457 changes=None, text=False, opts={}):
458 if not node1:
458 if not node1:
459 node1 = repo.dirstate.parents()[0]
459 node1 = repo.dirstate.parents()[0]
460 # reading the data for node1 early allows it to play nicely
460 # reading the data for node1 early allows it to play nicely
461 # with repo.changes and the revlog cache.
461 # with repo.changes and the revlog cache.
462 change = repo.changelog.read(node1)
462 change = repo.changelog.read(node1)
463 mmap = repo.manifest.read(change[0])
463 mmap = repo.manifest.read(change[0])
464 date1 = util.datestr(change[2])
464 date1 = util.datestr(change[2])
465
465
466 if not changes:
466 if not changes:
467 changes = repo.changes(node1, node2, files, match=match)
467 changes = repo.changes(node1, node2, files, match=match)
468 modified, added, removed, deleted, unknown = changes
468 modified, added, removed, deleted, unknown = changes
469 if files:
469 if files:
470 modified, added, removed = map(lambda x: filterfiles(files, x),
470 modified, added, removed = map(lambda x: filterfiles(files, x),
471 (modified, added, removed))
471 (modified, added, removed))
472
472
473 if not modified and not added and not removed:
473 if not modified and not added and not removed:
474 return
474 return
475
475
476 if node2:
476 if node2:
477 change = repo.changelog.read(node2)
477 change = repo.changelog.read(node2)
478 mmap2 = repo.manifest.read(change[0])
478 mmap2 = repo.manifest.read(change[0])
479 _date2 = util.datestr(change[2])
479 _date2 = util.datestr(change[2])
480 def date2(f):
480 def date2(f):
481 return _date2
481 return _date2
482 def read(f):
482 def read(f):
483 return repo.file(f).read(mmap2[f])
483 return repo.file(f).read(mmap2[f])
484 else:
484 else:
485 tz = util.makedate()[1]
485 tz = util.makedate()[1]
486 _date2 = util.datestr()
486 _date2 = util.datestr()
487 def date2(f):
487 def date2(f):
488 try:
488 try:
489 return util.datestr((os.lstat(repo.wjoin(f)).st_mtime, tz))
489 return util.datestr((os.lstat(repo.wjoin(f)).st_mtime, tz))
490 except OSError, err:
490 except OSError, err:
491 if err.errno != errno.ENOENT: raise
491 if err.errno != errno.ENOENT: raise
492 return _date2
492 return _date2
493 def read(f):
493 def read(f):
494 return repo.wread(f)
494 return repo.wread(f)
495
495
496 if ui.quiet:
496 if ui.quiet:
497 r = None
497 r = None
498 else:
498 else:
499 hexfunc = ui.verbose and hex or short
499 hexfunc = ui.verbose and hex or short
500 r = [hexfunc(node) for node in [node1, node2] if node]
500 r = [hexfunc(node) for node in [node1, node2] if node]
501
501
502 diffopts = ui.diffopts()
502 diffopts = ui.diffopts()
503 showfunc = opts.get('show_function') or diffopts['showfunc']
503 showfunc = opts.get('show_function') or diffopts['showfunc']
504 ignorews = opts.get('ignore_all_space') or diffopts['ignorews']
504 ignorews = opts.get('ignore_all_space') or diffopts['ignorews']
505 ignorewsamount = opts.get('ignore_space_change') or \
505 ignorewsamount = opts.get('ignore_space_change') or \
506 diffopts['ignorewsamount']
506 diffopts['ignorewsamount']
507 ignoreblanklines = opts.get('ignore_blank_lines') or \
507 ignoreblanklines = opts.get('ignore_blank_lines') or \
508 diffopts['ignoreblanklines']
508 diffopts['ignoreblanklines']
509
509
510 all = modified + added + removed
510 all = modified + added + removed
511 all.sort()
511 all.sort()
512 for f in all:
512 for f in all:
513 to = None
513 to = None
514 tn = None
514 tn = None
515 if f in mmap:
515 if f in mmap:
516 to = repo.file(f).read(mmap[f])
516 to = repo.file(f).read(mmap[f])
517 if f not in removed:
517 if f not in removed:
518 tn = read(f)
518 tn = read(f)
519 fp.write(mdiff.unidiff(to, date1, tn, date2(f), f, r, text=text,
519 fp.write(mdiff.unidiff(to, date1, tn, date2(f), f, r, text=text,
520 showfunc=showfunc, ignorews=ignorews,
520 showfunc=showfunc, ignorews=ignorews,
521 ignorewsamount=ignorewsamount,
521 ignorewsamount=ignorewsamount,
522 ignoreblanklines=ignoreblanklines))
522 ignoreblanklines=ignoreblanklines))
523
523
524 def trimuser(ui, name, rev, revcache):
524 def trimuser(ui, name, rev, revcache):
525 """trim the name of the user who committed a change"""
525 """trim the name of the user who committed a change"""
526 user = revcache.get(rev)
526 user = revcache.get(rev)
527 if user is None:
527 if user is None:
528 user = revcache[rev] = ui.shortuser(name)
528 user = revcache[rev] = ui.shortuser(name)
529 return user
529 return user
530
530
531 class changeset_printer(object):
531 class changeset_printer(object):
532 '''show changeset information when templating not requested.'''
532 '''show changeset information when templating not requested.'''
533
533
534 def __init__(self, ui, repo):
534 def __init__(self, ui, repo):
535 self.ui = ui
535 self.ui = ui
536 self.repo = repo
536 self.repo = repo
537
537
538 def show(self, rev=0, changenode=None, brinfo=None):
538 def show(self, rev=0, changenode=None, brinfo=None):
539 '''show a single changeset or file revision'''
539 '''show a single changeset or file revision'''
540 log = self.repo.changelog
540 log = self.repo.changelog
541 if changenode is None:
541 if changenode is None:
542 changenode = log.node(rev)
542 changenode = log.node(rev)
543 elif not rev:
543 elif not rev:
544 rev = log.rev(changenode)
544 rev = log.rev(changenode)
545
545
546 if self.ui.quiet:
546 if self.ui.quiet:
547 self.ui.write("%d:%s\n" % (rev, short(changenode)))
547 self.ui.write("%d:%s\n" % (rev, short(changenode)))
548 return
548 return
549
549
550 changes = log.read(changenode)
550 changes = log.read(changenode)
551 date = util.datestr(changes[2])
551 date = util.datestr(changes[2])
552
552
553 parents = [(log.rev(p), self.ui.verbose and hex(p) or short(p))
553 parents = [(log.rev(p), self.ui.verbose and hex(p) or short(p))
554 for p in log.parents(changenode)
554 for p in log.parents(changenode)
555 if self.ui.debugflag or p != nullid]
555 if self.ui.debugflag or p != nullid]
556 if (not self.ui.debugflag and len(parents) == 1 and
556 if (not self.ui.debugflag and len(parents) == 1 and
557 parents[0][0] == rev-1):
557 parents[0][0] == rev-1):
558 parents = []
558 parents = []
559
559
560 if self.ui.verbose:
560 if self.ui.verbose:
561 self.ui.write(_("changeset: %d:%s\n") % (rev, hex(changenode)))
561 self.ui.write(_("changeset: %d:%s\n") % (rev, hex(changenode)))
562 else:
562 else:
563 self.ui.write(_("changeset: %d:%s\n") % (rev, short(changenode)))
563 self.ui.write(_("changeset: %d:%s\n") % (rev, short(changenode)))
564
564
565 for tag in self.repo.nodetags(changenode):
565 for tag in self.repo.nodetags(changenode):
566 self.ui.status(_("tag: %s\n") % tag)
566 self.ui.status(_("tag: %s\n") % tag)
567 for parent in parents:
567 for parent in parents:
568 self.ui.write(_("parent: %d:%s\n") % parent)
568 self.ui.write(_("parent: %d:%s\n") % parent)
569
569
570 if brinfo and changenode in brinfo:
570 if brinfo and changenode in brinfo:
571 br = brinfo[changenode]
571 br = brinfo[changenode]
572 self.ui.write(_("branch: %s\n") % " ".join(br))
572 self.ui.write(_("branch: %s\n") % " ".join(br))
573
573
574 self.ui.debug(_("manifest: %d:%s\n") %
574 self.ui.debug(_("manifest: %d:%s\n") %
575 (self.repo.manifest.rev(changes[0]), hex(changes[0])))
575 (self.repo.manifest.rev(changes[0]), hex(changes[0])))
576 self.ui.status(_("user: %s\n") % changes[1])
576 self.ui.status(_("user: %s\n") % changes[1])
577 self.ui.status(_("date: %s\n") % date)
577 self.ui.status(_("date: %s\n") % date)
578
578
579 if self.ui.debugflag:
579 if self.ui.debugflag:
580 files = self.repo.changes(log.parents(changenode)[0], changenode)
580 files = self.repo.changes(log.parents(changenode)[0], changenode)
581 for key, value in zip([_("files:"), _("files+:"), _("files-:")],
581 for key, value in zip([_("files:"), _("files+:"), _("files-:")],
582 files):
582 files):
583 if value:
583 if value:
584 self.ui.note("%-12s %s\n" % (key, " ".join(value)))
584 self.ui.note("%-12s %s\n" % (key, " ".join(value)))
585 else:
585 else:
586 self.ui.note(_("files: %s\n") % " ".join(changes[3]))
586 self.ui.note(_("files: %s\n") % " ".join(changes[3]))
587
587
588 description = changes[4].strip()
588 description = changes[4].strip()
589 if description:
589 if description:
590 if self.ui.verbose:
590 if self.ui.verbose:
591 self.ui.status(_("description:\n"))
591 self.ui.status(_("description:\n"))
592 self.ui.status(description)
592 self.ui.status(description)
593 self.ui.status("\n\n")
593 self.ui.status("\n\n")
594 else:
594 else:
595 self.ui.status(_("summary: %s\n") %
595 self.ui.status(_("summary: %s\n") %
596 description.splitlines()[0])
596 description.splitlines()[0])
597 self.ui.status("\n")
597 self.ui.status("\n")
598
598
599 def show_changeset(ui, repo, opts):
599 def show_changeset(ui, repo, opts):
600 '''show one changeset. uses template or regular display. caller
600 '''show one changeset. uses template or regular display. caller
601 can pass in 'style' and 'template' options in opts.'''
601 can pass in 'style' and 'template' options in opts.'''
602
602
603 tmpl = opts.get('template')
603 tmpl = opts.get('template')
604 if tmpl:
604 if tmpl:
605 tmpl = templater.parsestring(tmpl, quoted=False)
605 tmpl = templater.parsestring(tmpl, quoted=False)
606 else:
606 else:
607 tmpl = ui.config('ui', 'logtemplate')
607 tmpl = ui.config('ui', 'logtemplate')
608 if tmpl: tmpl = templater.parsestring(tmpl)
608 if tmpl: tmpl = templater.parsestring(tmpl)
609 mapfile = opts.get('style') or ui.config('ui', 'style')
609 mapfile = opts.get('style') or ui.config('ui', 'style')
610 if tmpl or mapfile:
610 if tmpl or mapfile:
611 if mapfile:
611 if mapfile:
612 if not os.path.isfile(mapfile):
612 if not os.path.isfile(mapfile):
613 mapname = templater.templatepath('map-cmdline.' + mapfile)
613 mapname = templater.templatepath('map-cmdline.' + mapfile)
614 if not mapname: mapname = templater.templatepath(mapfile)
614 if not mapname: mapname = templater.templatepath(mapfile)
615 if mapname: mapfile = mapname
615 if mapname: mapfile = mapname
616 try:
616 try:
617 t = templater.changeset_templater(ui, repo, mapfile)
617 t = templater.changeset_templater(ui, repo, mapfile)
618 except SyntaxError, inst:
618 except SyntaxError, inst:
619 raise util.Abort(inst.args[0])
619 raise util.Abort(inst.args[0])
620 if tmpl: t.use_template(tmpl)
620 if tmpl: t.use_template(tmpl)
621 return t
621 return t
622 return changeset_printer(ui, repo)
622 return changeset_printer(ui, repo)
623
623
624 def setremoteconfig(ui, opts):
624 def setremoteconfig(ui, opts):
625 "copy remote options to ui tree"
625 "copy remote options to ui tree"
626 if opts.get('ssh'):
626 if opts.get('ssh'):
627 ui.setconfig("ui", "ssh", opts['ssh'])
627 ui.setconfig("ui", "ssh", opts['ssh'])
628 if opts.get('remotecmd'):
628 if opts.get('remotecmd'):
629 ui.setconfig("ui", "remotecmd", opts['remotecmd'])
629 ui.setconfig("ui", "remotecmd", opts['remotecmd'])
630
630
631 def show_version(ui):
631 def show_version(ui):
632 """output version and copyright information"""
632 """output version and copyright information"""
633 ui.write(_("Mercurial Distributed SCM (version %s)\n")
633 ui.write(_("Mercurial Distributed SCM (version %s)\n")
634 % version.get_version())
634 % version.get_version())
635 ui.status(_(
635 ui.status(_(
636 "\nCopyright (C) 2005 Matt Mackall <mpm@selenic.com>\n"
636 "\nCopyright (C) 2005 Matt Mackall <mpm@selenic.com>\n"
637 "This is free software; see the source for copying conditions. "
637 "This is free software; see the source for copying conditions. "
638 "There is NO\nwarranty; "
638 "There is NO\nwarranty; "
639 "not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.\n"
639 "not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.\n"
640 ))
640 ))
641
641
642 def help_(ui, name=None, with_version=False):
642 def help_(ui, name=None, with_version=False):
643 """show help for a command, extension, or list of commands
643 """show help for a command, extension, or list of commands
644
644
645 With no arguments, print a list of commands and short help.
645 With no arguments, print a list of commands and short help.
646
646
647 Given a command name, print help for that command.
647 Given a command name, print help for that command.
648
648
649 Given an extension name, print help for that extension, and the
649 Given an extension name, print help for that extension, and the
650 commands it provides."""
650 commands it provides."""
651 option_lists = []
651 option_lists = []
652
652
653 def helpcmd(name):
653 def helpcmd(name):
654 if with_version:
654 if with_version:
655 show_version(ui)
655 show_version(ui)
656 ui.write('\n')
656 ui.write('\n')
657 aliases, i = findcmd(name)
657 aliases, i = findcmd(name)
658 # synopsis
658 # synopsis
659 ui.write("%s\n\n" % i[2])
659 ui.write("%s\n\n" % i[2])
660
660
661 # description
661 # description
662 doc = i[0].__doc__
662 doc = i[0].__doc__
663 if not doc:
663 if not doc:
664 doc = _("(No help text available)")
664 doc = _("(No help text available)")
665 if ui.quiet:
665 if ui.quiet:
666 doc = doc.splitlines(0)[0]
666 doc = doc.splitlines(0)[0]
667 ui.write("%s\n" % doc.rstrip())
667 ui.write("%s\n" % doc.rstrip())
668
668
669 if not ui.quiet:
669 if not ui.quiet:
670 # aliases
670 # aliases
671 if len(aliases) > 1:
671 if len(aliases) > 1:
672 ui.write(_("\naliases: %s\n") % ', '.join(aliases[1:]))
672 ui.write(_("\naliases: %s\n") % ', '.join(aliases[1:]))
673
673
674 # options
674 # options
675 if i[1]:
675 if i[1]:
676 option_lists.append(("options", i[1]))
676 option_lists.append(("options", i[1]))
677
677
678 def helplist(select=None):
678 def helplist(select=None):
679 h = {}
679 h = {}
680 cmds = {}
680 cmds = {}
681 for c, e in table.items():
681 for c, e in table.items():
682 f = c.split("|", 1)[0]
682 f = c.split("|", 1)[0]
683 if select and not select(f):
683 if select and not select(f):
684 continue
684 continue
685 if name == "shortlist" and not f.startswith("^"):
685 if name == "shortlist" and not f.startswith("^"):
686 continue
686 continue
687 f = f.lstrip("^")
687 f = f.lstrip("^")
688 if not ui.debugflag and f.startswith("debug"):
688 if not ui.debugflag and f.startswith("debug"):
689 continue
689 continue
690 doc = e[0].__doc__
690 doc = e[0].__doc__
691 if not doc:
691 if not doc:
692 doc = _("(No help text available)")
692 doc = _("(No help text available)")
693 h[f] = doc.splitlines(0)[0].rstrip()
693 h[f] = doc.splitlines(0)[0].rstrip()
694 cmds[f] = c.lstrip("^")
694 cmds[f] = c.lstrip("^")
695
695
696 fns = h.keys()
696 fns = h.keys()
697 fns.sort()
697 fns.sort()
698 m = max(map(len, fns))
698 m = max(map(len, fns))
699 for f in fns:
699 for f in fns:
700 if ui.verbose:
700 if ui.verbose:
701 commands = cmds[f].replace("|",", ")
701 commands = cmds[f].replace("|",", ")
702 ui.write(" %s:\n %s\n"%(commands, h[f]))
702 ui.write(" %s:\n %s\n"%(commands, h[f]))
703 else:
703 else:
704 ui.write(' %-*s %s\n' % (m, f, h[f]))
704 ui.write(' %-*s %s\n' % (m, f, h[f]))
705
705
706 def helpext(name):
706 def helpext(name):
707 try:
707 try:
708 mod = findext(name)
708 mod = findext(name)
709 except KeyError:
709 except KeyError:
710 raise UnknownCommand(name)
710 raise UnknownCommand(name)
711
711
712 doc = (mod.__doc__ or _('No help text available')).splitlines(0)
712 doc = (mod.__doc__ or _('No help text available')).splitlines(0)
713 ui.write(_('%s extension - %s\n') % (name.split('.')[-1], doc[0]))
713 ui.write(_('%s extension - %s\n') % (name.split('.')[-1], doc[0]))
714 for d in doc[1:]:
714 for d in doc[1:]:
715 ui.write(d, '\n')
715 ui.write(d, '\n')
716
716
717 ui.status('\n')
717 ui.status('\n')
718 if ui.verbose:
718 if ui.verbose:
719 ui.status(_('list of commands:\n\n'))
719 ui.status(_('list of commands:\n\n'))
720 else:
720 else:
721 ui.status(_('list of commands (use "hg help -v %s" '
721 ui.status(_('list of commands (use "hg help -v %s" '
722 'to show aliases and global options):\n\n') % name)
722 'to show aliases and global options):\n\n') % name)
723
723
724 modcmds = dict.fromkeys([c.split('|', 1)[0] for c in mod.cmdtable])
724 modcmds = dict.fromkeys([c.split('|', 1)[0] for c in mod.cmdtable])
725 helplist(modcmds.has_key)
725 helplist(modcmds.has_key)
726
726
727 if name and name != 'shortlist':
727 if name and name != 'shortlist':
728 try:
728 try:
729 helpcmd(name)
729 helpcmd(name)
730 except UnknownCommand:
730 except UnknownCommand:
731 helpext(name)
731 helpext(name)
732
732
733 else:
733 else:
734 # program name
734 # program name
735 if ui.verbose or with_version:
735 if ui.verbose or with_version:
736 show_version(ui)
736 show_version(ui)
737 else:
737 else:
738 ui.status(_("Mercurial Distributed SCM\n"))
738 ui.status(_("Mercurial Distributed SCM\n"))
739 ui.status('\n')
739 ui.status('\n')
740
740
741 # list of commands
741 # list of commands
742 if name == "shortlist":
742 if name == "shortlist":
743 ui.status(_('basic commands (use "hg help" '
743 ui.status(_('basic commands (use "hg help" '
744 'for the full list or option "-v" for details):\n\n'))
744 'for the full list or option "-v" for details):\n\n'))
745 elif ui.verbose:
745 elif ui.verbose:
746 ui.status(_('list of commands:\n\n'))
746 ui.status(_('list of commands:\n\n'))
747 else:
747 else:
748 ui.status(_('list of commands (use "hg help -v" '
748 ui.status(_('list of commands (use "hg help -v" '
749 'to show aliases and global options):\n\n'))
749 'to show aliases and global options):\n\n'))
750
750
751 helplist()
751 helplist()
752
752
753 # global options
753 # global options
754 if ui.verbose:
754 if ui.verbose:
755 option_lists.append(("global options", globalopts))
755 option_lists.append(("global options", globalopts))
756
756
757 # list all option lists
757 # list all option lists
758 opt_output = []
758 opt_output = []
759 for title, options in option_lists:
759 for title, options in option_lists:
760 opt_output.append(("\n%s:\n" % title, None))
760 opt_output.append(("\n%s:\n" % title, None))
761 for shortopt, longopt, default, desc in options:
761 for shortopt, longopt, default, desc in options:
762 opt_output.append(("%2s%s" % (shortopt and "-%s" % shortopt,
762 opt_output.append(("%2s%s" % (shortopt and "-%s" % shortopt,
763 longopt and " --%s" % longopt),
763 longopt and " --%s" % longopt),
764 "%s%s" % (desc,
764 "%s%s" % (desc,
765 default
765 default
766 and _(" (default: %s)") % default
766 and _(" (default: %s)") % default
767 or "")))
767 or "")))
768
768
769 if opt_output:
769 if opt_output:
770 opts_len = max([len(line[0]) for line in opt_output if line[1]])
770 opts_len = max([len(line[0]) for line in opt_output if line[1]])
771 for first, second in opt_output:
771 for first, second in opt_output:
772 if second:
772 if second:
773 ui.write(" %-*s %s\n" % (opts_len, first, second))
773 ui.write(" %-*s %s\n" % (opts_len, first, second))
774 else:
774 else:
775 ui.write("%s\n" % first)
775 ui.write("%s\n" % first)
776
776
777 # Commands start here, listed alphabetically
777 # Commands start here, listed alphabetically
778
778
779 def add(ui, repo, *pats, **opts):
779 def add(ui, repo, *pats, **opts):
780 """add the specified files on the next commit
780 """add the specified files on the next commit
781
781
782 Schedule files to be version controlled and added to the repository.
782 Schedule files to be version controlled and added to the repository.
783
783
784 The files will be added to the repository at the next commit.
784 The files will be added to the repository at the next commit.
785
785
786 If no names are given, add all files in the repository.
786 If no names are given, add all files in the repository.
787 """
787 """
788
788
789 names = []
789 names = []
790 for src, abs, rel, exact in walk(repo, pats, opts):
790 for src, abs, rel, exact in walk(repo, pats, opts):
791 if exact:
791 if exact:
792 if ui.verbose:
792 if ui.verbose:
793 ui.status(_('adding %s\n') % rel)
793 ui.status(_('adding %s\n') % rel)
794 names.append(abs)
794 names.append(abs)
795 elif repo.dirstate.state(abs) == '?':
795 elif repo.dirstate.state(abs) == '?':
796 ui.status(_('adding %s\n') % rel)
796 ui.status(_('adding %s\n') % rel)
797 names.append(abs)
797 names.append(abs)
798 if not opts.get('dry_run'):
798 if not opts.get('dry_run'):
799 repo.add(names)
799 repo.add(names)
800
800
801 def addremove(ui, repo, *pats, **opts):
801 def addremove(ui, repo, *pats, **opts):
802 """add all new files, delete all missing files (DEPRECATED)
802 """add all new files, delete all missing files (DEPRECATED)
803
803
804 (DEPRECATED)
804 (DEPRECATED)
805 Add all new files and remove all missing files from the repository.
805 Add all new files and remove all missing files from the repository.
806
806
807 New files are ignored if they match any of the patterns in .hgignore. As
807 New files are ignored if they match any of the patterns in .hgignore. As
808 with add, these changes take effect at the next commit.
808 with add, these changes take effect at the next commit.
809
809
810 This command is now deprecated and will be removed in a future
810 This command is now deprecated and will be removed in a future
811 release. Please use add and remove --after instead.
811 release. Please use add and remove --after instead.
812 """
812 """
813 ui.warn(_('(the addremove command is deprecated; use add and remove '
813 ui.warn(_('(the addremove command is deprecated; use add and remove '
814 '--after instead)\n'))
814 '--after instead)\n'))
815 return addremove_lock(ui, repo, pats, opts)
815 return addremove_lock(ui, repo, pats, opts)
816
816
817 def addremove_lock(ui, repo, pats, opts, wlock=None):
817 def addremove_lock(ui, repo, pats, opts, wlock=None):
818 add, remove = [], []
818 add, remove = [], []
819 for src, abs, rel, exact in walk(repo, pats, opts):
819 for src, abs, rel, exact in walk(repo, pats, opts):
820 if src == 'f' and repo.dirstate.state(abs) == '?':
820 if src == 'f' and repo.dirstate.state(abs) == '?':
821 add.append(abs)
821 add.append(abs)
822 if ui.verbose or not exact:
822 if ui.verbose or not exact:
823 ui.status(_('adding %s\n') % ((pats and rel) or abs))
823 ui.status(_('adding %s\n') % ((pats and rel) or abs))
824 if repo.dirstate.state(abs) != 'r' and not os.path.exists(rel):
824 if repo.dirstate.state(abs) != 'r' and not os.path.exists(rel):
825 remove.append(abs)
825 remove.append(abs)
826 if ui.verbose or not exact:
826 if ui.verbose or not exact:
827 ui.status(_('removing %s\n') % ((pats and rel) or abs))
827 ui.status(_('removing %s\n') % ((pats and rel) or abs))
828 if not opts.get('dry_run'):
828 if not opts.get('dry_run'):
829 repo.add(add, wlock=wlock)
829 repo.add(add, wlock=wlock)
830 repo.remove(remove, wlock=wlock)
830 repo.remove(remove, wlock=wlock)
831
831
832 def annotate(ui, repo, *pats, **opts):
832 def annotate(ui, repo, *pats, **opts):
833 """show changeset information per file line
833 """show changeset information per file line
834
834
835 List changes in files, showing the revision id responsible for each line
835 List changes in files, showing the revision id responsible for each line
836
836
837 This command is useful to discover who did a change or when a change took
837 This command is useful to discover who did a change or when a change took
838 place.
838 place.
839
839
840 Without the -a option, annotate will avoid processing files it
840 Without the -a option, annotate will avoid processing files it
841 detects as binary. With -a, annotate will generate an annotation
841 detects as binary. With -a, annotate will generate an annotation
842 anyway, probably with undesirable results.
842 anyway, probably with undesirable results.
843 """
843 """
844 def getnode(rev):
844 def getnode(rev):
845 return short(repo.changelog.node(rev))
845 return short(repo.changelog.node(rev))
846
846
847 ucache = {}
847 ucache = {}
848 def getname(rev):
848 def getname(rev):
849 try:
849 try:
850 return ucache[rev]
850 return ucache[rev]
851 except:
851 except:
852 u = trimuser(ui, repo.changectx(rev).user(), rev, ucache)
852 u = trimuser(ui, repo.changectx(rev).user(), rev, ucache)
853 ucache[rev] = u
853 ucache[rev] = u
854 return u
854 return u
855
855
856 dcache = {}
856 dcache = {}
857 def getdate(rev):
857 def getdate(rev):
858 datestr = dcache.get(rev)
858 datestr = dcache.get(rev)
859 if datestr is None:
859 if datestr is None:
860 datestr = dcache[rev] = util.datestr(repo.changectx(rev).date())
860 datestr = dcache[rev] = util.datestr(repo.changectx(rev).date())
861 return datestr
861 return datestr
862
862
863 if not pats:
863 if not pats:
864 raise util.Abort(_('at least one file name or pattern required'))
864 raise util.Abort(_('at least one file name or pattern required'))
865
865
866 opmap = [['user', getname], ['number', str], ['changeset', getnode],
866 opmap = [['user', getname], ['number', str], ['changeset', getnode],
867 ['date', getdate]]
867 ['date', getdate]]
868 if not opts['user'] and not opts['changeset'] and not opts['date']:
868 if not opts['user'] and not opts['changeset'] and not opts['date']:
869 opts['number'] = 1
869 opts['number'] = 1
870
870
871 ctx = repo.changectx(opts['rev'] or repo.dirstate.parents()[0])
871 ctx = repo.changectx(opts['rev'] or repo.dirstate.parents()[0])
872
872
873 for src, abs, rel, exact in walk(repo, pats, opts, node=ctx.node()):
873 for src, abs, rel, exact in walk(repo, pats, opts, node=ctx.node()):
874 fctx = ctx.filectx(abs)
874 fctx = ctx.filectx(abs)
875 if not opts['text'] and util.binary(fctx.data()):
875 if not opts['text'] and util.binary(fctx.data()):
876 ui.write(_("%s: binary file\n") % ((pats and rel) or abs))
876 ui.write(_("%s: binary file\n") % ((pats and rel) or abs))
877 continue
877 continue
878
878
879 lines = fctx.annotate()
879 lines = fctx.annotate()
880 pieces = []
880 pieces = []
881
881
882 for o, f in opmap:
882 for o, f in opmap:
883 if opts[o]:
883 if opts[o]:
884 l = [f(n) for n, dummy in lines]
884 l = [f(n) for n, dummy in lines]
885 if l:
885 if l:
886 m = max(map(len, l))
886 m = max(map(len, l))
887 pieces.append(["%*s" % (m, x) for x in l])
887 pieces.append(["%*s" % (m, x) for x in l])
888
888
889 if pieces:
889 if pieces:
890 for p, l in zip(zip(*pieces), lines):
890 for p, l in zip(zip(*pieces), lines):
891 ui.write("%s: %s" % (" ".join(p), l[1]))
891 ui.write("%s: %s" % (" ".join(p), l[1]))
892
892
893 def archive(ui, repo, dest, **opts):
893 def archive(ui, repo, dest, **opts):
894 '''create unversioned archive of a repository revision
894 '''create unversioned archive of a repository revision
895
895
896 By default, the revision used is the parent of the working
896 By default, the revision used is the parent of the working
897 directory; use "-r" to specify a different revision.
897 directory; use "-r" to specify a different revision.
898
898
899 To specify the type of archive to create, use "-t". Valid
899 To specify the type of archive to create, use "-t". Valid
900 types are:
900 types are:
901
901
902 "files" (default): a directory full of files
902 "files" (default): a directory full of files
903 "tar": tar archive, uncompressed
903 "tar": tar archive, uncompressed
904 "tbz2": tar archive, compressed using bzip2
904 "tbz2": tar archive, compressed using bzip2
905 "tgz": tar archive, compressed using gzip
905 "tgz": tar archive, compressed using gzip
906 "uzip": zip archive, uncompressed
906 "uzip": zip archive, uncompressed
907 "zip": zip archive, compressed using deflate
907 "zip": zip archive, compressed using deflate
908
908
909 The exact name of the destination archive or directory is given
909 The exact name of the destination archive or directory is given
910 using a format string; see "hg help export" for details.
910 using a format string; see "hg help export" for details.
911
911
912 Each member added to an archive file has a directory prefix
912 Each member added to an archive file has a directory prefix
913 prepended. Use "-p" to specify a format string for the prefix.
913 prepended. Use "-p" to specify a format string for the prefix.
914 The default is the basename of the archive, with suffixes removed.
914 The default is the basename of the archive, with suffixes removed.
915 '''
915 '''
916
916
917 if opts['rev']:
917 if opts['rev']:
918 node = repo.lookup(opts['rev'])
918 node = repo.lookup(opts['rev'])
919 else:
919 else:
920 node, p2 = repo.dirstate.parents()
920 node, p2 = repo.dirstate.parents()
921 if p2 != nullid:
921 if p2 != nullid:
922 raise util.Abort(_('uncommitted merge - please provide a '
922 raise util.Abort(_('uncommitted merge - please provide a '
923 'specific revision'))
923 'specific revision'))
924
924
925 dest = make_filename(repo, dest, node)
925 dest = make_filename(repo, dest, node)
926 if os.path.realpath(dest) == repo.root:
926 if os.path.realpath(dest) == repo.root:
927 raise util.Abort(_('repository root cannot be destination'))
927 raise util.Abort(_('repository root cannot be destination'))
928 dummy, matchfn, dummy = matchpats(repo, [], opts)
928 dummy, matchfn, dummy = matchpats(repo, [], opts)
929 kind = opts.get('type') or 'files'
929 kind = opts.get('type') or 'files'
930 prefix = opts['prefix']
930 prefix = opts['prefix']
931 if dest == '-':
931 if dest == '-':
932 if kind == 'files':
932 if kind == 'files':
933 raise util.Abort(_('cannot archive plain files to stdout'))
933 raise util.Abort(_('cannot archive plain files to stdout'))
934 dest = sys.stdout
934 dest = sys.stdout
935 if not prefix: prefix = os.path.basename(repo.root) + '-%h'
935 if not prefix: prefix = os.path.basename(repo.root) + '-%h'
936 prefix = make_filename(repo, prefix, node)
936 prefix = make_filename(repo, prefix, node)
937 archival.archive(repo, dest, node, kind, not opts['no_decode'],
937 archival.archive(repo, dest, node, kind, not opts['no_decode'],
938 matchfn, prefix)
938 matchfn, prefix)
939
939
940 def backout(ui, repo, rev, **opts):
940 def backout(ui, repo, rev, **opts):
941 '''reverse effect of earlier changeset
941 '''reverse effect of earlier changeset
942
942
943 Commit the backed out changes as a new changeset. The new
943 Commit the backed out changes as a new changeset. The new
944 changeset is a child of the backed out changeset.
944 changeset is a child of the backed out changeset.
945
945
946 If you back out a changeset other than the tip, a new head is
946 If you back out a changeset other than the tip, a new head is
947 created. This head is the parent of the working directory. If
947 created. This head is the parent of the working directory. If
948 you back out an old changeset, your working directory will appear
948 you back out an old changeset, your working directory will appear
949 old after the backout. You should merge the backout changeset
949 old after the backout. You should merge the backout changeset
950 with another head.
950 with another head.
951
951
952 The --merge option remembers the parent of the working directory
952 The --merge option remembers the parent of the working directory
953 before starting the backout, then merges the new head with that
953 before starting the backout, then merges the new head with that
954 changeset afterwards. This saves you from doing the merge by
954 changeset afterwards. This saves you from doing the merge by
955 hand. The result of this merge is not committed, as for a normal
955 hand. The result of this merge is not committed, as for a normal
956 merge.'''
956 merge.'''
957
957
958 bail_if_changed(repo)
958 bail_if_changed(repo)
959 op1, op2 = repo.dirstate.parents()
959 op1, op2 = repo.dirstate.parents()
960 if op2 != nullid:
960 if op2 != nullid:
961 raise util.Abort(_('outstanding uncommitted merge'))
961 raise util.Abort(_('outstanding uncommitted merge'))
962 node = repo.lookup(rev)
962 node = repo.lookup(rev)
963 p1, p2 = repo.changelog.parents(node)
963 p1, p2 = repo.changelog.parents(node)
964 if p1 == nullid:
964 if p1 == nullid:
965 raise util.Abort(_('cannot back out a change with no parents'))
965 raise util.Abort(_('cannot back out a change with no parents'))
966 if p2 != nullid:
966 if p2 != nullid:
967 if not opts['parent']:
967 if not opts['parent']:
968 raise util.Abort(_('cannot back out a merge changeset without '
968 raise util.Abort(_('cannot back out a merge changeset without '
969 '--parent'))
969 '--parent'))
970 p = repo.lookup(opts['parent'])
970 p = repo.lookup(opts['parent'])
971 if p not in (p1, p2):
971 if p not in (p1, p2):
972 raise util.Abort(_('%s is not a parent of %s' %
972 raise util.Abort(_('%s is not a parent of %s' %
973 (short(p), short(node))))
973 (short(p), short(node))))
974 parent = p
974 parent = p
975 else:
975 else:
976 if opts['parent']:
976 if opts['parent']:
977 raise util.Abort(_('cannot use --parent on non-merge changeset'))
977 raise util.Abort(_('cannot use --parent on non-merge changeset'))
978 parent = p1
978 parent = p1
979 hg.clean(repo, node, show_stats=False)
979 hg.clean(repo, node, show_stats=False)
980 revert_opts = opts.copy()
980 revert_opts = opts.copy()
981 revert_opts['rev'] = hex(parent)
981 revert_opts['rev'] = hex(parent)
982 revert(ui, repo, **revert_opts)
982 revert(ui, repo, **revert_opts)
983 commit_opts = opts.copy()
983 commit_opts = opts.copy()
984 commit_opts['addremove'] = False
984 commit_opts['addremove'] = False
985 if not commit_opts['message'] and not commit_opts['logfile']:
985 if not commit_opts['message'] and not commit_opts['logfile']:
986 commit_opts['message'] = _("Backed out changeset %s") % (hex(node))
986 commit_opts['message'] = _("Backed out changeset %s") % (hex(node))
987 commit_opts['force_editor'] = True
987 commit_opts['force_editor'] = True
988 commit(ui, repo, **commit_opts)
988 commit(ui, repo, **commit_opts)
989 def nice(node):
989 def nice(node):
990 return '%d:%s' % (repo.changelog.rev(node), short(node))
990 return '%d:%s' % (repo.changelog.rev(node), short(node))
991 ui.status(_('changeset %s backs out changeset %s\n') %
991 ui.status(_('changeset %s backs out changeset %s\n') %
992 (nice(repo.changelog.tip()), nice(node)))
992 (nice(repo.changelog.tip()), nice(node)))
993 if op1 != node:
993 if op1 != node:
994 if opts['merge']:
994 if opts['merge']:
995 ui.status(_('merging with changeset %s\n') % nice(op1))
995 ui.status(_('merging with changeset %s\n') % nice(op1))
996 n = _lookup(repo, hex(op1))
996 n = _lookup(repo, hex(op1))
997 hg.merge(repo, n)
997 hg.merge(repo, n)
998 else:
998 else:
999 ui.status(_('the backout changeset is a new head - '
999 ui.status(_('the backout changeset is a new head - '
1000 'do not forget to merge\n'))
1000 'do not forget to merge\n'))
1001 ui.status(_('(use "backout --merge" '
1001 ui.status(_('(use "backout --merge" '
1002 'if you want to auto-merge)\n'))
1002 'if you want to auto-merge)\n'))
1003
1003
1004 def bundle(ui, repo, fname, dest=None, **opts):
1004 def bundle(ui, repo, fname, dest=None, **opts):
1005 """create a changegroup file
1005 """create a changegroup file
1006
1006
1007 Generate a compressed changegroup file collecting all changesets
1007 Generate a compressed changegroup file collecting all changesets
1008 not found in the other repository.
1008 not found in the other repository.
1009
1009
1010 This file can then be transferred using conventional means and
1010 This file can then be transferred using conventional means and
1011 applied to another repository with the unbundle command. This is
1011 applied to another repository with the unbundle command. This is
1012 useful when native push and pull are not available or when
1012 useful when native push and pull are not available or when
1013 exporting an entire repository is undesirable. The standard file
1013 exporting an entire repository is undesirable. The standard file
1014 extension is ".hg".
1014 extension is ".hg".
1015
1015
1016 Unlike import/export, this exactly preserves all changeset
1016 Unlike import/export, this exactly preserves all changeset
1017 contents including permissions, rename data, and revision history.
1017 contents including permissions, rename data, and revision history.
1018 """
1018 """
1019 dest = ui.expandpath(dest or 'default-push', dest or 'default')
1019 dest = ui.expandpath(dest or 'default-push', dest or 'default')
1020 other = hg.repository(ui, dest)
1020 other = hg.repository(ui, dest)
1021 o = repo.findoutgoing(other, force=opts['force'])
1021 o = repo.findoutgoing(other, force=opts['force'])
1022 cg = repo.changegroup(o, 'bundle')
1022 cg = repo.changegroup(o, 'bundle')
1023 write_bundle(cg, fname)
1023 write_bundle(cg, fname)
1024
1024
1025 def cat(ui, repo, file1, *pats, **opts):
1025 def cat(ui, repo, file1, *pats, **opts):
1026 """output the latest or given revisions of files
1026 """output the latest or given revisions of files
1027
1027
1028 Print the specified files as they were at the given revision.
1028 Print the specified files as they were at the given revision.
1029 If no revision is given then the tip is used.
1029 If no revision is given then the tip is used.
1030
1030
1031 Output may be to a file, in which case the name of the file is
1031 Output may be to a file, in which case the name of the file is
1032 given using a format string. The formatting rules are the same as
1032 given using a format string. The formatting rules are the same as
1033 for the export command, with the following additions:
1033 for the export command, with the following additions:
1034
1034
1035 %s basename of file being printed
1035 %s basename of file being printed
1036 %d dirname of file being printed, or '.' if in repo root
1036 %d dirname of file being printed, or '.' if in repo root
1037 %p root-relative path name of file being printed
1037 %p root-relative path name of file being printed
1038 """
1038 """
1039 ctx = repo.changectx(opts['rev'] or "-1")
1039 ctx = repo.changectx(opts['rev'] or "-1")
1040 for src, abs, rel, exact in walk(repo, (file1,) + pats, opts, ctx.node()):
1040 for src, abs, rel, exact in walk(repo, (file1,) + pats, opts, ctx.node()):
1041 fp = make_file(repo, opts['output'], ctx.node(), pathname=abs)
1041 fp = make_file(repo, opts['output'], ctx.node(), pathname=abs)
1042 fp.write(ctx.filectx(abs).data())
1042 fp.write(ctx.filectx(abs).data())
1043
1043
1044 def clone(ui, source, dest=None, **opts):
1044 def clone(ui, source, dest=None, **opts):
1045 """make a copy of an existing repository
1045 """make a copy of an existing repository
1046
1046
1047 Create a copy of an existing repository in a new directory.
1047 Create a copy of an existing repository in a new directory.
1048
1048
1049 If no destination directory name is specified, it defaults to the
1049 If no destination directory name is specified, it defaults to the
1050 basename of the source.
1050 basename of the source.
1051
1051
1052 The location of the source is added to the new repository's
1052 The location of the source is added to the new repository's
1053 .hg/hgrc file, as the default to be used for future pulls.
1053 .hg/hgrc file, as the default to be used for future pulls.
1054
1054
1055 For efficiency, hardlinks are used for cloning whenever the source
1055 For efficiency, hardlinks are used for cloning whenever the source
1056 and destination are on the same filesystem (note this applies only
1056 and destination are on the same filesystem (note this applies only
1057 to the repository data, not to the checked out files). Some
1057 to the repository data, not to the checked out files). Some
1058 filesystems, such as AFS, implement hardlinking incorrectly, but
1058 filesystems, such as AFS, implement hardlinking incorrectly, but
1059 do not report errors. In these cases, use the --pull option to
1059 do not report errors. In these cases, use the --pull option to
1060 avoid hardlinking.
1060 avoid hardlinking.
1061
1061
1062 You can safely clone repositories and checked out files using full
1062 You can safely clone repositories and checked out files using full
1063 hardlinks with
1063 hardlinks with
1064
1064
1065 $ cp -al REPO REPOCLONE
1065 $ cp -al REPO REPOCLONE
1066
1066
1067 which is the fastest way to clone. However, the operation is not
1067 which is the fastest way to clone. However, the operation is not
1068 atomic (making sure REPO is not modified during the operation is
1068 atomic (making sure REPO is not modified during the operation is
1069 up to you) and you have to make sure your editor breaks hardlinks
1069 up to you) and you have to make sure your editor breaks hardlinks
1070 (Emacs and most Linux Kernel tools do so).
1070 (Emacs and most Linux Kernel tools do so).
1071
1071
1072 If you use the -r option to clone up to a specific revision, no
1072 If you use the -r option to clone up to a specific revision, no
1073 subsequent revisions will be present in the cloned repository.
1073 subsequent revisions will be present in the cloned repository.
1074 This option implies --pull, even on local repositories.
1074 This option implies --pull, even on local repositories.
1075
1075
1076 See pull for valid source format details.
1076 See pull for valid source format details.
1077
1077
1078 It is possible to specify an ssh:// URL as the destination, but no
1078 It is possible to specify an ssh:// URL as the destination, but no
1079 .hg/hgrc will be created on the remote side. Look at the help text
1079 .hg/hgrc will be created on the remote side. Look at the help text
1080 for the pull command for important details about ssh:// URLs.
1080 for the pull command for important details about ssh:// URLs.
1081 """
1081 """
1082 setremoteconfig(ui, opts)
1082 setremoteconfig(ui, opts)
1083 hg.clone(ui, ui.expandpath(source), dest,
1083 hg.clone(ui, ui.expandpath(source), dest,
1084 pull=opts['pull'],
1084 pull=opts['pull'],
1085 stream=opts['uncompressed'],
1085 stream=opts['uncompressed'],
1086 rev=opts['rev'],
1086 rev=opts['rev'],
1087 update=not opts['noupdate'])
1087 update=not opts['noupdate'])
1088
1088
1089 def commit(ui, repo, *pats, **opts):
1089 def commit(ui, repo, *pats, **opts):
1090 """commit the specified files or all outstanding changes
1090 """commit the specified files or all outstanding changes
1091
1091
1092 Commit changes to the given files into the repository.
1092 Commit changes to the given files into the repository.
1093
1093
1094 If a list of files is omitted, all changes reported by "hg status"
1094 If a list of files is omitted, all changes reported by "hg status"
1095 will be committed.
1095 will be committed.
1096
1096
1097 If no commit message is specified, the editor configured in your hgrc
1097 If no commit message is specified, the editor configured in your hgrc
1098 or in the EDITOR environment variable is started to enter a message.
1098 or in the EDITOR environment variable is started to enter a message.
1099 """
1099 """
1100 message = logmessage(opts)
1100 message = logmessage(opts)
1101
1101
1102 if opts['addremove']:
1102 if opts['addremove']:
1103 addremove_lock(ui, repo, pats, opts)
1103 addremove_lock(ui, repo, pats, opts)
1104 fns, match, anypats = matchpats(repo, pats, opts)
1104 fns, match, anypats = matchpats(repo, pats, opts)
1105 if pats:
1105 if pats:
1106 modified, added, removed, deleted, unknown = (
1106 modified, added, removed, deleted, unknown = (
1107 repo.changes(files=fns, match=match))
1107 repo.changes(files=fns, match=match))
1108 files = modified + added + removed
1108 files = modified + added + removed
1109 else:
1109 else:
1110 files = []
1110 files = []
1111 try:
1111 try:
1112 repo.commit(files, message, opts['user'], opts['date'], match,
1112 repo.commit(files, message, opts['user'], opts['date'], match,
1113 force_editor=opts.get('force_editor'))
1113 force_editor=opts.get('force_editor'))
1114 except ValueError, inst:
1114 except ValueError, inst:
1115 raise util.Abort(str(inst))
1115 raise util.Abort(str(inst))
1116
1116
1117 def docopy(ui, repo, pats, opts, wlock):
1117 def docopy(ui, repo, pats, opts, wlock):
1118 # called with the repo lock held
1118 # called with the repo lock held
1119 cwd = repo.getcwd()
1119 cwd = repo.getcwd()
1120 errors = 0
1120 errors = 0
1121 copied = []
1121 copied = []
1122 targets = {}
1122 targets = {}
1123
1123
1124 def okaytocopy(abs, rel, exact):
1124 def okaytocopy(abs, rel, exact):
1125 reasons = {'?': _('is not managed'),
1125 reasons = {'?': _('is not managed'),
1126 'a': _('has been marked for add'),
1126 'a': _('has been marked for add'),
1127 'r': _('has been marked for remove')}
1127 'r': _('has been marked for remove')}
1128 state = repo.dirstate.state(abs)
1128 state = repo.dirstate.state(abs)
1129 reason = reasons.get(state)
1129 reason = reasons.get(state)
1130 if reason:
1130 if reason:
1131 if state == 'a':
1131 if state == 'a':
1132 origsrc = repo.dirstate.copied(abs)
1132 origsrc = repo.dirstate.copied(abs)
1133 if origsrc is not None:
1133 if origsrc is not None:
1134 return origsrc
1134 return origsrc
1135 if exact:
1135 if exact:
1136 ui.warn(_('%s: not copying - file %s\n') % (rel, reason))
1136 ui.warn(_('%s: not copying - file %s\n') % (rel, reason))
1137 else:
1137 else:
1138 return abs
1138 return abs
1139
1139
1140 def copy(origsrc, abssrc, relsrc, target, exact):
1140 def copy(origsrc, abssrc, relsrc, target, exact):
1141 abstarget = util.canonpath(repo.root, cwd, target)
1141 abstarget = util.canonpath(repo.root, cwd, target)
1142 reltarget = util.pathto(cwd, abstarget)
1142 reltarget = util.pathto(cwd, abstarget)
1143 prevsrc = targets.get(abstarget)
1143 prevsrc = targets.get(abstarget)
1144 if prevsrc is not None:
1144 if prevsrc is not None:
1145 ui.warn(_('%s: not overwriting - %s collides with %s\n') %
1145 ui.warn(_('%s: not overwriting - %s collides with %s\n') %
1146 (reltarget, abssrc, prevsrc))
1146 (reltarget, abssrc, prevsrc))
1147 return
1147 return
1148 if (not opts['after'] and os.path.exists(reltarget) or
1148 if (not opts['after'] and os.path.exists(reltarget) or
1149 opts['after'] and repo.dirstate.state(abstarget) not in '?r'):
1149 opts['after'] and repo.dirstate.state(abstarget) not in '?r'):
1150 if not opts['force']:
1150 if not opts['force']:
1151 ui.warn(_('%s: not overwriting - file exists\n') %
1151 ui.warn(_('%s: not overwriting - file exists\n') %
1152 reltarget)
1152 reltarget)
1153 return
1153 return
1154 if not opts['after'] and not opts.get('dry_run'):
1154 if not opts['after'] and not opts.get('dry_run'):
1155 os.unlink(reltarget)
1155 os.unlink(reltarget)
1156 if opts['after']:
1156 if opts['after']:
1157 if not os.path.exists(reltarget):
1157 if not os.path.exists(reltarget):
1158 return
1158 return
1159 else:
1159 else:
1160 targetdir = os.path.dirname(reltarget) or '.'
1160 targetdir = os.path.dirname(reltarget) or '.'
1161 if not os.path.isdir(targetdir) and not opts.get('dry_run'):
1161 if not os.path.isdir(targetdir) and not opts.get('dry_run'):
1162 os.makedirs(targetdir)
1162 os.makedirs(targetdir)
1163 try:
1163 try:
1164 restore = repo.dirstate.state(abstarget) == 'r'
1164 restore = repo.dirstate.state(abstarget) == 'r'
1165 if restore and not opts.get('dry_run'):
1165 if restore and not opts.get('dry_run'):
1166 repo.undelete([abstarget], wlock)
1166 repo.undelete([abstarget], wlock)
1167 try:
1167 try:
1168 if not opts.get('dry_run'):
1168 if not opts.get('dry_run'):
1169 shutil.copyfile(relsrc, reltarget)
1169 shutil.copyfile(relsrc, reltarget)
1170 shutil.copymode(relsrc, reltarget)
1170 shutil.copymode(relsrc, reltarget)
1171 restore = False
1171 restore = False
1172 finally:
1172 finally:
1173 if restore:
1173 if restore:
1174 repo.remove([abstarget], wlock)
1174 repo.remove([abstarget], wlock)
1175 except shutil.Error, inst:
1175 except shutil.Error, inst:
1176 raise util.Abort(str(inst))
1176 raise util.Abort(str(inst))
1177 except IOError, inst:
1177 except IOError, inst:
1178 if inst.errno == errno.ENOENT:
1178 if inst.errno == errno.ENOENT:
1179 ui.warn(_('%s: deleted in working copy\n') % relsrc)
1179 ui.warn(_('%s: deleted in working copy\n') % relsrc)
1180 else:
1180 else:
1181 ui.warn(_('%s: cannot copy - %s\n') %
1181 ui.warn(_('%s: cannot copy - %s\n') %
1182 (relsrc, inst.strerror))
1182 (relsrc, inst.strerror))
1183 errors += 1
1183 errors += 1
1184 return
1184 return
1185 if ui.verbose or not exact:
1185 if ui.verbose or not exact:
1186 ui.status(_('copying %s to %s\n') % (relsrc, reltarget))
1186 ui.status(_('copying %s to %s\n') % (relsrc, reltarget))
1187 targets[abstarget] = abssrc
1187 targets[abstarget] = abssrc
1188 if abstarget != origsrc and not opts.get('dry_run'):
1188 if abstarget != origsrc and not opts.get('dry_run'):
1189 repo.copy(origsrc, abstarget, wlock)
1189 repo.copy(origsrc, abstarget, wlock)
1190 copied.append((abssrc, relsrc, exact))
1190 copied.append((abssrc, relsrc, exact))
1191
1191
1192 def targetpathfn(pat, dest, srcs):
1192 def targetpathfn(pat, dest, srcs):
1193 if os.path.isdir(pat):
1193 if os.path.isdir(pat):
1194 abspfx = util.canonpath(repo.root, cwd, pat)
1194 abspfx = util.canonpath(repo.root, cwd, pat)
1195 if destdirexists:
1195 if destdirexists:
1196 striplen = len(os.path.split(abspfx)[0])
1196 striplen = len(os.path.split(abspfx)[0])
1197 else:
1197 else:
1198 striplen = len(abspfx)
1198 striplen = len(abspfx)
1199 if striplen:
1199 if striplen:
1200 striplen += len(os.sep)
1200 striplen += len(os.sep)
1201 res = lambda p: os.path.join(dest, p[striplen:])
1201 res = lambda p: os.path.join(dest, p[striplen:])
1202 elif destdirexists:
1202 elif destdirexists:
1203 res = lambda p: os.path.join(dest, os.path.basename(p))
1203 res = lambda p: os.path.join(dest, os.path.basename(p))
1204 else:
1204 else:
1205 res = lambda p: dest
1205 res = lambda p: dest
1206 return res
1206 return res
1207
1207
1208 def targetpathafterfn(pat, dest, srcs):
1208 def targetpathafterfn(pat, dest, srcs):
1209 if util.patkind(pat, None)[0]:
1209 if util.patkind(pat, None)[0]:
1210 # a mercurial pattern
1210 # a mercurial pattern
1211 res = lambda p: os.path.join(dest, os.path.basename(p))
1211 res = lambda p: os.path.join(dest, os.path.basename(p))
1212 else:
1212 else:
1213 abspfx = util.canonpath(repo.root, cwd, pat)
1213 abspfx = util.canonpath(repo.root, cwd, pat)
1214 if len(abspfx) < len(srcs[0][0]):
1214 if len(abspfx) < len(srcs[0][0]):
1215 # A directory. Either the target path contains the last
1215 # A directory. Either the target path contains the last
1216 # component of the source path or it does not.
1216 # component of the source path or it does not.
1217 def evalpath(striplen):
1217 def evalpath(striplen):
1218 score = 0
1218 score = 0
1219 for s in srcs:
1219 for s in srcs:
1220 t = os.path.join(dest, s[0][striplen:])
1220 t = os.path.join(dest, s[0][striplen:])
1221 if os.path.exists(t):
1221 if os.path.exists(t):
1222 score += 1
1222 score += 1
1223 return score
1223 return score
1224
1224
1225 striplen = len(abspfx)
1225 striplen = len(abspfx)
1226 if striplen:
1226 if striplen:
1227 striplen += len(os.sep)
1227 striplen += len(os.sep)
1228 if os.path.isdir(os.path.join(dest, os.path.split(abspfx)[1])):
1228 if os.path.isdir(os.path.join(dest, os.path.split(abspfx)[1])):
1229 score = evalpath(striplen)
1229 score = evalpath(striplen)
1230 striplen1 = len(os.path.split(abspfx)[0])
1230 striplen1 = len(os.path.split(abspfx)[0])
1231 if striplen1:
1231 if striplen1:
1232 striplen1 += len(os.sep)
1232 striplen1 += len(os.sep)
1233 if evalpath(striplen1) > score:
1233 if evalpath(striplen1) > score:
1234 striplen = striplen1
1234 striplen = striplen1
1235 res = lambda p: os.path.join(dest, p[striplen:])
1235 res = lambda p: os.path.join(dest, p[striplen:])
1236 else:
1236 else:
1237 # a file
1237 # a file
1238 if destdirexists:
1238 if destdirexists:
1239 res = lambda p: os.path.join(dest, os.path.basename(p))
1239 res = lambda p: os.path.join(dest, os.path.basename(p))
1240 else:
1240 else:
1241 res = lambda p: dest
1241 res = lambda p: dest
1242 return res
1242 return res
1243
1243
1244
1244
1245 pats = list(pats)
1245 pats = list(pats)
1246 if not pats:
1246 if not pats:
1247 raise util.Abort(_('no source or destination specified'))
1247 raise util.Abort(_('no source or destination specified'))
1248 if len(pats) == 1:
1248 if len(pats) == 1:
1249 raise util.Abort(_('no destination specified'))
1249 raise util.Abort(_('no destination specified'))
1250 dest = pats.pop()
1250 dest = pats.pop()
1251 destdirexists = os.path.isdir(dest)
1251 destdirexists = os.path.isdir(dest)
1252 if (len(pats) > 1 or util.patkind(pats[0], None)[0]) and not destdirexists:
1252 if (len(pats) > 1 or util.patkind(pats[0], None)[0]) and not destdirexists:
1253 raise util.Abort(_('with multiple sources, destination must be an '
1253 raise util.Abort(_('with multiple sources, destination must be an '
1254 'existing directory'))
1254 'existing directory'))
1255 if opts['after']:
1255 if opts['after']:
1256 tfn = targetpathafterfn
1256 tfn = targetpathafterfn
1257 else:
1257 else:
1258 tfn = targetpathfn
1258 tfn = targetpathfn
1259 copylist = []
1259 copylist = []
1260 for pat in pats:
1260 for pat in pats:
1261 srcs = []
1261 srcs = []
1262 for tag, abssrc, relsrc, exact in walk(repo, [pat], opts):
1262 for tag, abssrc, relsrc, exact in walk(repo, [pat], opts):
1263 origsrc = okaytocopy(abssrc, relsrc, exact)
1263 origsrc = okaytocopy(abssrc, relsrc, exact)
1264 if origsrc:
1264 if origsrc:
1265 srcs.append((origsrc, abssrc, relsrc, exact))
1265 srcs.append((origsrc, abssrc, relsrc, exact))
1266 if not srcs:
1266 if not srcs:
1267 continue
1267 continue
1268 copylist.append((tfn(pat, dest, srcs), srcs))
1268 copylist.append((tfn(pat, dest, srcs), srcs))
1269 if not copylist:
1269 if not copylist:
1270 raise util.Abort(_('no files to copy'))
1270 raise util.Abort(_('no files to copy'))
1271
1271
1272 for targetpath, srcs in copylist:
1272 for targetpath, srcs in copylist:
1273 for origsrc, abssrc, relsrc, exact in srcs:
1273 for origsrc, abssrc, relsrc, exact in srcs:
1274 copy(origsrc, abssrc, relsrc, targetpath(abssrc), exact)
1274 copy(origsrc, abssrc, relsrc, targetpath(abssrc), exact)
1275
1275
1276 if errors:
1276 if errors:
1277 ui.warn(_('(consider using --after)\n'))
1277 ui.warn(_('(consider using --after)\n'))
1278 return errors, copied
1278 return errors, copied
1279
1279
1280 def copy(ui, repo, *pats, **opts):
1280 def copy(ui, repo, *pats, **opts):
1281 """mark files as copied for the next commit
1281 """mark files as copied for the next commit
1282
1282
1283 Mark dest as having copies of source files. If dest is a
1283 Mark dest as having copies of source files. If dest is a
1284 directory, copies are put in that directory. If dest is a file,
1284 directory, copies are put in that directory. If dest is a file,
1285 there can only be one source.
1285 there can only be one source.
1286
1286
1287 By default, this command copies the contents of files as they
1287 By default, this command copies the contents of files as they
1288 stand in the working directory. If invoked with --after, the
1288 stand in the working directory. If invoked with --after, the
1289 operation is recorded, but no copying is performed.
1289 operation is recorded, but no copying is performed.
1290
1290
1291 This command takes effect in the next commit.
1291 This command takes effect in the next commit.
1292
1292
1293 NOTE: This command should be treated as experimental. While it
1293 NOTE: This command should be treated as experimental. While it
1294 should properly record copied files, this information is not yet
1294 should properly record copied files, this information is not yet
1295 fully used by merge, nor fully reported by log.
1295 fully used by merge, nor fully reported by log.
1296 """
1296 """
1297 wlock = repo.wlock(0)
1297 wlock = repo.wlock(0)
1298 errs, copied = docopy(ui, repo, pats, opts, wlock)
1298 errs, copied = docopy(ui, repo, pats, opts, wlock)
1299 return errs
1299 return errs
1300
1300
1301 def debugancestor(ui, index, rev1, rev2):
1301 def debugancestor(ui, index, rev1, rev2):
1302 """find the ancestor revision of two revisions in a given index"""
1302 """find the ancestor revision of two revisions in a given index"""
1303 r = revlog.revlog(util.opener(os.getcwd(), audit=False), index, "", 0)
1303 r = revlog.revlog(util.opener(os.getcwd(), audit=False), index, "", 0)
1304 a = r.ancestor(r.lookup(rev1), r.lookup(rev2))
1304 a = r.ancestor(r.lookup(rev1), r.lookup(rev2))
1305 ui.write("%d:%s\n" % (r.rev(a), hex(a)))
1305 ui.write("%d:%s\n" % (r.rev(a), hex(a)))
1306
1306
1307 def debugcomplete(ui, cmd='', **opts):
1307 def debugcomplete(ui, cmd='', **opts):
1308 """returns the completion list associated with the given command"""
1308 """returns the completion list associated with the given command"""
1309
1309
1310 if opts['options']:
1310 if opts['options']:
1311 options = []
1311 options = []
1312 otables = [globalopts]
1312 otables = [globalopts]
1313 if cmd:
1313 if cmd:
1314 aliases, entry = findcmd(cmd)
1314 aliases, entry = findcmd(cmd)
1315 otables.append(entry[1])
1315 otables.append(entry[1])
1316 for t in otables:
1316 for t in otables:
1317 for o in t:
1317 for o in t:
1318 if o[0]:
1318 if o[0]:
1319 options.append('-%s' % o[0])
1319 options.append('-%s' % o[0])
1320 options.append('--%s' % o[1])
1320 options.append('--%s' % o[1])
1321 ui.write("%s\n" % "\n".join(options))
1321 ui.write("%s\n" % "\n".join(options))
1322 return
1322 return
1323
1323
1324 clist = findpossible(cmd).keys()
1324 clist = findpossible(cmd).keys()
1325 clist.sort()
1325 clist.sort()
1326 ui.write("%s\n" % "\n".join(clist))
1326 ui.write("%s\n" % "\n".join(clist))
1327
1327
1328 def debugrebuildstate(ui, repo, rev=None):
1328 def debugrebuildstate(ui, repo, rev=None):
1329 """rebuild the dirstate as it would look like for the given revision"""
1329 """rebuild the dirstate as it would look like for the given revision"""
1330 if not rev:
1330 if not rev:
1331 rev = repo.changelog.tip()
1331 rev = repo.changelog.tip()
1332 else:
1332 else:
1333 rev = repo.lookup(rev)
1333 rev = repo.lookup(rev)
1334 change = repo.changelog.read(rev)
1334 change = repo.changelog.read(rev)
1335 n = change[0]
1335 n = change[0]
1336 files = repo.manifest.readflags(n)
1336 files = repo.manifest.read(n)
1337 wlock = repo.wlock()
1337 wlock = repo.wlock()
1338 repo.dirstate.rebuild(rev, files.iteritems())
1338 repo.dirstate.rebuild(rev, files)
1339
1339
1340 def debugcheckstate(ui, repo):
1340 def debugcheckstate(ui, repo):
1341 """validate the correctness of the current dirstate"""
1341 """validate the correctness of the current dirstate"""
1342 parent1, parent2 = repo.dirstate.parents()
1342 parent1, parent2 = repo.dirstate.parents()
1343 repo.dirstate.read()
1343 repo.dirstate.read()
1344 dc = repo.dirstate.map
1344 dc = repo.dirstate.map
1345 keys = dc.keys()
1345 keys = dc.keys()
1346 keys.sort()
1346 keys.sort()
1347 m1n = repo.changelog.read(parent1)[0]
1347 m1n = repo.changelog.read(parent1)[0]
1348 m2n = repo.changelog.read(parent2)[0]
1348 m2n = repo.changelog.read(parent2)[0]
1349 m1 = repo.manifest.read(m1n)
1349 m1 = repo.manifest.read(m1n)
1350 m2 = repo.manifest.read(m2n)
1350 m2 = repo.manifest.read(m2n)
1351 errors = 0
1351 errors = 0
1352 for f in dc:
1352 for f in dc:
1353 state = repo.dirstate.state(f)
1353 state = repo.dirstate.state(f)
1354 if state in "nr" and f not in m1:
1354 if state in "nr" and f not in m1:
1355 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
1355 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
1356 errors += 1
1356 errors += 1
1357 if state in "a" and f in m1:
1357 if state in "a" and f in m1:
1358 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
1358 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
1359 errors += 1
1359 errors += 1
1360 if state in "m" and f not in m1 and f not in m2:
1360 if state in "m" and f not in m1 and f not in m2:
1361 ui.warn(_("%s in state %s, but not in either manifest\n") %
1361 ui.warn(_("%s in state %s, but not in either manifest\n") %
1362 (f, state))
1362 (f, state))
1363 errors += 1
1363 errors += 1
1364 for f in m1:
1364 for f in m1:
1365 state = repo.dirstate.state(f)
1365 state = repo.dirstate.state(f)
1366 if state not in "nrm":
1366 if state not in "nrm":
1367 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
1367 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
1368 errors += 1
1368 errors += 1
1369 if errors:
1369 if errors:
1370 error = _(".hg/dirstate inconsistent with current parent's manifest")
1370 error = _(".hg/dirstate inconsistent with current parent's manifest")
1371 raise util.Abort(error)
1371 raise util.Abort(error)
1372
1372
1373 def debugconfig(ui, repo, *values):
1373 def debugconfig(ui, repo, *values):
1374 """show combined config settings from all hgrc files
1374 """show combined config settings from all hgrc files
1375
1375
1376 With no args, print names and values of all config items.
1376 With no args, print names and values of all config items.
1377
1377
1378 With one arg of the form section.name, print just the value of
1378 With one arg of the form section.name, print just the value of
1379 that config item.
1379 that config item.
1380
1380
1381 With multiple args, print names and values of all config items
1381 With multiple args, print names and values of all config items
1382 with matching section names."""
1382 with matching section names."""
1383
1383
1384 if values:
1384 if values:
1385 if len([v for v in values if '.' in v]) > 1:
1385 if len([v for v in values if '.' in v]) > 1:
1386 raise util.Abort(_('only one config item permitted'))
1386 raise util.Abort(_('only one config item permitted'))
1387 for section, name, value in ui.walkconfig():
1387 for section, name, value in ui.walkconfig():
1388 sectname = section + '.' + name
1388 sectname = section + '.' + name
1389 if values:
1389 if values:
1390 for v in values:
1390 for v in values:
1391 if v == section:
1391 if v == section:
1392 ui.write('%s=%s\n' % (sectname, value))
1392 ui.write('%s=%s\n' % (sectname, value))
1393 elif v == sectname:
1393 elif v == sectname:
1394 ui.write(value, '\n')
1394 ui.write(value, '\n')
1395 else:
1395 else:
1396 ui.write('%s=%s\n' % (sectname, value))
1396 ui.write('%s=%s\n' % (sectname, value))
1397
1397
1398 def debugsetparents(ui, repo, rev1, rev2=None):
1398 def debugsetparents(ui, repo, rev1, rev2=None):
1399 """manually set the parents of the current working directory
1399 """manually set the parents of the current working directory
1400
1400
1401 This is useful for writing repository conversion tools, but should
1401 This is useful for writing repository conversion tools, but should
1402 be used with care.
1402 be used with care.
1403 """
1403 """
1404
1404
1405 if not rev2:
1405 if not rev2:
1406 rev2 = hex(nullid)
1406 rev2 = hex(nullid)
1407
1407
1408 repo.dirstate.setparents(repo.lookup(rev1), repo.lookup(rev2))
1408 repo.dirstate.setparents(repo.lookup(rev1), repo.lookup(rev2))
1409
1409
1410 def debugstate(ui, repo):
1410 def debugstate(ui, repo):
1411 """show the contents of the current dirstate"""
1411 """show the contents of the current dirstate"""
1412 repo.dirstate.read()
1412 repo.dirstate.read()
1413 dc = repo.dirstate.map
1413 dc = repo.dirstate.map
1414 keys = dc.keys()
1414 keys = dc.keys()
1415 keys.sort()
1415 keys.sort()
1416 for file_ in keys:
1416 for file_ in keys:
1417 ui.write("%c %3o %10d %s %s\n"
1417 ui.write("%c %3o %10d %s %s\n"
1418 % (dc[file_][0], dc[file_][1] & 0777, dc[file_][2],
1418 % (dc[file_][0], dc[file_][1] & 0777, dc[file_][2],
1419 time.strftime("%x %X",
1419 time.strftime("%x %X",
1420 time.localtime(dc[file_][3])), file_))
1420 time.localtime(dc[file_][3])), file_))
1421 for f in repo.dirstate.copies:
1421 for f in repo.dirstate.copies:
1422 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copies[f], f))
1422 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copies[f], f))
1423
1423
1424 def debugdata(ui, file_, rev):
1424 def debugdata(ui, file_, rev):
1425 """dump the contents of an data file revision"""
1425 """dump the contents of an data file revision"""
1426 r = revlog.revlog(util.opener(os.getcwd(), audit=False),
1426 r = revlog.revlog(util.opener(os.getcwd(), audit=False),
1427 file_[:-2] + ".i", file_, 0)
1427 file_[:-2] + ".i", file_, 0)
1428 try:
1428 try:
1429 ui.write(r.revision(r.lookup(rev)))
1429 ui.write(r.revision(r.lookup(rev)))
1430 except KeyError:
1430 except KeyError:
1431 raise util.Abort(_('invalid revision identifier %s'), rev)
1431 raise util.Abort(_('invalid revision identifier %s'), rev)
1432
1432
1433 def debugindex(ui, file_):
1433 def debugindex(ui, file_):
1434 """dump the contents of an index file"""
1434 """dump the contents of an index file"""
1435 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_, "", 0)
1435 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_, "", 0)
1436 ui.write(" rev offset length base linkrev" +
1436 ui.write(" rev offset length base linkrev" +
1437 " nodeid p1 p2\n")
1437 " nodeid p1 p2\n")
1438 for i in range(r.count()):
1438 for i in range(r.count()):
1439 node = r.node(i)
1439 node = r.node(i)
1440 pp = r.parents(node)
1440 pp = r.parents(node)
1441 ui.write("% 6d % 9d % 7d % 6d % 7d %s %s %s\n" % (
1441 ui.write("% 6d % 9d % 7d % 6d % 7d %s %s %s\n" % (
1442 i, r.start(i), r.length(i), r.base(i), r.linkrev(node),
1442 i, r.start(i), r.length(i), r.base(i), r.linkrev(node),
1443 short(node), short(pp[0]), short(pp[1])))
1443 short(node), short(pp[0]), short(pp[1])))
1444
1444
1445 def debugindexdot(ui, file_):
1445 def debugindexdot(ui, file_):
1446 """dump an index DAG as a .dot file"""
1446 """dump an index DAG as a .dot file"""
1447 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_, "", 0)
1447 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_, "", 0)
1448 ui.write("digraph G {\n")
1448 ui.write("digraph G {\n")
1449 for i in range(r.count()):
1449 for i in range(r.count()):
1450 node = r.node(i)
1450 node = r.node(i)
1451 pp = r.parents(node)
1451 pp = r.parents(node)
1452 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
1452 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
1453 if pp[1] != nullid:
1453 if pp[1] != nullid:
1454 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
1454 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
1455 ui.write("}\n")
1455 ui.write("}\n")
1456
1456
1457 def debugrename(ui, repo, file, rev=None):
1457 def debugrename(ui, repo, file, rev=None):
1458 """dump rename information"""
1458 """dump rename information"""
1459 r = repo.file(relpath(repo, [file])[0])
1459 r = repo.file(relpath(repo, [file])[0])
1460 if rev:
1460 if rev:
1461 try:
1461 try:
1462 # assume all revision numbers are for changesets
1462 # assume all revision numbers are for changesets
1463 n = repo.lookup(rev)
1463 n = repo.lookup(rev)
1464 change = repo.changelog.read(n)
1464 change = repo.changelog.read(n)
1465 m = repo.manifest.read(change[0])
1465 m = repo.manifest.read(change[0])
1466 n = m[relpath(repo, [file])[0]]
1466 n = m[relpath(repo, [file])[0]]
1467 except (hg.RepoError, KeyError):
1467 except (hg.RepoError, KeyError):
1468 n = r.lookup(rev)
1468 n = r.lookup(rev)
1469 else:
1469 else:
1470 n = r.tip()
1470 n = r.tip()
1471 m = r.renamed(n)
1471 m = r.renamed(n)
1472 if m:
1472 if m:
1473 ui.write(_("renamed from %s:%s\n") % (m[0], hex(m[1])))
1473 ui.write(_("renamed from %s:%s\n") % (m[0], hex(m[1])))
1474 else:
1474 else:
1475 ui.write(_("not renamed\n"))
1475 ui.write(_("not renamed\n"))
1476
1476
1477 def debugwalk(ui, repo, *pats, **opts):
1477 def debugwalk(ui, repo, *pats, **opts):
1478 """show how files match on given patterns"""
1478 """show how files match on given patterns"""
1479 items = list(walk(repo, pats, opts))
1479 items = list(walk(repo, pats, opts))
1480 if not items:
1480 if not items:
1481 return
1481 return
1482 fmt = '%%s %%-%ds %%-%ds %%s' % (
1482 fmt = '%%s %%-%ds %%-%ds %%s' % (
1483 max([len(abs) for (src, abs, rel, exact) in items]),
1483 max([len(abs) for (src, abs, rel, exact) in items]),
1484 max([len(rel) for (src, abs, rel, exact) in items]))
1484 max([len(rel) for (src, abs, rel, exact) in items]))
1485 for src, abs, rel, exact in items:
1485 for src, abs, rel, exact in items:
1486 line = fmt % (src, abs, rel, exact and 'exact' or '')
1486 line = fmt % (src, abs, rel, exact and 'exact' or '')
1487 ui.write("%s\n" % line.rstrip())
1487 ui.write("%s\n" % line.rstrip())
1488
1488
1489 def diff(ui, repo, *pats, **opts):
1489 def diff(ui, repo, *pats, **opts):
1490 """diff repository (or selected files)
1490 """diff repository (or selected files)
1491
1491
1492 Show differences between revisions for the specified files.
1492 Show differences between revisions for the specified files.
1493
1493
1494 Differences between files are shown using the unified diff format.
1494 Differences between files are shown using the unified diff format.
1495
1495
1496 When two revision arguments are given, then changes are shown
1496 When two revision arguments are given, then changes are shown
1497 between those revisions. If only one revision is specified then
1497 between those revisions. If only one revision is specified then
1498 that revision is compared to the working directory, and, when no
1498 that revision is compared to the working directory, and, when no
1499 revisions are specified, the working directory files are compared
1499 revisions are specified, the working directory files are compared
1500 to its parent.
1500 to its parent.
1501
1501
1502 Without the -a option, diff will avoid generating diffs of files
1502 Without the -a option, diff will avoid generating diffs of files
1503 it detects as binary. With -a, diff will generate a diff anyway,
1503 it detects as binary. With -a, diff will generate a diff anyway,
1504 probably with undesirable results.
1504 probably with undesirable results.
1505 """
1505 """
1506 node1, node2 = revpair(ui, repo, opts['rev'])
1506 node1, node2 = revpair(ui, repo, opts['rev'])
1507
1507
1508 fns, matchfn, anypats = matchpats(repo, pats, opts)
1508 fns, matchfn, anypats = matchpats(repo, pats, opts)
1509
1509
1510 dodiff(sys.stdout, ui, repo, node1, node2, fns, match=matchfn,
1510 dodiff(sys.stdout, ui, repo, node1, node2, fns, match=matchfn,
1511 text=opts['text'], opts=opts)
1511 text=opts['text'], opts=opts)
1512
1512
1513 def doexport(ui, repo, changeset, seqno, total, revwidth, opts):
1513 def doexport(ui, repo, changeset, seqno, total, revwidth, opts):
1514 node = repo.lookup(changeset)
1514 node = repo.lookup(changeset)
1515 parents = [p for p in repo.changelog.parents(node) if p != nullid]
1515 parents = [p for p in repo.changelog.parents(node) if p != nullid]
1516 if opts['switch_parent']:
1516 if opts['switch_parent']:
1517 parents.reverse()
1517 parents.reverse()
1518 prev = (parents and parents[0]) or nullid
1518 prev = (parents and parents[0]) or nullid
1519 change = repo.changelog.read(node)
1519 change = repo.changelog.read(node)
1520
1520
1521 fp = make_file(repo, opts['output'], node, total=total, seqno=seqno,
1521 fp = make_file(repo, opts['output'], node, total=total, seqno=seqno,
1522 revwidth=revwidth)
1522 revwidth=revwidth)
1523 if fp != sys.stdout:
1523 if fp != sys.stdout:
1524 ui.note("%s\n" % fp.name)
1524 ui.note("%s\n" % fp.name)
1525
1525
1526 fp.write("# HG changeset patch\n")
1526 fp.write("# HG changeset patch\n")
1527 fp.write("# User %s\n" % change[1])
1527 fp.write("# User %s\n" % change[1])
1528 fp.write("# Date %d %d\n" % change[2])
1528 fp.write("# Date %d %d\n" % change[2])
1529 fp.write("# Node ID %s\n" % hex(node))
1529 fp.write("# Node ID %s\n" % hex(node))
1530 fp.write("# Parent %s\n" % hex(prev))
1530 fp.write("# Parent %s\n" % hex(prev))
1531 if len(parents) > 1:
1531 if len(parents) > 1:
1532 fp.write("# Parent %s\n" % hex(parents[1]))
1532 fp.write("# Parent %s\n" % hex(parents[1]))
1533 fp.write(change[4].rstrip())
1533 fp.write(change[4].rstrip())
1534 fp.write("\n\n")
1534 fp.write("\n\n")
1535
1535
1536 dodiff(fp, ui, repo, prev, node, text=opts['text'])
1536 dodiff(fp, ui, repo, prev, node, text=opts['text'])
1537 if fp != sys.stdout:
1537 if fp != sys.stdout:
1538 fp.close()
1538 fp.close()
1539
1539
1540 def export(ui, repo, *changesets, **opts):
1540 def export(ui, repo, *changesets, **opts):
1541 """dump the header and diffs for one or more changesets
1541 """dump the header and diffs for one or more changesets
1542
1542
1543 Print the changeset header and diffs for one or more revisions.
1543 Print the changeset header and diffs for one or more revisions.
1544
1544
1545 The information shown in the changeset header is: author,
1545 The information shown in the changeset header is: author,
1546 changeset hash, parent and commit comment.
1546 changeset hash, parent and commit comment.
1547
1547
1548 Output may be to a file, in which case the name of the file is
1548 Output may be to a file, in which case the name of the file is
1549 given using a format string. The formatting rules are as follows:
1549 given using a format string. The formatting rules are as follows:
1550
1550
1551 %% literal "%" character
1551 %% literal "%" character
1552 %H changeset hash (40 bytes of hexadecimal)
1552 %H changeset hash (40 bytes of hexadecimal)
1553 %N number of patches being generated
1553 %N number of patches being generated
1554 %R changeset revision number
1554 %R changeset revision number
1555 %b basename of the exporting repository
1555 %b basename of the exporting repository
1556 %h short-form changeset hash (12 bytes of hexadecimal)
1556 %h short-form changeset hash (12 bytes of hexadecimal)
1557 %n zero-padded sequence number, starting at 1
1557 %n zero-padded sequence number, starting at 1
1558 %r zero-padded changeset revision number
1558 %r zero-padded changeset revision number
1559
1559
1560 Without the -a option, export will avoid generating diffs of files
1560 Without the -a option, export will avoid generating diffs of files
1561 it detects as binary. With -a, export will generate a diff anyway,
1561 it detects as binary. With -a, export will generate a diff anyway,
1562 probably with undesirable results.
1562 probably with undesirable results.
1563
1563
1564 With the --switch-parent option, the diff will be against the second
1564 With the --switch-parent option, the diff will be against the second
1565 parent. It can be useful to review a merge.
1565 parent. It can be useful to review a merge.
1566 """
1566 """
1567 if not changesets:
1567 if not changesets:
1568 raise util.Abort(_("export requires at least one changeset"))
1568 raise util.Abort(_("export requires at least one changeset"))
1569 seqno = 0
1569 seqno = 0
1570 revs = list(revrange(ui, repo, changesets))
1570 revs = list(revrange(ui, repo, changesets))
1571 total = len(revs)
1571 total = len(revs)
1572 revwidth = max(map(len, revs))
1572 revwidth = max(map(len, revs))
1573 msg = len(revs) > 1 and _("Exporting patches:\n") or _("Exporting patch:\n")
1573 msg = len(revs) > 1 and _("Exporting patches:\n") or _("Exporting patch:\n")
1574 ui.note(msg)
1574 ui.note(msg)
1575 for cset in revs:
1575 for cset in revs:
1576 seqno += 1
1576 seqno += 1
1577 doexport(ui, repo, cset, seqno, total, revwidth, opts)
1577 doexport(ui, repo, cset, seqno, total, revwidth, opts)
1578
1578
1579 def forget(ui, repo, *pats, **opts):
1579 def forget(ui, repo, *pats, **opts):
1580 """don't add the specified files on the next commit (DEPRECATED)
1580 """don't add the specified files on the next commit (DEPRECATED)
1581
1581
1582 (DEPRECATED)
1582 (DEPRECATED)
1583 Undo an 'hg add' scheduled for the next commit.
1583 Undo an 'hg add' scheduled for the next commit.
1584
1584
1585 This command is now deprecated and will be removed in a future
1585 This command is now deprecated and will be removed in a future
1586 release. Please use revert instead.
1586 release. Please use revert instead.
1587 """
1587 """
1588 ui.warn(_("(the forget command is deprecated; use revert instead)\n"))
1588 ui.warn(_("(the forget command is deprecated; use revert instead)\n"))
1589 forget = []
1589 forget = []
1590 for src, abs, rel, exact in walk(repo, pats, opts):
1590 for src, abs, rel, exact in walk(repo, pats, opts):
1591 if repo.dirstate.state(abs) == 'a':
1591 if repo.dirstate.state(abs) == 'a':
1592 forget.append(abs)
1592 forget.append(abs)
1593 if ui.verbose or not exact:
1593 if ui.verbose or not exact:
1594 ui.status(_('forgetting %s\n') % ((pats and rel) or abs))
1594 ui.status(_('forgetting %s\n') % ((pats and rel) or abs))
1595 repo.forget(forget)
1595 repo.forget(forget)
1596
1596
1597 def grep(ui, repo, pattern, *pats, **opts):
1597 def grep(ui, repo, pattern, *pats, **opts):
1598 """search for a pattern in specified files and revisions
1598 """search for a pattern in specified files and revisions
1599
1599
1600 Search revisions of files for a regular expression.
1600 Search revisions of files for a regular expression.
1601
1601
1602 This command behaves differently than Unix grep. It only accepts
1602 This command behaves differently than Unix grep. It only accepts
1603 Python/Perl regexps. It searches repository history, not the
1603 Python/Perl regexps. It searches repository history, not the
1604 working directory. It always prints the revision number in which
1604 working directory. It always prints the revision number in which
1605 a match appears.
1605 a match appears.
1606
1606
1607 By default, grep only prints output for the first revision of a
1607 By default, grep only prints output for the first revision of a
1608 file in which it finds a match. To get it to print every revision
1608 file in which it finds a match. To get it to print every revision
1609 that contains a change in match status ("-" for a match that
1609 that contains a change in match status ("-" for a match that
1610 becomes a non-match, or "+" for a non-match that becomes a match),
1610 becomes a non-match, or "+" for a non-match that becomes a match),
1611 use the --all flag.
1611 use the --all flag.
1612 """
1612 """
1613 reflags = 0
1613 reflags = 0
1614 if opts['ignore_case']:
1614 if opts['ignore_case']:
1615 reflags |= re.I
1615 reflags |= re.I
1616 regexp = re.compile(pattern, reflags)
1616 regexp = re.compile(pattern, reflags)
1617 sep, eol = ':', '\n'
1617 sep, eol = ':', '\n'
1618 if opts['print0']:
1618 if opts['print0']:
1619 sep = eol = '\0'
1619 sep = eol = '\0'
1620
1620
1621 fcache = {}
1621 fcache = {}
1622 def getfile(fn):
1622 def getfile(fn):
1623 if fn not in fcache:
1623 if fn not in fcache:
1624 fcache[fn] = repo.file(fn)
1624 fcache[fn] = repo.file(fn)
1625 return fcache[fn]
1625 return fcache[fn]
1626
1626
1627 def matchlines(body):
1627 def matchlines(body):
1628 begin = 0
1628 begin = 0
1629 linenum = 0
1629 linenum = 0
1630 while True:
1630 while True:
1631 match = regexp.search(body, begin)
1631 match = regexp.search(body, begin)
1632 if not match:
1632 if not match:
1633 break
1633 break
1634 mstart, mend = match.span()
1634 mstart, mend = match.span()
1635 linenum += body.count('\n', begin, mstart) + 1
1635 linenum += body.count('\n', begin, mstart) + 1
1636 lstart = body.rfind('\n', begin, mstart) + 1 or begin
1636 lstart = body.rfind('\n', begin, mstart) + 1 or begin
1637 lend = body.find('\n', mend)
1637 lend = body.find('\n', mend)
1638 yield linenum, mstart - lstart, mend - lstart, body[lstart:lend]
1638 yield linenum, mstart - lstart, mend - lstart, body[lstart:lend]
1639 begin = lend + 1
1639 begin = lend + 1
1640
1640
1641 class linestate(object):
1641 class linestate(object):
1642 def __init__(self, line, linenum, colstart, colend):
1642 def __init__(self, line, linenum, colstart, colend):
1643 self.line = line
1643 self.line = line
1644 self.linenum = linenum
1644 self.linenum = linenum
1645 self.colstart = colstart
1645 self.colstart = colstart
1646 self.colend = colend
1646 self.colend = colend
1647 def __eq__(self, other):
1647 def __eq__(self, other):
1648 return self.line == other.line
1648 return self.line == other.line
1649 def __hash__(self):
1649 def __hash__(self):
1650 return hash(self.line)
1650 return hash(self.line)
1651
1651
1652 matches = {}
1652 matches = {}
1653 def grepbody(fn, rev, body):
1653 def grepbody(fn, rev, body):
1654 matches[rev].setdefault(fn, {})
1654 matches[rev].setdefault(fn, {})
1655 m = matches[rev][fn]
1655 m = matches[rev][fn]
1656 for lnum, cstart, cend, line in matchlines(body):
1656 for lnum, cstart, cend, line in matchlines(body):
1657 s = linestate(line, lnum, cstart, cend)
1657 s = linestate(line, lnum, cstart, cend)
1658 m[s] = s
1658 m[s] = s
1659
1659
1660 # FIXME: prev isn't used, why ?
1660 # FIXME: prev isn't used, why ?
1661 prev = {}
1661 prev = {}
1662 ucache = {}
1662 ucache = {}
1663 def display(fn, rev, states, prevstates):
1663 def display(fn, rev, states, prevstates):
1664 diff = list(sets.Set(states).symmetric_difference(sets.Set(prevstates)))
1664 diff = list(sets.Set(states).symmetric_difference(sets.Set(prevstates)))
1665 diff.sort(lambda x, y: cmp(x.linenum, y.linenum))
1665 diff.sort(lambda x, y: cmp(x.linenum, y.linenum))
1666 counts = {'-': 0, '+': 0}
1666 counts = {'-': 0, '+': 0}
1667 filerevmatches = {}
1667 filerevmatches = {}
1668 for l in diff:
1668 for l in diff:
1669 if incrementing or not opts['all']:
1669 if incrementing or not opts['all']:
1670 change = ((l in prevstates) and '-') or '+'
1670 change = ((l in prevstates) and '-') or '+'
1671 r = rev
1671 r = rev
1672 else:
1672 else:
1673 change = ((l in states) and '-') or '+'
1673 change = ((l in states) and '-') or '+'
1674 r = prev[fn]
1674 r = prev[fn]
1675 cols = [fn, str(rev)]
1675 cols = [fn, str(rev)]
1676 if opts['line_number']:
1676 if opts['line_number']:
1677 cols.append(str(l.linenum))
1677 cols.append(str(l.linenum))
1678 if opts['all']:
1678 if opts['all']:
1679 cols.append(change)
1679 cols.append(change)
1680 if opts['user']:
1680 if opts['user']:
1681 cols.append(trimuser(ui, getchange(rev)[1], rev,
1681 cols.append(trimuser(ui, getchange(rev)[1], rev,
1682 ucache))
1682 ucache))
1683 if opts['files_with_matches']:
1683 if opts['files_with_matches']:
1684 c = (fn, rev)
1684 c = (fn, rev)
1685 if c in filerevmatches:
1685 if c in filerevmatches:
1686 continue
1686 continue
1687 filerevmatches[c] = 1
1687 filerevmatches[c] = 1
1688 else:
1688 else:
1689 cols.append(l.line)
1689 cols.append(l.line)
1690 ui.write(sep.join(cols), eol)
1690 ui.write(sep.join(cols), eol)
1691 counts[change] += 1
1691 counts[change] += 1
1692 return counts['+'], counts['-']
1692 return counts['+'], counts['-']
1693
1693
1694 fstate = {}
1694 fstate = {}
1695 skip = {}
1695 skip = {}
1696 changeiter, getchange, matchfn = walkchangerevs(ui, repo, pats, opts)
1696 changeiter, getchange, matchfn = walkchangerevs(ui, repo, pats, opts)
1697 count = 0
1697 count = 0
1698 incrementing = False
1698 incrementing = False
1699 for st, rev, fns in changeiter:
1699 for st, rev, fns in changeiter:
1700 if st == 'window':
1700 if st == 'window':
1701 incrementing = rev
1701 incrementing = rev
1702 matches.clear()
1702 matches.clear()
1703 elif st == 'add':
1703 elif st == 'add':
1704 change = repo.changelog.read(repo.lookup(str(rev)))
1704 change = repo.changelog.read(repo.lookup(str(rev)))
1705 mf = repo.manifest.read(change[0])
1705 mf = repo.manifest.read(change[0])
1706 matches[rev] = {}
1706 matches[rev] = {}
1707 for fn in fns:
1707 for fn in fns:
1708 if fn in skip:
1708 if fn in skip:
1709 continue
1709 continue
1710 fstate.setdefault(fn, {})
1710 fstate.setdefault(fn, {})
1711 try:
1711 try:
1712 grepbody(fn, rev, getfile(fn).read(mf[fn]))
1712 grepbody(fn, rev, getfile(fn).read(mf[fn]))
1713 except KeyError:
1713 except KeyError:
1714 pass
1714 pass
1715 elif st == 'iter':
1715 elif st == 'iter':
1716 states = matches[rev].items()
1716 states = matches[rev].items()
1717 states.sort()
1717 states.sort()
1718 for fn, m in states:
1718 for fn, m in states:
1719 if fn in skip:
1719 if fn in skip:
1720 continue
1720 continue
1721 if incrementing or not opts['all'] or fstate[fn]:
1721 if incrementing or not opts['all'] or fstate[fn]:
1722 pos, neg = display(fn, rev, m, fstate[fn])
1722 pos, neg = display(fn, rev, m, fstate[fn])
1723 count += pos + neg
1723 count += pos + neg
1724 if pos and not opts['all']:
1724 if pos and not opts['all']:
1725 skip[fn] = True
1725 skip[fn] = True
1726 fstate[fn] = m
1726 fstate[fn] = m
1727 prev[fn] = rev
1727 prev[fn] = rev
1728
1728
1729 if not incrementing:
1729 if not incrementing:
1730 fstate = fstate.items()
1730 fstate = fstate.items()
1731 fstate.sort()
1731 fstate.sort()
1732 for fn, state in fstate:
1732 for fn, state in fstate:
1733 if fn in skip:
1733 if fn in skip:
1734 continue
1734 continue
1735 display(fn, rev, {}, state)
1735 display(fn, rev, {}, state)
1736 return (count == 0 and 1) or 0
1736 return (count == 0 and 1) or 0
1737
1737
1738 def heads(ui, repo, **opts):
1738 def heads(ui, repo, **opts):
1739 """show current repository heads
1739 """show current repository heads
1740
1740
1741 Show all repository head changesets.
1741 Show all repository head changesets.
1742
1742
1743 Repository "heads" are changesets that don't have children
1743 Repository "heads" are changesets that don't have children
1744 changesets. They are where development generally takes place and
1744 changesets. They are where development generally takes place and
1745 are the usual targets for update and merge operations.
1745 are the usual targets for update and merge operations.
1746 """
1746 """
1747 if opts['rev']:
1747 if opts['rev']:
1748 heads = repo.heads(repo.lookup(opts['rev']))
1748 heads = repo.heads(repo.lookup(opts['rev']))
1749 else:
1749 else:
1750 heads = repo.heads()
1750 heads = repo.heads()
1751 br = None
1751 br = None
1752 if opts['branches']:
1752 if opts['branches']:
1753 br = repo.branchlookup(heads)
1753 br = repo.branchlookup(heads)
1754 displayer = show_changeset(ui, repo, opts)
1754 displayer = show_changeset(ui, repo, opts)
1755 for n in heads:
1755 for n in heads:
1756 displayer.show(changenode=n, brinfo=br)
1756 displayer.show(changenode=n, brinfo=br)
1757
1757
1758 def identify(ui, repo):
1758 def identify(ui, repo):
1759 """print information about the working copy
1759 """print information about the working copy
1760
1760
1761 Print a short summary of the current state of the repo.
1761 Print a short summary of the current state of the repo.
1762
1762
1763 This summary identifies the repository state using one or two parent
1763 This summary identifies the repository state using one or two parent
1764 hash identifiers, followed by a "+" if there are uncommitted changes
1764 hash identifiers, followed by a "+" if there are uncommitted changes
1765 in the working directory, followed by a list of tags for this revision.
1765 in the working directory, followed by a list of tags for this revision.
1766 """
1766 """
1767 parents = [p for p in repo.dirstate.parents() if p != nullid]
1767 parents = [p for p in repo.dirstate.parents() if p != nullid]
1768 if not parents:
1768 if not parents:
1769 ui.write(_("unknown\n"))
1769 ui.write(_("unknown\n"))
1770 return
1770 return
1771
1771
1772 hexfunc = ui.verbose and hex or short
1772 hexfunc = ui.verbose and hex or short
1773 modified, added, removed, deleted, unknown = repo.changes()
1773 modified, added, removed, deleted, unknown = repo.changes()
1774 output = ["%s%s" %
1774 output = ["%s%s" %
1775 ('+'.join([hexfunc(parent) for parent in parents]),
1775 ('+'.join([hexfunc(parent) for parent in parents]),
1776 (modified or added or removed or deleted) and "+" or "")]
1776 (modified or added or removed or deleted) and "+" or "")]
1777
1777
1778 if not ui.quiet:
1778 if not ui.quiet:
1779 # multiple tags for a single parent separated by '/'
1779 # multiple tags for a single parent separated by '/'
1780 parenttags = ['/'.join(tags)
1780 parenttags = ['/'.join(tags)
1781 for tags in map(repo.nodetags, parents) if tags]
1781 for tags in map(repo.nodetags, parents) if tags]
1782 # tags for multiple parents separated by ' + '
1782 # tags for multiple parents separated by ' + '
1783 if parenttags:
1783 if parenttags:
1784 output.append(' + '.join(parenttags))
1784 output.append(' + '.join(parenttags))
1785
1785
1786 ui.write("%s\n" % ' '.join(output))
1786 ui.write("%s\n" % ' '.join(output))
1787
1787
1788 def import_(ui, repo, patch1, *patches, **opts):
1788 def import_(ui, repo, patch1, *patches, **opts):
1789 """import an ordered set of patches
1789 """import an ordered set of patches
1790
1790
1791 Import a list of patches and commit them individually.
1791 Import a list of patches and commit them individually.
1792
1792
1793 If there are outstanding changes in the working directory, import
1793 If there are outstanding changes in the working directory, import
1794 will abort unless given the -f flag.
1794 will abort unless given the -f flag.
1795
1795
1796 You can import a patch straight from a mail message. Even patches
1796 You can import a patch straight from a mail message. Even patches
1797 as attachments work (body part must be type text/plain or
1797 as attachments work (body part must be type text/plain or
1798 text/x-patch to be used). From and Subject headers of email
1798 text/x-patch to be used). From and Subject headers of email
1799 message are used as default committer and commit message. All
1799 message are used as default committer and commit message. All
1800 text/plain body parts before first diff are added to commit
1800 text/plain body parts before first diff are added to commit
1801 message.
1801 message.
1802
1802
1803 If imported patch was generated by hg export, user and description
1803 If imported patch was generated by hg export, user and description
1804 from patch override values from message headers and body. Values
1804 from patch override values from message headers and body. Values
1805 given on command line with -m and -u override these.
1805 given on command line with -m and -u override these.
1806
1806
1807 To read a patch from standard input, use patch name "-".
1807 To read a patch from standard input, use patch name "-".
1808 """
1808 """
1809 patches = (patch1,) + patches
1809 patches = (patch1,) + patches
1810
1810
1811 if not opts['force']:
1811 if not opts['force']:
1812 bail_if_changed(repo)
1812 bail_if_changed(repo)
1813
1813
1814 d = opts["base"]
1814 d = opts["base"]
1815 strip = opts["strip"]
1815 strip = opts["strip"]
1816
1816
1817 mailre = re.compile(r'(?:From |[\w-]+:)')
1817 mailre = re.compile(r'(?:From |[\w-]+:)')
1818
1818
1819 # attempt to detect the start of a patch
1819 # attempt to detect the start of a patch
1820 # (this heuristic is borrowed from quilt)
1820 # (this heuristic is borrowed from quilt)
1821 diffre = re.compile(r'^(?:Index:[ \t]|diff[ \t]|RCS file: |' +
1821 diffre = re.compile(r'^(?:Index:[ \t]|diff[ \t]|RCS file: |' +
1822 'retrieving revision [0-9]+(\.[0-9]+)*$|' +
1822 'retrieving revision [0-9]+(\.[0-9]+)*$|' +
1823 '(---|\*\*\*)[ \t])', re.MULTILINE)
1823 '(---|\*\*\*)[ \t])', re.MULTILINE)
1824
1824
1825 for patch in patches:
1825 for patch in patches:
1826 pf = os.path.join(d, patch)
1826 pf = os.path.join(d, patch)
1827
1827
1828 message = None
1828 message = None
1829 user = None
1829 user = None
1830 date = None
1830 date = None
1831 hgpatch = False
1831 hgpatch = False
1832
1832
1833 p = email.Parser.Parser()
1833 p = email.Parser.Parser()
1834 if pf == '-':
1834 if pf == '-':
1835 msg = p.parse(sys.stdin)
1835 msg = p.parse(sys.stdin)
1836 ui.status(_("applying patch from stdin\n"))
1836 ui.status(_("applying patch from stdin\n"))
1837 else:
1837 else:
1838 msg = p.parse(file(pf))
1838 msg = p.parse(file(pf))
1839 ui.status(_("applying %s\n") % patch)
1839 ui.status(_("applying %s\n") % patch)
1840
1840
1841 fd, tmpname = tempfile.mkstemp(prefix='hg-patch-')
1841 fd, tmpname = tempfile.mkstemp(prefix='hg-patch-')
1842 tmpfp = os.fdopen(fd, 'w')
1842 tmpfp = os.fdopen(fd, 'w')
1843 try:
1843 try:
1844 message = msg['Subject']
1844 message = msg['Subject']
1845 if message:
1845 if message:
1846 message = message.replace('\n\t', ' ')
1846 message = message.replace('\n\t', ' ')
1847 ui.debug('Subject: %s\n' % message)
1847 ui.debug('Subject: %s\n' % message)
1848 user = msg['From']
1848 user = msg['From']
1849 if user:
1849 if user:
1850 ui.debug('From: %s\n' % user)
1850 ui.debug('From: %s\n' % user)
1851 diffs_seen = 0
1851 diffs_seen = 0
1852 ok_types = ('text/plain', 'text/x-diff', 'text/x-patch')
1852 ok_types = ('text/plain', 'text/x-diff', 'text/x-patch')
1853 for part in msg.walk():
1853 for part in msg.walk():
1854 content_type = part.get_content_type()
1854 content_type = part.get_content_type()
1855 ui.debug('Content-Type: %s\n' % content_type)
1855 ui.debug('Content-Type: %s\n' % content_type)
1856 if content_type not in ok_types:
1856 if content_type not in ok_types:
1857 continue
1857 continue
1858 payload = part.get_payload(decode=True)
1858 payload = part.get_payload(decode=True)
1859 m = diffre.search(payload)
1859 m = diffre.search(payload)
1860 if m:
1860 if m:
1861 ui.debug(_('found patch at byte %d\n') % m.start(0))
1861 ui.debug(_('found patch at byte %d\n') % m.start(0))
1862 diffs_seen += 1
1862 diffs_seen += 1
1863 hgpatch = False
1863 hgpatch = False
1864 fp = cStringIO.StringIO()
1864 fp = cStringIO.StringIO()
1865 if message:
1865 if message:
1866 fp.write(message)
1866 fp.write(message)
1867 fp.write('\n')
1867 fp.write('\n')
1868 for line in payload[:m.start(0)].splitlines():
1868 for line in payload[:m.start(0)].splitlines():
1869 if line.startswith('# HG changeset patch'):
1869 if line.startswith('# HG changeset patch'):
1870 ui.debug(_('patch generated by hg export\n'))
1870 ui.debug(_('patch generated by hg export\n'))
1871 hgpatch = True
1871 hgpatch = True
1872 # drop earlier commit message content
1872 # drop earlier commit message content
1873 fp.seek(0)
1873 fp.seek(0)
1874 fp.truncate()
1874 fp.truncate()
1875 elif hgpatch:
1875 elif hgpatch:
1876 if line.startswith('# User '):
1876 if line.startswith('# User '):
1877 user = line[7:]
1877 user = line[7:]
1878 ui.debug('From: %s\n' % user)
1878 ui.debug('From: %s\n' % user)
1879 elif line.startswith("# Date "):
1879 elif line.startswith("# Date "):
1880 date = line[7:]
1880 date = line[7:]
1881 if not line.startswith('# '):
1881 if not line.startswith('# '):
1882 fp.write(line)
1882 fp.write(line)
1883 fp.write('\n')
1883 fp.write('\n')
1884 message = fp.getvalue()
1884 message = fp.getvalue()
1885 if tmpfp:
1885 if tmpfp:
1886 tmpfp.write(payload)
1886 tmpfp.write(payload)
1887 if not payload.endswith('\n'):
1887 if not payload.endswith('\n'):
1888 tmpfp.write('\n')
1888 tmpfp.write('\n')
1889 elif not diffs_seen and message and content_type == 'text/plain':
1889 elif not diffs_seen and message and content_type == 'text/plain':
1890 message += '\n' + payload
1890 message += '\n' + payload
1891
1891
1892 if opts['message']:
1892 if opts['message']:
1893 # pickup the cmdline msg
1893 # pickup the cmdline msg
1894 message = opts['message']
1894 message = opts['message']
1895 elif message:
1895 elif message:
1896 # pickup the patch msg
1896 # pickup the patch msg
1897 message = message.strip()
1897 message = message.strip()
1898 else:
1898 else:
1899 # launch the editor
1899 # launch the editor
1900 message = None
1900 message = None
1901 ui.debug(_('message:\n%s\n') % message)
1901 ui.debug(_('message:\n%s\n') % message)
1902
1902
1903 tmpfp.close()
1903 tmpfp.close()
1904 if not diffs_seen:
1904 if not diffs_seen:
1905 raise util.Abort(_('no diffs found'))
1905 raise util.Abort(_('no diffs found'))
1906
1906
1907 files = util.patch(strip, tmpname, ui, cwd=repo.root)
1907 files = util.patch(strip, tmpname, ui, cwd=repo.root)
1908 if len(files) > 0:
1908 if len(files) > 0:
1909 cfiles = files
1909 cfiles = files
1910 cwd = repo.getcwd()
1910 cwd = repo.getcwd()
1911 if cwd:
1911 if cwd:
1912 cfiles = [util.pathto(cwd, f) for f in files]
1912 cfiles = [util.pathto(cwd, f) for f in files]
1913 addremove_lock(ui, repo, cfiles, {})
1913 addremove_lock(ui, repo, cfiles, {})
1914 repo.commit(files, message, user, date)
1914 repo.commit(files, message, user, date)
1915 finally:
1915 finally:
1916 os.unlink(tmpname)
1916 os.unlink(tmpname)
1917
1917
1918 def incoming(ui, repo, source="default", **opts):
1918 def incoming(ui, repo, source="default", **opts):
1919 """show new changesets found in source
1919 """show new changesets found in source
1920
1920
1921 Show new changesets found in the specified path/URL or the default
1921 Show new changesets found in the specified path/URL or the default
1922 pull location. These are the changesets that would be pulled if a pull
1922 pull location. These are the changesets that would be pulled if a pull
1923 was requested.
1923 was requested.
1924
1924
1925 For remote repository, using --bundle avoids downloading the changesets
1925 For remote repository, using --bundle avoids downloading the changesets
1926 twice if the incoming is followed by a pull.
1926 twice if the incoming is followed by a pull.
1927
1927
1928 See pull for valid source format details.
1928 See pull for valid source format details.
1929 """
1929 """
1930 source = ui.expandpath(source)
1930 source = ui.expandpath(source)
1931 setremoteconfig(ui, opts)
1931 setremoteconfig(ui, opts)
1932
1932
1933 other = hg.repository(ui, source)
1933 other = hg.repository(ui, source)
1934 incoming = repo.findincoming(other, force=opts["force"])
1934 incoming = repo.findincoming(other, force=opts["force"])
1935 if not incoming:
1935 if not incoming:
1936 ui.status(_("no changes found\n"))
1936 ui.status(_("no changes found\n"))
1937 return
1937 return
1938
1938
1939 cleanup = None
1939 cleanup = None
1940 try:
1940 try:
1941 fname = opts["bundle"]
1941 fname = opts["bundle"]
1942 if fname or not other.local():
1942 if fname or not other.local():
1943 # create a bundle (uncompressed if other repo is not local)
1943 # create a bundle (uncompressed if other repo is not local)
1944 cg = other.changegroup(incoming, "incoming")
1944 cg = other.changegroup(incoming, "incoming")
1945 fname = cleanup = write_bundle(cg, fname, compress=other.local())
1945 fname = cleanup = write_bundle(cg, fname, compress=other.local())
1946 # keep written bundle?
1946 # keep written bundle?
1947 if opts["bundle"]:
1947 if opts["bundle"]:
1948 cleanup = None
1948 cleanup = None
1949 if not other.local():
1949 if not other.local():
1950 # use the created uncompressed bundlerepo
1950 # use the created uncompressed bundlerepo
1951 other = bundlerepo.bundlerepository(ui, repo.root, fname)
1951 other = bundlerepo.bundlerepository(ui, repo.root, fname)
1952
1952
1953 revs = None
1953 revs = None
1954 if opts['rev']:
1954 if opts['rev']:
1955 revs = [other.lookup(rev) for rev in opts['rev']]
1955 revs = [other.lookup(rev) for rev in opts['rev']]
1956 o = other.changelog.nodesbetween(incoming, revs)[0]
1956 o = other.changelog.nodesbetween(incoming, revs)[0]
1957 if opts['newest_first']:
1957 if opts['newest_first']:
1958 o.reverse()
1958 o.reverse()
1959 displayer = show_changeset(ui, other, opts)
1959 displayer = show_changeset(ui, other, opts)
1960 for n in o:
1960 for n in o:
1961 parents = [p for p in other.changelog.parents(n) if p != nullid]
1961 parents = [p for p in other.changelog.parents(n) if p != nullid]
1962 if opts['no_merges'] and len(parents) == 2:
1962 if opts['no_merges'] and len(parents) == 2:
1963 continue
1963 continue
1964 displayer.show(changenode=n)
1964 displayer.show(changenode=n)
1965 if opts['patch']:
1965 if opts['patch']:
1966 prev = (parents and parents[0]) or nullid
1966 prev = (parents and parents[0]) or nullid
1967 dodiff(ui, ui, other, prev, n)
1967 dodiff(ui, ui, other, prev, n)
1968 ui.write("\n")
1968 ui.write("\n")
1969 finally:
1969 finally:
1970 if hasattr(other, 'close'):
1970 if hasattr(other, 'close'):
1971 other.close()
1971 other.close()
1972 if cleanup:
1972 if cleanup:
1973 os.unlink(cleanup)
1973 os.unlink(cleanup)
1974
1974
1975 def init(ui, dest=".", **opts):
1975 def init(ui, dest=".", **opts):
1976 """create a new repository in the given directory
1976 """create a new repository in the given directory
1977
1977
1978 Initialize a new repository in the given directory. If the given
1978 Initialize a new repository in the given directory. If the given
1979 directory does not exist, it is created.
1979 directory does not exist, it is created.
1980
1980
1981 If no directory is given, the current directory is used.
1981 If no directory is given, the current directory is used.
1982
1982
1983 It is possible to specify an ssh:// URL as the destination.
1983 It is possible to specify an ssh:// URL as the destination.
1984 Look at the help text for the pull command for important details
1984 Look at the help text for the pull command for important details
1985 about ssh:// URLs.
1985 about ssh:// URLs.
1986 """
1986 """
1987 setremoteconfig(ui, opts)
1987 setremoteconfig(ui, opts)
1988 hg.repository(ui, dest, create=1)
1988 hg.repository(ui, dest, create=1)
1989
1989
1990 def locate(ui, repo, *pats, **opts):
1990 def locate(ui, repo, *pats, **opts):
1991 """locate files matching specific patterns
1991 """locate files matching specific patterns
1992
1992
1993 Print all files under Mercurial control whose names match the
1993 Print all files under Mercurial control whose names match the
1994 given patterns.
1994 given patterns.
1995
1995
1996 This command searches the current directory and its
1996 This command searches the current directory and its
1997 subdirectories. To search an entire repository, move to the root
1997 subdirectories. To search an entire repository, move to the root
1998 of the repository.
1998 of the repository.
1999
1999
2000 If no patterns are given to match, this command prints all file
2000 If no patterns are given to match, this command prints all file
2001 names.
2001 names.
2002
2002
2003 If you want to feed the output of this command into the "xargs"
2003 If you want to feed the output of this command into the "xargs"
2004 command, use the "-0" option to both this command and "xargs".
2004 command, use the "-0" option to both this command and "xargs".
2005 This will avoid the problem of "xargs" treating single filenames
2005 This will avoid the problem of "xargs" treating single filenames
2006 that contain white space as multiple filenames.
2006 that contain white space as multiple filenames.
2007 """
2007 """
2008 end = opts['print0'] and '\0' or '\n'
2008 end = opts['print0'] and '\0' or '\n'
2009 rev = opts['rev']
2009 rev = opts['rev']
2010 if rev:
2010 if rev:
2011 node = repo.lookup(rev)
2011 node = repo.lookup(rev)
2012 else:
2012 else:
2013 node = None
2013 node = None
2014
2014
2015 for src, abs, rel, exact in walk(repo, pats, opts, node=node,
2015 for src, abs, rel, exact in walk(repo, pats, opts, node=node,
2016 head='(?:.*/|)'):
2016 head='(?:.*/|)'):
2017 if not node and repo.dirstate.state(abs) == '?':
2017 if not node and repo.dirstate.state(abs) == '?':
2018 continue
2018 continue
2019 if opts['fullpath']:
2019 if opts['fullpath']:
2020 ui.write(os.path.join(repo.root, abs), end)
2020 ui.write(os.path.join(repo.root, abs), end)
2021 else:
2021 else:
2022 ui.write(((pats and rel) or abs), end)
2022 ui.write(((pats and rel) or abs), end)
2023
2023
2024 def log(ui, repo, *pats, **opts):
2024 def log(ui, repo, *pats, **opts):
2025 """show revision history of entire repository or files
2025 """show revision history of entire repository or files
2026
2026
2027 Print the revision history of the specified files or the entire
2027 Print the revision history of the specified files or the entire
2028 project.
2028 project.
2029
2029
2030 File history is shown without following rename or copy history of
2030 File history is shown without following rename or copy history of
2031 files. Use -f/--follow with a file name to follow history across
2031 files. Use -f/--follow with a file name to follow history across
2032 renames and copies. --follow without a file name will only show
2032 renames and copies. --follow without a file name will only show
2033 ancestors or descendants of the starting revision. --follow-first
2033 ancestors or descendants of the starting revision. --follow-first
2034 only follows the first parent of merge revisions.
2034 only follows the first parent of merge revisions.
2035
2035
2036 If no revision range is specified, the default is tip:0 unless
2036 If no revision range is specified, the default is tip:0 unless
2037 --follow is set, in which case the working directory parent is
2037 --follow is set, in which case the working directory parent is
2038 used as the starting revision.
2038 used as the starting revision.
2039
2039
2040 By default this command outputs: changeset id and hash, tags,
2040 By default this command outputs: changeset id and hash, tags,
2041 non-trivial parents, user, date and time, and a summary for each
2041 non-trivial parents, user, date and time, and a summary for each
2042 commit. When the -v/--verbose switch is used, the list of changed
2042 commit. When the -v/--verbose switch is used, the list of changed
2043 files and full commit message is shown.
2043 files and full commit message is shown.
2044 """
2044 """
2045 class dui(object):
2045 class dui(object):
2046 # Implement and delegate some ui protocol. Save hunks of
2046 # Implement and delegate some ui protocol. Save hunks of
2047 # output for later display in the desired order.
2047 # output for later display in the desired order.
2048 def __init__(self, ui):
2048 def __init__(self, ui):
2049 self.ui = ui
2049 self.ui = ui
2050 self.hunk = {}
2050 self.hunk = {}
2051 self.header = {}
2051 self.header = {}
2052 def bump(self, rev):
2052 def bump(self, rev):
2053 self.rev = rev
2053 self.rev = rev
2054 self.hunk[rev] = []
2054 self.hunk[rev] = []
2055 self.header[rev] = []
2055 self.header[rev] = []
2056 def note(self, *args):
2056 def note(self, *args):
2057 if self.verbose:
2057 if self.verbose:
2058 self.write(*args)
2058 self.write(*args)
2059 def status(self, *args):
2059 def status(self, *args):
2060 if not self.quiet:
2060 if not self.quiet:
2061 self.write(*args)
2061 self.write(*args)
2062 def write(self, *args):
2062 def write(self, *args):
2063 self.hunk[self.rev].append(args)
2063 self.hunk[self.rev].append(args)
2064 def write_header(self, *args):
2064 def write_header(self, *args):
2065 self.header[self.rev].append(args)
2065 self.header[self.rev].append(args)
2066 def debug(self, *args):
2066 def debug(self, *args):
2067 if self.debugflag:
2067 if self.debugflag:
2068 self.write(*args)
2068 self.write(*args)
2069 def __getattr__(self, key):
2069 def __getattr__(self, key):
2070 return getattr(self.ui, key)
2070 return getattr(self.ui, key)
2071
2071
2072 changeiter, getchange, matchfn = walkchangerevs(ui, repo, pats, opts)
2072 changeiter, getchange, matchfn = walkchangerevs(ui, repo, pats, opts)
2073
2073
2074 if opts['limit']:
2074 if opts['limit']:
2075 try:
2075 try:
2076 limit = int(opts['limit'])
2076 limit = int(opts['limit'])
2077 except ValueError:
2077 except ValueError:
2078 raise util.Abort(_('limit must be a positive integer'))
2078 raise util.Abort(_('limit must be a positive integer'))
2079 if limit <= 0: raise util.Abort(_('limit must be positive'))
2079 if limit <= 0: raise util.Abort(_('limit must be positive'))
2080 else:
2080 else:
2081 limit = sys.maxint
2081 limit = sys.maxint
2082 count = 0
2082 count = 0
2083
2083
2084 displayer = show_changeset(ui, repo, opts)
2084 displayer = show_changeset(ui, repo, opts)
2085 for st, rev, fns in changeiter:
2085 for st, rev, fns in changeiter:
2086 if st == 'window':
2086 if st == 'window':
2087 du = dui(ui)
2087 du = dui(ui)
2088 displayer.ui = du
2088 displayer.ui = du
2089 elif st == 'add':
2089 elif st == 'add':
2090 du.bump(rev)
2090 du.bump(rev)
2091 changenode = repo.changelog.node(rev)
2091 changenode = repo.changelog.node(rev)
2092 parents = [p for p in repo.changelog.parents(changenode)
2092 parents = [p for p in repo.changelog.parents(changenode)
2093 if p != nullid]
2093 if p != nullid]
2094 if opts['no_merges'] and len(parents) == 2:
2094 if opts['no_merges'] and len(parents) == 2:
2095 continue
2095 continue
2096 if opts['only_merges'] and len(parents) != 2:
2096 if opts['only_merges'] and len(parents) != 2:
2097 continue
2097 continue
2098
2098
2099 if opts['keyword']:
2099 if opts['keyword']:
2100 changes = getchange(rev)
2100 changes = getchange(rev)
2101 miss = 0
2101 miss = 0
2102 for k in [kw.lower() for kw in opts['keyword']]:
2102 for k in [kw.lower() for kw in opts['keyword']]:
2103 if not (k in changes[1].lower() or
2103 if not (k in changes[1].lower() or
2104 k in changes[4].lower() or
2104 k in changes[4].lower() or
2105 k in " ".join(changes[3][:20]).lower()):
2105 k in " ".join(changes[3][:20]).lower()):
2106 miss = 1
2106 miss = 1
2107 break
2107 break
2108 if miss:
2108 if miss:
2109 continue
2109 continue
2110
2110
2111 br = None
2111 br = None
2112 if opts['branches']:
2112 if opts['branches']:
2113 br = repo.branchlookup([repo.changelog.node(rev)])
2113 br = repo.branchlookup([repo.changelog.node(rev)])
2114
2114
2115 displayer.show(rev, brinfo=br)
2115 displayer.show(rev, brinfo=br)
2116 if opts['patch']:
2116 if opts['patch']:
2117 prev = (parents and parents[0]) or nullid
2117 prev = (parents and parents[0]) or nullid
2118 dodiff(du, du, repo, prev, changenode, match=matchfn)
2118 dodiff(du, du, repo, prev, changenode, match=matchfn)
2119 du.write("\n\n")
2119 du.write("\n\n")
2120 elif st == 'iter':
2120 elif st == 'iter':
2121 if count == limit: break
2121 if count == limit: break
2122 if du.header[rev]:
2122 if du.header[rev]:
2123 for args in du.header[rev]:
2123 for args in du.header[rev]:
2124 ui.write_header(*args)
2124 ui.write_header(*args)
2125 if du.hunk[rev]:
2125 if du.hunk[rev]:
2126 count += 1
2126 count += 1
2127 for args in du.hunk[rev]:
2127 for args in du.hunk[rev]:
2128 ui.write(*args)
2128 ui.write(*args)
2129
2129
2130 def manifest(ui, repo, rev=None):
2130 def manifest(ui, repo, rev=None):
2131 """output the latest or given revision of the project manifest
2131 """output the latest or given revision of the project manifest
2132
2132
2133 Print a list of version controlled files for the given revision.
2133 Print a list of version controlled files for the given revision.
2134
2134
2135 The manifest is the list of files being version controlled. If no revision
2135 The manifest is the list of files being version controlled. If no revision
2136 is given then the tip is used.
2136 is given then the tip is used.
2137 """
2137 """
2138 if rev:
2138 if rev:
2139 try:
2139 try:
2140 # assume all revision numbers are for changesets
2140 # assume all revision numbers are for changesets
2141 n = repo.lookup(rev)
2141 n = repo.lookup(rev)
2142 change = repo.changelog.read(n)
2142 change = repo.changelog.read(n)
2143 n = change[0]
2143 n = change[0]
2144 except hg.RepoError:
2144 except hg.RepoError:
2145 n = repo.manifest.lookup(rev)
2145 n = repo.manifest.lookup(rev)
2146 else:
2146 else:
2147 n = repo.manifest.tip()
2147 n = repo.manifest.tip()
2148 m = repo.manifest.read(n)
2148 m = repo.manifest.read(n)
2149 mf = repo.manifest.readflags(n)
2150 files = m.keys()
2149 files = m.keys()
2151 files.sort()
2150 files.sort()
2152
2151
2153 for f in files:
2152 for f in files:
2154 ui.write("%40s %3s %s\n" % (hex(m[f]),
2153 ui.write("%40s %3s %s\n" % (hex(m[f]),
2155 mf.execf(f) and "755" or "644", f))
2154 m.execf(f) and "755" or "644", f))
2156
2155
2157 def merge(ui, repo, node=None, force=None, branch=None):
2156 def merge(ui, repo, node=None, force=None, branch=None):
2158 """Merge working directory with another revision
2157 """Merge working directory with another revision
2159
2158
2160 Merge the contents of the current working directory and the
2159 Merge the contents of the current working directory and the
2161 requested revision. Files that changed between either parent are
2160 requested revision. Files that changed between either parent are
2162 marked as changed for the next commit and a commit must be
2161 marked as changed for the next commit and a commit must be
2163 performed before any further updates are allowed.
2162 performed before any further updates are allowed.
2164 """
2163 """
2165
2164
2166 node = _lookup(repo, node, branch)
2165 node = _lookup(repo, node, branch)
2167 return hg.merge(repo, node, force=force)
2166 return hg.merge(repo, node, force=force)
2168
2167
2169 def outgoing(ui, repo, dest=None, **opts):
2168 def outgoing(ui, repo, dest=None, **opts):
2170 """show changesets not found in destination
2169 """show changesets not found in destination
2171
2170
2172 Show changesets not found in the specified destination repository or
2171 Show changesets not found in the specified destination repository or
2173 the default push location. These are the changesets that would be pushed
2172 the default push location. These are the changesets that would be pushed
2174 if a push was requested.
2173 if a push was requested.
2175
2174
2176 See pull for valid destination format details.
2175 See pull for valid destination format details.
2177 """
2176 """
2178 dest = ui.expandpath(dest or 'default-push', dest or 'default')
2177 dest = ui.expandpath(dest or 'default-push', dest or 'default')
2179 setremoteconfig(ui, opts)
2178 setremoteconfig(ui, opts)
2180 revs = None
2179 revs = None
2181 if opts['rev']:
2180 if opts['rev']:
2182 revs = [repo.lookup(rev) for rev in opts['rev']]
2181 revs = [repo.lookup(rev) for rev in opts['rev']]
2183
2182
2184 other = hg.repository(ui, dest)
2183 other = hg.repository(ui, dest)
2185 o = repo.findoutgoing(other, force=opts['force'])
2184 o = repo.findoutgoing(other, force=opts['force'])
2186 if not o:
2185 if not o:
2187 ui.status(_("no changes found\n"))
2186 ui.status(_("no changes found\n"))
2188 return
2187 return
2189 o = repo.changelog.nodesbetween(o, revs)[0]
2188 o = repo.changelog.nodesbetween(o, revs)[0]
2190 if opts['newest_first']:
2189 if opts['newest_first']:
2191 o.reverse()
2190 o.reverse()
2192 displayer = show_changeset(ui, repo, opts)
2191 displayer = show_changeset(ui, repo, opts)
2193 for n in o:
2192 for n in o:
2194 parents = [p for p in repo.changelog.parents(n) if p != nullid]
2193 parents = [p for p in repo.changelog.parents(n) if p != nullid]
2195 if opts['no_merges'] and len(parents) == 2:
2194 if opts['no_merges'] and len(parents) == 2:
2196 continue
2195 continue
2197 displayer.show(changenode=n)
2196 displayer.show(changenode=n)
2198 if opts['patch']:
2197 if opts['patch']:
2199 prev = (parents and parents[0]) or nullid
2198 prev = (parents and parents[0]) or nullid
2200 dodiff(ui, ui, repo, prev, n)
2199 dodiff(ui, ui, repo, prev, n)
2201 ui.write("\n")
2200 ui.write("\n")
2202
2201
2203 def parents(ui, repo, file_=None, rev=None, branches=None, **opts):
2202 def parents(ui, repo, file_=None, rev=None, branches=None, **opts):
2204 """show the parents of the working dir or revision
2203 """show the parents of the working dir or revision
2205
2204
2206 Print the working directory's parent revisions.
2205 Print the working directory's parent revisions.
2207 """
2206 """
2208 # legacy
2207 # legacy
2209 if file_ and not rev:
2208 if file_ and not rev:
2210 try:
2209 try:
2211 rev = repo.lookup(file_)
2210 rev = repo.lookup(file_)
2212 file_ = None
2211 file_ = None
2213 except hg.RepoError:
2212 except hg.RepoError:
2214 pass
2213 pass
2215 else:
2214 else:
2216 ui.warn(_("'hg parent REV' is deprecated, "
2215 ui.warn(_("'hg parent REV' is deprecated, "
2217 "please use 'hg parents -r REV instead\n"))
2216 "please use 'hg parents -r REV instead\n"))
2218
2217
2219 if rev:
2218 if rev:
2220 if file_:
2219 if file_:
2221 ctx = repo.filectx(file_, changeid=rev)
2220 ctx = repo.filectx(file_, changeid=rev)
2222 else:
2221 else:
2223 ctx = repo.changectx(rev)
2222 ctx = repo.changectx(rev)
2224 p = [cp.node() for cp in ctx.parents()]
2223 p = [cp.node() for cp in ctx.parents()]
2225 else:
2224 else:
2226 p = repo.dirstate.parents()
2225 p = repo.dirstate.parents()
2227
2226
2228 br = None
2227 br = None
2229 if branches is not None:
2228 if branches is not None:
2230 br = repo.branchlookup(p)
2229 br = repo.branchlookup(p)
2231 displayer = show_changeset(ui, repo, opts)
2230 displayer = show_changeset(ui, repo, opts)
2232 for n in p:
2231 for n in p:
2233 if n != nullid:
2232 if n != nullid:
2234 displayer.show(changenode=n, brinfo=br)
2233 displayer.show(changenode=n, brinfo=br)
2235
2234
2236 def paths(ui, repo, search=None):
2235 def paths(ui, repo, search=None):
2237 """show definition of symbolic path names
2236 """show definition of symbolic path names
2238
2237
2239 Show definition of symbolic path name NAME. If no name is given, show
2238 Show definition of symbolic path name NAME. If no name is given, show
2240 definition of available names.
2239 definition of available names.
2241
2240
2242 Path names are defined in the [paths] section of /etc/mercurial/hgrc
2241 Path names are defined in the [paths] section of /etc/mercurial/hgrc
2243 and $HOME/.hgrc. If run inside a repository, .hg/hgrc is used, too.
2242 and $HOME/.hgrc. If run inside a repository, .hg/hgrc is used, too.
2244 """
2243 """
2245 if search:
2244 if search:
2246 for name, path in ui.configitems("paths"):
2245 for name, path in ui.configitems("paths"):
2247 if name == search:
2246 if name == search:
2248 ui.write("%s\n" % path)
2247 ui.write("%s\n" % path)
2249 return
2248 return
2250 ui.warn(_("not found!\n"))
2249 ui.warn(_("not found!\n"))
2251 return 1
2250 return 1
2252 else:
2251 else:
2253 for name, path in ui.configitems("paths"):
2252 for name, path in ui.configitems("paths"):
2254 ui.write("%s = %s\n" % (name, path))
2253 ui.write("%s = %s\n" % (name, path))
2255
2254
2256 def postincoming(ui, repo, modheads, optupdate):
2255 def postincoming(ui, repo, modheads, optupdate):
2257 if modheads == 0:
2256 if modheads == 0:
2258 return
2257 return
2259 if optupdate:
2258 if optupdate:
2260 if modheads == 1:
2259 if modheads == 1:
2261 return hg.update(repo, repo.changelog.tip()) # update
2260 return hg.update(repo, repo.changelog.tip()) # update
2262 else:
2261 else:
2263 ui.status(_("not updating, since new heads added\n"))
2262 ui.status(_("not updating, since new heads added\n"))
2264 if modheads > 1:
2263 if modheads > 1:
2265 ui.status(_("(run 'hg heads' to see heads, 'hg merge' to merge)\n"))
2264 ui.status(_("(run 'hg heads' to see heads, 'hg merge' to merge)\n"))
2266 else:
2265 else:
2267 ui.status(_("(run 'hg update' to get a working copy)\n"))
2266 ui.status(_("(run 'hg update' to get a working copy)\n"))
2268
2267
2269 def pull(ui, repo, source="default", **opts):
2268 def pull(ui, repo, source="default", **opts):
2270 """pull changes from the specified source
2269 """pull changes from the specified source
2271
2270
2272 Pull changes from a remote repository to a local one.
2271 Pull changes from a remote repository to a local one.
2273
2272
2274 This finds all changes from the repository at the specified path
2273 This finds all changes from the repository at the specified path
2275 or URL and adds them to the local repository. By default, this
2274 or URL and adds them to the local repository. By default, this
2276 does not update the copy of the project in the working directory.
2275 does not update the copy of the project in the working directory.
2277
2276
2278 Valid URLs are of the form:
2277 Valid URLs are of the form:
2279
2278
2280 local/filesystem/path
2279 local/filesystem/path
2281 http://[user@]host[:port]/[path]
2280 http://[user@]host[:port]/[path]
2282 https://[user@]host[:port]/[path]
2281 https://[user@]host[:port]/[path]
2283 ssh://[user@]host[:port]/[path]
2282 ssh://[user@]host[:port]/[path]
2284
2283
2285 Some notes about using SSH with Mercurial:
2284 Some notes about using SSH with Mercurial:
2286 - SSH requires an accessible shell account on the destination machine
2285 - SSH requires an accessible shell account on the destination machine
2287 and a copy of hg in the remote path or specified with as remotecmd.
2286 and a copy of hg in the remote path or specified with as remotecmd.
2288 - path is relative to the remote user's home directory by default.
2287 - path is relative to the remote user's home directory by default.
2289 Use an extra slash at the start of a path to specify an absolute path:
2288 Use an extra slash at the start of a path to specify an absolute path:
2290 ssh://example.com//tmp/repository
2289 ssh://example.com//tmp/repository
2291 - Mercurial doesn't use its own compression via SSH; the right thing
2290 - Mercurial doesn't use its own compression via SSH; the right thing
2292 to do is to configure it in your ~/.ssh/ssh_config, e.g.:
2291 to do is to configure it in your ~/.ssh/ssh_config, e.g.:
2293 Host *.mylocalnetwork.example.com
2292 Host *.mylocalnetwork.example.com
2294 Compression off
2293 Compression off
2295 Host *
2294 Host *
2296 Compression on
2295 Compression on
2297 Alternatively specify "ssh -C" as your ssh command in your hgrc or
2296 Alternatively specify "ssh -C" as your ssh command in your hgrc or
2298 with the --ssh command line option.
2297 with the --ssh command line option.
2299 """
2298 """
2300 source = ui.expandpath(source)
2299 source = ui.expandpath(source)
2301 setremoteconfig(ui, opts)
2300 setremoteconfig(ui, opts)
2302
2301
2303 other = hg.repository(ui, source)
2302 other = hg.repository(ui, source)
2304 ui.status(_('pulling from %s\n') % (source))
2303 ui.status(_('pulling from %s\n') % (source))
2305 revs = None
2304 revs = None
2306 if opts['rev'] and not other.local():
2305 if opts['rev'] and not other.local():
2307 raise util.Abort(_("pull -r doesn't work for remote repositories yet"))
2306 raise util.Abort(_("pull -r doesn't work for remote repositories yet"))
2308 elif opts['rev']:
2307 elif opts['rev']:
2309 revs = [other.lookup(rev) for rev in opts['rev']]
2308 revs = [other.lookup(rev) for rev in opts['rev']]
2310 modheads = repo.pull(other, heads=revs, force=opts['force'])
2309 modheads = repo.pull(other, heads=revs, force=opts['force'])
2311 return postincoming(ui, repo, modheads, opts['update'])
2310 return postincoming(ui, repo, modheads, opts['update'])
2312
2311
2313 def push(ui, repo, dest=None, **opts):
2312 def push(ui, repo, dest=None, **opts):
2314 """push changes to the specified destination
2313 """push changes to the specified destination
2315
2314
2316 Push changes from the local repository to the given destination.
2315 Push changes from the local repository to the given destination.
2317
2316
2318 This is the symmetrical operation for pull. It helps to move
2317 This is the symmetrical operation for pull. It helps to move
2319 changes from the current repository to a different one. If the
2318 changes from the current repository to a different one. If the
2320 destination is local this is identical to a pull in that directory
2319 destination is local this is identical to a pull in that directory
2321 from the current one.
2320 from the current one.
2322
2321
2323 By default, push will refuse to run if it detects the result would
2322 By default, push will refuse to run if it detects the result would
2324 increase the number of remote heads. This generally indicates the
2323 increase the number of remote heads. This generally indicates the
2325 the client has forgotten to sync and merge before pushing.
2324 the client has forgotten to sync and merge before pushing.
2326
2325
2327 Valid URLs are of the form:
2326 Valid URLs are of the form:
2328
2327
2329 local/filesystem/path
2328 local/filesystem/path
2330 ssh://[user@]host[:port]/[path]
2329 ssh://[user@]host[:port]/[path]
2331
2330
2332 Look at the help text for the pull command for important details
2331 Look at the help text for the pull command for important details
2333 about ssh:// URLs.
2332 about ssh:// URLs.
2334
2333
2335 Pushing to http:// and https:// URLs is possible, too, if this
2334 Pushing to http:// and https:// URLs is possible, too, if this
2336 feature is enabled on the remote Mercurial server.
2335 feature is enabled on the remote Mercurial server.
2337 """
2336 """
2338 dest = ui.expandpath(dest or 'default-push', dest or 'default')
2337 dest = ui.expandpath(dest or 'default-push', dest or 'default')
2339 setremoteconfig(ui, opts)
2338 setremoteconfig(ui, opts)
2340
2339
2341 other = hg.repository(ui, dest)
2340 other = hg.repository(ui, dest)
2342 ui.status('pushing to %s\n' % (dest))
2341 ui.status('pushing to %s\n' % (dest))
2343 revs = None
2342 revs = None
2344 if opts['rev']:
2343 if opts['rev']:
2345 revs = [repo.lookup(rev) for rev in opts['rev']]
2344 revs = [repo.lookup(rev) for rev in opts['rev']]
2346 r = repo.push(other, opts['force'], revs=revs)
2345 r = repo.push(other, opts['force'], revs=revs)
2347 return r == 0
2346 return r == 0
2348
2347
2349 def rawcommit(ui, repo, *flist, **rc):
2348 def rawcommit(ui, repo, *flist, **rc):
2350 """raw commit interface (DEPRECATED)
2349 """raw commit interface (DEPRECATED)
2351
2350
2352 (DEPRECATED)
2351 (DEPRECATED)
2353 Lowlevel commit, for use in helper scripts.
2352 Lowlevel commit, for use in helper scripts.
2354
2353
2355 This command is not intended to be used by normal users, as it is
2354 This command is not intended to be used by normal users, as it is
2356 primarily useful for importing from other SCMs.
2355 primarily useful for importing from other SCMs.
2357
2356
2358 This command is now deprecated and will be removed in a future
2357 This command is now deprecated and will be removed in a future
2359 release, please use debugsetparents and commit instead.
2358 release, please use debugsetparents and commit instead.
2360 """
2359 """
2361
2360
2362 ui.warn(_("(the rawcommit command is deprecated)\n"))
2361 ui.warn(_("(the rawcommit command is deprecated)\n"))
2363
2362
2364 message = rc['message']
2363 message = rc['message']
2365 if not message and rc['logfile']:
2364 if not message and rc['logfile']:
2366 try:
2365 try:
2367 message = open(rc['logfile']).read()
2366 message = open(rc['logfile']).read()
2368 except IOError:
2367 except IOError:
2369 pass
2368 pass
2370 if not message and not rc['logfile']:
2369 if not message and not rc['logfile']:
2371 raise util.Abort(_("missing commit message"))
2370 raise util.Abort(_("missing commit message"))
2372
2371
2373 files = relpath(repo, list(flist))
2372 files = relpath(repo, list(flist))
2374 if rc['files']:
2373 if rc['files']:
2375 files += open(rc['files']).read().splitlines()
2374 files += open(rc['files']).read().splitlines()
2376
2375
2377 rc['parent'] = map(repo.lookup, rc['parent'])
2376 rc['parent'] = map(repo.lookup, rc['parent'])
2378
2377
2379 try:
2378 try:
2380 repo.rawcommit(files, message, rc['user'], rc['date'], *rc['parent'])
2379 repo.rawcommit(files, message, rc['user'], rc['date'], *rc['parent'])
2381 except ValueError, inst:
2380 except ValueError, inst:
2382 raise util.Abort(str(inst))
2381 raise util.Abort(str(inst))
2383
2382
2384 def recover(ui, repo):
2383 def recover(ui, repo):
2385 """roll back an interrupted transaction
2384 """roll back an interrupted transaction
2386
2385
2387 Recover from an interrupted commit or pull.
2386 Recover from an interrupted commit or pull.
2388
2387
2389 This command tries to fix the repository status after an interrupted
2388 This command tries to fix the repository status after an interrupted
2390 operation. It should only be necessary when Mercurial suggests it.
2389 operation. It should only be necessary when Mercurial suggests it.
2391 """
2390 """
2392 if repo.recover():
2391 if repo.recover():
2393 return hg.verify(repo)
2392 return hg.verify(repo)
2394 return 1
2393 return 1
2395
2394
2396 def remove(ui, repo, *pats, **opts):
2395 def remove(ui, repo, *pats, **opts):
2397 """remove the specified files on the next commit
2396 """remove the specified files on the next commit
2398
2397
2399 Schedule the indicated files for removal from the repository.
2398 Schedule the indicated files for removal from the repository.
2400
2399
2401 This command schedules the files to be removed at the next commit.
2400 This command schedules the files to be removed at the next commit.
2402 This only removes files from the current branch, not from the
2401 This only removes files from the current branch, not from the
2403 entire project history. If the files still exist in the working
2402 entire project history. If the files still exist in the working
2404 directory, they will be deleted from it. If invoked with --after,
2403 directory, they will be deleted from it. If invoked with --after,
2405 files that have been manually deleted are marked as removed.
2404 files that have been manually deleted are marked as removed.
2406
2405
2407 Modified files and added files are not removed by default. To
2406 Modified files and added files are not removed by default. To
2408 remove them, use the -f/--force option.
2407 remove them, use the -f/--force option.
2409 """
2408 """
2410 names = []
2409 names = []
2411 if not opts['after'] and not pats:
2410 if not opts['after'] and not pats:
2412 raise util.Abort(_('no files specified'))
2411 raise util.Abort(_('no files specified'))
2413 files, matchfn, anypats = matchpats(repo, pats, opts)
2412 files, matchfn, anypats = matchpats(repo, pats, opts)
2414 exact = dict.fromkeys(files)
2413 exact = dict.fromkeys(files)
2415 mardu = map(dict.fromkeys, repo.changes(files=files, match=matchfn))
2414 mardu = map(dict.fromkeys, repo.changes(files=files, match=matchfn))
2416 modified, added, removed, deleted, unknown = mardu
2415 modified, added, removed, deleted, unknown = mardu
2417 remove, forget = [], []
2416 remove, forget = [], []
2418 for src, abs, rel, exact in walk(repo, pats, opts):
2417 for src, abs, rel, exact in walk(repo, pats, opts):
2419 reason = None
2418 reason = None
2420 if abs not in deleted and opts['after']:
2419 if abs not in deleted and opts['after']:
2421 reason = _('is still present')
2420 reason = _('is still present')
2422 elif abs in modified and not opts['force']:
2421 elif abs in modified and not opts['force']:
2423 reason = _('is modified (use -f to force removal)')
2422 reason = _('is modified (use -f to force removal)')
2424 elif abs in added:
2423 elif abs in added:
2425 if opts['force']:
2424 if opts['force']:
2426 forget.append(abs)
2425 forget.append(abs)
2427 continue
2426 continue
2428 reason = _('has been marked for add (use -f to force removal)')
2427 reason = _('has been marked for add (use -f to force removal)')
2429 elif abs in unknown:
2428 elif abs in unknown:
2430 reason = _('is not managed')
2429 reason = _('is not managed')
2431 elif abs in removed:
2430 elif abs in removed:
2432 continue
2431 continue
2433 if reason:
2432 if reason:
2434 if exact:
2433 if exact:
2435 ui.warn(_('not removing %s: file %s\n') % (rel, reason))
2434 ui.warn(_('not removing %s: file %s\n') % (rel, reason))
2436 else:
2435 else:
2437 if ui.verbose or not exact:
2436 if ui.verbose or not exact:
2438 ui.status(_('removing %s\n') % rel)
2437 ui.status(_('removing %s\n') % rel)
2439 remove.append(abs)
2438 remove.append(abs)
2440 repo.forget(forget)
2439 repo.forget(forget)
2441 repo.remove(remove, unlink=not opts['after'])
2440 repo.remove(remove, unlink=not opts['after'])
2442
2441
2443 def rename(ui, repo, *pats, **opts):
2442 def rename(ui, repo, *pats, **opts):
2444 """rename files; equivalent of copy + remove
2443 """rename files; equivalent of copy + remove
2445
2444
2446 Mark dest as copies of sources; mark sources for deletion. If
2445 Mark dest as copies of sources; mark sources for deletion. If
2447 dest is a directory, copies are put in that directory. If dest is
2446 dest is a directory, copies are put in that directory. If dest is
2448 a file, there can only be one source.
2447 a file, there can only be one source.
2449
2448
2450 By default, this command copies the contents of files as they
2449 By default, this command copies the contents of files as they
2451 stand in the working directory. If invoked with --after, the
2450 stand in the working directory. If invoked with --after, the
2452 operation is recorded, but no copying is performed.
2451 operation is recorded, but no copying is performed.
2453
2452
2454 This command takes effect in the next commit.
2453 This command takes effect in the next commit.
2455
2454
2456 NOTE: This command should be treated as experimental. While it
2455 NOTE: This command should be treated as experimental. While it
2457 should properly record rename files, this information is not yet
2456 should properly record rename files, this information is not yet
2458 fully used by merge, nor fully reported by log.
2457 fully used by merge, nor fully reported by log.
2459 """
2458 """
2460 wlock = repo.wlock(0)
2459 wlock = repo.wlock(0)
2461 errs, copied = docopy(ui, repo, pats, opts, wlock)
2460 errs, copied = docopy(ui, repo, pats, opts, wlock)
2462 names = []
2461 names = []
2463 for abs, rel, exact in copied:
2462 for abs, rel, exact in copied:
2464 if ui.verbose or not exact:
2463 if ui.verbose or not exact:
2465 ui.status(_('removing %s\n') % rel)
2464 ui.status(_('removing %s\n') % rel)
2466 names.append(abs)
2465 names.append(abs)
2467 if not opts.get('dry_run'):
2466 if not opts.get('dry_run'):
2468 repo.remove(names, True, wlock)
2467 repo.remove(names, True, wlock)
2469 return errs
2468 return errs
2470
2469
2471 def revert(ui, repo, *pats, **opts):
2470 def revert(ui, repo, *pats, **opts):
2472 """revert files or dirs to their states as of some revision
2471 """revert files or dirs to their states as of some revision
2473
2472
2474 With no revision specified, revert the named files or directories
2473 With no revision specified, revert the named files or directories
2475 to the contents they had in the parent of the working directory.
2474 to the contents they had in the parent of the working directory.
2476 This restores the contents of the affected files to an unmodified
2475 This restores the contents of the affected files to an unmodified
2477 state. If the working directory has two parents, you must
2476 state. If the working directory has two parents, you must
2478 explicitly specify the revision to revert to.
2477 explicitly specify the revision to revert to.
2479
2478
2480 Modified files are saved with a .orig suffix before reverting.
2479 Modified files are saved with a .orig suffix before reverting.
2481 To disable these backups, use --no-backup.
2480 To disable these backups, use --no-backup.
2482
2481
2483 Using the -r option, revert the given files or directories to
2482 Using the -r option, revert the given files or directories to
2484 their contents as of a specific revision. This can be helpful to"roll
2483 their contents as of a specific revision. This can be helpful to"roll
2485 back" some or all of a change that should not have been committed.
2484 back" some or all of a change that should not have been committed.
2486
2485
2487 Revert modifies the working directory. It does not commit any
2486 Revert modifies the working directory. It does not commit any
2488 changes, or change the parent of the working directory. If you
2487 changes, or change the parent of the working directory. If you
2489 revert to a revision other than the parent of the working
2488 revert to a revision other than the parent of the working
2490 directory, the reverted files will thus appear modified
2489 directory, the reverted files will thus appear modified
2491 afterwards.
2490 afterwards.
2492
2491
2493 If a file has been deleted, it is recreated. If the executable
2492 If a file has been deleted, it is recreated. If the executable
2494 mode of a file was changed, it is reset.
2493 mode of a file was changed, it is reset.
2495
2494
2496 If names are given, all files matching the names are reverted.
2495 If names are given, all files matching the names are reverted.
2497
2496
2498 If no arguments are given, all files in the repository are reverted.
2497 If no arguments are given, all files in the repository are reverted.
2499 """
2498 """
2500 parent, p2 = repo.dirstate.parents()
2499 parent, p2 = repo.dirstate.parents()
2501 if opts['rev']:
2500 if opts['rev']:
2502 node = repo.lookup(opts['rev'])
2501 node = repo.lookup(opts['rev'])
2503 elif p2 != nullid:
2502 elif p2 != nullid:
2504 raise util.Abort(_('working dir has two parents; '
2503 raise util.Abort(_('working dir has two parents; '
2505 'you must specify the revision to revert to'))
2504 'you must specify the revision to revert to'))
2506 else:
2505 else:
2507 node = parent
2506 node = parent
2508 mf = repo.manifest.read(repo.changelog.read(node)[0])
2507 mf = repo.manifest.read(repo.changelog.read(node)[0])
2509 if node == parent:
2508 if node == parent:
2510 pmf = mf
2509 pmf = mf
2511 else:
2510 else:
2512 pmf = None
2511 pmf = None
2513
2512
2514 wlock = repo.wlock()
2513 wlock = repo.wlock()
2515
2514
2516 # need all matching names in dirstate and manifest of target rev,
2515 # need all matching names in dirstate and manifest of target rev,
2517 # so have to walk both. do not print errors if files exist in one
2516 # so have to walk both. do not print errors if files exist in one
2518 # but not other.
2517 # but not other.
2519
2518
2520 names = {}
2519 names = {}
2521 target_only = {}
2520 target_only = {}
2522
2521
2523 # walk dirstate.
2522 # walk dirstate.
2524
2523
2525 for src, abs, rel, exact in walk(repo, pats, opts, badmatch=mf.has_key):
2524 for src, abs, rel, exact in walk(repo, pats, opts, badmatch=mf.has_key):
2526 names[abs] = (rel, exact)
2525 names[abs] = (rel, exact)
2527 if src == 'b':
2526 if src == 'b':
2528 target_only[abs] = True
2527 target_only[abs] = True
2529
2528
2530 # walk target manifest.
2529 # walk target manifest.
2531
2530
2532 for src, abs, rel, exact in walk(repo, pats, opts, node=node,
2531 for src, abs, rel, exact in walk(repo, pats, opts, node=node,
2533 badmatch=names.has_key):
2532 badmatch=names.has_key):
2534 if abs in names: continue
2533 if abs in names: continue
2535 names[abs] = (rel, exact)
2534 names[abs] = (rel, exact)
2536 target_only[abs] = True
2535 target_only[abs] = True
2537
2536
2538 changes = repo.changes(match=names.has_key, wlock=wlock)
2537 changes = repo.changes(match=names.has_key, wlock=wlock)
2539 modified, added, removed, deleted, unknown = map(dict.fromkeys, changes)
2538 modified, added, removed, deleted, unknown = map(dict.fromkeys, changes)
2540
2539
2541 revert = ([], _('reverting %s\n'))
2540 revert = ([], _('reverting %s\n'))
2542 add = ([], _('adding %s\n'))
2541 add = ([], _('adding %s\n'))
2543 remove = ([], _('removing %s\n'))
2542 remove = ([], _('removing %s\n'))
2544 forget = ([], _('forgetting %s\n'))
2543 forget = ([], _('forgetting %s\n'))
2545 undelete = ([], _('undeleting %s\n'))
2544 undelete = ([], _('undeleting %s\n'))
2546 update = {}
2545 update = {}
2547
2546
2548 disptable = (
2547 disptable = (
2549 # dispatch table:
2548 # dispatch table:
2550 # file state
2549 # file state
2551 # action if in target manifest
2550 # action if in target manifest
2552 # action if not in target manifest
2551 # action if not in target manifest
2553 # make backup if in target manifest
2552 # make backup if in target manifest
2554 # make backup if not in target manifest
2553 # make backup if not in target manifest
2555 (modified, revert, remove, True, True),
2554 (modified, revert, remove, True, True),
2556 (added, revert, forget, True, False),
2555 (added, revert, forget, True, False),
2557 (removed, undelete, None, False, False),
2556 (removed, undelete, None, False, False),
2558 (deleted, revert, remove, False, False),
2557 (deleted, revert, remove, False, False),
2559 (unknown, add, None, True, False),
2558 (unknown, add, None, True, False),
2560 (target_only, add, None, False, False),
2559 (target_only, add, None, False, False),
2561 )
2560 )
2562
2561
2563 entries = names.items()
2562 entries = names.items()
2564 entries.sort()
2563 entries.sort()
2565
2564
2566 for abs, (rel, exact) in entries:
2565 for abs, (rel, exact) in entries:
2567 mfentry = mf.get(abs)
2566 mfentry = mf.get(abs)
2568 def handle(xlist, dobackup):
2567 def handle(xlist, dobackup):
2569 xlist[0].append(abs)
2568 xlist[0].append(abs)
2570 update[abs] = 1
2569 update[abs] = 1
2571 if dobackup and not opts['no_backup'] and os.path.exists(rel):
2570 if dobackup and not opts['no_backup'] and os.path.exists(rel):
2572 bakname = "%s.orig" % rel
2571 bakname = "%s.orig" % rel
2573 ui.note(_('saving current version of %s as %s\n') %
2572 ui.note(_('saving current version of %s as %s\n') %
2574 (rel, bakname))
2573 (rel, bakname))
2575 if not opts.get('dry_run'):
2574 if not opts.get('dry_run'):
2576 shutil.copyfile(rel, bakname)
2575 shutil.copyfile(rel, bakname)
2577 shutil.copymode(rel, bakname)
2576 shutil.copymode(rel, bakname)
2578 if ui.verbose or not exact:
2577 if ui.verbose or not exact:
2579 ui.status(xlist[1] % rel)
2578 ui.status(xlist[1] % rel)
2580 for table, hitlist, misslist, backuphit, backupmiss in disptable:
2579 for table, hitlist, misslist, backuphit, backupmiss in disptable:
2581 if abs not in table: continue
2580 if abs not in table: continue
2582 # file has changed in dirstate
2581 # file has changed in dirstate
2583 if mfentry:
2582 if mfentry:
2584 handle(hitlist, backuphit)
2583 handle(hitlist, backuphit)
2585 elif misslist is not None:
2584 elif misslist is not None:
2586 handle(misslist, backupmiss)
2585 handle(misslist, backupmiss)
2587 else:
2586 else:
2588 if exact: ui.warn(_('file not managed: %s\n' % rel))
2587 if exact: ui.warn(_('file not managed: %s\n' % rel))
2589 break
2588 break
2590 else:
2589 else:
2591 # file has not changed in dirstate
2590 # file has not changed in dirstate
2592 if node == parent:
2591 if node == parent:
2593 if exact: ui.warn(_('no changes needed to %s\n' % rel))
2592 if exact: ui.warn(_('no changes needed to %s\n' % rel))
2594 continue
2593 continue
2595 if pmf is None:
2594 if pmf is None:
2596 # only need parent manifest in this unlikely case,
2595 # only need parent manifest in this unlikely case,
2597 # so do not read by default
2596 # so do not read by default
2598 pmf = repo.manifest.read(repo.changelog.read(parent)[0])
2597 pmf = repo.manifest.read(repo.changelog.read(parent)[0])
2599 if abs in pmf:
2598 if abs in pmf:
2600 if mfentry:
2599 if mfentry:
2601 # if version of file is same in parent and target
2600 # if version of file is same in parent and target
2602 # manifests, do nothing
2601 # manifests, do nothing
2603 if pmf[abs] != mfentry:
2602 if pmf[abs] != mfentry:
2604 handle(revert, False)
2603 handle(revert, False)
2605 else:
2604 else:
2606 handle(remove, False)
2605 handle(remove, False)
2607
2606
2608 if not opts.get('dry_run'):
2607 if not opts.get('dry_run'):
2609 repo.dirstate.forget(forget[0])
2608 repo.dirstate.forget(forget[0])
2610 r = hg.revert(repo, node, update.has_key, wlock)
2609 r = hg.revert(repo, node, update.has_key, wlock)
2611 repo.dirstate.update(add[0], 'a')
2610 repo.dirstate.update(add[0], 'a')
2612 repo.dirstate.update(undelete[0], 'n')
2611 repo.dirstate.update(undelete[0], 'n')
2613 repo.dirstate.update(remove[0], 'r')
2612 repo.dirstate.update(remove[0], 'r')
2614 return r
2613 return r
2615
2614
2616 def rollback(ui, repo):
2615 def rollback(ui, repo):
2617 """roll back the last transaction in this repository
2616 """roll back the last transaction in this repository
2618
2617
2619 Roll back the last transaction in this repository, restoring the
2618 Roll back the last transaction in this repository, restoring the
2620 project to its state prior to the transaction.
2619 project to its state prior to the transaction.
2621
2620
2622 Transactions are used to encapsulate the effects of all commands
2621 Transactions are used to encapsulate the effects of all commands
2623 that create new changesets or propagate existing changesets into a
2622 that create new changesets or propagate existing changesets into a
2624 repository. For example, the following commands are transactional,
2623 repository. For example, the following commands are transactional,
2625 and their effects can be rolled back:
2624 and their effects can be rolled back:
2626
2625
2627 commit
2626 commit
2628 import
2627 import
2629 pull
2628 pull
2630 push (with this repository as destination)
2629 push (with this repository as destination)
2631 unbundle
2630 unbundle
2632
2631
2633 This command should be used with care. There is only one level of
2632 This command should be used with care. There is only one level of
2634 rollback, and there is no way to undo a rollback.
2633 rollback, and there is no way to undo a rollback.
2635
2634
2636 This command is not intended for use on public repositories. Once
2635 This command is not intended for use on public repositories. Once
2637 changes are visible for pull by other users, rolling a transaction
2636 changes are visible for pull by other users, rolling a transaction
2638 back locally is ineffective (someone else may already have pulled
2637 back locally is ineffective (someone else may already have pulled
2639 the changes). Furthermore, a race is possible with readers of the
2638 the changes). Furthermore, a race is possible with readers of the
2640 repository; for example an in-progress pull from the repository
2639 repository; for example an in-progress pull from the repository
2641 may fail if a rollback is performed.
2640 may fail if a rollback is performed.
2642 """
2641 """
2643 repo.rollback()
2642 repo.rollback()
2644
2643
2645 def root(ui, repo):
2644 def root(ui, repo):
2646 """print the root (top) of the current working dir
2645 """print the root (top) of the current working dir
2647
2646
2648 Print the root directory of the current repository.
2647 Print the root directory of the current repository.
2649 """
2648 """
2650 ui.write(repo.root + "\n")
2649 ui.write(repo.root + "\n")
2651
2650
2652 def serve(ui, repo, **opts):
2651 def serve(ui, repo, **opts):
2653 """export the repository via HTTP
2652 """export the repository via HTTP
2654
2653
2655 Start a local HTTP repository browser and pull server.
2654 Start a local HTTP repository browser and pull server.
2656
2655
2657 By default, the server logs accesses to stdout and errors to
2656 By default, the server logs accesses to stdout and errors to
2658 stderr. Use the "-A" and "-E" options to log to files.
2657 stderr. Use the "-A" and "-E" options to log to files.
2659 """
2658 """
2660
2659
2661 if opts["stdio"]:
2660 if opts["stdio"]:
2662 if repo is None:
2661 if repo is None:
2663 raise hg.RepoError(_('no repo found'))
2662 raise hg.RepoError(_('no repo found'))
2664 s = sshserver.sshserver(ui, repo)
2663 s = sshserver.sshserver(ui, repo)
2665 s.serve_forever()
2664 s.serve_forever()
2666
2665
2667 optlist = ("name templates style address port ipv6"
2666 optlist = ("name templates style address port ipv6"
2668 " accesslog errorlog webdir_conf")
2667 " accesslog errorlog webdir_conf")
2669 for o in optlist.split():
2668 for o in optlist.split():
2670 if opts[o]:
2669 if opts[o]:
2671 ui.setconfig("web", o, opts[o])
2670 ui.setconfig("web", o, opts[o])
2672
2671
2673 if repo is None and not ui.config("web", "webdir_conf"):
2672 if repo is None and not ui.config("web", "webdir_conf"):
2674 raise hg.RepoError(_('no repo found'))
2673 raise hg.RepoError(_('no repo found'))
2675
2674
2676 if opts['daemon'] and not opts['daemon_pipefds']:
2675 if opts['daemon'] and not opts['daemon_pipefds']:
2677 rfd, wfd = os.pipe()
2676 rfd, wfd = os.pipe()
2678 args = sys.argv[:]
2677 args = sys.argv[:]
2679 args.append('--daemon-pipefds=%d,%d' % (rfd, wfd))
2678 args.append('--daemon-pipefds=%d,%d' % (rfd, wfd))
2680 pid = os.spawnvp(os.P_NOWAIT | getattr(os, 'P_DETACH', 0),
2679 pid = os.spawnvp(os.P_NOWAIT | getattr(os, 'P_DETACH', 0),
2681 args[0], args)
2680 args[0], args)
2682 os.close(wfd)
2681 os.close(wfd)
2683 os.read(rfd, 1)
2682 os.read(rfd, 1)
2684 os._exit(0)
2683 os._exit(0)
2685
2684
2686 try:
2685 try:
2687 httpd = hgweb.server.create_server(ui, repo)
2686 httpd = hgweb.server.create_server(ui, repo)
2688 except socket.error, inst:
2687 except socket.error, inst:
2689 raise util.Abort(_('cannot start server: ') + inst.args[1])
2688 raise util.Abort(_('cannot start server: ') + inst.args[1])
2690
2689
2691 if ui.verbose:
2690 if ui.verbose:
2692 addr, port = httpd.socket.getsockname()
2691 addr, port = httpd.socket.getsockname()
2693 if addr == '0.0.0.0':
2692 if addr == '0.0.0.0':
2694 addr = socket.gethostname()
2693 addr = socket.gethostname()
2695 else:
2694 else:
2696 try:
2695 try:
2697 addr = socket.gethostbyaddr(addr)[0]
2696 addr = socket.gethostbyaddr(addr)[0]
2698 except socket.error:
2697 except socket.error:
2699 pass
2698 pass
2700 if port != 80:
2699 if port != 80:
2701 ui.status(_('listening at http://%s:%d/\n') % (addr, port))
2700 ui.status(_('listening at http://%s:%d/\n') % (addr, port))
2702 else:
2701 else:
2703 ui.status(_('listening at http://%s/\n') % addr)
2702 ui.status(_('listening at http://%s/\n') % addr)
2704
2703
2705 if opts['pid_file']:
2704 if opts['pid_file']:
2706 fp = open(opts['pid_file'], 'w')
2705 fp = open(opts['pid_file'], 'w')
2707 fp.write(str(os.getpid()) + '\n')
2706 fp.write(str(os.getpid()) + '\n')
2708 fp.close()
2707 fp.close()
2709
2708
2710 if opts['daemon_pipefds']:
2709 if opts['daemon_pipefds']:
2711 rfd, wfd = [int(x) for x in opts['daemon_pipefds'].split(',')]
2710 rfd, wfd = [int(x) for x in opts['daemon_pipefds'].split(',')]
2712 os.close(rfd)
2711 os.close(rfd)
2713 os.write(wfd, 'y')
2712 os.write(wfd, 'y')
2714 os.close(wfd)
2713 os.close(wfd)
2715 sys.stdout.flush()
2714 sys.stdout.flush()
2716 sys.stderr.flush()
2715 sys.stderr.flush()
2717 fd = os.open(util.nulldev, os.O_RDWR)
2716 fd = os.open(util.nulldev, os.O_RDWR)
2718 if fd != 0: os.dup2(fd, 0)
2717 if fd != 0: os.dup2(fd, 0)
2719 if fd != 1: os.dup2(fd, 1)
2718 if fd != 1: os.dup2(fd, 1)
2720 if fd != 2: os.dup2(fd, 2)
2719 if fd != 2: os.dup2(fd, 2)
2721 if fd not in (0, 1, 2): os.close(fd)
2720 if fd not in (0, 1, 2): os.close(fd)
2722
2721
2723 httpd.serve_forever()
2722 httpd.serve_forever()
2724
2723
2725 def status(ui, repo, *pats, **opts):
2724 def status(ui, repo, *pats, **opts):
2726 """show changed files in the working directory
2725 """show changed files in the working directory
2727
2726
2728 Show status of files in the repository. If names are given, only
2727 Show status of files in the repository. If names are given, only
2729 files that match are shown. Files that are clean or ignored, are
2728 files that match are shown. Files that are clean or ignored, are
2730 not listed unless -c (clean), -i (ignored) or -A is given.
2729 not listed unless -c (clean), -i (ignored) or -A is given.
2731
2730
2732 The codes used to show the status of files are:
2731 The codes used to show the status of files are:
2733 M = modified
2732 M = modified
2734 A = added
2733 A = added
2735 R = removed
2734 R = removed
2736 C = clean
2735 C = clean
2737 ! = deleted, but still tracked
2736 ! = deleted, but still tracked
2738 ? = not tracked
2737 ? = not tracked
2739 I = ignored (not shown by default)
2738 I = ignored (not shown by default)
2740 = the previous added file was copied from here
2739 = the previous added file was copied from here
2741 """
2740 """
2742
2741
2743 all = opts['all']
2742 all = opts['all']
2744
2743
2745 files, matchfn, anypats = matchpats(repo, pats, opts)
2744 files, matchfn, anypats = matchpats(repo, pats, opts)
2746 cwd = (pats and repo.getcwd()) or ''
2745 cwd = (pats and repo.getcwd()) or ''
2747 modified, added, removed, deleted, unknown, ignored, clean = [
2746 modified, added, removed, deleted, unknown, ignored, clean = [
2748 [util.pathto(cwd, x) for x in n]
2747 [util.pathto(cwd, x) for x in n]
2749 for n in repo.status(files=files, match=matchfn,
2748 for n in repo.status(files=files, match=matchfn,
2750 list_ignored=all or opts['ignored'],
2749 list_ignored=all or opts['ignored'],
2751 list_clean=all or opts['clean'])]
2750 list_clean=all or opts['clean'])]
2752
2751
2753 changetypes = (('modified', 'M', modified),
2752 changetypes = (('modified', 'M', modified),
2754 ('added', 'A', added),
2753 ('added', 'A', added),
2755 ('removed', 'R', removed),
2754 ('removed', 'R', removed),
2756 ('deleted', '!', deleted),
2755 ('deleted', '!', deleted),
2757 ('unknown', '?', unknown),
2756 ('unknown', '?', unknown),
2758 ('ignored', 'I', ignored))
2757 ('ignored', 'I', ignored))
2759
2758
2760 explicit_changetypes = changetypes + (('clean', 'C', clean),)
2759 explicit_changetypes = changetypes + (('clean', 'C', clean),)
2761
2760
2762 end = opts['print0'] and '\0' or '\n'
2761 end = opts['print0'] and '\0' or '\n'
2763
2762
2764 for opt, char, changes in ([ct for ct in explicit_changetypes
2763 for opt, char, changes in ([ct for ct in explicit_changetypes
2765 if all or opts[ct[0]]]
2764 if all or opts[ct[0]]]
2766 or changetypes):
2765 or changetypes):
2767 if opts['no_status']:
2766 if opts['no_status']:
2768 format = "%%s%s" % end
2767 format = "%%s%s" % end
2769 else:
2768 else:
2770 format = "%s %%s%s" % (char, end)
2769 format = "%s %%s%s" % (char, end)
2771
2770
2772 for f in changes:
2771 for f in changes:
2773 ui.write(format % f)
2772 ui.write(format % f)
2774 if ((all or opts.get('copies')) and not opts.get('no_status')
2773 if ((all or opts.get('copies')) and not opts.get('no_status')
2775 and opt == 'added' and repo.dirstate.copies.has_key(f)):
2774 and opt == 'added' and repo.dirstate.copies.has_key(f)):
2776 ui.write(' %s%s' % (repo.dirstate.copies[f], end))
2775 ui.write(' %s%s' % (repo.dirstate.copies[f], end))
2777
2776
2778 def tag(ui, repo, name, rev_=None, **opts):
2777 def tag(ui, repo, name, rev_=None, **opts):
2779 """add a tag for the current tip or a given revision
2778 """add a tag for the current tip or a given revision
2780
2779
2781 Name a particular revision using <name>.
2780 Name a particular revision using <name>.
2782
2781
2783 Tags are used to name particular revisions of the repository and are
2782 Tags are used to name particular revisions of the repository and are
2784 very useful to compare different revision, to go back to significant
2783 very useful to compare different revision, to go back to significant
2785 earlier versions or to mark branch points as releases, etc.
2784 earlier versions or to mark branch points as releases, etc.
2786
2785
2787 If no revision is given, the parent of the working directory is used.
2786 If no revision is given, the parent of the working directory is used.
2788
2787
2789 To facilitate version control, distribution, and merging of tags,
2788 To facilitate version control, distribution, and merging of tags,
2790 they are stored as a file named ".hgtags" which is managed
2789 they are stored as a file named ".hgtags" which is managed
2791 similarly to other project files and can be hand-edited if
2790 similarly to other project files and can be hand-edited if
2792 necessary. The file '.hg/localtags' is used for local tags (not
2791 necessary. The file '.hg/localtags' is used for local tags (not
2793 shared among repositories).
2792 shared among repositories).
2794 """
2793 """
2795 if name in ['tip', '.']:
2794 if name in ['tip', '.']:
2796 raise util.Abort(_("the name '%s' is reserved") % name)
2795 raise util.Abort(_("the name '%s' is reserved") % name)
2797 if rev_ is not None:
2796 if rev_ is not None:
2798 ui.warn(_("use of 'hg tag NAME [REV]' is deprecated, "
2797 ui.warn(_("use of 'hg tag NAME [REV]' is deprecated, "
2799 "please use 'hg tag [-r REV] NAME' instead\n"))
2798 "please use 'hg tag [-r REV] NAME' instead\n"))
2800 if opts['rev']:
2799 if opts['rev']:
2801 raise util.Abort(_("use only one form to specify the revision"))
2800 raise util.Abort(_("use only one form to specify the revision"))
2802 if opts['rev']:
2801 if opts['rev']:
2803 rev_ = opts['rev']
2802 rev_ = opts['rev']
2804 if rev_:
2803 if rev_:
2805 r = hex(repo.lookup(rev_))
2804 r = hex(repo.lookup(rev_))
2806 else:
2805 else:
2807 p1, p2 = repo.dirstate.parents()
2806 p1, p2 = repo.dirstate.parents()
2808 if p1 == nullid:
2807 if p1 == nullid:
2809 raise util.Abort(_('no revision to tag'))
2808 raise util.Abort(_('no revision to tag'))
2810 if p2 != nullid:
2809 if p2 != nullid:
2811 raise util.Abort(_('outstanding uncommitted merges'))
2810 raise util.Abort(_('outstanding uncommitted merges'))
2812 r = hex(p1)
2811 r = hex(p1)
2813
2812
2814 repo.tag(name, r, opts['local'], opts['message'], opts['user'],
2813 repo.tag(name, r, opts['local'], opts['message'], opts['user'],
2815 opts['date'])
2814 opts['date'])
2816
2815
2817 def tags(ui, repo):
2816 def tags(ui, repo):
2818 """list repository tags
2817 """list repository tags
2819
2818
2820 List the repository tags.
2819 List the repository tags.
2821
2820
2822 This lists both regular and local tags.
2821 This lists both regular and local tags.
2823 """
2822 """
2824
2823
2825 l = repo.tagslist()
2824 l = repo.tagslist()
2826 l.reverse()
2825 l.reverse()
2827 for t, n in l:
2826 for t, n in l:
2828 try:
2827 try:
2829 r = "%5d:%s" % (repo.changelog.rev(n), hex(n))
2828 r = "%5d:%s" % (repo.changelog.rev(n), hex(n))
2830 except KeyError:
2829 except KeyError:
2831 r = " ?:?"
2830 r = " ?:?"
2832 if ui.quiet:
2831 if ui.quiet:
2833 ui.write("%s\n" % t)
2832 ui.write("%s\n" % t)
2834 else:
2833 else:
2835 ui.write("%-30s %s\n" % (t, r))
2834 ui.write("%-30s %s\n" % (t, r))
2836
2835
2837 def tip(ui, repo, **opts):
2836 def tip(ui, repo, **opts):
2838 """show the tip revision
2837 """show the tip revision
2839
2838
2840 Show the tip revision.
2839 Show the tip revision.
2841 """
2840 """
2842 n = repo.changelog.tip()
2841 n = repo.changelog.tip()
2843 br = None
2842 br = None
2844 if opts['branches']:
2843 if opts['branches']:
2845 br = repo.branchlookup([n])
2844 br = repo.branchlookup([n])
2846 show_changeset(ui, repo, opts).show(changenode=n, brinfo=br)
2845 show_changeset(ui, repo, opts).show(changenode=n, brinfo=br)
2847 if opts['patch']:
2846 if opts['patch']:
2848 dodiff(ui, ui, repo, repo.changelog.parents(n)[0], n)
2847 dodiff(ui, ui, repo, repo.changelog.parents(n)[0], n)
2849
2848
2850 def unbundle(ui, repo, fname, **opts):
2849 def unbundle(ui, repo, fname, **opts):
2851 """apply a changegroup file
2850 """apply a changegroup file
2852
2851
2853 Apply a compressed changegroup file generated by the bundle
2852 Apply a compressed changegroup file generated by the bundle
2854 command.
2853 command.
2855 """
2854 """
2856 f = urllib.urlopen(fname)
2855 f = urllib.urlopen(fname)
2857
2856
2858 header = f.read(6)
2857 header = f.read(6)
2859 if not header.startswith("HG"):
2858 if not header.startswith("HG"):
2860 raise util.Abort(_("%s: not a Mercurial bundle file") % fname)
2859 raise util.Abort(_("%s: not a Mercurial bundle file") % fname)
2861 elif not header.startswith("HG10"):
2860 elif not header.startswith("HG10"):
2862 raise util.Abort(_("%s: unknown bundle version") % fname)
2861 raise util.Abort(_("%s: unknown bundle version") % fname)
2863 elif header == "HG10BZ":
2862 elif header == "HG10BZ":
2864 def generator(f):
2863 def generator(f):
2865 zd = bz2.BZ2Decompressor()
2864 zd = bz2.BZ2Decompressor()
2866 zd.decompress("BZ")
2865 zd.decompress("BZ")
2867 for chunk in f:
2866 for chunk in f:
2868 yield zd.decompress(chunk)
2867 yield zd.decompress(chunk)
2869 elif header == "HG10UN":
2868 elif header == "HG10UN":
2870 def generator(f):
2869 def generator(f):
2871 for chunk in f:
2870 for chunk in f:
2872 yield chunk
2871 yield chunk
2873 else:
2872 else:
2874 raise util.Abort(_("%s: unknown bundle compression type")
2873 raise util.Abort(_("%s: unknown bundle compression type")
2875 % fname)
2874 % fname)
2876 gen = generator(util.filechunkiter(f, 4096))
2875 gen = generator(util.filechunkiter(f, 4096))
2877 modheads = repo.addchangegroup(util.chunkbuffer(gen), 'unbundle',
2876 modheads = repo.addchangegroup(util.chunkbuffer(gen), 'unbundle',
2878 'bundle:' + fname)
2877 'bundle:' + fname)
2879 return postincoming(ui, repo, modheads, opts['update'])
2878 return postincoming(ui, repo, modheads, opts['update'])
2880
2879
2881 def undo(ui, repo):
2880 def undo(ui, repo):
2882 """undo the last commit or pull (DEPRECATED)
2881 """undo the last commit or pull (DEPRECATED)
2883
2882
2884 (DEPRECATED)
2883 (DEPRECATED)
2885 This command is now deprecated and will be removed in a future
2884 This command is now deprecated and will be removed in a future
2886 release. Please use the rollback command instead. For usage
2885 release. Please use the rollback command instead. For usage
2887 instructions, see the rollback command.
2886 instructions, see the rollback command.
2888 """
2887 """
2889 ui.warn(_('(the undo command is deprecated; use rollback instead)\n'))
2888 ui.warn(_('(the undo command is deprecated; use rollback instead)\n'))
2890 repo.rollback()
2889 repo.rollback()
2891
2890
2892 def update(ui, repo, node=None, merge=False, clean=False, force=None,
2891 def update(ui, repo, node=None, merge=False, clean=False, force=None,
2893 branch=None):
2892 branch=None):
2894 """update or merge working directory
2893 """update or merge working directory
2895
2894
2896 Update the working directory to the specified revision.
2895 Update the working directory to the specified revision.
2897
2896
2898 If there are no outstanding changes in the working directory and
2897 If there are no outstanding changes in the working directory and
2899 there is a linear relationship between the current version and the
2898 there is a linear relationship between the current version and the
2900 requested version, the result is the requested version.
2899 requested version, the result is the requested version.
2901
2900
2902 To merge the working directory with another revision, use the
2901 To merge the working directory with another revision, use the
2903 merge command.
2902 merge command.
2904
2903
2905 By default, update will refuse to run if doing so would require
2904 By default, update will refuse to run if doing so would require
2906 merging or discarding local changes.
2905 merging or discarding local changes.
2907 """
2906 """
2908 node = _lookup(repo, node, branch)
2907 node = _lookup(repo, node, branch)
2909 if merge:
2908 if merge:
2910 ui.warn(_('(the -m/--merge option is deprecated; '
2909 ui.warn(_('(the -m/--merge option is deprecated; '
2911 'use the merge command instead)\n'))
2910 'use the merge command instead)\n'))
2912 return hg.merge(repo, node, force=force)
2911 return hg.merge(repo, node, force=force)
2913 elif clean:
2912 elif clean:
2914 return hg.clean(repo, node)
2913 return hg.clean(repo, node)
2915 else:
2914 else:
2916 return hg.update(repo, node)
2915 return hg.update(repo, node)
2917
2916
2918 def _lookup(repo, node, branch=None):
2917 def _lookup(repo, node, branch=None):
2919 if branch:
2918 if branch:
2920 br = repo.branchlookup(branch=branch)
2919 br = repo.branchlookup(branch=branch)
2921 found = []
2920 found = []
2922 for x in br:
2921 for x in br:
2923 if branch in br[x]:
2922 if branch in br[x]:
2924 found.append(x)
2923 found.append(x)
2925 if len(found) > 1:
2924 if len(found) > 1:
2926 repo.ui.warn(_("Found multiple heads for %s\n") % branch)
2925 repo.ui.warn(_("Found multiple heads for %s\n") % branch)
2927 for x in found:
2926 for x in found:
2928 show_changeset(ui, repo, {}).show(changenode=x, brinfo=br)
2927 show_changeset(ui, repo, {}).show(changenode=x, brinfo=br)
2929 raise util.Abort("")
2928 raise util.Abort("")
2930 if len(found) == 1:
2929 if len(found) == 1:
2931 node = found[0]
2930 node = found[0]
2932 repo.ui.warn(_("Using head %s for branch %s\n")
2931 repo.ui.warn(_("Using head %s for branch %s\n")
2933 % (short(node), branch))
2932 % (short(node), branch))
2934 else:
2933 else:
2935 raise util.Abort(_("branch %s not found\n") % (branch))
2934 raise util.Abort(_("branch %s not found\n") % (branch))
2936 else:
2935 else:
2937 node = node and repo.lookup(node) or repo.changelog.tip()
2936 node = node and repo.lookup(node) or repo.changelog.tip()
2938 return node
2937 return node
2939
2938
2940 def verify(ui, repo):
2939 def verify(ui, repo):
2941 """verify the integrity of the repository
2940 """verify the integrity of the repository
2942
2941
2943 Verify the integrity of the current repository.
2942 Verify the integrity of the current repository.
2944
2943
2945 This will perform an extensive check of the repository's
2944 This will perform an extensive check of the repository's
2946 integrity, validating the hashes and checksums of each entry in
2945 integrity, validating the hashes and checksums of each entry in
2947 the changelog, manifest, and tracked files, as well as the
2946 the changelog, manifest, and tracked files, as well as the
2948 integrity of their crosslinks and indices.
2947 integrity of their crosslinks and indices.
2949 """
2948 """
2950 return hg.verify(repo)
2949 return hg.verify(repo)
2951
2950
2952 # Command options and aliases are listed here, alphabetically
2951 # Command options and aliases are listed here, alphabetically
2953
2952
2954 table = {
2953 table = {
2955 "^add":
2954 "^add":
2956 (add,
2955 (add,
2957 [('I', 'include', [], _('include names matching the given patterns')),
2956 [('I', 'include', [], _('include names matching the given patterns')),
2958 ('X', 'exclude', [], _('exclude names matching the given patterns')),
2957 ('X', 'exclude', [], _('exclude names matching the given patterns')),
2959 ('n', 'dry-run', None, _('do not perform actions, just print output'))],
2958 ('n', 'dry-run', None, _('do not perform actions, just print output'))],
2960 _('hg add [OPTION]... [FILE]...')),
2959 _('hg add [OPTION]... [FILE]...')),
2961 "debugaddremove|addremove":
2960 "debugaddremove|addremove":
2962 (addremove,
2961 (addremove,
2963 [('I', 'include', [], _('include names matching the given patterns')),
2962 [('I', 'include', [], _('include names matching the given patterns')),
2964 ('X', 'exclude', [], _('exclude names matching the given patterns')),
2963 ('X', 'exclude', [], _('exclude names matching the given patterns')),
2965 ('n', 'dry-run', None, _('do not perform actions, just print output'))],
2964 ('n', 'dry-run', None, _('do not perform actions, just print output'))],
2966 _('hg addremove [OPTION]... [FILE]...')),
2965 _('hg addremove [OPTION]... [FILE]...')),
2967 "^annotate":
2966 "^annotate":
2968 (annotate,
2967 (annotate,
2969 [('r', 'rev', '', _('annotate the specified revision')),
2968 [('r', 'rev', '', _('annotate the specified revision')),
2970 ('a', 'text', None, _('treat all files as text')),
2969 ('a', 'text', None, _('treat all files as text')),
2971 ('u', 'user', None, _('list the author')),
2970 ('u', 'user', None, _('list the author')),
2972 ('d', 'date', None, _('list the date')),
2971 ('d', 'date', None, _('list the date')),
2973 ('n', 'number', None, _('list the revision number (default)')),
2972 ('n', 'number', None, _('list the revision number (default)')),
2974 ('c', 'changeset', None, _('list the changeset')),
2973 ('c', 'changeset', None, _('list the changeset')),
2975 ('I', 'include', [], _('include names matching the given patterns')),
2974 ('I', 'include', [], _('include names matching the given patterns')),
2976 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2975 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2977 _('hg annotate [-r REV] [-a] [-u] [-d] [-n] [-c] FILE...')),
2976 _('hg annotate [-r REV] [-a] [-u] [-d] [-n] [-c] FILE...')),
2978 "archive":
2977 "archive":
2979 (archive,
2978 (archive,
2980 [('', 'no-decode', None, _('do not pass files through decoders')),
2979 [('', 'no-decode', None, _('do not pass files through decoders')),
2981 ('p', 'prefix', '', _('directory prefix for files in archive')),
2980 ('p', 'prefix', '', _('directory prefix for files in archive')),
2982 ('r', 'rev', '', _('revision to distribute')),
2981 ('r', 'rev', '', _('revision to distribute')),
2983 ('t', 'type', '', _('type of distribution to create')),
2982 ('t', 'type', '', _('type of distribution to create')),
2984 ('I', 'include', [], _('include names matching the given patterns')),
2983 ('I', 'include', [], _('include names matching the given patterns')),
2985 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2984 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2986 _('hg archive [OPTION]... DEST')),
2985 _('hg archive [OPTION]... DEST')),
2987 "backout":
2986 "backout":
2988 (backout,
2987 (backout,
2989 [('', 'merge', None,
2988 [('', 'merge', None,
2990 _('merge with old dirstate parent after backout')),
2989 _('merge with old dirstate parent after backout')),
2991 ('m', 'message', '', _('use <text> as commit message')),
2990 ('m', 'message', '', _('use <text> as commit message')),
2992 ('l', 'logfile', '', _('read commit message from <file>')),
2991 ('l', 'logfile', '', _('read commit message from <file>')),
2993 ('d', 'date', '', _('record datecode as commit date')),
2992 ('d', 'date', '', _('record datecode as commit date')),
2994 ('', 'parent', '', _('parent to choose when backing out merge')),
2993 ('', 'parent', '', _('parent to choose when backing out merge')),
2995 ('u', 'user', '', _('record user as committer')),
2994 ('u', 'user', '', _('record user as committer')),
2996 ('I', 'include', [], _('include names matching the given patterns')),
2995 ('I', 'include', [], _('include names matching the given patterns')),
2997 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2996 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2998 _('hg backout [OPTION]... REV')),
2997 _('hg backout [OPTION]... REV')),
2999 "bundle":
2998 "bundle":
3000 (bundle,
2999 (bundle,
3001 [('f', 'force', None,
3000 [('f', 'force', None,
3002 _('run even when remote repository is unrelated'))],
3001 _('run even when remote repository is unrelated'))],
3003 _('hg bundle FILE DEST')),
3002 _('hg bundle FILE DEST')),
3004 "cat":
3003 "cat":
3005 (cat,
3004 (cat,
3006 [('o', 'output', '', _('print output to file with formatted name')),
3005 [('o', 'output', '', _('print output to file with formatted name')),
3007 ('r', 'rev', '', _('print the given revision')),
3006 ('r', 'rev', '', _('print the given revision')),
3008 ('I', 'include', [], _('include names matching the given patterns')),
3007 ('I', 'include', [], _('include names matching the given patterns')),
3009 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
3008 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
3010 _('hg cat [OPTION]... FILE...')),
3009 _('hg cat [OPTION]... FILE...')),
3011 "^clone":
3010 "^clone":
3012 (clone,
3011 (clone,
3013 [('U', 'noupdate', None, _('do not update the new working directory')),
3012 [('U', 'noupdate', None, _('do not update the new working directory')),
3014 ('r', 'rev', [],
3013 ('r', 'rev', [],
3015 _('a changeset you would like to have after cloning')),
3014 _('a changeset you would like to have after cloning')),
3016 ('', 'pull', None, _('use pull protocol to copy metadata')),
3015 ('', 'pull', None, _('use pull protocol to copy metadata')),
3017 ('', 'uncompressed', None,
3016 ('', 'uncompressed', None,
3018 _('use uncompressed transfer (fast over LAN)')),
3017 _('use uncompressed transfer (fast over LAN)')),
3019 ('e', 'ssh', '', _('specify ssh command to use')),
3018 ('e', 'ssh', '', _('specify ssh command to use')),
3020 ('', 'remotecmd', '',
3019 ('', 'remotecmd', '',
3021 _('specify hg command to run on the remote side'))],
3020 _('specify hg command to run on the remote side'))],
3022 _('hg clone [OPTION]... SOURCE [DEST]')),
3021 _('hg clone [OPTION]... SOURCE [DEST]')),
3023 "^commit|ci":
3022 "^commit|ci":
3024 (commit,
3023 (commit,
3025 [('A', 'addremove', None,
3024 [('A', 'addremove', None,
3026 _('mark new/missing files as added/removed before committing')),
3025 _('mark new/missing files as added/removed before committing')),
3027 ('m', 'message', '', _('use <text> as commit message')),
3026 ('m', 'message', '', _('use <text> as commit message')),
3028 ('l', 'logfile', '', _('read the commit message from <file>')),
3027 ('l', 'logfile', '', _('read the commit message from <file>')),
3029 ('d', 'date', '', _('record datecode as commit date')),
3028 ('d', 'date', '', _('record datecode as commit date')),
3030 ('u', 'user', '', _('record user as commiter')),
3029 ('u', 'user', '', _('record user as commiter')),
3031 ('I', 'include', [], _('include names matching the given patterns')),
3030 ('I', 'include', [], _('include names matching the given patterns')),
3032 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
3031 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
3033 _('hg commit [OPTION]... [FILE]...')),
3032 _('hg commit [OPTION]... [FILE]...')),
3034 "copy|cp":
3033 "copy|cp":
3035 (copy,
3034 (copy,
3036 [('A', 'after', None, _('record a copy that has already occurred')),
3035 [('A', 'after', None, _('record a copy that has already occurred')),
3037 ('f', 'force', None,
3036 ('f', 'force', None,
3038 _('forcibly copy over an existing managed file')),
3037 _('forcibly copy over an existing managed file')),
3039 ('I', 'include', [], _('include names matching the given patterns')),
3038 ('I', 'include', [], _('include names matching the given patterns')),
3040 ('X', 'exclude', [], _('exclude names matching the given patterns')),
3039 ('X', 'exclude', [], _('exclude names matching the given patterns')),
3041 ('n', 'dry-run', None, _('do not perform actions, just print output'))],
3040 ('n', 'dry-run', None, _('do not perform actions, just print output'))],
3042 _('hg copy [OPTION]... [SOURCE]... DEST')),
3041 _('hg copy [OPTION]... [SOURCE]... DEST')),
3043 "debugancestor": (debugancestor, [], _('debugancestor INDEX REV1 REV2')),
3042 "debugancestor": (debugancestor, [], _('debugancestor INDEX REV1 REV2')),
3044 "debugcomplete":
3043 "debugcomplete":
3045 (debugcomplete,
3044 (debugcomplete,
3046 [('o', 'options', None, _('show the command options'))],
3045 [('o', 'options', None, _('show the command options'))],
3047 _('debugcomplete [-o] CMD')),
3046 _('debugcomplete [-o] CMD')),
3048 "debugrebuildstate":
3047 "debugrebuildstate":
3049 (debugrebuildstate,
3048 (debugrebuildstate,
3050 [('r', 'rev', '', _('revision to rebuild to'))],
3049 [('r', 'rev', '', _('revision to rebuild to'))],
3051 _('debugrebuildstate [-r REV] [REV]')),
3050 _('debugrebuildstate [-r REV] [REV]')),
3052 "debugcheckstate": (debugcheckstate, [], _('debugcheckstate')),
3051 "debugcheckstate": (debugcheckstate, [], _('debugcheckstate')),
3053 "debugconfig": (debugconfig, [], _('debugconfig [NAME]...')),
3052 "debugconfig": (debugconfig, [], _('debugconfig [NAME]...')),
3054 "debugsetparents": (debugsetparents, [], _('debugsetparents REV1 [REV2]')),
3053 "debugsetparents": (debugsetparents, [], _('debugsetparents REV1 [REV2]')),
3055 "debugstate": (debugstate, [], _('debugstate')),
3054 "debugstate": (debugstate, [], _('debugstate')),
3056 "debugdata": (debugdata, [], _('debugdata FILE REV')),
3055 "debugdata": (debugdata, [], _('debugdata FILE REV')),
3057 "debugindex": (debugindex, [], _('debugindex FILE')),
3056 "debugindex": (debugindex, [], _('debugindex FILE')),
3058 "debugindexdot": (debugindexdot, [], _('debugindexdot FILE')),
3057 "debugindexdot": (debugindexdot, [], _('debugindexdot FILE')),
3059 "debugrename": (debugrename, [], _('debugrename FILE [REV]')),
3058 "debugrename": (debugrename, [], _('debugrename FILE [REV]')),
3060 "debugwalk":
3059 "debugwalk":
3061 (debugwalk,
3060 (debugwalk,
3062 [('I', 'include', [], _('include names matching the given patterns')),
3061 [('I', 'include', [], _('include names matching the given patterns')),
3063 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
3062 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
3064 _('debugwalk [OPTION]... [FILE]...')),
3063 _('debugwalk [OPTION]... [FILE]...')),
3065 "^diff":
3064 "^diff":
3066 (diff,
3065 (diff,
3067 [('r', 'rev', [], _('revision')),
3066 [('r', 'rev', [], _('revision')),
3068 ('a', 'text', None, _('treat all files as text')),
3067 ('a', 'text', None, _('treat all files as text')),
3069 ('p', 'show-function', None,
3068 ('p', 'show-function', None,
3070 _('show which function each change is in')),
3069 _('show which function each change is in')),
3071 ('w', 'ignore-all-space', None,
3070 ('w', 'ignore-all-space', None,
3072 _('ignore white space when comparing lines')),
3071 _('ignore white space when comparing lines')),
3073 ('b', 'ignore-space-change', None,
3072 ('b', 'ignore-space-change', None,
3074 _('ignore changes in the amount of white space')),
3073 _('ignore changes in the amount of white space')),
3075 ('B', 'ignore-blank-lines', None,
3074 ('B', 'ignore-blank-lines', None,
3076 _('ignore changes whose lines are all blank')),
3075 _('ignore changes whose lines are all blank')),
3077 ('I', 'include', [], _('include names matching the given patterns')),
3076 ('I', 'include', [], _('include names matching the given patterns')),
3078 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
3077 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
3079 _('hg diff [-a] [-I] [-X] [-r REV1 [-r REV2]] [FILE]...')),
3078 _('hg diff [-a] [-I] [-X] [-r REV1 [-r REV2]] [FILE]...')),
3080 "^export":
3079 "^export":
3081 (export,
3080 (export,
3082 [('o', 'output', '', _('print output to file with formatted name')),
3081 [('o', 'output', '', _('print output to file with formatted name')),
3083 ('a', 'text', None, _('treat all files as text')),
3082 ('a', 'text', None, _('treat all files as text')),
3084 ('', 'switch-parent', None, _('diff against the second parent'))],
3083 ('', 'switch-parent', None, _('diff against the second parent'))],
3085 _('hg export [-a] [-o OUTFILESPEC] REV...')),
3084 _('hg export [-a] [-o OUTFILESPEC] REV...')),
3086 "debugforget|forget":
3085 "debugforget|forget":
3087 (forget,
3086 (forget,
3088 [('I', 'include', [], _('include names matching the given patterns')),
3087 [('I', 'include', [], _('include names matching the given patterns')),
3089 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
3088 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
3090 _('hg forget [OPTION]... FILE...')),
3089 _('hg forget [OPTION]... FILE...')),
3091 "grep":
3090 "grep":
3092 (grep,
3091 (grep,
3093 [('0', 'print0', None, _('end fields with NUL')),
3092 [('0', 'print0', None, _('end fields with NUL')),
3094 ('', 'all', None, _('print all revisions that match')),
3093 ('', 'all', None, _('print all revisions that match')),
3095 ('i', 'ignore-case', None, _('ignore case when matching')),
3094 ('i', 'ignore-case', None, _('ignore case when matching')),
3096 ('l', 'files-with-matches', None,
3095 ('l', 'files-with-matches', None,
3097 _('print only filenames and revs that match')),
3096 _('print only filenames and revs that match')),
3098 ('n', 'line-number', None, _('print matching line numbers')),
3097 ('n', 'line-number', None, _('print matching line numbers')),
3099 ('r', 'rev', [], _('search in given revision range')),
3098 ('r', 'rev', [], _('search in given revision range')),
3100 ('u', 'user', None, _('print user who committed change')),
3099 ('u', 'user', None, _('print user who committed change')),
3101 ('I', 'include', [], _('include names matching the given patterns')),
3100 ('I', 'include', [], _('include names matching the given patterns')),
3102 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
3101 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
3103 _('hg grep [OPTION]... PATTERN [FILE]...')),
3102 _('hg grep [OPTION]... PATTERN [FILE]...')),
3104 "heads":
3103 "heads":
3105 (heads,
3104 (heads,
3106 [('b', 'branches', None, _('show branches')),
3105 [('b', 'branches', None, _('show branches')),
3107 ('', 'style', '', _('display using template map file')),
3106 ('', 'style', '', _('display using template map file')),
3108 ('r', 'rev', '', _('show only heads which are descendants of rev')),
3107 ('r', 'rev', '', _('show only heads which are descendants of rev')),
3109 ('', 'template', '', _('display with template'))],
3108 ('', 'template', '', _('display with template'))],
3110 _('hg heads [-b] [-r <rev>]')),
3109 _('hg heads [-b] [-r <rev>]')),
3111 "help": (help_, [], _('hg help [COMMAND]')),
3110 "help": (help_, [], _('hg help [COMMAND]')),
3112 "identify|id": (identify, [], _('hg identify')),
3111 "identify|id": (identify, [], _('hg identify')),
3113 "import|patch":
3112 "import|patch":
3114 (import_,
3113 (import_,
3115 [('p', 'strip', 1,
3114 [('p', 'strip', 1,
3116 _('directory strip option for patch. This has the same\n'
3115 _('directory strip option for patch. This has the same\n'
3117 'meaning as the corresponding patch option')),
3116 'meaning as the corresponding patch option')),
3118 ('m', 'message', '', _('use <text> as commit message')),
3117 ('m', 'message', '', _('use <text> as commit message')),
3119 ('b', 'base', '', _('base path')),
3118 ('b', 'base', '', _('base path')),
3120 ('f', 'force', None,
3119 ('f', 'force', None,
3121 _('skip check for outstanding uncommitted changes'))],
3120 _('skip check for outstanding uncommitted changes'))],
3122 _('hg import [-p NUM] [-b BASE] [-m MESSAGE] [-f] PATCH...')),
3121 _('hg import [-p NUM] [-b BASE] [-m MESSAGE] [-f] PATCH...')),
3123 "incoming|in": (incoming,
3122 "incoming|in": (incoming,
3124 [('M', 'no-merges', None, _('do not show merges')),
3123 [('M', 'no-merges', None, _('do not show merges')),
3125 ('f', 'force', None,
3124 ('f', 'force', None,
3126 _('run even when remote repository is unrelated')),
3125 _('run even when remote repository is unrelated')),
3127 ('', 'style', '', _('display using template map file')),
3126 ('', 'style', '', _('display using template map file')),
3128 ('n', 'newest-first', None, _('show newest record first')),
3127 ('n', 'newest-first', None, _('show newest record first')),
3129 ('', 'bundle', '', _('file to store the bundles into')),
3128 ('', 'bundle', '', _('file to store the bundles into')),
3130 ('p', 'patch', None, _('show patch')),
3129 ('p', 'patch', None, _('show patch')),
3131 ('r', 'rev', [], _('a specific revision you would like to pull')),
3130 ('r', 'rev', [], _('a specific revision you would like to pull')),
3132 ('', 'template', '', _('display with template')),
3131 ('', 'template', '', _('display with template')),
3133 ('e', 'ssh', '', _('specify ssh command to use')),
3132 ('e', 'ssh', '', _('specify ssh command to use')),
3134 ('', 'remotecmd', '',
3133 ('', 'remotecmd', '',
3135 _('specify hg command to run on the remote side'))],
3134 _('specify hg command to run on the remote side'))],
3136 _('hg incoming [-p] [-n] [-M] [-r REV]...'
3135 _('hg incoming [-p] [-n] [-M] [-r REV]...'
3137 ' [--bundle FILENAME] [SOURCE]')),
3136 ' [--bundle FILENAME] [SOURCE]')),
3138 "^init":
3137 "^init":
3139 (init,
3138 (init,
3140 [('e', 'ssh', '', _('specify ssh command to use')),
3139 [('e', 'ssh', '', _('specify ssh command to use')),
3141 ('', 'remotecmd', '',
3140 ('', 'remotecmd', '',
3142 _('specify hg command to run on the remote side'))],
3141 _('specify hg command to run on the remote side'))],
3143 _('hg init [-e FILE] [--remotecmd FILE] [DEST]')),
3142 _('hg init [-e FILE] [--remotecmd FILE] [DEST]')),
3144 "locate":
3143 "locate":
3145 (locate,
3144 (locate,
3146 [('r', 'rev', '', _('search the repository as it stood at rev')),
3145 [('r', 'rev', '', _('search the repository as it stood at rev')),
3147 ('0', 'print0', None,
3146 ('0', 'print0', None,
3148 _('end filenames with NUL, for use with xargs')),
3147 _('end filenames with NUL, for use with xargs')),
3149 ('f', 'fullpath', None,
3148 ('f', 'fullpath', None,
3150 _('print complete paths from the filesystem root')),
3149 _('print complete paths from the filesystem root')),
3151 ('I', 'include', [], _('include names matching the given patterns')),
3150 ('I', 'include', [], _('include names matching the given patterns')),
3152 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
3151 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
3153 _('hg locate [OPTION]... [PATTERN]...')),
3152 _('hg locate [OPTION]... [PATTERN]...')),
3154 "^log|history":
3153 "^log|history":
3155 (log,
3154 (log,
3156 [('b', 'branches', None, _('show branches')),
3155 [('b', 'branches', None, _('show branches')),
3157 ('f', 'follow', None,
3156 ('f', 'follow', None,
3158 _('follow changeset history, or file history across copies and renames')),
3157 _('follow changeset history, or file history across copies and renames')),
3159 ('', 'follow-first', None,
3158 ('', 'follow-first', None,
3160 _('only follow the first parent of merge changesets')),
3159 _('only follow the first parent of merge changesets')),
3161 ('k', 'keyword', [], _('search for a keyword')),
3160 ('k', 'keyword', [], _('search for a keyword')),
3162 ('l', 'limit', '', _('limit number of changes displayed')),
3161 ('l', 'limit', '', _('limit number of changes displayed')),
3163 ('r', 'rev', [], _('show the specified revision or range')),
3162 ('r', 'rev', [], _('show the specified revision or range')),
3164 ('M', 'no-merges', None, _('do not show merges')),
3163 ('M', 'no-merges', None, _('do not show merges')),
3165 ('', 'style', '', _('display using template map file')),
3164 ('', 'style', '', _('display using template map file')),
3166 ('m', 'only-merges', None, _('show only merges')),
3165 ('m', 'only-merges', None, _('show only merges')),
3167 ('p', 'patch', None, _('show patch')),
3166 ('p', 'patch', None, _('show patch')),
3168 ('', 'template', '', _('display with template')),
3167 ('', 'template', '', _('display with template')),
3169 ('I', 'include', [], _('include names matching the given patterns')),
3168 ('I', 'include', [], _('include names matching the given patterns')),
3170 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
3169 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
3171 _('hg log [OPTION]... [FILE]')),
3170 _('hg log [OPTION]... [FILE]')),
3172 "manifest": (manifest, [], _('hg manifest [REV]')),
3171 "manifest": (manifest, [], _('hg manifest [REV]')),
3173 "merge":
3172 "merge":
3174 (merge,
3173 (merge,
3175 [('b', 'branch', '', _('merge with head of a specific branch')),
3174 [('b', 'branch', '', _('merge with head of a specific branch')),
3176 ('f', 'force', None, _('force a merge with outstanding changes'))],
3175 ('f', 'force', None, _('force a merge with outstanding changes'))],
3177 _('hg merge [-b TAG] [-f] [REV]')),
3176 _('hg merge [-b TAG] [-f] [REV]')),
3178 "outgoing|out": (outgoing,
3177 "outgoing|out": (outgoing,
3179 [('M', 'no-merges', None, _('do not show merges')),
3178 [('M', 'no-merges', None, _('do not show merges')),
3180 ('f', 'force', None,
3179 ('f', 'force', None,
3181 _('run even when remote repository is unrelated')),
3180 _('run even when remote repository is unrelated')),
3182 ('p', 'patch', None, _('show patch')),
3181 ('p', 'patch', None, _('show patch')),
3183 ('', 'style', '', _('display using template map file')),
3182 ('', 'style', '', _('display using template map file')),
3184 ('r', 'rev', [], _('a specific revision you would like to push')),
3183 ('r', 'rev', [], _('a specific revision you would like to push')),
3185 ('n', 'newest-first', None, _('show newest record first')),
3184 ('n', 'newest-first', None, _('show newest record first')),
3186 ('', 'template', '', _('display with template')),
3185 ('', 'template', '', _('display with template')),
3187 ('e', 'ssh', '', _('specify ssh command to use')),
3186 ('e', 'ssh', '', _('specify ssh command to use')),
3188 ('', 'remotecmd', '',
3187 ('', 'remotecmd', '',
3189 _('specify hg command to run on the remote side'))],
3188 _('specify hg command to run on the remote side'))],
3190 _('hg outgoing [-M] [-p] [-n] [-r REV]... [DEST]')),
3189 _('hg outgoing [-M] [-p] [-n] [-r REV]... [DEST]')),
3191 "^parents":
3190 "^parents":
3192 (parents,
3191 (parents,
3193 [('b', 'branches', None, _('show branches')),
3192 [('b', 'branches', None, _('show branches')),
3194 ('r', 'rev', '', _('show parents from the specified rev')),
3193 ('r', 'rev', '', _('show parents from the specified rev')),
3195 ('', 'style', '', _('display using template map file')),
3194 ('', 'style', '', _('display using template map file')),
3196 ('', 'template', '', _('display with template'))],
3195 ('', 'template', '', _('display with template'))],
3197 _('hg parents [-b] [-r REV] [FILE]')),
3196 _('hg parents [-b] [-r REV] [FILE]')),
3198 "paths": (paths, [], _('hg paths [NAME]')),
3197 "paths": (paths, [], _('hg paths [NAME]')),
3199 "^pull":
3198 "^pull":
3200 (pull,
3199 (pull,
3201 [('u', 'update', None,
3200 [('u', 'update', None,
3202 _('update the working directory to tip after pull')),
3201 _('update the working directory to tip after pull')),
3203 ('e', 'ssh', '', _('specify ssh command to use')),
3202 ('e', 'ssh', '', _('specify ssh command to use')),
3204 ('f', 'force', None,
3203 ('f', 'force', None,
3205 _('run even when remote repository is unrelated')),
3204 _('run even when remote repository is unrelated')),
3206 ('r', 'rev', [], _('a specific revision you would like to pull')),
3205 ('r', 'rev', [], _('a specific revision you would like to pull')),
3207 ('', 'remotecmd', '',
3206 ('', 'remotecmd', '',
3208 _('specify hg command to run on the remote side'))],
3207 _('specify hg command to run on the remote side'))],
3209 _('hg pull [-u] [-r REV]... [-e FILE] [--remotecmd FILE] [SOURCE]')),
3208 _('hg pull [-u] [-r REV]... [-e FILE] [--remotecmd FILE] [SOURCE]')),
3210 "^push":
3209 "^push":
3211 (push,
3210 (push,
3212 [('f', 'force', None, _('force push')),
3211 [('f', 'force', None, _('force push')),
3213 ('e', 'ssh', '', _('specify ssh command to use')),
3212 ('e', 'ssh', '', _('specify ssh command to use')),
3214 ('r', 'rev', [], _('a specific revision you would like to push')),
3213 ('r', 'rev', [], _('a specific revision you would like to push')),
3215 ('', 'remotecmd', '',
3214 ('', 'remotecmd', '',
3216 _('specify hg command to run on the remote side'))],
3215 _('specify hg command to run on the remote side'))],
3217 _('hg push [-f] [-r REV]... [-e FILE] [--remotecmd FILE] [DEST]')),
3216 _('hg push [-f] [-r REV]... [-e FILE] [--remotecmd FILE] [DEST]')),
3218 "debugrawcommit|rawcommit":
3217 "debugrawcommit|rawcommit":
3219 (rawcommit,
3218 (rawcommit,
3220 [('p', 'parent', [], _('parent')),
3219 [('p', 'parent', [], _('parent')),
3221 ('d', 'date', '', _('date code')),
3220 ('d', 'date', '', _('date code')),
3222 ('u', 'user', '', _('user')),
3221 ('u', 'user', '', _('user')),
3223 ('F', 'files', '', _('file list')),
3222 ('F', 'files', '', _('file list')),
3224 ('m', 'message', '', _('commit message')),
3223 ('m', 'message', '', _('commit message')),
3225 ('l', 'logfile', '', _('commit message file'))],
3224 ('l', 'logfile', '', _('commit message file'))],
3226 _('hg debugrawcommit [OPTION]... [FILE]...')),
3225 _('hg debugrawcommit [OPTION]... [FILE]...')),
3227 "recover": (recover, [], _('hg recover')),
3226 "recover": (recover, [], _('hg recover')),
3228 "^remove|rm":
3227 "^remove|rm":
3229 (remove,
3228 (remove,
3230 [('A', 'after', None, _('record remove that has already occurred')),
3229 [('A', 'after', None, _('record remove that has already occurred')),
3231 ('f', 'force', None, _('remove file even if modified')),
3230 ('f', 'force', None, _('remove file even if modified')),
3232 ('I', 'include', [], _('include names matching the given patterns')),
3231 ('I', 'include', [], _('include names matching the given patterns')),
3233 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
3232 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
3234 _('hg remove [OPTION]... FILE...')),
3233 _('hg remove [OPTION]... FILE...')),
3235 "rename|mv":
3234 "rename|mv":
3236 (rename,
3235 (rename,
3237 [('A', 'after', None, _('record a rename that has already occurred')),
3236 [('A', 'after', None, _('record a rename that has already occurred')),
3238 ('f', 'force', None,
3237 ('f', 'force', None,
3239 _('forcibly copy over an existing managed file')),
3238 _('forcibly copy over an existing managed file')),
3240 ('I', 'include', [], _('include names matching the given patterns')),
3239 ('I', 'include', [], _('include names matching the given patterns')),
3241 ('X', 'exclude', [], _('exclude names matching the given patterns')),
3240 ('X', 'exclude', [], _('exclude names matching the given patterns')),
3242 ('n', 'dry-run', None, _('do not perform actions, just print output'))],
3241 ('n', 'dry-run', None, _('do not perform actions, just print output'))],
3243 _('hg rename [OPTION]... SOURCE... DEST')),
3242 _('hg rename [OPTION]... SOURCE... DEST')),
3244 "^revert":
3243 "^revert":
3245 (revert,
3244 (revert,
3246 [('r', 'rev', '', _('revision to revert to')),
3245 [('r', 'rev', '', _('revision to revert to')),
3247 ('', 'no-backup', None, _('do not save backup copies of files')),
3246 ('', 'no-backup', None, _('do not save backup copies of files')),
3248 ('I', 'include', [], _('include names matching given patterns')),
3247 ('I', 'include', [], _('include names matching given patterns')),
3249 ('X', 'exclude', [], _('exclude names matching given patterns')),
3248 ('X', 'exclude', [], _('exclude names matching given patterns')),
3250 ('n', 'dry-run', None, _('do not perform actions, just print output'))],
3249 ('n', 'dry-run', None, _('do not perform actions, just print output'))],
3251 _('hg revert [-r REV] [NAME]...')),
3250 _('hg revert [-r REV] [NAME]...')),
3252 "rollback": (rollback, [], _('hg rollback')),
3251 "rollback": (rollback, [], _('hg rollback')),
3253 "root": (root, [], _('hg root')),
3252 "root": (root, [], _('hg root')),
3254 "^serve":
3253 "^serve":
3255 (serve,
3254 (serve,
3256 [('A', 'accesslog', '', _('name of access log file to write to')),
3255 [('A', 'accesslog', '', _('name of access log file to write to')),
3257 ('d', 'daemon', None, _('run server in background')),
3256 ('d', 'daemon', None, _('run server in background')),
3258 ('', 'daemon-pipefds', '', _('used internally by daemon mode')),
3257 ('', 'daemon-pipefds', '', _('used internally by daemon mode')),
3259 ('E', 'errorlog', '', _('name of error log file to write to')),
3258 ('E', 'errorlog', '', _('name of error log file to write to')),
3260 ('p', 'port', 0, _('port to use (default: 8000)')),
3259 ('p', 'port', 0, _('port to use (default: 8000)')),
3261 ('a', 'address', '', _('address to use')),
3260 ('a', 'address', '', _('address to use')),
3262 ('n', 'name', '',
3261 ('n', 'name', '',
3263 _('name to show in web pages (default: working dir)')),
3262 _('name to show in web pages (default: working dir)')),
3264 ('', 'webdir-conf', '', _('name of the webdir config file'
3263 ('', 'webdir-conf', '', _('name of the webdir config file'
3265 ' (serve more than one repo)')),
3264 ' (serve more than one repo)')),
3266 ('', 'pid-file', '', _('name of file to write process ID to')),
3265 ('', 'pid-file', '', _('name of file to write process ID to')),
3267 ('', 'stdio', None, _('for remote clients')),
3266 ('', 'stdio', None, _('for remote clients')),
3268 ('t', 'templates', '', _('web templates to use')),
3267 ('t', 'templates', '', _('web templates to use')),
3269 ('', 'style', '', _('template style to use')),
3268 ('', 'style', '', _('template style to use')),
3270 ('6', 'ipv6', None, _('use IPv6 in addition to IPv4'))],
3269 ('6', 'ipv6', None, _('use IPv6 in addition to IPv4'))],
3271 _('hg serve [OPTION]...')),
3270 _('hg serve [OPTION]...')),
3272 "^status|st":
3271 "^status|st":
3273 (status,
3272 (status,
3274 [('A', 'all', None, _('show status of all files')),
3273 [('A', 'all', None, _('show status of all files')),
3275 ('m', 'modified', None, _('show only modified files')),
3274 ('m', 'modified', None, _('show only modified files')),
3276 ('a', 'added', None, _('show only added files')),
3275 ('a', 'added', None, _('show only added files')),
3277 ('r', 'removed', None, _('show only removed files')),
3276 ('r', 'removed', None, _('show only removed files')),
3278 ('d', 'deleted', None, _('show only deleted (but tracked) files')),
3277 ('d', 'deleted', None, _('show only deleted (but tracked) files')),
3279 ('c', 'clean', None, _('show only files without changes')),
3278 ('c', 'clean', None, _('show only files without changes')),
3280 ('u', 'unknown', None, _('show only unknown (not tracked) files')),
3279 ('u', 'unknown', None, _('show only unknown (not tracked) files')),
3281 ('i', 'ignored', None, _('show ignored files')),
3280 ('i', 'ignored', None, _('show ignored files')),
3282 ('n', 'no-status', None, _('hide status prefix')),
3281 ('n', 'no-status', None, _('hide status prefix')),
3283 ('C', 'copies', None, _('show source of copied files')),
3282 ('C', 'copies', None, _('show source of copied files')),
3284 ('0', 'print0', None,
3283 ('0', 'print0', None,
3285 _('end filenames with NUL, for use with xargs')),
3284 _('end filenames with NUL, for use with xargs')),
3286 ('I', 'include', [], _('include names matching the given patterns')),
3285 ('I', 'include', [], _('include names matching the given patterns')),
3287 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
3286 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
3288 _('hg status [OPTION]... [FILE]...')),
3287 _('hg status [OPTION]... [FILE]...')),
3289 "tag":
3288 "tag":
3290 (tag,
3289 (tag,
3291 [('l', 'local', None, _('make the tag local')),
3290 [('l', 'local', None, _('make the tag local')),
3292 ('m', 'message', '', _('message for tag commit log entry')),
3291 ('m', 'message', '', _('message for tag commit log entry')),
3293 ('d', 'date', '', _('record datecode as commit date')),
3292 ('d', 'date', '', _('record datecode as commit date')),
3294 ('u', 'user', '', _('record user as commiter')),
3293 ('u', 'user', '', _('record user as commiter')),
3295 ('r', 'rev', '', _('revision to tag'))],
3294 ('r', 'rev', '', _('revision to tag'))],
3296 _('hg tag [-l] [-m TEXT] [-d DATE] [-u USER] [-r REV] NAME')),
3295 _('hg tag [-l] [-m TEXT] [-d DATE] [-u USER] [-r REV] NAME')),
3297 "tags": (tags, [], _('hg tags')),
3296 "tags": (tags, [], _('hg tags')),
3298 "tip":
3297 "tip":
3299 (tip,
3298 (tip,
3300 [('b', 'branches', None, _('show branches')),
3299 [('b', 'branches', None, _('show branches')),
3301 ('', 'style', '', _('display using template map file')),
3300 ('', 'style', '', _('display using template map file')),
3302 ('p', 'patch', None, _('show patch')),
3301 ('p', 'patch', None, _('show patch')),
3303 ('', 'template', '', _('display with template'))],
3302 ('', 'template', '', _('display with template'))],
3304 _('hg tip [-b] [-p]')),
3303 _('hg tip [-b] [-p]')),
3305 "unbundle":
3304 "unbundle":
3306 (unbundle,
3305 (unbundle,
3307 [('u', 'update', None,
3306 [('u', 'update', None,
3308 _('update the working directory to tip after unbundle'))],
3307 _('update the working directory to tip after unbundle'))],
3309 _('hg unbundle [-u] FILE')),
3308 _('hg unbundle [-u] FILE')),
3310 "debugundo|undo": (undo, [], _('hg undo')),
3309 "debugundo|undo": (undo, [], _('hg undo')),
3311 "^update|up|checkout|co":
3310 "^update|up|checkout|co":
3312 (update,
3311 (update,
3313 [('b', 'branch', '', _('checkout the head of a specific branch')),
3312 [('b', 'branch', '', _('checkout the head of a specific branch')),
3314 ('m', 'merge', None, _('allow merging of branches (DEPRECATED)')),
3313 ('m', 'merge', None, _('allow merging of branches (DEPRECATED)')),
3315 ('C', 'clean', None, _('overwrite locally modified files')),
3314 ('C', 'clean', None, _('overwrite locally modified files')),
3316 ('f', 'force', None, _('force a merge with outstanding changes'))],
3315 ('f', 'force', None, _('force a merge with outstanding changes'))],
3317 _('hg update [-b TAG] [-m] [-C] [-f] [REV]')),
3316 _('hg update [-b TAG] [-m] [-C] [-f] [REV]')),
3318 "verify": (verify, [], _('hg verify')),
3317 "verify": (verify, [], _('hg verify')),
3319 "version": (show_version, [], _('hg version')),
3318 "version": (show_version, [], _('hg version')),
3320 }
3319 }
3321
3320
3322 globalopts = [
3321 globalopts = [
3323 ('R', 'repository', '',
3322 ('R', 'repository', '',
3324 _('repository root directory or symbolic path name')),
3323 _('repository root directory or symbolic path name')),
3325 ('', 'cwd', '', _('change working directory')),
3324 ('', 'cwd', '', _('change working directory')),
3326 ('y', 'noninteractive', None,
3325 ('y', 'noninteractive', None,
3327 _('do not prompt, assume \'yes\' for any required answers')),
3326 _('do not prompt, assume \'yes\' for any required answers')),
3328 ('q', 'quiet', None, _('suppress output')),
3327 ('q', 'quiet', None, _('suppress output')),
3329 ('v', 'verbose', None, _('enable additional output')),
3328 ('v', 'verbose', None, _('enable additional output')),
3330 ('', 'config', [], _('set/override config option')),
3329 ('', 'config', [], _('set/override config option')),
3331 ('', 'debug', None, _('enable debugging output')),
3330 ('', 'debug', None, _('enable debugging output')),
3332 ('', 'debugger', None, _('start debugger')),
3331 ('', 'debugger', None, _('start debugger')),
3333 ('', 'lsprof', None, _('print improved command execution profile')),
3332 ('', 'lsprof', None, _('print improved command execution profile')),
3334 ('', 'traceback', None, _('print traceback on exception')),
3333 ('', 'traceback', None, _('print traceback on exception')),
3335 ('', 'time', None, _('time how long the command takes')),
3334 ('', 'time', None, _('time how long the command takes')),
3336 ('', 'profile', None, _('print command execution profile')),
3335 ('', 'profile', None, _('print command execution profile')),
3337 ('', 'version', None, _('output version information and exit')),
3336 ('', 'version', None, _('output version information and exit')),
3338 ('h', 'help', None, _('display help and exit')),
3337 ('h', 'help', None, _('display help and exit')),
3339 ]
3338 ]
3340
3339
3341 norepo = ("clone init version help debugancestor debugcomplete debugdata"
3340 norepo = ("clone init version help debugancestor debugcomplete debugdata"
3342 " debugindex debugindexdot")
3341 " debugindex debugindexdot")
3343 optionalrepo = ("paths serve debugconfig")
3342 optionalrepo = ("paths serve debugconfig")
3344
3343
3345 def findpossible(cmd):
3344 def findpossible(cmd):
3346 """
3345 """
3347 Return cmd -> (aliases, command table entry)
3346 Return cmd -> (aliases, command table entry)
3348 for each matching command.
3347 for each matching command.
3349 Return debug commands (or their aliases) only if no normal command matches.
3348 Return debug commands (or their aliases) only if no normal command matches.
3350 """
3349 """
3351 choice = {}
3350 choice = {}
3352 debugchoice = {}
3351 debugchoice = {}
3353 for e in table.keys():
3352 for e in table.keys():
3354 aliases = e.lstrip("^").split("|")
3353 aliases = e.lstrip("^").split("|")
3355 found = None
3354 found = None
3356 if cmd in aliases:
3355 if cmd in aliases:
3357 found = cmd
3356 found = cmd
3358 else:
3357 else:
3359 for a in aliases:
3358 for a in aliases:
3360 if a.startswith(cmd):
3359 if a.startswith(cmd):
3361 found = a
3360 found = a
3362 break
3361 break
3363 if found is not None:
3362 if found is not None:
3364 if aliases[0].startswith("debug"):
3363 if aliases[0].startswith("debug"):
3365 debugchoice[found] = (aliases, table[e])
3364 debugchoice[found] = (aliases, table[e])
3366 else:
3365 else:
3367 choice[found] = (aliases, table[e])
3366 choice[found] = (aliases, table[e])
3368
3367
3369 if not choice and debugchoice:
3368 if not choice and debugchoice:
3370 choice = debugchoice
3369 choice = debugchoice
3371
3370
3372 return choice
3371 return choice
3373
3372
3374 def findcmd(cmd):
3373 def findcmd(cmd):
3375 """Return (aliases, command table entry) for command string."""
3374 """Return (aliases, command table entry) for command string."""
3376 choice = findpossible(cmd)
3375 choice = findpossible(cmd)
3377
3376
3378 if choice.has_key(cmd):
3377 if choice.has_key(cmd):
3379 return choice[cmd]
3378 return choice[cmd]
3380
3379
3381 if len(choice) > 1:
3380 if len(choice) > 1:
3382 clist = choice.keys()
3381 clist = choice.keys()
3383 clist.sort()
3382 clist.sort()
3384 raise AmbiguousCommand(cmd, clist)
3383 raise AmbiguousCommand(cmd, clist)
3385
3384
3386 if choice:
3385 if choice:
3387 return choice.values()[0]
3386 return choice.values()[0]
3388
3387
3389 raise UnknownCommand(cmd)
3388 raise UnknownCommand(cmd)
3390
3389
3391 def catchterm(*args):
3390 def catchterm(*args):
3392 raise util.SignalInterrupt
3391 raise util.SignalInterrupt
3393
3392
3394 def run():
3393 def run():
3395 sys.exit(dispatch(sys.argv[1:]))
3394 sys.exit(dispatch(sys.argv[1:]))
3396
3395
3397 class ParseError(Exception):
3396 class ParseError(Exception):
3398 """Exception raised on errors in parsing the command line."""
3397 """Exception raised on errors in parsing the command line."""
3399
3398
3400 def parse(ui, args):
3399 def parse(ui, args):
3401 options = {}
3400 options = {}
3402 cmdoptions = {}
3401 cmdoptions = {}
3403
3402
3404 try:
3403 try:
3405 args = fancyopts.fancyopts(args, globalopts, options)
3404 args = fancyopts.fancyopts(args, globalopts, options)
3406 except fancyopts.getopt.GetoptError, inst:
3405 except fancyopts.getopt.GetoptError, inst:
3407 raise ParseError(None, inst)
3406 raise ParseError(None, inst)
3408
3407
3409 if args:
3408 if args:
3410 cmd, args = args[0], args[1:]
3409 cmd, args = args[0], args[1:]
3411 aliases, i = findcmd(cmd)
3410 aliases, i = findcmd(cmd)
3412 cmd = aliases[0]
3411 cmd = aliases[0]
3413 defaults = ui.config("defaults", cmd)
3412 defaults = ui.config("defaults", cmd)
3414 if defaults:
3413 if defaults:
3415 args = defaults.split() + args
3414 args = defaults.split() + args
3416 c = list(i[1])
3415 c = list(i[1])
3417 else:
3416 else:
3418 cmd = None
3417 cmd = None
3419 c = []
3418 c = []
3420
3419
3421 # combine global options into local
3420 # combine global options into local
3422 for o in globalopts:
3421 for o in globalopts:
3423 c.append((o[0], o[1], options[o[1]], o[3]))
3422 c.append((o[0], o[1], options[o[1]], o[3]))
3424
3423
3425 try:
3424 try:
3426 args = fancyopts.fancyopts(args, c, cmdoptions)
3425 args = fancyopts.fancyopts(args, c, cmdoptions)
3427 except fancyopts.getopt.GetoptError, inst:
3426 except fancyopts.getopt.GetoptError, inst:
3428 raise ParseError(cmd, inst)
3427 raise ParseError(cmd, inst)
3429
3428
3430 # separate global options back out
3429 # separate global options back out
3431 for o in globalopts:
3430 for o in globalopts:
3432 n = o[1]
3431 n = o[1]
3433 options[n] = cmdoptions[n]
3432 options[n] = cmdoptions[n]
3434 del cmdoptions[n]
3433 del cmdoptions[n]
3435
3434
3436 return (cmd, cmd and i[0] or None, args, options, cmdoptions)
3435 return (cmd, cmd and i[0] or None, args, options, cmdoptions)
3437
3436
3438 external = {}
3437 external = {}
3439
3438
3440 def findext(name):
3439 def findext(name):
3441 '''return module with given extension name'''
3440 '''return module with given extension name'''
3442 try:
3441 try:
3443 return sys.modules[external[name]]
3442 return sys.modules[external[name]]
3444 except KeyError:
3443 except KeyError:
3445 for k, v in external.iteritems():
3444 for k, v in external.iteritems():
3446 if k.endswith('.' + name) or k.endswith('/' + name) or v == name:
3445 if k.endswith('.' + name) or k.endswith('/' + name) or v == name:
3447 return sys.modules[v]
3446 return sys.modules[v]
3448 raise KeyError(name)
3447 raise KeyError(name)
3449
3448
3450 def dispatch(args):
3449 def dispatch(args):
3451 for name in 'SIGBREAK', 'SIGHUP', 'SIGTERM':
3450 for name in 'SIGBREAK', 'SIGHUP', 'SIGTERM':
3452 num = getattr(signal, name, None)
3451 num = getattr(signal, name, None)
3453 if num: signal.signal(num, catchterm)
3452 if num: signal.signal(num, catchterm)
3454
3453
3455 try:
3454 try:
3456 u = ui.ui(traceback='--traceback' in sys.argv[1:])
3455 u = ui.ui(traceback='--traceback' in sys.argv[1:])
3457 except util.Abort, inst:
3456 except util.Abort, inst:
3458 sys.stderr.write(_("abort: %s\n") % inst)
3457 sys.stderr.write(_("abort: %s\n") % inst)
3459 return -1
3458 return -1
3460
3459
3461 for ext_name, load_from_name in u.extensions():
3460 for ext_name, load_from_name in u.extensions():
3462 try:
3461 try:
3463 if load_from_name:
3462 if load_from_name:
3464 # the module will be loaded in sys.modules
3463 # the module will be loaded in sys.modules
3465 # choose an unique name so that it doesn't
3464 # choose an unique name so that it doesn't
3466 # conflicts with other modules
3465 # conflicts with other modules
3467 module_name = "hgext_%s" % ext_name.replace('.', '_')
3466 module_name = "hgext_%s" % ext_name.replace('.', '_')
3468 mod = imp.load_source(module_name, load_from_name)
3467 mod = imp.load_source(module_name, load_from_name)
3469 else:
3468 else:
3470 def importh(name):
3469 def importh(name):
3471 mod = __import__(name)
3470 mod = __import__(name)
3472 components = name.split('.')
3471 components = name.split('.')
3473 for comp in components[1:]:
3472 for comp in components[1:]:
3474 mod = getattr(mod, comp)
3473 mod = getattr(mod, comp)
3475 return mod
3474 return mod
3476 try:
3475 try:
3477 mod = importh("hgext.%s" % ext_name)
3476 mod = importh("hgext.%s" % ext_name)
3478 except ImportError:
3477 except ImportError:
3479 mod = importh(ext_name)
3478 mod = importh(ext_name)
3480 external[ext_name] = mod.__name__
3479 external[ext_name] = mod.__name__
3481 except (util.SignalInterrupt, KeyboardInterrupt):
3480 except (util.SignalInterrupt, KeyboardInterrupt):
3482 raise
3481 raise
3483 except Exception, inst:
3482 except Exception, inst:
3484 u.warn(_("*** failed to import extension %s: %s\n") % (ext_name, inst))
3483 u.warn(_("*** failed to import extension %s: %s\n") % (ext_name, inst))
3485 if u.print_exc():
3484 if u.print_exc():
3486 return 1
3485 return 1
3487
3486
3488 for name in external.itervalues():
3487 for name in external.itervalues():
3489 mod = sys.modules[name]
3488 mod = sys.modules[name]
3490 uisetup = getattr(mod, 'uisetup', None)
3489 uisetup = getattr(mod, 'uisetup', None)
3491 if uisetup:
3490 if uisetup:
3492 uisetup(u)
3491 uisetup(u)
3493 cmdtable = getattr(mod, 'cmdtable', {})
3492 cmdtable = getattr(mod, 'cmdtable', {})
3494 for t in cmdtable:
3493 for t in cmdtable:
3495 if t in table:
3494 if t in table:
3496 u.warn(_("module %s overrides %s\n") % (name, t))
3495 u.warn(_("module %s overrides %s\n") % (name, t))
3497 table.update(cmdtable)
3496 table.update(cmdtable)
3498
3497
3499 try:
3498 try:
3500 cmd, func, args, options, cmdoptions = parse(u, args)
3499 cmd, func, args, options, cmdoptions = parse(u, args)
3501 if options["time"]:
3500 if options["time"]:
3502 def get_times():
3501 def get_times():
3503 t = os.times()
3502 t = os.times()
3504 if t[4] == 0.0: # Windows leaves this as zero, so use time.clock()
3503 if t[4] == 0.0: # Windows leaves this as zero, so use time.clock()
3505 t = (t[0], t[1], t[2], t[3], time.clock())
3504 t = (t[0], t[1], t[2], t[3], time.clock())
3506 return t
3505 return t
3507 s = get_times()
3506 s = get_times()
3508 def print_time():
3507 def print_time():
3509 t = get_times()
3508 t = get_times()
3510 u.warn(_("Time: real %.3f secs (user %.3f+%.3f sys %.3f+%.3f)\n") %
3509 u.warn(_("Time: real %.3f secs (user %.3f+%.3f sys %.3f+%.3f)\n") %
3511 (t[4]-s[4], t[0]-s[0], t[2]-s[2], t[1]-s[1], t[3]-s[3]))
3510 (t[4]-s[4], t[0]-s[0], t[2]-s[2], t[1]-s[1], t[3]-s[3]))
3512 atexit.register(print_time)
3511 atexit.register(print_time)
3513
3512
3514 u.updateopts(options["verbose"], options["debug"], options["quiet"],
3513 u.updateopts(options["verbose"], options["debug"], options["quiet"],
3515 not options["noninteractive"], options["traceback"],
3514 not options["noninteractive"], options["traceback"],
3516 options["config"])
3515 options["config"])
3517
3516
3518 # enter the debugger before command execution
3517 # enter the debugger before command execution
3519 if options['debugger']:
3518 if options['debugger']:
3520 pdb.set_trace()
3519 pdb.set_trace()
3521
3520
3522 try:
3521 try:
3523 if options['cwd']:
3522 if options['cwd']:
3524 try:
3523 try:
3525 os.chdir(options['cwd'])
3524 os.chdir(options['cwd'])
3526 except OSError, inst:
3525 except OSError, inst:
3527 raise util.Abort('%s: %s' %
3526 raise util.Abort('%s: %s' %
3528 (options['cwd'], inst.strerror))
3527 (options['cwd'], inst.strerror))
3529
3528
3530 path = u.expandpath(options["repository"]) or ""
3529 path = u.expandpath(options["repository"]) or ""
3531 repo = path and hg.repository(u, path=path) or None
3530 repo = path and hg.repository(u, path=path) or None
3532
3531
3533 if options['help']:
3532 if options['help']:
3534 return help_(u, cmd, options['version'])
3533 return help_(u, cmd, options['version'])
3535 elif options['version']:
3534 elif options['version']:
3536 return show_version(u)
3535 return show_version(u)
3537 elif not cmd:
3536 elif not cmd:
3538 return help_(u, 'shortlist')
3537 return help_(u, 'shortlist')
3539
3538
3540 if cmd not in norepo.split():
3539 if cmd not in norepo.split():
3541 try:
3540 try:
3542 if not repo:
3541 if not repo:
3543 repo = hg.repository(u, path=path)
3542 repo = hg.repository(u, path=path)
3544 u = repo.ui
3543 u = repo.ui
3545 for name in external.itervalues():
3544 for name in external.itervalues():
3546 mod = sys.modules[name]
3545 mod = sys.modules[name]
3547 if hasattr(mod, 'reposetup'):
3546 if hasattr(mod, 'reposetup'):
3548 mod.reposetup(u, repo)
3547 mod.reposetup(u, repo)
3549 except hg.RepoError:
3548 except hg.RepoError:
3550 if cmd not in optionalrepo.split():
3549 if cmd not in optionalrepo.split():
3551 raise
3550 raise
3552 d = lambda: func(u, repo, *args, **cmdoptions)
3551 d = lambda: func(u, repo, *args, **cmdoptions)
3553 else:
3552 else:
3554 d = lambda: func(u, *args, **cmdoptions)
3553 d = lambda: func(u, *args, **cmdoptions)
3555
3554
3556 # reupdate the options, repo/.hg/hgrc may have changed them
3555 # reupdate the options, repo/.hg/hgrc may have changed them
3557 u.updateopts(options["verbose"], options["debug"], options["quiet"],
3556 u.updateopts(options["verbose"], options["debug"], options["quiet"],
3558 not options["noninteractive"], options["traceback"],
3557 not options["noninteractive"], options["traceback"],
3559 options["config"])
3558 options["config"])
3560
3559
3561 try:
3560 try:
3562 if options['profile']:
3561 if options['profile']:
3563 import hotshot, hotshot.stats
3562 import hotshot, hotshot.stats
3564 prof = hotshot.Profile("hg.prof")
3563 prof = hotshot.Profile("hg.prof")
3565 try:
3564 try:
3566 try:
3565 try:
3567 return prof.runcall(d)
3566 return prof.runcall(d)
3568 except:
3567 except:
3569 try:
3568 try:
3570 u.warn(_('exception raised - generating '
3569 u.warn(_('exception raised - generating '
3571 'profile anyway\n'))
3570 'profile anyway\n'))
3572 except:
3571 except:
3573 pass
3572 pass
3574 raise
3573 raise
3575 finally:
3574 finally:
3576 prof.close()
3575 prof.close()
3577 stats = hotshot.stats.load("hg.prof")
3576 stats = hotshot.stats.load("hg.prof")
3578 stats.strip_dirs()
3577 stats.strip_dirs()
3579 stats.sort_stats('time', 'calls')
3578 stats.sort_stats('time', 'calls')
3580 stats.print_stats(40)
3579 stats.print_stats(40)
3581 elif options['lsprof']:
3580 elif options['lsprof']:
3582 try:
3581 try:
3583 from mercurial import lsprof
3582 from mercurial import lsprof
3584 except ImportError:
3583 except ImportError:
3585 raise util.Abort(_(
3584 raise util.Abort(_(
3586 'lsprof not available - install from '
3585 'lsprof not available - install from '
3587 'http://codespeak.net/svn/user/arigo/hack/misc/lsprof/'))
3586 'http://codespeak.net/svn/user/arigo/hack/misc/lsprof/'))
3588 p = lsprof.Profiler()
3587 p = lsprof.Profiler()
3589 p.enable(subcalls=True)
3588 p.enable(subcalls=True)
3590 try:
3589 try:
3591 return d()
3590 return d()
3592 finally:
3591 finally:
3593 p.disable()
3592 p.disable()
3594 stats = lsprof.Stats(p.getstats())
3593 stats = lsprof.Stats(p.getstats())
3595 stats.sort()
3594 stats.sort()
3596 stats.pprint(top=10, file=sys.stderr, climit=5)
3595 stats.pprint(top=10, file=sys.stderr, climit=5)
3597 else:
3596 else:
3598 return d()
3597 return d()
3599 finally:
3598 finally:
3600 u.flush()
3599 u.flush()
3601 except:
3600 except:
3602 # enter the debugger when we hit an exception
3601 # enter the debugger when we hit an exception
3603 if options['debugger']:
3602 if options['debugger']:
3604 pdb.post_mortem(sys.exc_info()[2])
3603 pdb.post_mortem(sys.exc_info()[2])
3605 u.print_exc()
3604 u.print_exc()
3606 raise
3605 raise
3607 except ParseError, inst:
3606 except ParseError, inst:
3608 if inst.args[0]:
3607 if inst.args[0]:
3609 u.warn(_("hg %s: %s\n") % (inst.args[0], inst.args[1]))
3608 u.warn(_("hg %s: %s\n") % (inst.args[0], inst.args[1]))
3610 help_(u, inst.args[0])
3609 help_(u, inst.args[0])
3611 else:
3610 else:
3612 u.warn(_("hg: %s\n") % inst.args[1])
3611 u.warn(_("hg: %s\n") % inst.args[1])
3613 help_(u, 'shortlist')
3612 help_(u, 'shortlist')
3614 except AmbiguousCommand, inst:
3613 except AmbiguousCommand, inst:
3615 u.warn(_("hg: command '%s' is ambiguous:\n %s\n") %
3614 u.warn(_("hg: command '%s' is ambiguous:\n %s\n") %
3616 (inst.args[0], " ".join(inst.args[1])))
3615 (inst.args[0], " ".join(inst.args[1])))
3617 except UnknownCommand, inst:
3616 except UnknownCommand, inst:
3618 u.warn(_("hg: unknown command '%s'\n") % inst.args[0])
3617 u.warn(_("hg: unknown command '%s'\n") % inst.args[0])
3619 help_(u, 'shortlist')
3618 help_(u, 'shortlist')
3620 except hg.RepoError, inst:
3619 except hg.RepoError, inst:
3621 u.warn(_("abort: %s!\n") % inst)
3620 u.warn(_("abort: %s!\n") % inst)
3622 except lock.LockHeld, inst:
3621 except lock.LockHeld, inst:
3623 if inst.errno == errno.ETIMEDOUT:
3622 if inst.errno == errno.ETIMEDOUT:
3624 reason = _('timed out waiting for lock held by %s') % inst.locker
3623 reason = _('timed out waiting for lock held by %s') % inst.locker
3625 else:
3624 else:
3626 reason = _('lock held by %s') % inst.locker
3625 reason = _('lock held by %s') % inst.locker
3627 u.warn(_("abort: %s: %s\n") % (inst.desc or inst.filename, reason))
3626 u.warn(_("abort: %s: %s\n") % (inst.desc or inst.filename, reason))
3628 except lock.LockUnavailable, inst:
3627 except lock.LockUnavailable, inst:
3629 u.warn(_("abort: could not lock %s: %s\n") %
3628 u.warn(_("abort: could not lock %s: %s\n") %
3630 (inst.desc or inst.filename, inst.strerror))
3629 (inst.desc or inst.filename, inst.strerror))
3631 except revlog.RevlogError, inst:
3630 except revlog.RevlogError, inst:
3632 u.warn(_("abort: "), inst, "!\n")
3631 u.warn(_("abort: "), inst, "!\n")
3633 except util.SignalInterrupt:
3632 except util.SignalInterrupt:
3634 u.warn(_("killed!\n"))
3633 u.warn(_("killed!\n"))
3635 except KeyboardInterrupt:
3634 except KeyboardInterrupt:
3636 try:
3635 try:
3637 u.warn(_("interrupted!\n"))
3636 u.warn(_("interrupted!\n"))
3638 except IOError, inst:
3637 except IOError, inst:
3639 if inst.errno == errno.EPIPE:
3638 if inst.errno == errno.EPIPE:
3640 if u.debugflag:
3639 if u.debugflag:
3641 u.warn(_("\nbroken pipe\n"))
3640 u.warn(_("\nbroken pipe\n"))
3642 else:
3641 else:
3643 raise
3642 raise
3644 except IOError, inst:
3643 except IOError, inst:
3645 if hasattr(inst, "code"):
3644 if hasattr(inst, "code"):
3646 u.warn(_("abort: %s\n") % inst)
3645 u.warn(_("abort: %s\n") % inst)
3647 elif hasattr(inst, "reason"):
3646 elif hasattr(inst, "reason"):
3648 u.warn(_("abort: error: %s\n") % inst.reason[1])
3647 u.warn(_("abort: error: %s\n") % inst.reason[1])
3649 elif hasattr(inst, "args") and inst[0] == errno.EPIPE:
3648 elif hasattr(inst, "args") and inst[0] == errno.EPIPE:
3650 if u.debugflag:
3649 if u.debugflag:
3651 u.warn(_("broken pipe\n"))
3650 u.warn(_("broken pipe\n"))
3652 elif getattr(inst, "strerror", None):
3651 elif getattr(inst, "strerror", None):
3653 if getattr(inst, "filename", None):
3652 if getattr(inst, "filename", None):
3654 u.warn(_("abort: %s - %s\n") % (inst.strerror, inst.filename))
3653 u.warn(_("abort: %s - %s\n") % (inst.strerror, inst.filename))
3655 else:
3654 else:
3656 u.warn(_("abort: %s\n") % inst.strerror)
3655 u.warn(_("abort: %s\n") % inst.strerror)
3657 else:
3656 else:
3658 raise
3657 raise
3659 except OSError, inst:
3658 except OSError, inst:
3660 if hasattr(inst, "filename"):
3659 if hasattr(inst, "filename"):
3661 u.warn(_("abort: %s: %s\n") % (inst.strerror, inst.filename))
3660 u.warn(_("abort: %s: %s\n") % (inst.strerror, inst.filename))
3662 else:
3661 else:
3663 u.warn(_("abort: %s\n") % inst.strerror)
3662 u.warn(_("abort: %s\n") % inst.strerror)
3664 except util.Abort, inst:
3663 except util.Abort, inst:
3665 u.warn(_('abort: '), inst.args[0] % inst.args[1:], '\n')
3664 u.warn(_('abort: '), inst.args[0] % inst.args[1:], '\n')
3666 except TypeError, inst:
3665 except TypeError, inst:
3667 # was this an argument error?
3666 # was this an argument error?
3668 tb = traceback.extract_tb(sys.exc_info()[2])
3667 tb = traceback.extract_tb(sys.exc_info()[2])
3669 if len(tb) > 2: # no
3668 if len(tb) > 2: # no
3670 raise
3669 raise
3671 u.debug(inst, "\n")
3670 u.debug(inst, "\n")
3672 u.warn(_("%s: invalid arguments\n") % cmd)
3671 u.warn(_("%s: invalid arguments\n") % cmd)
3673 help_(u, cmd)
3672 help_(u, cmd)
3674 except SystemExit, inst:
3673 except SystemExit, inst:
3675 # Commands shouldn't sys.exit directly, but give a return code.
3674 # Commands shouldn't sys.exit directly, but give a return code.
3676 # Just in case catch this and and pass exit code to caller.
3675 # Just in case catch this and and pass exit code to caller.
3677 return inst.code
3676 return inst.code
3678 except:
3677 except:
3679 u.warn(_("** unknown exception encountered, details follow\n"))
3678 u.warn(_("** unknown exception encountered, details follow\n"))
3680 u.warn(_("** report bug details to "
3679 u.warn(_("** report bug details to "
3681 "http://www.selenic.com/mercurial/bts\n"))
3680 "http://www.selenic.com/mercurial/bts\n"))
3682 u.warn(_("** or mercurial@selenic.com\n"))
3681 u.warn(_("** or mercurial@selenic.com\n"))
3683 u.warn(_("** Mercurial Distributed SCM (version %s)\n")
3682 u.warn(_("** Mercurial Distributed SCM (version %s)\n")
3684 % version.get_version())
3683 % version.get_version())
3685 raise
3684 raise
3686
3685
3687 return -1
3686 return -1
@@ -1,992 +1,991 b''
1 # hgweb/hgweb_mod.py - Web interface for a repository.
1 # hgweb/hgweb_mod.py - Web interface for a repository.
2 #
2 #
3 # Copyright 21 May 2005 - (c) 2005 Jake Edge <jake@edge2.net>
3 # Copyright 21 May 2005 - (c) 2005 Jake Edge <jake@edge2.net>
4 # Copyright 2005 Matt Mackall <mpm@selenic.com>
4 # Copyright 2005 Matt Mackall <mpm@selenic.com>
5 #
5 #
6 # This software may be used and distributed according to the terms
6 # This software may be used and distributed according to the terms
7 # of the GNU General Public License, incorporated herein by reference.
7 # of the GNU General Public License, incorporated herein by reference.
8
8
9 import os
9 import os
10 import os.path
10 import os.path
11 import mimetypes
11 import mimetypes
12 from mercurial.demandload import demandload
12 from mercurial.demandload import demandload
13 demandload(globals(), "re zlib ConfigParser mimetools cStringIO sys tempfile")
13 demandload(globals(), "re zlib ConfigParser mimetools cStringIO sys tempfile")
14 demandload(globals(), "mercurial:mdiff,ui,hg,util,archival,streamclone")
14 demandload(globals(), "mercurial:mdiff,ui,hg,util,archival,streamclone")
15 demandload(globals(), "mercurial:templater")
15 demandload(globals(), "mercurial:templater")
16 demandload(globals(), "mercurial.hgweb.common:get_mtime,staticfile")
16 demandload(globals(), "mercurial.hgweb.common:get_mtime,staticfile")
17 from mercurial.node import *
17 from mercurial.node import *
18 from mercurial.i18n import gettext as _
18 from mercurial.i18n import gettext as _
19
19
20 def _up(p):
20 def _up(p):
21 if p[0] != "/":
21 if p[0] != "/":
22 p = "/" + p
22 p = "/" + p
23 if p[-1] == "/":
23 if p[-1] == "/":
24 p = p[:-1]
24 p = p[:-1]
25 up = os.path.dirname(p)
25 up = os.path.dirname(p)
26 if up == "/":
26 if up == "/":
27 return "/"
27 return "/"
28 return up + "/"
28 return up + "/"
29
29
30 class hgweb(object):
30 class hgweb(object):
31 def __init__(self, repo, name=None):
31 def __init__(self, repo, name=None):
32 if type(repo) == type(""):
32 if type(repo) == type(""):
33 self.repo = hg.repository(ui.ui(), repo)
33 self.repo = hg.repository(ui.ui(), repo)
34 else:
34 else:
35 self.repo = repo
35 self.repo = repo
36
36
37 self.mtime = -1
37 self.mtime = -1
38 self.reponame = name
38 self.reponame = name
39 self.archives = 'zip', 'gz', 'bz2'
39 self.archives = 'zip', 'gz', 'bz2'
40 self.stripecount = 1
40 self.stripecount = 1
41 self.templatepath = self.repo.ui.config("web", "templates",
41 self.templatepath = self.repo.ui.config("web", "templates",
42 templater.templatepath())
42 templater.templatepath())
43
43
44 def refresh(self):
44 def refresh(self):
45 mtime = get_mtime(self.repo.root)
45 mtime = get_mtime(self.repo.root)
46 if mtime != self.mtime:
46 if mtime != self.mtime:
47 self.mtime = mtime
47 self.mtime = mtime
48 self.repo = hg.repository(self.repo.ui, self.repo.root)
48 self.repo = hg.repository(self.repo.ui, self.repo.root)
49 self.maxchanges = int(self.repo.ui.config("web", "maxchanges", 10))
49 self.maxchanges = int(self.repo.ui.config("web", "maxchanges", 10))
50 self.stripecount = int(self.repo.ui.config("web", "stripes", 1))
50 self.stripecount = int(self.repo.ui.config("web", "stripes", 1))
51 self.maxshortchanges = int(self.repo.ui.config("web", "maxshortchanges", 60))
51 self.maxshortchanges = int(self.repo.ui.config("web", "maxshortchanges", 60))
52 self.maxfiles = int(self.repo.ui.config("web", "maxfiles", 10))
52 self.maxfiles = int(self.repo.ui.config("web", "maxfiles", 10))
53 self.allowpull = self.repo.ui.configbool("web", "allowpull", True)
53 self.allowpull = self.repo.ui.configbool("web", "allowpull", True)
54
54
55 def archivelist(self, nodeid):
55 def archivelist(self, nodeid):
56 allowed = self.repo.ui.configlist("web", "allow_archive")
56 allowed = self.repo.ui.configlist("web", "allow_archive")
57 for i in self.archives:
57 for i in self.archives:
58 if i in allowed or self.repo.ui.configbool("web", "allow" + i):
58 if i in allowed or self.repo.ui.configbool("web", "allow" + i):
59 yield {"type" : i, "node" : nodeid, "url": ""}
59 yield {"type" : i, "node" : nodeid, "url": ""}
60
60
61 def listfiles(self, files, mf):
61 def listfiles(self, files, mf):
62 for f in files[:self.maxfiles]:
62 for f in files[:self.maxfiles]:
63 yield self.t("filenodelink", node=hex(mf[f]), file=f)
63 yield self.t("filenodelink", node=hex(mf[f]), file=f)
64 if len(files) > self.maxfiles:
64 if len(files) > self.maxfiles:
65 yield self.t("fileellipses")
65 yield self.t("fileellipses")
66
66
67 def listfilediffs(self, files, changeset):
67 def listfilediffs(self, files, changeset):
68 for f in files[:self.maxfiles]:
68 for f in files[:self.maxfiles]:
69 yield self.t("filedifflink", node=hex(changeset), file=f)
69 yield self.t("filedifflink", node=hex(changeset), file=f)
70 if len(files) > self.maxfiles:
70 if len(files) > self.maxfiles:
71 yield self.t("fileellipses")
71 yield self.t("fileellipses")
72
72
73 def siblings(self, siblings=[], rev=None, hiderev=None, **args):
73 def siblings(self, siblings=[], rev=None, hiderev=None, **args):
74 if not rev:
74 if not rev:
75 rev = lambda x: ""
75 rev = lambda x: ""
76 siblings = [s for s in siblings if s != nullid]
76 siblings = [s for s in siblings if s != nullid]
77 if len(siblings) == 1 and rev(siblings[0]) == hiderev:
77 if len(siblings) == 1 and rev(siblings[0]) == hiderev:
78 return
78 return
79 for s in siblings:
79 for s in siblings:
80 yield dict(node=hex(s), rev=rev(s), **args)
80 yield dict(node=hex(s), rev=rev(s), **args)
81
81
82 def renamelink(self, fl, node):
82 def renamelink(self, fl, node):
83 r = fl.renamed(node)
83 r = fl.renamed(node)
84 if r:
84 if r:
85 return [dict(file=r[0], node=hex(r[1]))]
85 return [dict(file=r[0], node=hex(r[1]))]
86 return []
86 return []
87
87
88 def showtag(self, t1, node=nullid, **args):
88 def showtag(self, t1, node=nullid, **args):
89 for t in self.repo.nodetags(node):
89 for t in self.repo.nodetags(node):
90 yield self.t(t1, tag=t, **args)
90 yield self.t(t1, tag=t, **args)
91
91
92 def diff(self, node1, node2, files):
92 def diff(self, node1, node2, files):
93 def filterfiles(filters, files):
93 def filterfiles(filters, files):
94 l = [x for x in files if x in filters]
94 l = [x for x in files if x in filters]
95
95
96 for t in filters:
96 for t in filters:
97 if t and t[-1] != os.sep:
97 if t and t[-1] != os.sep:
98 t += os.sep
98 t += os.sep
99 l += [x for x in files if x.startswith(t)]
99 l += [x for x in files if x.startswith(t)]
100 return l
100 return l
101
101
102 parity = [0]
102 parity = [0]
103 def diffblock(diff, f, fn):
103 def diffblock(diff, f, fn):
104 yield self.t("diffblock",
104 yield self.t("diffblock",
105 lines=prettyprintlines(diff),
105 lines=prettyprintlines(diff),
106 parity=parity[0],
106 parity=parity[0],
107 file=f,
107 file=f,
108 filenode=hex(fn or nullid))
108 filenode=hex(fn or nullid))
109 parity[0] = 1 - parity[0]
109 parity[0] = 1 - parity[0]
110
110
111 def prettyprintlines(diff):
111 def prettyprintlines(diff):
112 for l in diff.splitlines(1):
112 for l in diff.splitlines(1):
113 if l.startswith('+'):
113 if l.startswith('+'):
114 yield self.t("difflineplus", line=l)
114 yield self.t("difflineplus", line=l)
115 elif l.startswith('-'):
115 elif l.startswith('-'):
116 yield self.t("difflineminus", line=l)
116 yield self.t("difflineminus", line=l)
117 elif l.startswith('@'):
117 elif l.startswith('@'):
118 yield self.t("difflineat", line=l)
118 yield self.t("difflineat", line=l)
119 else:
119 else:
120 yield self.t("diffline", line=l)
120 yield self.t("diffline", line=l)
121
121
122 r = self.repo
122 r = self.repo
123 cl = r.changelog
123 cl = r.changelog
124 mf = r.manifest
124 mf = r.manifest
125 change1 = cl.read(node1)
125 change1 = cl.read(node1)
126 change2 = cl.read(node2)
126 change2 = cl.read(node2)
127 mmap1 = mf.read(change1[0])
127 mmap1 = mf.read(change1[0])
128 mmap2 = mf.read(change2[0])
128 mmap2 = mf.read(change2[0])
129 date1 = util.datestr(change1[2])
129 date1 = util.datestr(change1[2])
130 date2 = util.datestr(change2[2])
130 date2 = util.datestr(change2[2])
131
131
132 modified, added, removed, deleted, unknown = r.changes(node1, node2)
132 modified, added, removed, deleted, unknown = r.changes(node1, node2)
133 if files:
133 if files:
134 modified, added, removed = map(lambda x: filterfiles(files, x),
134 modified, added, removed = map(lambda x: filterfiles(files, x),
135 (modified, added, removed))
135 (modified, added, removed))
136
136
137 diffopts = self.repo.ui.diffopts()
137 diffopts = self.repo.ui.diffopts()
138 showfunc = diffopts['showfunc']
138 showfunc = diffopts['showfunc']
139 ignorews = diffopts['ignorews']
139 ignorews = diffopts['ignorews']
140 ignorewsamount = diffopts['ignorewsamount']
140 ignorewsamount = diffopts['ignorewsamount']
141 ignoreblanklines = diffopts['ignoreblanklines']
141 ignoreblanklines = diffopts['ignoreblanklines']
142 for f in modified:
142 for f in modified:
143 to = r.file(f).read(mmap1[f])
143 to = r.file(f).read(mmap1[f])
144 tn = r.file(f).read(mmap2[f])
144 tn = r.file(f).read(mmap2[f])
145 yield diffblock(mdiff.unidiff(to, date1, tn, date2, f,
145 yield diffblock(mdiff.unidiff(to, date1, tn, date2, f,
146 showfunc=showfunc, ignorews=ignorews,
146 showfunc=showfunc, ignorews=ignorews,
147 ignorewsamount=ignorewsamount,
147 ignorewsamount=ignorewsamount,
148 ignoreblanklines=ignoreblanklines), f, tn)
148 ignoreblanklines=ignoreblanklines), f, tn)
149 for f in added:
149 for f in added:
150 to = None
150 to = None
151 tn = r.file(f).read(mmap2[f])
151 tn = r.file(f).read(mmap2[f])
152 yield diffblock(mdiff.unidiff(to, date1, tn, date2, f,
152 yield diffblock(mdiff.unidiff(to, date1, tn, date2, f,
153 showfunc=showfunc, ignorews=ignorews,
153 showfunc=showfunc, ignorews=ignorews,
154 ignorewsamount=ignorewsamount,
154 ignorewsamount=ignorewsamount,
155 ignoreblanklines=ignoreblanklines), f, tn)
155 ignoreblanklines=ignoreblanklines), f, tn)
156 for f in removed:
156 for f in removed:
157 to = r.file(f).read(mmap1[f])
157 to = r.file(f).read(mmap1[f])
158 tn = None
158 tn = None
159 yield diffblock(mdiff.unidiff(to, date1, tn, date2, f,
159 yield diffblock(mdiff.unidiff(to, date1, tn, date2, f,
160 showfunc=showfunc, ignorews=ignorews,
160 showfunc=showfunc, ignorews=ignorews,
161 ignorewsamount=ignorewsamount,
161 ignorewsamount=ignorewsamount,
162 ignoreblanklines=ignoreblanklines), f, tn)
162 ignoreblanklines=ignoreblanklines), f, tn)
163
163
164 def changelog(self, pos, shortlog=False):
164 def changelog(self, pos, shortlog=False):
165 def changenav(**map):
165 def changenav(**map):
166 def seq(factor, maxchanges=None):
166 def seq(factor, maxchanges=None):
167 if maxchanges:
167 if maxchanges:
168 yield maxchanges
168 yield maxchanges
169 if maxchanges >= 20 and maxchanges <= 40:
169 if maxchanges >= 20 and maxchanges <= 40:
170 yield 50
170 yield 50
171 else:
171 else:
172 yield 1 * factor
172 yield 1 * factor
173 yield 3 * factor
173 yield 3 * factor
174 for f in seq(factor * 10):
174 for f in seq(factor * 10):
175 yield f
175 yield f
176
176
177 l = []
177 l = []
178 last = 0
178 last = 0
179 maxchanges = shortlog and self.maxshortchanges or self.maxchanges
179 maxchanges = shortlog and self.maxshortchanges or self.maxchanges
180 for f in seq(1, maxchanges):
180 for f in seq(1, maxchanges):
181 if f < maxchanges or f <= last:
181 if f < maxchanges or f <= last:
182 continue
182 continue
183 if f > count:
183 if f > count:
184 break
184 break
185 last = f
185 last = f
186 r = "%d" % f
186 r = "%d" % f
187 if pos + f < count:
187 if pos + f < count:
188 l.append(("+" + r, pos + f))
188 l.append(("+" + r, pos + f))
189 if pos - f >= 0:
189 if pos - f >= 0:
190 l.insert(0, ("-" + r, pos - f))
190 l.insert(0, ("-" + r, pos - f))
191
191
192 yield {"rev": 0, "label": "(0)"}
192 yield {"rev": 0, "label": "(0)"}
193
193
194 for label, rev in l:
194 for label, rev in l:
195 yield {"label": label, "rev": rev}
195 yield {"label": label, "rev": rev}
196
196
197 yield {"label": "tip", "rev": "tip"}
197 yield {"label": "tip", "rev": "tip"}
198
198
199 def changelist(**map):
199 def changelist(**map):
200 parity = (start - end) & 1
200 parity = (start - end) & 1
201 cl = self.repo.changelog
201 cl = self.repo.changelog
202 l = [] # build a list in forward order for efficiency
202 l = [] # build a list in forward order for efficiency
203 for i in range(start, end):
203 for i in range(start, end):
204 n = cl.node(i)
204 n = cl.node(i)
205 changes = cl.read(n)
205 changes = cl.read(n)
206 hn = hex(n)
206 hn = hex(n)
207
207
208 l.insert(0, {"parity": parity,
208 l.insert(0, {"parity": parity,
209 "author": changes[1],
209 "author": changes[1],
210 "parent": self.siblings(cl.parents(n), cl.rev,
210 "parent": self.siblings(cl.parents(n), cl.rev,
211 cl.rev(n) - 1),
211 cl.rev(n) - 1),
212 "child": self.siblings(cl.children(n), cl.rev,
212 "child": self.siblings(cl.children(n), cl.rev,
213 cl.rev(n) + 1),
213 cl.rev(n) + 1),
214 "changelogtag": self.showtag("changelogtag",n),
214 "changelogtag": self.showtag("changelogtag",n),
215 "manifest": hex(changes[0]),
215 "manifest": hex(changes[0]),
216 "desc": changes[4],
216 "desc": changes[4],
217 "date": changes[2],
217 "date": changes[2],
218 "files": self.listfilediffs(changes[3], n),
218 "files": self.listfilediffs(changes[3], n),
219 "rev": i,
219 "rev": i,
220 "node": hn})
220 "node": hn})
221 parity = 1 - parity
221 parity = 1 - parity
222
222
223 for e in l:
223 for e in l:
224 yield e
224 yield e
225
225
226 maxchanges = shortlog and self.maxshortchanges or self.maxchanges
226 maxchanges = shortlog and self.maxshortchanges or self.maxchanges
227 cl = self.repo.changelog
227 cl = self.repo.changelog
228 mf = cl.read(cl.tip())[0]
228 mf = cl.read(cl.tip())[0]
229 count = cl.count()
229 count = cl.count()
230 start = max(0, pos - maxchanges + 1)
230 start = max(0, pos - maxchanges + 1)
231 end = min(count, start + maxchanges)
231 end = min(count, start + maxchanges)
232 pos = end - 1
232 pos = end - 1
233
233
234 yield self.t(shortlog and 'shortlog' or 'changelog',
234 yield self.t(shortlog and 'shortlog' or 'changelog',
235 changenav=changenav,
235 changenav=changenav,
236 manifest=hex(mf),
236 manifest=hex(mf),
237 rev=pos, changesets=count, entries=changelist,
237 rev=pos, changesets=count, entries=changelist,
238 archives=self.archivelist("tip"))
238 archives=self.archivelist("tip"))
239
239
240 def search(self, query):
240 def search(self, query):
241
241
242 def changelist(**map):
242 def changelist(**map):
243 cl = self.repo.changelog
243 cl = self.repo.changelog
244 count = 0
244 count = 0
245 qw = query.lower().split()
245 qw = query.lower().split()
246
246
247 def revgen():
247 def revgen():
248 for i in range(cl.count() - 1, 0, -100):
248 for i in range(cl.count() - 1, 0, -100):
249 l = []
249 l = []
250 for j in range(max(0, i - 100), i):
250 for j in range(max(0, i - 100), i):
251 n = cl.node(j)
251 n = cl.node(j)
252 changes = cl.read(n)
252 changes = cl.read(n)
253 l.append((n, j, changes))
253 l.append((n, j, changes))
254 l.reverse()
254 l.reverse()
255 for e in l:
255 for e in l:
256 yield e
256 yield e
257
257
258 for n, i, changes in revgen():
258 for n, i, changes in revgen():
259 miss = 0
259 miss = 0
260 for q in qw:
260 for q in qw:
261 if not (q in changes[1].lower() or
261 if not (q in changes[1].lower() or
262 q in changes[4].lower() or
262 q in changes[4].lower() or
263 q in " ".join(changes[3][:20]).lower()):
263 q in " ".join(changes[3][:20]).lower()):
264 miss = 1
264 miss = 1
265 break
265 break
266 if miss:
266 if miss:
267 continue
267 continue
268
268
269 count += 1
269 count += 1
270 hn = hex(n)
270 hn = hex(n)
271
271
272 yield self.t('searchentry',
272 yield self.t('searchentry',
273 parity=self.stripes(count),
273 parity=self.stripes(count),
274 author=changes[1],
274 author=changes[1],
275 parent=self.siblings(cl.parents(n), cl.rev),
275 parent=self.siblings(cl.parents(n), cl.rev),
276 child=self.siblings(cl.children(n), cl.rev),
276 child=self.siblings(cl.children(n), cl.rev),
277 changelogtag=self.showtag("changelogtag",n),
277 changelogtag=self.showtag("changelogtag",n),
278 manifest=hex(changes[0]),
278 manifest=hex(changes[0]),
279 desc=changes[4],
279 desc=changes[4],
280 date=changes[2],
280 date=changes[2],
281 files=self.listfilediffs(changes[3], n),
281 files=self.listfilediffs(changes[3], n),
282 rev=i,
282 rev=i,
283 node=hn)
283 node=hn)
284
284
285 if count >= self.maxchanges:
285 if count >= self.maxchanges:
286 break
286 break
287
287
288 cl = self.repo.changelog
288 cl = self.repo.changelog
289 mf = cl.read(cl.tip())[0]
289 mf = cl.read(cl.tip())[0]
290
290
291 yield self.t('search',
291 yield self.t('search',
292 query=query,
292 query=query,
293 manifest=hex(mf),
293 manifest=hex(mf),
294 entries=changelist)
294 entries=changelist)
295
295
296 def changeset(self, nodeid):
296 def changeset(self, nodeid):
297 cl = self.repo.changelog
297 cl = self.repo.changelog
298 n = self.repo.lookup(nodeid)
298 n = self.repo.lookup(nodeid)
299 nodeid = hex(n)
299 nodeid = hex(n)
300 changes = cl.read(n)
300 changes = cl.read(n)
301 p1 = cl.parents(n)[0]
301 p1 = cl.parents(n)[0]
302
302
303 files = []
303 files = []
304 mf = self.repo.manifest.read(changes[0])
304 mf = self.repo.manifest.read(changes[0])
305 for f in changes[3]:
305 for f in changes[3]:
306 files.append(self.t("filenodelink",
306 files.append(self.t("filenodelink",
307 filenode=hex(mf.get(f, nullid)), file=f))
307 filenode=hex(mf.get(f, nullid)), file=f))
308
308
309 def diff(**map):
309 def diff(**map):
310 yield self.diff(p1, n, None)
310 yield self.diff(p1, n, None)
311
311
312 yield self.t('changeset',
312 yield self.t('changeset',
313 diff=diff,
313 diff=diff,
314 rev=cl.rev(n),
314 rev=cl.rev(n),
315 node=nodeid,
315 node=nodeid,
316 parent=self.siblings(cl.parents(n), cl.rev),
316 parent=self.siblings(cl.parents(n), cl.rev),
317 child=self.siblings(cl.children(n), cl.rev),
317 child=self.siblings(cl.children(n), cl.rev),
318 changesettag=self.showtag("changesettag",n),
318 changesettag=self.showtag("changesettag",n),
319 manifest=hex(changes[0]),
319 manifest=hex(changes[0]),
320 author=changes[1],
320 author=changes[1],
321 desc=changes[4],
321 desc=changes[4],
322 date=changes[2],
322 date=changes[2],
323 files=files,
323 files=files,
324 archives=self.archivelist(nodeid))
324 archives=self.archivelist(nodeid))
325
325
326 def filelog(self, f, filenode):
326 def filelog(self, f, filenode):
327 cl = self.repo.changelog
327 cl = self.repo.changelog
328 fl = self.repo.file(f)
328 fl = self.repo.file(f)
329 filenode = hex(fl.lookup(filenode))
329 filenode = hex(fl.lookup(filenode))
330 count = fl.count()
330 count = fl.count()
331
331
332 def entries(**map):
332 def entries(**map):
333 l = []
333 l = []
334 parity = (count - 1) & 1
334 parity = (count - 1) & 1
335
335
336 for i in range(count):
336 for i in range(count):
337 n = fl.node(i)
337 n = fl.node(i)
338 lr = fl.linkrev(n)
338 lr = fl.linkrev(n)
339 cn = cl.node(lr)
339 cn = cl.node(lr)
340 cs = cl.read(cl.node(lr))
340 cs = cl.read(cl.node(lr))
341
341
342 l.insert(0, {"parity": parity,
342 l.insert(0, {"parity": parity,
343 "filenode": hex(n),
343 "filenode": hex(n),
344 "filerev": i,
344 "filerev": i,
345 "file": f,
345 "file": f,
346 "node": hex(cn),
346 "node": hex(cn),
347 "author": cs[1],
347 "author": cs[1],
348 "date": cs[2],
348 "date": cs[2],
349 "rename": self.renamelink(fl, n),
349 "rename": self.renamelink(fl, n),
350 "parent": self.siblings(fl.parents(n),
350 "parent": self.siblings(fl.parents(n),
351 fl.rev, file=f),
351 fl.rev, file=f),
352 "child": self.siblings(fl.children(n),
352 "child": self.siblings(fl.children(n),
353 fl.rev, file=f),
353 fl.rev, file=f),
354 "desc": cs[4]})
354 "desc": cs[4]})
355 parity = 1 - parity
355 parity = 1 - parity
356
356
357 for e in l:
357 for e in l:
358 yield e
358 yield e
359
359
360 yield self.t("filelog", file=f, filenode=filenode, entries=entries)
360 yield self.t("filelog", file=f, filenode=filenode, entries=entries)
361
361
362 def filerevision(self, f, node):
362 def filerevision(self, f, node):
363 fl = self.repo.file(f)
363 fl = self.repo.file(f)
364 n = fl.lookup(node)
364 n = fl.lookup(node)
365 node = hex(n)
365 node = hex(n)
366 text = fl.read(n)
366 text = fl.read(n)
367 changerev = fl.linkrev(n)
367 changerev = fl.linkrev(n)
368 cl = self.repo.changelog
368 cl = self.repo.changelog
369 cn = cl.node(changerev)
369 cn = cl.node(changerev)
370 cs = cl.read(cn)
370 cs = cl.read(cn)
371 mfn = cs[0]
371 mfn = cs[0]
372
372
373 mt = mimetypes.guess_type(f)[0]
373 mt = mimetypes.guess_type(f)[0]
374 rawtext = text
374 rawtext = text
375 if util.binary(text):
375 if util.binary(text):
376 mt = mt or 'application/octet-stream'
376 mt = mt or 'application/octet-stream'
377 text = "(binary:%s)" % mt
377 text = "(binary:%s)" % mt
378 mt = mt or 'text/plain'
378 mt = mt or 'text/plain'
379
379
380 def lines():
380 def lines():
381 for l, t in enumerate(text.splitlines(1)):
381 for l, t in enumerate(text.splitlines(1)):
382 yield {"line": t,
382 yield {"line": t,
383 "linenumber": "% 6d" % (l + 1),
383 "linenumber": "% 6d" % (l + 1),
384 "parity": self.stripes(l)}
384 "parity": self.stripes(l)}
385
385
386 yield self.t("filerevision",
386 yield self.t("filerevision",
387 file=f,
387 file=f,
388 filenode=node,
388 filenode=node,
389 path=_up(f),
389 path=_up(f),
390 text=lines(),
390 text=lines(),
391 raw=rawtext,
391 raw=rawtext,
392 mimetype=mt,
392 mimetype=mt,
393 rev=changerev,
393 rev=changerev,
394 node=hex(cn),
394 node=hex(cn),
395 manifest=hex(mfn),
395 manifest=hex(mfn),
396 author=cs[1],
396 author=cs[1],
397 date=cs[2],
397 date=cs[2],
398 parent=self.siblings(fl.parents(n), fl.rev, file=f),
398 parent=self.siblings(fl.parents(n), fl.rev, file=f),
399 child=self.siblings(fl.children(n), fl.rev, file=f),
399 child=self.siblings(fl.children(n), fl.rev, file=f),
400 rename=self.renamelink(fl, n),
400 rename=self.renamelink(fl, n),
401 permissions=self.repo.manifest.readflags(mfn)[f])
401 permissions=self.repo.manifest.read(mfn).execf[f])
402
402
403 def fileannotate(self, f, node):
403 def fileannotate(self, f, node):
404 bcache = {}
404 bcache = {}
405 ncache = {}
405 ncache = {}
406 fl = self.repo.file(f)
406 fl = self.repo.file(f)
407 n = fl.lookup(node)
407 n = fl.lookup(node)
408 node = hex(n)
408 node = hex(n)
409 changerev = fl.linkrev(n)
409 changerev = fl.linkrev(n)
410
410
411 cl = self.repo.changelog
411 cl = self.repo.changelog
412 cn = cl.node(changerev)
412 cn = cl.node(changerev)
413 cs = cl.read(cn)
413 cs = cl.read(cn)
414 mfn = cs[0]
414 mfn = cs[0]
415
415
416 def annotate(**map):
416 def annotate(**map):
417 parity = 0
417 parity = 0
418 last = None
418 last = None
419 for r, l in fl.annotate(n):
419 for r, l in fl.annotate(n):
420 try:
420 try:
421 cnode = ncache[r]
421 cnode = ncache[r]
422 except KeyError:
422 except KeyError:
423 cnode = ncache[r] = self.repo.changelog.node(r)
423 cnode = ncache[r] = self.repo.changelog.node(r)
424
424
425 try:
425 try:
426 name = bcache[r]
426 name = bcache[r]
427 except KeyError:
427 except KeyError:
428 cl = self.repo.changelog.read(cnode)
428 cl = self.repo.changelog.read(cnode)
429 bcache[r] = name = self.repo.ui.shortuser(cl[1])
429 bcache[r] = name = self.repo.ui.shortuser(cl[1])
430
430
431 if last != cnode:
431 if last != cnode:
432 parity = 1 - parity
432 parity = 1 - parity
433 last = cnode
433 last = cnode
434
434
435 yield {"parity": parity,
435 yield {"parity": parity,
436 "node": hex(cnode),
436 "node": hex(cnode),
437 "rev": r,
437 "rev": r,
438 "author": name,
438 "author": name,
439 "file": f,
439 "file": f,
440 "line": l}
440 "line": l}
441
441
442 yield self.t("fileannotate",
442 yield self.t("fileannotate",
443 file=f,
443 file=f,
444 filenode=node,
444 filenode=node,
445 annotate=annotate,
445 annotate=annotate,
446 path=_up(f),
446 path=_up(f),
447 rev=changerev,
447 rev=changerev,
448 node=hex(cn),
448 node=hex(cn),
449 manifest=hex(mfn),
449 manifest=hex(mfn),
450 author=cs[1],
450 author=cs[1],
451 date=cs[2],
451 date=cs[2],
452 rename=self.renamelink(fl, n),
452 rename=self.renamelink(fl, n),
453 parent=self.siblings(fl.parents(n), fl.rev, file=f),
453 parent=self.siblings(fl.parents(n), fl.rev, file=f),
454 child=self.siblings(fl.children(n), fl.rev, file=f),
454 child=self.siblings(fl.children(n), fl.rev, file=f),
455 permissions=self.repo.manifest.readflags(mfn)[f])
455 permissions=self.repo.manifest.read(mfn).execf[f])
456
456
457 def manifest(self, mnode, path):
457 def manifest(self, mnode, path):
458 man = self.repo.manifest
458 man = self.repo.manifest
459 mn = man.lookup(mnode)
459 mn = man.lookup(mnode)
460 mnode = hex(mn)
460 mnode = hex(mn)
461 mf = man.read(mn)
461 mf = man.read(mn)
462 rev = man.rev(mn)
462 rev = man.rev(mn)
463 changerev = man.linkrev(mn)
463 changerev = man.linkrev(mn)
464 node = self.repo.changelog.node(changerev)
464 node = self.repo.changelog.node(changerev)
465 mff = man.readflags(mn)
466
465
467 files = {}
466 files = {}
468
467
469 p = path[1:]
468 p = path[1:]
470 if p and p[-1] != "/":
469 if p and p[-1] != "/":
471 p += "/"
470 p += "/"
472 l = len(p)
471 l = len(p)
473
472
474 for f,n in mf.items():
473 for f,n in mf.items():
475 if f[:l] != p:
474 if f[:l] != p:
476 continue
475 continue
477 remain = f[l:]
476 remain = f[l:]
478 if "/" in remain:
477 if "/" in remain:
479 short = remain[:remain.index("/") + 1] # bleah
478 short = remain[:remain.index("/") + 1] # bleah
480 files[short] = (f, None)
479 files[short] = (f, None)
481 else:
480 else:
482 short = os.path.basename(remain)
481 short = os.path.basename(remain)
483 files[short] = (f, n)
482 files[short] = (f, n)
484
483
485 def filelist(**map):
484 def filelist(**map):
486 parity = 0
485 parity = 0
487 fl = files.keys()
486 fl = files.keys()
488 fl.sort()
487 fl.sort()
489 for f in fl:
488 for f in fl:
490 full, fnode = files[f]
489 full, fnode = files[f]
491 if not fnode:
490 if not fnode:
492 continue
491 continue
493
492
494 yield {"file": full,
493 yield {"file": full,
495 "manifest": mnode,
494 "manifest": mnode,
496 "filenode": hex(fnode),
495 "filenode": hex(fnode),
497 "parity": self.stripes(parity),
496 "parity": self.stripes(parity),
498 "basename": f,
497 "basename": f,
499 "permissions": mff[full]}
498 "permissions": mf.execf[full]}
500 parity += 1
499 parity += 1
501
500
502 def dirlist(**map):
501 def dirlist(**map):
503 parity = 0
502 parity = 0
504 fl = files.keys()
503 fl = files.keys()
505 fl.sort()
504 fl.sort()
506 for f in fl:
505 for f in fl:
507 full, fnode = files[f]
506 full, fnode = files[f]
508 if fnode:
507 if fnode:
509 continue
508 continue
510
509
511 yield {"parity": self.stripes(parity),
510 yield {"parity": self.stripes(parity),
512 "path": os.path.join(path, f),
511 "path": os.path.join(path, f),
513 "manifest": mnode,
512 "manifest": mnode,
514 "basename": f[:-1]}
513 "basename": f[:-1]}
515 parity += 1
514 parity += 1
516
515
517 yield self.t("manifest",
516 yield self.t("manifest",
518 manifest=mnode,
517 manifest=mnode,
519 rev=rev,
518 rev=rev,
520 node=hex(node),
519 node=hex(node),
521 path=path,
520 path=path,
522 up=_up(path),
521 up=_up(path),
523 fentries=filelist,
522 fentries=filelist,
524 dentries=dirlist,
523 dentries=dirlist,
525 archives=self.archivelist(hex(node)))
524 archives=self.archivelist(hex(node)))
526
525
527 def tags(self):
526 def tags(self):
528 cl = self.repo.changelog
527 cl = self.repo.changelog
529 mf = cl.read(cl.tip())[0]
528 mf = cl.read(cl.tip())[0]
530
529
531 i = self.repo.tagslist()
530 i = self.repo.tagslist()
532 i.reverse()
531 i.reverse()
533
532
534 def entries(notip=False, **map):
533 def entries(notip=False, **map):
535 parity = 0
534 parity = 0
536 for k,n in i:
535 for k,n in i:
537 if notip and k == "tip": continue
536 if notip and k == "tip": continue
538 yield {"parity": self.stripes(parity),
537 yield {"parity": self.stripes(parity),
539 "tag": k,
538 "tag": k,
540 "tagmanifest": hex(cl.read(n)[0]),
539 "tagmanifest": hex(cl.read(n)[0]),
541 "date": cl.read(n)[2],
540 "date": cl.read(n)[2],
542 "node": hex(n)}
541 "node": hex(n)}
543 parity += 1
542 parity += 1
544
543
545 yield self.t("tags",
544 yield self.t("tags",
546 manifest=hex(mf),
545 manifest=hex(mf),
547 entries=lambda **x: entries(False, **x),
546 entries=lambda **x: entries(False, **x),
548 entriesnotip=lambda **x: entries(True, **x))
547 entriesnotip=lambda **x: entries(True, **x))
549
548
550 def summary(self):
549 def summary(self):
551 cl = self.repo.changelog
550 cl = self.repo.changelog
552 mf = cl.read(cl.tip())[0]
551 mf = cl.read(cl.tip())[0]
553
552
554 i = self.repo.tagslist()
553 i = self.repo.tagslist()
555 i.reverse()
554 i.reverse()
556
555
557 def tagentries(**map):
556 def tagentries(**map):
558 parity = 0
557 parity = 0
559 count = 0
558 count = 0
560 for k,n in i:
559 for k,n in i:
561 if k == "tip": # skip tip
560 if k == "tip": # skip tip
562 continue;
561 continue;
563
562
564 count += 1
563 count += 1
565 if count > 10: # limit to 10 tags
564 if count > 10: # limit to 10 tags
566 break;
565 break;
567
566
568 c = cl.read(n)
567 c = cl.read(n)
569 m = c[0]
568 m = c[0]
570 t = c[2]
569 t = c[2]
571
570
572 yield self.t("tagentry",
571 yield self.t("tagentry",
573 parity = self.stripes(parity),
572 parity = self.stripes(parity),
574 tag = k,
573 tag = k,
575 node = hex(n),
574 node = hex(n),
576 date = t,
575 date = t,
577 tagmanifest = hex(m))
576 tagmanifest = hex(m))
578 parity += 1
577 parity += 1
579
578
580 def changelist(**map):
579 def changelist(**map):
581 parity = 0
580 parity = 0
582 cl = self.repo.changelog
581 cl = self.repo.changelog
583 l = [] # build a list in forward order for efficiency
582 l = [] # build a list in forward order for efficiency
584 for i in range(start, end):
583 for i in range(start, end):
585 n = cl.node(i)
584 n = cl.node(i)
586 changes = cl.read(n)
585 changes = cl.read(n)
587 hn = hex(n)
586 hn = hex(n)
588 t = changes[2]
587 t = changes[2]
589
588
590 l.insert(0, self.t(
589 l.insert(0, self.t(
591 'shortlogentry',
590 'shortlogentry',
592 parity = parity,
591 parity = parity,
593 author = changes[1],
592 author = changes[1],
594 manifest = hex(changes[0]),
593 manifest = hex(changes[0]),
595 desc = changes[4],
594 desc = changes[4],
596 date = t,
595 date = t,
597 rev = i,
596 rev = i,
598 node = hn))
597 node = hn))
599 parity = 1 - parity
598 parity = 1 - parity
600
599
601 yield l
600 yield l
602
601
603 cl = self.repo.changelog
602 cl = self.repo.changelog
604 mf = cl.read(cl.tip())[0]
603 mf = cl.read(cl.tip())[0]
605 count = cl.count()
604 count = cl.count()
606 start = max(0, count - self.maxchanges)
605 start = max(0, count - self.maxchanges)
607 end = min(count, start + self.maxchanges)
606 end = min(count, start + self.maxchanges)
608
607
609 yield self.t("summary",
608 yield self.t("summary",
610 desc = self.repo.ui.config("web", "description", "unknown"),
609 desc = self.repo.ui.config("web", "description", "unknown"),
611 owner = (self.repo.ui.config("ui", "username") or # preferred
610 owner = (self.repo.ui.config("ui", "username") or # preferred
612 self.repo.ui.config("web", "contact") or # deprecated
611 self.repo.ui.config("web", "contact") or # deprecated
613 self.repo.ui.config("web", "author", "unknown")), # also
612 self.repo.ui.config("web", "author", "unknown")), # also
614 lastchange = (0, 0), # FIXME
613 lastchange = (0, 0), # FIXME
615 manifest = hex(mf),
614 manifest = hex(mf),
616 tags = tagentries,
615 tags = tagentries,
617 shortlog = changelist,
616 shortlog = changelist,
618 archives=self.archivelist("tip"))
617 archives=self.archivelist("tip"))
619
618
620 def filediff(self, file, changeset):
619 def filediff(self, file, changeset):
621 cl = self.repo.changelog
620 cl = self.repo.changelog
622 n = self.repo.lookup(changeset)
621 n = self.repo.lookup(changeset)
623 changeset = hex(n)
622 changeset = hex(n)
624 p1 = cl.parents(n)[0]
623 p1 = cl.parents(n)[0]
625 cs = cl.read(n)
624 cs = cl.read(n)
626 mf = self.repo.manifest.read(cs[0])
625 mf = self.repo.manifest.read(cs[0])
627
626
628 def diff(**map):
627 def diff(**map):
629 yield self.diff(p1, n, [file])
628 yield self.diff(p1, n, [file])
630
629
631 yield self.t("filediff",
630 yield self.t("filediff",
632 file=file,
631 file=file,
633 filenode=hex(mf.get(file, nullid)),
632 filenode=hex(mf.get(file, nullid)),
634 node=changeset,
633 node=changeset,
635 rev=self.repo.changelog.rev(n),
634 rev=self.repo.changelog.rev(n),
636 parent=self.siblings(cl.parents(n), cl.rev),
635 parent=self.siblings(cl.parents(n), cl.rev),
637 child=self.siblings(cl.children(n), cl.rev),
636 child=self.siblings(cl.children(n), cl.rev),
638 diff=diff)
637 diff=diff)
639
638
640 archive_specs = {
639 archive_specs = {
641 'bz2': ('application/x-tar', 'tbz2', '.tar.bz2', None),
640 'bz2': ('application/x-tar', 'tbz2', '.tar.bz2', None),
642 'gz': ('application/x-tar', 'tgz', '.tar.gz', None),
641 'gz': ('application/x-tar', 'tgz', '.tar.gz', None),
643 'zip': ('application/zip', 'zip', '.zip', None),
642 'zip': ('application/zip', 'zip', '.zip', None),
644 }
643 }
645
644
646 def archive(self, req, cnode, type_):
645 def archive(self, req, cnode, type_):
647 reponame = re.sub(r"\W+", "-", os.path.basename(self.reponame))
646 reponame = re.sub(r"\W+", "-", os.path.basename(self.reponame))
648 name = "%s-%s" % (reponame, short(cnode))
647 name = "%s-%s" % (reponame, short(cnode))
649 mimetype, artype, extension, encoding = self.archive_specs[type_]
648 mimetype, artype, extension, encoding = self.archive_specs[type_]
650 headers = [('Content-type', mimetype),
649 headers = [('Content-type', mimetype),
651 ('Content-disposition', 'attachment; filename=%s%s' %
650 ('Content-disposition', 'attachment; filename=%s%s' %
652 (name, extension))]
651 (name, extension))]
653 if encoding:
652 if encoding:
654 headers.append(('Content-encoding', encoding))
653 headers.append(('Content-encoding', encoding))
655 req.header(headers)
654 req.header(headers)
656 archival.archive(self.repo, req.out, cnode, artype, prefix=name)
655 archival.archive(self.repo, req.out, cnode, artype, prefix=name)
657
656
658 # add tags to things
657 # add tags to things
659 # tags -> list of changesets corresponding to tags
658 # tags -> list of changesets corresponding to tags
660 # find tag, changeset, file
659 # find tag, changeset, file
661
660
662 def cleanpath(self, path):
661 def cleanpath(self, path):
663 p = util.normpath(path)
662 p = util.normpath(path)
664 if p[:2] == "..":
663 if p[:2] == "..":
665 raise Exception("suspicious path")
664 raise Exception("suspicious path")
666 return p
665 return p
667
666
668 def run(self):
667 def run(self):
669 if not os.environ.get('GATEWAY_INTERFACE', '').startswith("CGI/1."):
668 if not os.environ.get('GATEWAY_INTERFACE', '').startswith("CGI/1."):
670 raise RuntimeError("This function is only intended to be called while running as a CGI script.")
669 raise RuntimeError("This function is only intended to be called while running as a CGI script.")
671 import mercurial.hgweb.wsgicgi as wsgicgi
670 import mercurial.hgweb.wsgicgi as wsgicgi
672 from request import wsgiapplication
671 from request import wsgiapplication
673 def make_web_app():
672 def make_web_app():
674 return self
673 return self
675 wsgicgi.launch(wsgiapplication(make_web_app))
674 wsgicgi.launch(wsgiapplication(make_web_app))
676
675
677 def run_wsgi(self, req):
676 def run_wsgi(self, req):
678 def header(**map):
677 def header(**map):
679 header_file = cStringIO.StringIO(''.join(self.t("header", **map)))
678 header_file = cStringIO.StringIO(''.join(self.t("header", **map)))
680 msg = mimetools.Message(header_file, 0)
679 msg = mimetools.Message(header_file, 0)
681 req.header(msg.items())
680 req.header(msg.items())
682 yield header_file.read()
681 yield header_file.read()
683
682
684 def rawfileheader(**map):
683 def rawfileheader(**map):
685 req.header([('Content-type', map['mimetype']),
684 req.header([('Content-type', map['mimetype']),
686 ('Content-disposition', 'filename=%s' % map['file']),
685 ('Content-disposition', 'filename=%s' % map['file']),
687 ('Content-length', str(len(map['raw'])))])
686 ('Content-length', str(len(map['raw'])))])
688 yield ''
687 yield ''
689
688
690 def footer(**map):
689 def footer(**map):
691 yield self.t("footer",
690 yield self.t("footer",
692 motd=self.repo.ui.config("web", "motd", ""),
691 motd=self.repo.ui.config("web", "motd", ""),
693 **map)
692 **map)
694
693
695 def expand_form(form):
694 def expand_form(form):
696 shortcuts = {
695 shortcuts = {
697 'cl': [('cmd', ['changelog']), ('rev', None)],
696 'cl': [('cmd', ['changelog']), ('rev', None)],
698 'sl': [('cmd', ['shortlog']), ('rev', None)],
697 'sl': [('cmd', ['shortlog']), ('rev', None)],
699 'cs': [('cmd', ['changeset']), ('node', None)],
698 'cs': [('cmd', ['changeset']), ('node', None)],
700 'f': [('cmd', ['file']), ('filenode', None)],
699 'f': [('cmd', ['file']), ('filenode', None)],
701 'fl': [('cmd', ['filelog']), ('filenode', None)],
700 'fl': [('cmd', ['filelog']), ('filenode', None)],
702 'fd': [('cmd', ['filediff']), ('node', None)],
701 'fd': [('cmd', ['filediff']), ('node', None)],
703 'fa': [('cmd', ['annotate']), ('filenode', None)],
702 'fa': [('cmd', ['annotate']), ('filenode', None)],
704 'mf': [('cmd', ['manifest']), ('manifest', None)],
703 'mf': [('cmd', ['manifest']), ('manifest', None)],
705 'ca': [('cmd', ['archive']), ('node', None)],
704 'ca': [('cmd', ['archive']), ('node', None)],
706 'tags': [('cmd', ['tags'])],
705 'tags': [('cmd', ['tags'])],
707 'tip': [('cmd', ['changeset']), ('node', ['tip'])],
706 'tip': [('cmd', ['changeset']), ('node', ['tip'])],
708 'static': [('cmd', ['static']), ('file', None)]
707 'static': [('cmd', ['static']), ('file', None)]
709 }
708 }
710
709
711 for k in shortcuts.iterkeys():
710 for k in shortcuts.iterkeys():
712 if form.has_key(k):
711 if form.has_key(k):
713 for name, value in shortcuts[k]:
712 for name, value in shortcuts[k]:
714 if value is None:
713 if value is None:
715 value = form[k]
714 value = form[k]
716 form[name] = value
715 form[name] = value
717 del form[k]
716 del form[k]
718
717
719 self.refresh()
718 self.refresh()
720
719
721 expand_form(req.form)
720 expand_form(req.form)
722
721
723 m = os.path.join(self.templatepath, "map")
722 m = os.path.join(self.templatepath, "map")
724 style = self.repo.ui.config("web", "style", "")
723 style = self.repo.ui.config("web", "style", "")
725 if req.form.has_key('style'):
724 if req.form.has_key('style'):
726 style = req.form['style'][0]
725 style = req.form['style'][0]
727 if style:
726 if style:
728 b = os.path.basename("map-" + style)
727 b = os.path.basename("map-" + style)
729 p = os.path.join(self.templatepath, b)
728 p = os.path.join(self.templatepath, b)
730 if os.path.isfile(p):
729 if os.path.isfile(p):
731 m = p
730 m = p
732
731
733 port = req.env["SERVER_PORT"]
732 port = req.env["SERVER_PORT"]
734 port = port != "80" and (":" + port) or ""
733 port = port != "80" and (":" + port) or ""
735 uri = req.env["REQUEST_URI"]
734 uri = req.env["REQUEST_URI"]
736 if "?" in uri:
735 if "?" in uri:
737 uri = uri.split("?")[0]
736 uri = uri.split("?")[0]
738 url = "http://%s%s%s" % (req.env["SERVER_NAME"], port, uri)
737 url = "http://%s%s%s" % (req.env["SERVER_NAME"], port, uri)
739 if not self.reponame:
738 if not self.reponame:
740 self.reponame = (self.repo.ui.config("web", "name")
739 self.reponame = (self.repo.ui.config("web", "name")
741 or uri.strip('/') or self.repo.root)
740 or uri.strip('/') or self.repo.root)
742
741
743 self.t = templater.templater(m, templater.common_filters,
742 self.t = templater.templater(m, templater.common_filters,
744 defaults={"url": url,
743 defaults={"url": url,
745 "repo": self.reponame,
744 "repo": self.reponame,
746 "header": header,
745 "header": header,
747 "footer": footer,
746 "footer": footer,
748 "rawfileheader": rawfileheader,
747 "rawfileheader": rawfileheader,
749 })
748 })
750
749
751 if not req.form.has_key('cmd'):
750 if not req.form.has_key('cmd'):
752 req.form['cmd'] = [self.t.cache['default'],]
751 req.form['cmd'] = [self.t.cache['default'],]
753
752
754 cmd = req.form['cmd'][0]
753 cmd = req.form['cmd'][0]
755
754
756 method = getattr(self, 'do_' + cmd, None)
755 method = getattr(self, 'do_' + cmd, None)
757 if method:
756 if method:
758 method(req)
757 method(req)
759 else:
758 else:
760 req.write(self.t("error"))
759 req.write(self.t("error"))
761
760
762 def stripes(self, parity):
761 def stripes(self, parity):
763 "make horizontal stripes for easier reading"
762 "make horizontal stripes for easier reading"
764 if self.stripecount:
763 if self.stripecount:
765 return (1 + parity / self.stripecount) & 1
764 return (1 + parity / self.stripecount) & 1
766 else:
765 else:
767 return 0
766 return 0
768
767
769 def do_changelog(self, req):
768 def do_changelog(self, req):
770 hi = self.repo.changelog.count() - 1
769 hi = self.repo.changelog.count() - 1
771 if req.form.has_key('rev'):
770 if req.form.has_key('rev'):
772 hi = req.form['rev'][0]
771 hi = req.form['rev'][0]
773 try:
772 try:
774 hi = self.repo.changelog.rev(self.repo.lookup(hi))
773 hi = self.repo.changelog.rev(self.repo.lookup(hi))
775 except hg.RepoError:
774 except hg.RepoError:
776 req.write(self.search(hi)) # XXX redirect to 404 page?
775 req.write(self.search(hi)) # XXX redirect to 404 page?
777 return
776 return
778
777
779 req.write(self.changelog(hi))
778 req.write(self.changelog(hi))
780
779
781 def do_shortlog(self, req):
780 def do_shortlog(self, req):
782 hi = self.repo.changelog.count() - 1
781 hi = self.repo.changelog.count() - 1
783 if req.form.has_key('rev'):
782 if req.form.has_key('rev'):
784 hi = req.form['rev'][0]
783 hi = req.form['rev'][0]
785 try:
784 try:
786 hi = self.repo.changelog.rev(self.repo.lookup(hi))
785 hi = self.repo.changelog.rev(self.repo.lookup(hi))
787 except hg.RepoError:
786 except hg.RepoError:
788 req.write(self.search(hi)) # XXX redirect to 404 page?
787 req.write(self.search(hi)) # XXX redirect to 404 page?
789 return
788 return
790
789
791 req.write(self.changelog(hi, shortlog = True))
790 req.write(self.changelog(hi, shortlog = True))
792
791
793 def do_changeset(self, req):
792 def do_changeset(self, req):
794 req.write(self.changeset(req.form['node'][0]))
793 req.write(self.changeset(req.form['node'][0]))
795
794
796 def do_manifest(self, req):
795 def do_manifest(self, req):
797 req.write(self.manifest(req.form['manifest'][0],
796 req.write(self.manifest(req.form['manifest'][0],
798 self.cleanpath(req.form['path'][0])))
797 self.cleanpath(req.form['path'][0])))
799
798
800 def do_tags(self, req):
799 def do_tags(self, req):
801 req.write(self.tags())
800 req.write(self.tags())
802
801
803 def do_summary(self, req):
802 def do_summary(self, req):
804 req.write(self.summary())
803 req.write(self.summary())
805
804
806 def do_filediff(self, req):
805 def do_filediff(self, req):
807 req.write(self.filediff(self.cleanpath(req.form['file'][0]),
806 req.write(self.filediff(self.cleanpath(req.form['file'][0]),
808 req.form['node'][0]))
807 req.form['node'][0]))
809
808
810 def do_file(self, req):
809 def do_file(self, req):
811 req.write(self.filerevision(self.cleanpath(req.form['file'][0]),
810 req.write(self.filerevision(self.cleanpath(req.form['file'][0]),
812 req.form['filenode'][0]))
811 req.form['filenode'][0]))
813
812
814 def do_annotate(self, req):
813 def do_annotate(self, req):
815 req.write(self.fileannotate(self.cleanpath(req.form['file'][0]),
814 req.write(self.fileannotate(self.cleanpath(req.form['file'][0]),
816 req.form['filenode'][0]))
815 req.form['filenode'][0]))
817
816
818 def do_filelog(self, req):
817 def do_filelog(self, req):
819 req.write(self.filelog(self.cleanpath(req.form['file'][0]),
818 req.write(self.filelog(self.cleanpath(req.form['file'][0]),
820 req.form['filenode'][0]))
819 req.form['filenode'][0]))
821
820
822 def do_heads(self, req):
821 def do_heads(self, req):
823 resp = " ".join(map(hex, self.repo.heads())) + "\n"
822 resp = " ".join(map(hex, self.repo.heads())) + "\n"
824 req.httphdr("application/mercurial-0.1", length=len(resp))
823 req.httphdr("application/mercurial-0.1", length=len(resp))
825 req.write(resp)
824 req.write(resp)
826
825
827 def do_branches(self, req):
826 def do_branches(self, req):
828 nodes = []
827 nodes = []
829 if req.form.has_key('nodes'):
828 if req.form.has_key('nodes'):
830 nodes = map(bin, req.form['nodes'][0].split(" "))
829 nodes = map(bin, req.form['nodes'][0].split(" "))
831 resp = cStringIO.StringIO()
830 resp = cStringIO.StringIO()
832 for b in self.repo.branches(nodes):
831 for b in self.repo.branches(nodes):
833 resp.write(" ".join(map(hex, b)) + "\n")
832 resp.write(" ".join(map(hex, b)) + "\n")
834 resp = resp.getvalue()
833 resp = resp.getvalue()
835 req.httphdr("application/mercurial-0.1", length=len(resp))
834 req.httphdr("application/mercurial-0.1", length=len(resp))
836 req.write(resp)
835 req.write(resp)
837
836
838 def do_between(self, req):
837 def do_between(self, req):
839 nodes = []
838 nodes = []
840 if req.form.has_key('pairs'):
839 if req.form.has_key('pairs'):
841 pairs = [map(bin, p.split("-"))
840 pairs = [map(bin, p.split("-"))
842 for p in req.form['pairs'][0].split(" ")]
841 for p in req.form['pairs'][0].split(" ")]
843 resp = cStringIO.StringIO()
842 resp = cStringIO.StringIO()
844 for b in self.repo.between(pairs):
843 for b in self.repo.between(pairs):
845 resp.write(" ".join(map(hex, b)) + "\n")
844 resp.write(" ".join(map(hex, b)) + "\n")
846 resp = resp.getvalue()
845 resp = resp.getvalue()
847 req.httphdr("application/mercurial-0.1", length=len(resp))
846 req.httphdr("application/mercurial-0.1", length=len(resp))
848 req.write(resp)
847 req.write(resp)
849
848
850 def do_changegroup(self, req):
849 def do_changegroup(self, req):
851 req.httphdr("application/mercurial-0.1")
850 req.httphdr("application/mercurial-0.1")
852 nodes = []
851 nodes = []
853 if not self.allowpull:
852 if not self.allowpull:
854 return
853 return
855
854
856 if req.form.has_key('roots'):
855 if req.form.has_key('roots'):
857 nodes = map(bin, req.form['roots'][0].split(" "))
856 nodes = map(bin, req.form['roots'][0].split(" "))
858
857
859 z = zlib.compressobj()
858 z = zlib.compressobj()
860 f = self.repo.changegroup(nodes, 'serve')
859 f = self.repo.changegroup(nodes, 'serve')
861 while 1:
860 while 1:
862 chunk = f.read(4096)
861 chunk = f.read(4096)
863 if not chunk:
862 if not chunk:
864 break
863 break
865 req.write(z.compress(chunk))
864 req.write(z.compress(chunk))
866
865
867 req.write(z.flush())
866 req.write(z.flush())
868
867
869 def do_archive(self, req):
868 def do_archive(self, req):
870 changeset = self.repo.lookup(req.form['node'][0])
869 changeset = self.repo.lookup(req.form['node'][0])
871 type_ = req.form['type'][0]
870 type_ = req.form['type'][0]
872 allowed = self.repo.ui.configlist("web", "allow_archive")
871 allowed = self.repo.ui.configlist("web", "allow_archive")
873 if (type_ in self.archives and (type_ in allowed or
872 if (type_ in self.archives and (type_ in allowed or
874 self.repo.ui.configbool("web", "allow" + type_, False))):
873 self.repo.ui.configbool("web", "allow" + type_, False))):
875 self.archive(req, changeset, type_)
874 self.archive(req, changeset, type_)
876 return
875 return
877
876
878 req.write(self.t("error"))
877 req.write(self.t("error"))
879
878
880 def do_static(self, req):
879 def do_static(self, req):
881 fname = req.form['file'][0]
880 fname = req.form['file'][0]
882 static = self.repo.ui.config("web", "static",
881 static = self.repo.ui.config("web", "static",
883 os.path.join(self.templatepath,
882 os.path.join(self.templatepath,
884 "static"))
883 "static"))
885 req.write(staticfile(static, fname, req)
884 req.write(staticfile(static, fname, req)
886 or self.t("error", error="%r not found" % fname))
885 or self.t("error", error="%r not found" % fname))
887
886
888 def do_capabilities(self, req):
887 def do_capabilities(self, req):
889 caps = ['unbundle']
888 caps = ['unbundle']
890 if self.repo.ui.configbool('server', 'uncompressed'):
889 if self.repo.ui.configbool('server', 'uncompressed'):
891 caps.append('stream=%d' % self.repo.revlogversion)
890 caps.append('stream=%d' % self.repo.revlogversion)
892 resp = ' '.join(caps)
891 resp = ' '.join(caps)
893 req.httphdr("application/mercurial-0.1", length=len(resp))
892 req.httphdr("application/mercurial-0.1", length=len(resp))
894 req.write(resp)
893 req.write(resp)
895
894
896 def check_perm(self, req, op, default):
895 def check_perm(self, req, op, default):
897 '''check permission for operation based on user auth.
896 '''check permission for operation based on user auth.
898 return true if op allowed, else false.
897 return true if op allowed, else false.
899 default is policy to use if no config given.'''
898 default is policy to use if no config given.'''
900
899
901 user = req.env.get('REMOTE_USER')
900 user = req.env.get('REMOTE_USER')
902
901
903 deny = self.repo.ui.configlist('web', 'deny_' + op)
902 deny = self.repo.ui.configlist('web', 'deny_' + op)
904 if deny and (not user or deny == ['*'] or user in deny):
903 if deny and (not user or deny == ['*'] or user in deny):
905 return False
904 return False
906
905
907 allow = self.repo.ui.configlist('web', 'allow_' + op)
906 allow = self.repo.ui.configlist('web', 'allow_' + op)
908 return (allow and (allow == ['*'] or user in allow)) or default
907 return (allow and (allow == ['*'] or user in allow)) or default
909
908
910 def do_unbundle(self, req):
909 def do_unbundle(self, req):
911 def bail(response, headers={}):
910 def bail(response, headers={}):
912 length = int(req.env['CONTENT_LENGTH'])
911 length = int(req.env['CONTENT_LENGTH'])
913 for s in util.filechunkiter(req, limit=length):
912 for s in util.filechunkiter(req, limit=length):
914 # drain incoming bundle, else client will not see
913 # drain incoming bundle, else client will not see
915 # response when run outside cgi script
914 # response when run outside cgi script
916 pass
915 pass
917 req.httphdr("application/mercurial-0.1", headers=headers)
916 req.httphdr("application/mercurial-0.1", headers=headers)
918 req.write('0\n')
917 req.write('0\n')
919 req.write(response)
918 req.write(response)
920
919
921 # require ssl by default, auth info cannot be sniffed and
920 # require ssl by default, auth info cannot be sniffed and
922 # replayed
921 # replayed
923 ssl_req = self.repo.ui.configbool('web', 'push_ssl', True)
922 ssl_req = self.repo.ui.configbool('web', 'push_ssl', True)
924 if ssl_req:
923 if ssl_req:
925 if not req.env.get('HTTPS'):
924 if not req.env.get('HTTPS'):
926 bail(_('ssl required\n'))
925 bail(_('ssl required\n'))
927 return
926 return
928 proto = 'https'
927 proto = 'https'
929 else:
928 else:
930 proto = 'http'
929 proto = 'http'
931
930
932 # do not allow push unless explicitly allowed
931 # do not allow push unless explicitly allowed
933 if not self.check_perm(req, 'push', False):
932 if not self.check_perm(req, 'push', False):
934 bail(_('push not authorized\n'),
933 bail(_('push not authorized\n'),
935 headers={'status': '401 Unauthorized'})
934 headers={'status': '401 Unauthorized'})
936 return
935 return
937
936
938 req.httphdr("application/mercurial-0.1")
937 req.httphdr("application/mercurial-0.1")
939
938
940 their_heads = req.form['heads'][0].split(' ')
939 their_heads = req.form['heads'][0].split(' ')
941
940
942 def check_heads():
941 def check_heads():
943 heads = map(hex, self.repo.heads())
942 heads = map(hex, self.repo.heads())
944 return their_heads == [hex('force')] or their_heads == heads
943 return their_heads == [hex('force')] or their_heads == heads
945
944
946 # fail early if possible
945 # fail early if possible
947 if not check_heads():
946 if not check_heads():
948 bail(_('unsynced changes\n'))
947 bail(_('unsynced changes\n'))
949 return
948 return
950
949
951 # do not lock repo until all changegroup data is
950 # do not lock repo until all changegroup data is
952 # streamed. save to temporary file.
951 # streamed. save to temporary file.
953
952
954 fd, tempname = tempfile.mkstemp(prefix='hg-unbundle-')
953 fd, tempname = tempfile.mkstemp(prefix='hg-unbundle-')
955 fp = os.fdopen(fd, 'wb+')
954 fp = os.fdopen(fd, 'wb+')
956 try:
955 try:
957 length = int(req.env['CONTENT_LENGTH'])
956 length = int(req.env['CONTENT_LENGTH'])
958 for s in util.filechunkiter(req, limit=length):
957 for s in util.filechunkiter(req, limit=length):
959 fp.write(s)
958 fp.write(s)
960
959
961 lock = self.repo.lock()
960 lock = self.repo.lock()
962 try:
961 try:
963 if not check_heads():
962 if not check_heads():
964 req.write('0\n')
963 req.write('0\n')
965 req.write(_('unsynced changes\n'))
964 req.write(_('unsynced changes\n'))
966 return
965 return
967
966
968 fp.seek(0)
967 fp.seek(0)
969
968
970 # send addchangegroup output to client
969 # send addchangegroup output to client
971
970
972 old_stdout = sys.stdout
971 old_stdout = sys.stdout
973 sys.stdout = cStringIO.StringIO()
972 sys.stdout = cStringIO.StringIO()
974
973
975 try:
974 try:
976 url = 'remote:%s:%s' % (proto,
975 url = 'remote:%s:%s' % (proto,
977 req.env.get('REMOTE_HOST', ''))
976 req.env.get('REMOTE_HOST', ''))
978 ret = self.repo.addchangegroup(fp, 'serve', url)
977 ret = self.repo.addchangegroup(fp, 'serve', url)
979 finally:
978 finally:
980 val = sys.stdout.getvalue()
979 val = sys.stdout.getvalue()
981 sys.stdout = old_stdout
980 sys.stdout = old_stdout
982 req.write('%d\n' % ret)
981 req.write('%d\n' % ret)
983 req.write(val)
982 req.write(val)
984 finally:
983 finally:
985 lock.release()
984 lock.release()
986 finally:
985 finally:
987 fp.close()
986 fp.close()
988 os.unlink(tempname)
987 os.unlink(tempname)
989
988
990 def do_stream_out(self, req):
989 def do_stream_out(self, req):
991 req.httphdr("application/mercurial-0.1")
990 req.httphdr("application/mercurial-0.1")
992 streamclone.stream_out(self.repo, req)
991 streamclone.stream_out(self.repo, req)
@@ -1,1764 +1,1758 b''
1 # localrepo.py - read/write repository class for mercurial
1 # localrepo.py - read/write repository class for mercurial
2 #
2 #
3 # Copyright 2005 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7
7
8 from node import *
8 from node import *
9 from i18n import gettext as _
9 from i18n import gettext as _
10 from demandload import *
10 from demandload import *
11 import repo
11 import repo
12 demandload(globals(), "appendfile changegroup")
12 demandload(globals(), "appendfile changegroup")
13 demandload(globals(), "changelog dirstate filelog manifest context")
13 demandload(globals(), "changelog dirstate filelog manifest context")
14 demandload(globals(), "re lock transaction tempfile stat mdiff errno ui")
14 demandload(globals(), "re lock transaction tempfile stat mdiff errno ui")
15 demandload(globals(), "os revlog time util")
15 demandload(globals(), "os revlog time util")
16
16
17 class localrepository(repo.repository):
17 class localrepository(repo.repository):
18 capabilities = ()
18 capabilities = ()
19
19
20 def __del__(self):
20 def __del__(self):
21 self.transhandle = None
21 self.transhandle = None
22 def __init__(self, parentui, path=None, create=0):
22 def __init__(self, parentui, path=None, create=0):
23 repo.repository.__init__(self)
23 repo.repository.__init__(self)
24 if not path:
24 if not path:
25 p = os.getcwd()
25 p = os.getcwd()
26 while not os.path.isdir(os.path.join(p, ".hg")):
26 while not os.path.isdir(os.path.join(p, ".hg")):
27 oldp = p
27 oldp = p
28 p = os.path.dirname(p)
28 p = os.path.dirname(p)
29 if p == oldp:
29 if p == oldp:
30 raise repo.RepoError(_("no repo found"))
30 raise repo.RepoError(_("no repo found"))
31 path = p
31 path = p
32 self.path = os.path.join(path, ".hg")
32 self.path = os.path.join(path, ".hg")
33
33
34 if not create and not os.path.isdir(self.path):
34 if not create and not os.path.isdir(self.path):
35 raise repo.RepoError(_("repository %s not found") % path)
35 raise repo.RepoError(_("repository %s not found") % path)
36
36
37 self.root = os.path.abspath(path)
37 self.root = os.path.abspath(path)
38 self.origroot = path
38 self.origroot = path
39 self.ui = ui.ui(parentui=parentui)
39 self.ui = ui.ui(parentui=parentui)
40 self.opener = util.opener(self.path)
40 self.opener = util.opener(self.path)
41 self.wopener = util.opener(self.root)
41 self.wopener = util.opener(self.root)
42
42
43 try:
43 try:
44 self.ui.readconfig(self.join("hgrc"), self.root)
44 self.ui.readconfig(self.join("hgrc"), self.root)
45 except IOError:
45 except IOError:
46 pass
46 pass
47
47
48 v = self.ui.revlogopts
48 v = self.ui.revlogopts
49 self.revlogversion = int(v.get('format', revlog.REVLOG_DEFAULT_FORMAT))
49 self.revlogversion = int(v.get('format', revlog.REVLOG_DEFAULT_FORMAT))
50 self.revlogv1 = self.revlogversion != revlog.REVLOGV0
50 self.revlogv1 = self.revlogversion != revlog.REVLOGV0
51 fl = v.get('flags', None)
51 fl = v.get('flags', None)
52 flags = 0
52 flags = 0
53 if fl != None:
53 if fl != None:
54 for x in fl.split():
54 for x in fl.split():
55 flags |= revlog.flagstr(x)
55 flags |= revlog.flagstr(x)
56 elif self.revlogv1:
56 elif self.revlogv1:
57 flags = revlog.REVLOG_DEFAULT_FLAGS
57 flags = revlog.REVLOG_DEFAULT_FLAGS
58
58
59 v = self.revlogversion | flags
59 v = self.revlogversion | flags
60 self.manifest = manifest.manifest(self.opener, v)
60 self.manifest = manifest.manifest(self.opener, v)
61 self.changelog = changelog.changelog(self.opener, v)
61 self.changelog = changelog.changelog(self.opener, v)
62
62
63 # the changelog might not have the inline index flag
63 # the changelog might not have the inline index flag
64 # on. If the format of the changelog is the same as found in
64 # on. If the format of the changelog is the same as found in
65 # .hgrc, apply any flags found in the .hgrc as well.
65 # .hgrc, apply any flags found in the .hgrc as well.
66 # Otherwise, just version from the changelog
66 # Otherwise, just version from the changelog
67 v = self.changelog.version
67 v = self.changelog.version
68 if v == self.revlogversion:
68 if v == self.revlogversion:
69 v |= flags
69 v |= flags
70 self.revlogversion = v
70 self.revlogversion = v
71
71
72 self.tagscache = None
72 self.tagscache = None
73 self.nodetagscache = None
73 self.nodetagscache = None
74 self.encodepats = None
74 self.encodepats = None
75 self.decodepats = None
75 self.decodepats = None
76 self.transhandle = None
76 self.transhandle = None
77
77
78 if create:
78 if create:
79 if not os.path.exists(path):
79 if not os.path.exists(path):
80 os.mkdir(path)
80 os.mkdir(path)
81 os.mkdir(self.path)
81 os.mkdir(self.path)
82 os.mkdir(self.join("data"))
82 os.mkdir(self.join("data"))
83
83
84 self.dirstate = dirstate.dirstate(self.opener, self.ui, self.root)
84 self.dirstate = dirstate.dirstate(self.opener, self.ui, self.root)
85
85
86 def url(self):
86 def url(self):
87 return 'file:' + self.root
87 return 'file:' + self.root
88
88
89 def hook(self, name, throw=False, **args):
89 def hook(self, name, throw=False, **args):
90 def callhook(hname, funcname):
90 def callhook(hname, funcname):
91 '''call python hook. hook is callable object, looked up as
91 '''call python hook. hook is callable object, looked up as
92 name in python module. if callable returns "true", hook
92 name in python module. if callable returns "true", hook
93 fails, else passes. if hook raises exception, treated as
93 fails, else passes. if hook raises exception, treated as
94 hook failure. exception propagates if throw is "true".
94 hook failure. exception propagates if throw is "true".
95
95
96 reason for "true" meaning "hook failed" is so that
96 reason for "true" meaning "hook failed" is so that
97 unmodified commands (e.g. mercurial.commands.update) can
97 unmodified commands (e.g. mercurial.commands.update) can
98 be run as hooks without wrappers to convert return values.'''
98 be run as hooks without wrappers to convert return values.'''
99
99
100 self.ui.note(_("calling hook %s: %s\n") % (hname, funcname))
100 self.ui.note(_("calling hook %s: %s\n") % (hname, funcname))
101 d = funcname.rfind('.')
101 d = funcname.rfind('.')
102 if d == -1:
102 if d == -1:
103 raise util.Abort(_('%s hook is invalid ("%s" not in a module)')
103 raise util.Abort(_('%s hook is invalid ("%s" not in a module)')
104 % (hname, funcname))
104 % (hname, funcname))
105 modname = funcname[:d]
105 modname = funcname[:d]
106 try:
106 try:
107 obj = __import__(modname)
107 obj = __import__(modname)
108 except ImportError:
108 except ImportError:
109 try:
109 try:
110 # extensions are loaded with hgext_ prefix
110 # extensions are loaded with hgext_ prefix
111 obj = __import__("hgext_%s" % modname)
111 obj = __import__("hgext_%s" % modname)
112 except ImportError:
112 except ImportError:
113 raise util.Abort(_('%s hook is invalid '
113 raise util.Abort(_('%s hook is invalid '
114 '(import of "%s" failed)') %
114 '(import of "%s" failed)') %
115 (hname, modname))
115 (hname, modname))
116 try:
116 try:
117 for p in funcname.split('.')[1:]:
117 for p in funcname.split('.')[1:]:
118 obj = getattr(obj, p)
118 obj = getattr(obj, p)
119 except AttributeError, err:
119 except AttributeError, err:
120 raise util.Abort(_('%s hook is invalid '
120 raise util.Abort(_('%s hook is invalid '
121 '("%s" is not defined)') %
121 '("%s" is not defined)') %
122 (hname, funcname))
122 (hname, funcname))
123 if not callable(obj):
123 if not callable(obj):
124 raise util.Abort(_('%s hook is invalid '
124 raise util.Abort(_('%s hook is invalid '
125 '("%s" is not callable)') %
125 '("%s" is not callable)') %
126 (hname, funcname))
126 (hname, funcname))
127 try:
127 try:
128 r = obj(ui=self.ui, repo=self, hooktype=name, **args)
128 r = obj(ui=self.ui, repo=self, hooktype=name, **args)
129 except (KeyboardInterrupt, util.SignalInterrupt):
129 except (KeyboardInterrupt, util.SignalInterrupt):
130 raise
130 raise
131 except Exception, exc:
131 except Exception, exc:
132 if isinstance(exc, util.Abort):
132 if isinstance(exc, util.Abort):
133 self.ui.warn(_('error: %s hook failed: %s\n') %
133 self.ui.warn(_('error: %s hook failed: %s\n') %
134 (hname, exc.args[0] % exc.args[1:]))
134 (hname, exc.args[0] % exc.args[1:]))
135 else:
135 else:
136 self.ui.warn(_('error: %s hook raised an exception: '
136 self.ui.warn(_('error: %s hook raised an exception: '
137 '%s\n') % (hname, exc))
137 '%s\n') % (hname, exc))
138 if throw:
138 if throw:
139 raise
139 raise
140 self.ui.print_exc()
140 self.ui.print_exc()
141 return True
141 return True
142 if r:
142 if r:
143 if throw:
143 if throw:
144 raise util.Abort(_('%s hook failed') % hname)
144 raise util.Abort(_('%s hook failed') % hname)
145 self.ui.warn(_('warning: %s hook failed\n') % hname)
145 self.ui.warn(_('warning: %s hook failed\n') % hname)
146 return r
146 return r
147
147
148 def runhook(name, cmd):
148 def runhook(name, cmd):
149 self.ui.note(_("running hook %s: %s\n") % (name, cmd))
149 self.ui.note(_("running hook %s: %s\n") % (name, cmd))
150 env = dict([('HG_' + k.upper(), v) for k, v in args.iteritems()])
150 env = dict([('HG_' + k.upper(), v) for k, v in args.iteritems()])
151 r = util.system(cmd, environ=env, cwd=self.root)
151 r = util.system(cmd, environ=env, cwd=self.root)
152 if r:
152 if r:
153 desc, r = util.explain_exit(r)
153 desc, r = util.explain_exit(r)
154 if throw:
154 if throw:
155 raise util.Abort(_('%s hook %s') % (name, desc))
155 raise util.Abort(_('%s hook %s') % (name, desc))
156 self.ui.warn(_('warning: %s hook %s\n') % (name, desc))
156 self.ui.warn(_('warning: %s hook %s\n') % (name, desc))
157 return r
157 return r
158
158
159 r = False
159 r = False
160 hooks = [(hname, cmd) for hname, cmd in self.ui.configitems("hooks")
160 hooks = [(hname, cmd) for hname, cmd in self.ui.configitems("hooks")
161 if hname.split(".", 1)[0] == name and cmd]
161 if hname.split(".", 1)[0] == name and cmd]
162 hooks.sort()
162 hooks.sort()
163 for hname, cmd in hooks:
163 for hname, cmd in hooks:
164 if cmd.startswith('python:'):
164 if cmd.startswith('python:'):
165 r = callhook(hname, cmd[7:].strip()) or r
165 r = callhook(hname, cmd[7:].strip()) or r
166 else:
166 else:
167 r = runhook(hname, cmd) or r
167 r = runhook(hname, cmd) or r
168 return r
168 return r
169
169
170 tag_disallowed = ':\r\n'
170 tag_disallowed = ':\r\n'
171
171
172 def tag(self, name, node, local=False, message=None, user=None, date=None):
172 def tag(self, name, node, local=False, message=None, user=None, date=None):
173 '''tag a revision with a symbolic name.
173 '''tag a revision with a symbolic name.
174
174
175 if local is True, the tag is stored in a per-repository file.
175 if local is True, the tag is stored in a per-repository file.
176 otherwise, it is stored in the .hgtags file, and a new
176 otherwise, it is stored in the .hgtags file, and a new
177 changeset is committed with the change.
177 changeset is committed with the change.
178
178
179 keyword arguments:
179 keyword arguments:
180
180
181 local: whether to store tag in non-version-controlled file
181 local: whether to store tag in non-version-controlled file
182 (default False)
182 (default False)
183
183
184 message: commit message to use if committing
184 message: commit message to use if committing
185
185
186 user: name of user to use if committing
186 user: name of user to use if committing
187
187
188 date: date tuple to use if committing'''
188 date: date tuple to use if committing'''
189
189
190 for c in self.tag_disallowed:
190 for c in self.tag_disallowed:
191 if c in name:
191 if c in name:
192 raise util.Abort(_('%r cannot be used in a tag name') % c)
192 raise util.Abort(_('%r cannot be used in a tag name') % c)
193
193
194 self.hook('pretag', throw=True, node=node, tag=name, local=local)
194 self.hook('pretag', throw=True, node=node, tag=name, local=local)
195
195
196 if local:
196 if local:
197 self.opener('localtags', 'a').write('%s %s\n' % (node, name))
197 self.opener('localtags', 'a').write('%s %s\n' % (node, name))
198 self.hook('tag', node=node, tag=name, local=local)
198 self.hook('tag', node=node, tag=name, local=local)
199 return
199 return
200
200
201 for x in self.changes():
201 for x in self.changes():
202 if '.hgtags' in x:
202 if '.hgtags' in x:
203 raise util.Abort(_('working copy of .hgtags is changed '
203 raise util.Abort(_('working copy of .hgtags is changed '
204 '(please commit .hgtags manually)'))
204 '(please commit .hgtags manually)'))
205
205
206 self.wfile('.hgtags', 'ab').write('%s %s\n' % (node, name))
206 self.wfile('.hgtags', 'ab').write('%s %s\n' % (node, name))
207 if self.dirstate.state('.hgtags') == '?':
207 if self.dirstate.state('.hgtags') == '?':
208 self.add(['.hgtags'])
208 self.add(['.hgtags'])
209
209
210 if not message:
210 if not message:
211 message = _('Added tag %s for changeset %s') % (name, node)
211 message = _('Added tag %s for changeset %s') % (name, node)
212
212
213 self.commit(['.hgtags'], message, user, date)
213 self.commit(['.hgtags'], message, user, date)
214 self.hook('tag', node=node, tag=name, local=local)
214 self.hook('tag', node=node, tag=name, local=local)
215
215
216 def tags(self):
216 def tags(self):
217 '''return a mapping of tag to node'''
217 '''return a mapping of tag to node'''
218 if not self.tagscache:
218 if not self.tagscache:
219 self.tagscache = {}
219 self.tagscache = {}
220
220
221 def parsetag(line, context):
221 def parsetag(line, context):
222 if not line:
222 if not line:
223 return
223 return
224 s = l.split(" ", 1)
224 s = l.split(" ", 1)
225 if len(s) != 2:
225 if len(s) != 2:
226 self.ui.warn(_("%s: cannot parse entry\n") % context)
226 self.ui.warn(_("%s: cannot parse entry\n") % context)
227 return
227 return
228 node, key = s
228 node, key = s
229 key = key.strip()
229 key = key.strip()
230 try:
230 try:
231 bin_n = bin(node)
231 bin_n = bin(node)
232 except TypeError:
232 except TypeError:
233 self.ui.warn(_("%s: node '%s' is not well formed\n") %
233 self.ui.warn(_("%s: node '%s' is not well formed\n") %
234 (context, node))
234 (context, node))
235 return
235 return
236 if bin_n not in self.changelog.nodemap:
236 if bin_n not in self.changelog.nodemap:
237 self.ui.warn(_("%s: tag '%s' refers to unknown node\n") %
237 self.ui.warn(_("%s: tag '%s' refers to unknown node\n") %
238 (context, key))
238 (context, key))
239 return
239 return
240 self.tagscache[key] = bin_n
240 self.tagscache[key] = bin_n
241
241
242 # read the tags file from each head, ending with the tip,
242 # read the tags file from each head, ending with the tip,
243 # and add each tag found to the map, with "newer" ones
243 # and add each tag found to the map, with "newer" ones
244 # taking precedence
244 # taking precedence
245 heads = self.heads()
245 heads = self.heads()
246 heads.reverse()
246 heads.reverse()
247 fl = self.file(".hgtags")
247 fl = self.file(".hgtags")
248 for node in heads:
248 for node in heads:
249 change = self.changelog.read(node)
249 change = self.changelog.read(node)
250 rev = self.changelog.rev(node)
250 rev = self.changelog.rev(node)
251 fn, ff = self.manifest.find(change[0], '.hgtags')
251 fn, ff = self.manifest.find(change[0], '.hgtags')
252 if fn is None: continue
252 if fn is None: continue
253 count = 0
253 count = 0
254 for l in fl.read(fn).splitlines():
254 for l in fl.read(fn).splitlines():
255 count += 1
255 count += 1
256 parsetag(l, _(".hgtags (rev %d:%s), line %d") %
256 parsetag(l, _(".hgtags (rev %d:%s), line %d") %
257 (rev, short(node), count))
257 (rev, short(node), count))
258 try:
258 try:
259 f = self.opener("localtags")
259 f = self.opener("localtags")
260 count = 0
260 count = 0
261 for l in f:
261 for l in f:
262 count += 1
262 count += 1
263 parsetag(l, _("localtags, line %d") % count)
263 parsetag(l, _("localtags, line %d") % count)
264 except IOError:
264 except IOError:
265 pass
265 pass
266
266
267 self.tagscache['tip'] = self.changelog.tip()
267 self.tagscache['tip'] = self.changelog.tip()
268
268
269 return self.tagscache
269 return self.tagscache
270
270
271 def tagslist(self):
271 def tagslist(self):
272 '''return a list of tags ordered by revision'''
272 '''return a list of tags ordered by revision'''
273 l = []
273 l = []
274 for t, n in self.tags().items():
274 for t, n in self.tags().items():
275 try:
275 try:
276 r = self.changelog.rev(n)
276 r = self.changelog.rev(n)
277 except:
277 except:
278 r = -2 # sort to the beginning of the list if unknown
278 r = -2 # sort to the beginning of the list if unknown
279 l.append((r, t, n))
279 l.append((r, t, n))
280 l.sort()
280 l.sort()
281 return [(t, n) for r, t, n in l]
281 return [(t, n) for r, t, n in l]
282
282
283 def nodetags(self, node):
283 def nodetags(self, node):
284 '''return the tags associated with a node'''
284 '''return the tags associated with a node'''
285 if not self.nodetagscache:
285 if not self.nodetagscache:
286 self.nodetagscache = {}
286 self.nodetagscache = {}
287 for t, n in self.tags().items():
287 for t, n in self.tags().items():
288 self.nodetagscache.setdefault(n, []).append(t)
288 self.nodetagscache.setdefault(n, []).append(t)
289 return self.nodetagscache.get(node, [])
289 return self.nodetagscache.get(node, [])
290
290
291 def lookup(self, key):
291 def lookup(self, key):
292 try:
292 try:
293 return self.tags()[key]
293 return self.tags()[key]
294 except KeyError:
294 except KeyError:
295 if key == '.':
295 if key == '.':
296 key = self.dirstate.parents()[0]
296 key = self.dirstate.parents()[0]
297 if key == nullid:
297 if key == nullid:
298 raise repo.RepoError(_("no revision checked out"))
298 raise repo.RepoError(_("no revision checked out"))
299 try:
299 try:
300 return self.changelog.lookup(key)
300 return self.changelog.lookup(key)
301 except:
301 except:
302 raise repo.RepoError(_("unknown revision '%s'") % key)
302 raise repo.RepoError(_("unknown revision '%s'") % key)
303
303
304 def dev(self):
304 def dev(self):
305 return os.lstat(self.path).st_dev
305 return os.lstat(self.path).st_dev
306
306
307 def local(self):
307 def local(self):
308 return True
308 return True
309
309
310 def join(self, f):
310 def join(self, f):
311 return os.path.join(self.path, f)
311 return os.path.join(self.path, f)
312
312
313 def wjoin(self, f):
313 def wjoin(self, f):
314 return os.path.join(self.root, f)
314 return os.path.join(self.root, f)
315
315
316 def file(self, f):
316 def file(self, f):
317 if f[0] == '/':
317 if f[0] == '/':
318 f = f[1:]
318 f = f[1:]
319 return filelog.filelog(self.opener, f, self.revlogversion)
319 return filelog.filelog(self.opener, f, self.revlogversion)
320
320
321 def changectx(self, changeid):
321 def changectx(self, changeid):
322 return context.changectx(self, changeid)
322 return context.changectx(self, changeid)
323
323
324 def filectx(self, path, changeid=None, fileid=None):
324 def filectx(self, path, changeid=None, fileid=None):
325 """changeid can be a changeset revision, node, or tag.
325 """changeid can be a changeset revision, node, or tag.
326 fileid can be a file revision or node."""
326 fileid can be a file revision or node."""
327 return context.filectx(self, path, changeid, fileid)
327 return context.filectx(self, path, changeid, fileid)
328
328
329 def getcwd(self):
329 def getcwd(self):
330 return self.dirstate.getcwd()
330 return self.dirstate.getcwd()
331
331
332 def wfile(self, f, mode='r'):
332 def wfile(self, f, mode='r'):
333 return self.wopener(f, mode)
333 return self.wopener(f, mode)
334
334
335 def wread(self, filename):
335 def wread(self, filename):
336 if self.encodepats == None:
336 if self.encodepats == None:
337 l = []
337 l = []
338 for pat, cmd in self.ui.configitems("encode"):
338 for pat, cmd in self.ui.configitems("encode"):
339 mf = util.matcher(self.root, "", [pat], [], [])[1]
339 mf = util.matcher(self.root, "", [pat], [], [])[1]
340 l.append((mf, cmd))
340 l.append((mf, cmd))
341 self.encodepats = l
341 self.encodepats = l
342
342
343 data = self.wopener(filename, 'r').read()
343 data = self.wopener(filename, 'r').read()
344
344
345 for mf, cmd in self.encodepats:
345 for mf, cmd in self.encodepats:
346 if mf(filename):
346 if mf(filename):
347 self.ui.debug(_("filtering %s through %s\n") % (filename, cmd))
347 self.ui.debug(_("filtering %s through %s\n") % (filename, cmd))
348 data = util.filter(data, cmd)
348 data = util.filter(data, cmd)
349 break
349 break
350
350
351 return data
351 return data
352
352
353 def wwrite(self, filename, data, fd=None):
353 def wwrite(self, filename, data, fd=None):
354 if self.decodepats == None:
354 if self.decodepats == None:
355 l = []
355 l = []
356 for pat, cmd in self.ui.configitems("decode"):
356 for pat, cmd in self.ui.configitems("decode"):
357 mf = util.matcher(self.root, "", [pat], [], [])[1]
357 mf = util.matcher(self.root, "", [pat], [], [])[1]
358 l.append((mf, cmd))
358 l.append((mf, cmd))
359 self.decodepats = l
359 self.decodepats = l
360
360
361 for mf, cmd in self.decodepats:
361 for mf, cmd in self.decodepats:
362 if mf(filename):
362 if mf(filename):
363 self.ui.debug(_("filtering %s through %s\n") % (filename, cmd))
363 self.ui.debug(_("filtering %s through %s\n") % (filename, cmd))
364 data = util.filter(data, cmd)
364 data = util.filter(data, cmd)
365 break
365 break
366
366
367 if fd:
367 if fd:
368 return fd.write(data)
368 return fd.write(data)
369 return self.wopener(filename, 'w').write(data)
369 return self.wopener(filename, 'w').write(data)
370
370
371 def transaction(self):
371 def transaction(self):
372 tr = self.transhandle
372 tr = self.transhandle
373 if tr != None and tr.running():
373 if tr != None and tr.running():
374 return tr.nest()
374 return tr.nest()
375
375
376 # save dirstate for rollback
376 # save dirstate for rollback
377 try:
377 try:
378 ds = self.opener("dirstate").read()
378 ds = self.opener("dirstate").read()
379 except IOError:
379 except IOError:
380 ds = ""
380 ds = ""
381 self.opener("journal.dirstate", "w").write(ds)
381 self.opener("journal.dirstate", "w").write(ds)
382
382
383 tr = transaction.transaction(self.ui.warn, self.opener,
383 tr = transaction.transaction(self.ui.warn, self.opener,
384 self.join("journal"),
384 self.join("journal"),
385 aftertrans(self.path))
385 aftertrans(self.path))
386 self.transhandle = tr
386 self.transhandle = tr
387 return tr
387 return tr
388
388
389 def recover(self):
389 def recover(self):
390 l = self.lock()
390 l = self.lock()
391 if os.path.exists(self.join("journal")):
391 if os.path.exists(self.join("journal")):
392 self.ui.status(_("rolling back interrupted transaction\n"))
392 self.ui.status(_("rolling back interrupted transaction\n"))
393 transaction.rollback(self.opener, self.join("journal"))
393 transaction.rollback(self.opener, self.join("journal"))
394 self.reload()
394 self.reload()
395 return True
395 return True
396 else:
396 else:
397 self.ui.warn(_("no interrupted transaction available\n"))
397 self.ui.warn(_("no interrupted transaction available\n"))
398 return False
398 return False
399
399
400 def rollback(self, wlock=None):
400 def rollback(self, wlock=None):
401 if not wlock:
401 if not wlock:
402 wlock = self.wlock()
402 wlock = self.wlock()
403 l = self.lock()
403 l = self.lock()
404 if os.path.exists(self.join("undo")):
404 if os.path.exists(self.join("undo")):
405 self.ui.status(_("rolling back last transaction\n"))
405 self.ui.status(_("rolling back last transaction\n"))
406 transaction.rollback(self.opener, self.join("undo"))
406 transaction.rollback(self.opener, self.join("undo"))
407 util.rename(self.join("undo.dirstate"), self.join("dirstate"))
407 util.rename(self.join("undo.dirstate"), self.join("dirstate"))
408 self.reload()
408 self.reload()
409 self.wreload()
409 self.wreload()
410 else:
410 else:
411 self.ui.warn(_("no rollback information available\n"))
411 self.ui.warn(_("no rollback information available\n"))
412
412
413 def wreload(self):
413 def wreload(self):
414 self.dirstate.read()
414 self.dirstate.read()
415
415
416 def reload(self):
416 def reload(self):
417 self.changelog.load()
417 self.changelog.load()
418 self.manifest.load()
418 self.manifest.load()
419 self.tagscache = None
419 self.tagscache = None
420 self.nodetagscache = None
420 self.nodetagscache = None
421
421
422 def do_lock(self, lockname, wait, releasefn=None, acquirefn=None,
422 def do_lock(self, lockname, wait, releasefn=None, acquirefn=None,
423 desc=None):
423 desc=None):
424 try:
424 try:
425 l = lock.lock(self.join(lockname), 0, releasefn, desc=desc)
425 l = lock.lock(self.join(lockname), 0, releasefn, desc=desc)
426 except lock.LockHeld, inst:
426 except lock.LockHeld, inst:
427 if not wait:
427 if not wait:
428 raise
428 raise
429 self.ui.warn(_("waiting for lock on %s held by %s\n") %
429 self.ui.warn(_("waiting for lock on %s held by %s\n") %
430 (desc, inst.args[0]))
430 (desc, inst.args[0]))
431 # default to 600 seconds timeout
431 # default to 600 seconds timeout
432 l = lock.lock(self.join(lockname),
432 l = lock.lock(self.join(lockname),
433 int(self.ui.config("ui", "timeout") or 600),
433 int(self.ui.config("ui", "timeout") or 600),
434 releasefn, desc=desc)
434 releasefn, desc=desc)
435 if acquirefn:
435 if acquirefn:
436 acquirefn()
436 acquirefn()
437 return l
437 return l
438
438
439 def lock(self, wait=1):
439 def lock(self, wait=1):
440 return self.do_lock("lock", wait, acquirefn=self.reload,
440 return self.do_lock("lock", wait, acquirefn=self.reload,
441 desc=_('repository %s') % self.origroot)
441 desc=_('repository %s') % self.origroot)
442
442
443 def wlock(self, wait=1):
443 def wlock(self, wait=1):
444 return self.do_lock("wlock", wait, self.dirstate.write,
444 return self.do_lock("wlock", wait, self.dirstate.write,
445 self.wreload,
445 self.wreload,
446 desc=_('working directory of %s') % self.origroot)
446 desc=_('working directory of %s') % self.origroot)
447
447
448 def checkfilemerge(self, filename, text, filelog, manifest1, manifest2):
448 def checkfilemerge(self, filename, text, filelog, manifest1, manifest2):
449 "determine whether a new filenode is needed"
449 "determine whether a new filenode is needed"
450 fp1 = manifest1.get(filename, nullid)
450 fp1 = manifest1.get(filename, nullid)
451 fp2 = manifest2.get(filename, nullid)
451 fp2 = manifest2.get(filename, nullid)
452
452
453 if fp2 != nullid:
453 if fp2 != nullid:
454 # is one parent an ancestor of the other?
454 # is one parent an ancestor of the other?
455 fpa = filelog.ancestor(fp1, fp2)
455 fpa = filelog.ancestor(fp1, fp2)
456 if fpa == fp1:
456 if fpa == fp1:
457 fp1, fp2 = fp2, nullid
457 fp1, fp2 = fp2, nullid
458 elif fpa == fp2:
458 elif fpa == fp2:
459 fp2 = nullid
459 fp2 = nullid
460
460
461 # is the file unmodified from the parent? report existing entry
461 # is the file unmodified from the parent? report existing entry
462 if fp2 == nullid and text == filelog.read(fp1):
462 if fp2 == nullid and text == filelog.read(fp1):
463 return (fp1, None, None)
463 return (fp1, None, None)
464
464
465 return (None, fp1, fp2)
465 return (None, fp1, fp2)
466
466
467 def rawcommit(self, files, text, user, date, p1=None, p2=None, wlock=None):
467 def rawcommit(self, files, text, user, date, p1=None, p2=None, wlock=None):
468 orig_parent = self.dirstate.parents()[0] or nullid
468 orig_parent = self.dirstate.parents()[0] or nullid
469 p1 = p1 or self.dirstate.parents()[0] or nullid
469 p1 = p1 or self.dirstate.parents()[0] or nullid
470 p2 = p2 or self.dirstate.parents()[1] or nullid
470 p2 = p2 or self.dirstate.parents()[1] or nullid
471 c1 = self.changelog.read(p1)
471 c1 = self.changelog.read(p1)
472 c2 = self.changelog.read(p2)
472 c2 = self.changelog.read(p2)
473 m1 = self.manifest.read(c1[0])
473 m1 = self.manifest.read(c1[0]).copy()
474 mf1 = self.manifest.readflags(c1[0])
475 m2 = self.manifest.read(c2[0])
474 m2 = self.manifest.read(c2[0])
476 changed = []
475 changed = []
477
476
478 if orig_parent == p1:
477 if orig_parent == p1:
479 update_dirstate = 1
478 update_dirstate = 1
480 else:
479 else:
481 update_dirstate = 0
480 update_dirstate = 0
482
481
483 if not wlock:
482 if not wlock:
484 wlock = self.wlock()
483 wlock = self.wlock()
485 l = self.lock()
484 l = self.lock()
486 tr = self.transaction()
485 tr = self.transaction()
487 mm = m1.copy()
488 mfm = mf1.copy()
489 linkrev = self.changelog.count()
486 linkrev = self.changelog.count()
490 for f in files:
487 for f in files:
491 try:
488 try:
492 t = self.wread(f)
489 t = self.wread(f)
493 mfm.set(f, util.is_exec(self.wjoin(f), mfm.execf(f)))
490 m1.set(f, util.is_exec(self.wjoin(f), m1.execf(f)))
494 r = self.file(f)
491 r = self.file(f)
495
492
496 (entry, fp1, fp2) = self.checkfilemerge(f, t, r, m1, m2)
493 (entry, fp1, fp2) = self.checkfilemerge(f, t, r, m1, m2)
497 if entry:
494 if entry:
498 mm[f] = entry
495 m1[f] = entry
499 continue
496 continue
500
497
501 mm[f] = r.add(t, {}, tr, linkrev, fp1, fp2)
498 m1[f] = r.add(t, {}, tr, linkrev, fp1, fp2)
502 changed.append(f)
499 changed.append(f)
503 if update_dirstate:
500 if update_dirstate:
504 self.dirstate.update([f], "n")
501 self.dirstate.update([f], "n")
505 except IOError:
502 except IOError:
506 try:
503 try:
507 del mm[f]
504 del m1[f]
508 del mfm[f]
505 del m1[f]
509 if update_dirstate:
506 if update_dirstate:
510 self.dirstate.forget([f])
507 self.dirstate.forget([f])
511 except:
508 except:
512 # deleted from p2?
509 # deleted from p2?
513 pass
510 pass
514
511
515 mnode = self.manifest.add(mm, mfm, tr, linkrev, c1[0], c2[0])
512 mnode = self.manifest.add(m1, m1, tr, linkrev, c1[0], c2[0])
516 user = user or self.ui.username()
513 user = user or self.ui.username()
517 n = self.changelog.add(mnode, changed, text, tr, p1, p2, user, date)
514 n = self.changelog.add(mnode, changed, text, tr, p1, p2, user, date)
518 tr.close()
515 tr.close()
519 if update_dirstate:
516 if update_dirstate:
520 self.dirstate.setparents(n, nullid)
517 self.dirstate.setparents(n, nullid)
521
518
522 def commit(self, files=None, text="", user=None, date=None,
519 def commit(self, files=None, text="", user=None, date=None,
523 match=util.always, force=False, lock=None, wlock=None,
520 match=util.always, force=False, lock=None, wlock=None,
524 force_editor=False):
521 force_editor=False):
525 commit = []
522 commit = []
526 remove = []
523 remove = []
527 changed = []
524 changed = []
528
525
529 if files:
526 if files:
530 for f in files:
527 for f in files:
531 s = self.dirstate.state(f)
528 s = self.dirstate.state(f)
532 if s in 'nmai':
529 if s in 'nmai':
533 commit.append(f)
530 commit.append(f)
534 elif s == 'r':
531 elif s == 'r':
535 remove.append(f)
532 remove.append(f)
536 else:
533 else:
537 self.ui.warn(_("%s not tracked!\n") % f)
534 self.ui.warn(_("%s not tracked!\n") % f)
538 else:
535 else:
539 modified, added, removed, deleted, unknown = self.changes(match=match)
536 modified, added, removed, deleted, unknown = self.changes(match=match)
540 commit = modified + added
537 commit = modified + added
541 remove = removed
538 remove = removed
542
539
543 p1, p2 = self.dirstate.parents()
540 p1, p2 = self.dirstate.parents()
544 c1 = self.changelog.read(p1)
541 c1 = self.changelog.read(p1)
545 c2 = self.changelog.read(p2)
542 c2 = self.changelog.read(p2)
546 m1 = self.manifest.read(c1[0])
543 m1 = self.manifest.read(c1[0]).copy()
547 mf1 = self.manifest.readflags(c1[0])
548 m2 = self.manifest.read(c2[0])
544 m2 = self.manifest.read(c2[0])
549
545
550 if not commit and not remove and not force and p2 == nullid:
546 if not commit and not remove and not force and p2 == nullid:
551 self.ui.status(_("nothing changed\n"))
547 self.ui.status(_("nothing changed\n"))
552 return None
548 return None
553
549
554 xp1 = hex(p1)
550 xp1 = hex(p1)
555 if p2 == nullid: xp2 = ''
551 if p2 == nullid: xp2 = ''
556 else: xp2 = hex(p2)
552 else: xp2 = hex(p2)
557
553
558 self.hook("precommit", throw=True, parent1=xp1, parent2=xp2)
554 self.hook("precommit", throw=True, parent1=xp1, parent2=xp2)
559
555
560 if not wlock:
556 if not wlock:
561 wlock = self.wlock()
557 wlock = self.wlock()
562 if not lock:
558 if not lock:
563 lock = self.lock()
559 lock = self.lock()
564 tr = self.transaction()
560 tr = self.transaction()
565
561
566 # check in files
562 # check in files
567 new = {}
563 new = {}
568 linkrev = self.changelog.count()
564 linkrev = self.changelog.count()
569 commit.sort()
565 commit.sort()
570 for f in commit:
566 for f in commit:
571 self.ui.note(f + "\n")
567 self.ui.note(f + "\n")
572 try:
568 try:
573 mf1.set(f, util.is_exec(self.wjoin(f), mf1.execf(f)))
569 m1.set(f, util.is_exec(self.wjoin(f), m1.execf(f)))
574 t = self.wread(f)
570 t = self.wread(f)
575 except IOError:
571 except IOError:
576 self.ui.warn(_("trouble committing %s!\n") % f)
572 self.ui.warn(_("trouble committing %s!\n") % f)
577 raise
573 raise
578
574
579 r = self.file(f)
575 r = self.file(f)
580
576
581 meta = {}
577 meta = {}
582 cp = self.dirstate.copied(f)
578 cp = self.dirstate.copied(f)
583 if cp:
579 if cp:
584 meta["copy"] = cp
580 meta["copy"] = cp
585 meta["copyrev"] = hex(m1.get(cp, m2.get(cp, nullid)))
581 meta["copyrev"] = hex(m1.get(cp, m2.get(cp, nullid)))
586 self.ui.debug(_(" %s: copy %s:%s\n") % (f, cp, meta["copyrev"]))
582 self.ui.debug(_(" %s: copy %s:%s\n") % (f, cp, meta["copyrev"]))
587 fp1, fp2 = nullid, nullid
583 fp1, fp2 = nullid, nullid
588 else:
584 else:
589 entry, fp1, fp2 = self.checkfilemerge(f, t, r, m1, m2)
585 entry, fp1, fp2 = self.checkfilemerge(f, t, r, m1, m2)
590 if entry:
586 if entry:
591 new[f] = entry
587 new[f] = entry
592 continue
588 continue
593
589
594 new[f] = r.add(t, meta, tr, linkrev, fp1, fp2)
590 new[f] = r.add(t, meta, tr, linkrev, fp1, fp2)
595 # remember what we've added so that we can later calculate
591 # remember what we've added so that we can later calculate
596 # the files to pull from a set of changesets
592 # the files to pull from a set of changesets
597 changed.append(f)
593 changed.append(f)
598
594
599 # update manifest
595 # update manifest
600 m1 = m1.copy()
601 m1.update(new)
596 m1.update(new)
602 for f in remove:
597 for f in remove:
603 if f in m1:
598 if f in m1:
604 del m1[f]
599 del m1[f]
605 mn = self.manifest.add(m1, mf1, tr, linkrev, c1[0], c2[0],
600 mn = self.manifest.add(m1, m1, tr, linkrev, c1[0], c2[0],
606 (new, remove))
601 (new, remove))
607
602
608 # add changeset
603 # add changeset
609 new = new.keys()
604 new = new.keys()
610 new.sort()
605 new.sort()
611
606
612 user = user or self.ui.username()
607 user = user or self.ui.username()
613 if not text or force_editor:
608 if not text or force_editor:
614 edittext = []
609 edittext = []
615 if text:
610 if text:
616 edittext.append(text)
611 edittext.append(text)
617 edittext.append("")
612 edittext.append("")
618 if p2 != nullid:
613 if p2 != nullid:
619 edittext.append("HG: branch merge")
614 edittext.append("HG: branch merge")
620 edittext.extend(["HG: changed %s" % f for f in changed])
615 edittext.extend(["HG: changed %s" % f for f in changed])
621 edittext.extend(["HG: removed %s" % f for f in remove])
616 edittext.extend(["HG: removed %s" % f for f in remove])
622 if not changed and not remove:
617 if not changed and not remove:
623 edittext.append("HG: no files changed")
618 edittext.append("HG: no files changed")
624 edittext.append("")
619 edittext.append("")
625 # run editor in the repository root
620 # run editor in the repository root
626 olddir = os.getcwd()
621 olddir = os.getcwd()
627 os.chdir(self.root)
622 os.chdir(self.root)
628 text = self.ui.edit("\n".join(edittext), user)
623 text = self.ui.edit("\n".join(edittext), user)
629 os.chdir(olddir)
624 os.chdir(olddir)
630
625
631 lines = [line.rstrip() for line in text.rstrip().splitlines()]
626 lines = [line.rstrip() for line in text.rstrip().splitlines()]
632 while lines and not lines[0]:
627 while lines and not lines[0]:
633 del lines[0]
628 del lines[0]
634 if not lines:
629 if not lines:
635 return None
630 return None
636 text = '\n'.join(lines)
631 text = '\n'.join(lines)
637 n = self.changelog.add(mn, changed + remove, text, tr, p1, p2, user, date)
632 n = self.changelog.add(mn, changed + remove, text, tr, p1, p2, user, date)
638 self.hook('pretxncommit', throw=True, node=hex(n), parent1=xp1,
633 self.hook('pretxncommit', throw=True, node=hex(n), parent1=xp1,
639 parent2=xp2)
634 parent2=xp2)
640 tr.close()
635 tr.close()
641
636
642 self.dirstate.setparents(n)
637 self.dirstate.setparents(n)
643 self.dirstate.update(new, "n")
638 self.dirstate.update(new, "n")
644 self.dirstate.forget(remove)
639 self.dirstate.forget(remove)
645
640
646 self.hook("commit", node=hex(n), parent1=xp1, parent2=xp2)
641 self.hook("commit", node=hex(n), parent1=xp1, parent2=xp2)
647 return n
642 return n
648
643
649 def walk(self, node=None, files=[], match=util.always, badmatch=None):
644 def walk(self, node=None, files=[], match=util.always, badmatch=None):
650 if node:
645 if node:
651 fdict = dict.fromkeys(files)
646 fdict = dict.fromkeys(files)
652 for fn in self.manifest.read(self.changelog.read(node)[0]):
647 for fn in self.manifest.read(self.changelog.read(node)[0]):
653 fdict.pop(fn, None)
648 fdict.pop(fn, None)
654 if match(fn):
649 if match(fn):
655 yield 'm', fn
650 yield 'm', fn
656 for fn in fdict:
651 for fn in fdict:
657 if badmatch and badmatch(fn):
652 if badmatch and badmatch(fn):
658 if match(fn):
653 if match(fn):
659 yield 'b', fn
654 yield 'b', fn
660 else:
655 else:
661 self.ui.warn(_('%s: No such file in rev %s\n') % (
656 self.ui.warn(_('%s: No such file in rev %s\n') % (
662 util.pathto(self.getcwd(), fn), short(node)))
657 util.pathto(self.getcwd(), fn), short(node)))
663 else:
658 else:
664 for src, fn in self.dirstate.walk(files, match, badmatch=badmatch):
659 for src, fn in self.dirstate.walk(files, match, badmatch=badmatch):
665 yield src, fn
660 yield src, fn
666
661
667 def status(self, node1=None, node2=None, files=[], match=util.always,
662 def status(self, node1=None, node2=None, files=[], match=util.always,
668 wlock=None, list_ignored=False, list_clean=False):
663 wlock=None, list_ignored=False, list_clean=False):
669 """return status of files between two nodes or node and working directory
664 """return status of files between two nodes or node and working directory
670
665
671 If node1 is None, use the first dirstate parent instead.
666 If node1 is None, use the first dirstate parent instead.
672 If node2 is None, compare node1 with working directory.
667 If node2 is None, compare node1 with working directory.
673 """
668 """
674
669
675 def fcmp(fn, mf):
670 def fcmp(fn, mf):
676 t1 = self.wread(fn)
671 t1 = self.wread(fn)
677 t2 = self.file(fn).read(mf.get(fn, nullid))
672 t2 = self.file(fn).read(mf.get(fn, nullid))
678 return cmp(t1, t2)
673 return cmp(t1, t2)
679
674
680 def mfmatches(node):
675 def mfmatches(node):
681 change = self.changelog.read(node)
676 change = self.changelog.read(node)
682 mf = dict(self.manifest.read(change[0]))
677 mf = dict(self.manifest.read(change[0]))
683 for fn in mf.keys():
678 for fn in mf.keys():
684 if not match(fn):
679 if not match(fn):
685 del mf[fn]
680 del mf[fn]
686 return mf
681 return mf
687
682
688 modified, added, removed, deleted, unknown = [], [], [], [], []
683 modified, added, removed, deleted, unknown = [], [], [], [], []
689 ignored, clean = [], []
684 ignored, clean = [], []
690
685
691 compareworking = False
686 compareworking = False
692 if not node1 or (not node2 and node1 == self.dirstate.parents()[0]):
687 if not node1 or (not node2 and node1 == self.dirstate.parents()[0]):
693 compareworking = True
688 compareworking = True
694
689
695 if not compareworking:
690 if not compareworking:
696 # read the manifest from node1 before the manifest from node2,
691 # read the manifest from node1 before the manifest from node2,
697 # so that we'll hit the manifest cache if we're going through
692 # so that we'll hit the manifest cache if we're going through
698 # all the revisions in parent->child order.
693 # all the revisions in parent->child order.
699 mf1 = mfmatches(node1)
694 mf1 = mfmatches(node1)
700
695
701 # are we comparing the working directory?
696 # are we comparing the working directory?
702 if not node2:
697 if not node2:
703 if not wlock:
698 if not wlock:
704 try:
699 try:
705 wlock = self.wlock(wait=0)
700 wlock = self.wlock(wait=0)
706 except lock.LockException:
701 except lock.LockException:
707 wlock = None
702 wlock = None
708 (lookup, modified, added, removed, deleted, unknown,
703 (lookup, modified, added, removed, deleted, unknown,
709 ignored, clean) = self.dirstate.status(files, match,
704 ignored, clean) = self.dirstate.status(files, match,
710 list_ignored, list_clean)
705 list_ignored, list_clean)
711
706
712 # are we comparing working dir against its parent?
707 # are we comparing working dir against its parent?
713 if compareworking:
708 if compareworking:
714 if lookup:
709 if lookup:
715 # do a full compare of any files that might have changed
710 # do a full compare of any files that might have changed
716 mf2 = mfmatches(self.dirstate.parents()[0])
711 mf2 = mfmatches(self.dirstate.parents()[0])
717 for f in lookup:
712 for f in lookup:
718 if fcmp(f, mf2):
713 if fcmp(f, mf2):
719 modified.append(f)
714 modified.append(f)
720 elif wlock is not None:
715 elif wlock is not None:
721 self.dirstate.update([f], "n")
716 self.dirstate.update([f], "n")
722 else:
717 else:
723 # we are comparing working dir against non-parent
718 # we are comparing working dir against non-parent
724 # generate a pseudo-manifest for the working dir
719 # generate a pseudo-manifest for the working dir
725 mf2 = mfmatches(self.dirstate.parents()[0])
720 mf2 = mfmatches(self.dirstate.parents()[0])
726 for f in lookup + modified + added:
721 for f in lookup + modified + added:
727 mf2[f] = ""
722 mf2[f] = ""
728 for f in removed:
723 for f in removed:
729 if f in mf2:
724 if f in mf2:
730 del mf2[f]
725 del mf2[f]
731 else:
726 else:
732 # we are comparing two revisions
727 # we are comparing two revisions
733 mf2 = mfmatches(node2)
728 mf2 = mfmatches(node2)
734
729
735 if not compareworking:
730 if not compareworking:
736 # flush lists from dirstate before comparing manifests
731 # flush lists from dirstate before comparing manifests
737 modified, added, clean = [], [], []
732 modified, added, clean = [], [], []
738
733
739 # make sure to sort the files so we talk to the disk in a
734 # make sure to sort the files so we talk to the disk in a
740 # reasonable order
735 # reasonable order
741 mf2keys = mf2.keys()
736 mf2keys = mf2.keys()
742 mf2keys.sort()
737 mf2keys.sort()
743 for fn in mf2keys:
738 for fn in mf2keys:
744 if mf1.has_key(fn):
739 if mf1.has_key(fn):
745 if mf1[fn] != mf2[fn] and (mf2[fn] != "" or fcmp(fn, mf1)):
740 if mf1[fn] != mf2[fn] and (mf2[fn] != "" or fcmp(fn, mf1)):
746 modified.append(fn)
741 modified.append(fn)
747 elif list_clean:
742 elif list_clean:
748 clean.append(fn)
743 clean.append(fn)
749 del mf1[fn]
744 del mf1[fn]
750 else:
745 else:
751 added.append(fn)
746 added.append(fn)
752
747
753 removed = mf1.keys()
748 removed = mf1.keys()
754
749
755 # sort and return results:
750 # sort and return results:
756 for l in modified, added, removed, deleted, unknown, ignored, clean:
751 for l in modified, added, removed, deleted, unknown, ignored, clean:
757 l.sort()
752 l.sort()
758 return (modified, added, removed, deleted, unknown, ignored, clean)
753 return (modified, added, removed, deleted, unknown, ignored, clean)
759
754
760 def changes(self, node1=None, node2=None, files=[], match=util.always,
755 def changes(self, node1=None, node2=None, files=[], match=util.always,
761 wlock=None, list_ignored=False, list_clean=False):
756 wlock=None, list_ignored=False, list_clean=False):
762 '''DEPRECATED - use status instead'''
757 '''DEPRECATED - use status instead'''
763 marduit = self.status(node1, node2, files, match, wlock,
758 marduit = self.status(node1, node2, files, match, wlock,
764 list_ignored, list_clean)
759 list_ignored, list_clean)
765 if list_ignored:
760 if list_ignored:
766 return marduit[:-1]
761 return marduit[:-1]
767 else:
762 else:
768 return marduit[:-2]
763 return marduit[:-2]
769
764
770 def add(self, list, wlock=None):
765 def add(self, list, wlock=None):
771 if not wlock:
766 if not wlock:
772 wlock = self.wlock()
767 wlock = self.wlock()
773 for f in list:
768 for f in list:
774 p = self.wjoin(f)
769 p = self.wjoin(f)
775 if not os.path.exists(p):
770 if not os.path.exists(p):
776 self.ui.warn(_("%s does not exist!\n") % f)
771 self.ui.warn(_("%s does not exist!\n") % f)
777 elif not os.path.isfile(p):
772 elif not os.path.isfile(p):
778 self.ui.warn(_("%s not added: only files supported currently\n")
773 self.ui.warn(_("%s not added: only files supported currently\n")
779 % f)
774 % f)
780 elif self.dirstate.state(f) in 'an':
775 elif self.dirstate.state(f) in 'an':
781 self.ui.warn(_("%s already tracked!\n") % f)
776 self.ui.warn(_("%s already tracked!\n") % f)
782 else:
777 else:
783 self.dirstate.update([f], "a")
778 self.dirstate.update([f], "a")
784
779
785 def forget(self, list, wlock=None):
780 def forget(self, list, wlock=None):
786 if not wlock:
781 if not wlock:
787 wlock = self.wlock()
782 wlock = self.wlock()
788 for f in list:
783 for f in list:
789 if self.dirstate.state(f) not in 'ai':
784 if self.dirstate.state(f) not in 'ai':
790 self.ui.warn(_("%s not added!\n") % f)
785 self.ui.warn(_("%s not added!\n") % f)
791 else:
786 else:
792 self.dirstate.forget([f])
787 self.dirstate.forget([f])
793
788
794 def remove(self, list, unlink=False, wlock=None):
789 def remove(self, list, unlink=False, wlock=None):
795 if unlink:
790 if unlink:
796 for f in list:
791 for f in list:
797 try:
792 try:
798 util.unlink(self.wjoin(f))
793 util.unlink(self.wjoin(f))
799 except OSError, inst:
794 except OSError, inst:
800 if inst.errno != errno.ENOENT:
795 if inst.errno != errno.ENOENT:
801 raise
796 raise
802 if not wlock:
797 if not wlock:
803 wlock = self.wlock()
798 wlock = self.wlock()
804 for f in list:
799 for f in list:
805 p = self.wjoin(f)
800 p = self.wjoin(f)
806 if os.path.exists(p):
801 if os.path.exists(p):
807 self.ui.warn(_("%s still exists!\n") % f)
802 self.ui.warn(_("%s still exists!\n") % f)
808 elif self.dirstate.state(f) == 'a':
803 elif self.dirstate.state(f) == 'a':
809 self.dirstate.forget([f])
804 self.dirstate.forget([f])
810 elif f not in self.dirstate:
805 elif f not in self.dirstate:
811 self.ui.warn(_("%s not tracked!\n") % f)
806 self.ui.warn(_("%s not tracked!\n") % f)
812 else:
807 else:
813 self.dirstate.update([f], "r")
808 self.dirstate.update([f], "r")
814
809
815 def undelete(self, list, wlock=None):
810 def undelete(self, list, wlock=None):
816 p = self.dirstate.parents()[0]
811 p = self.dirstate.parents()[0]
817 mn = self.changelog.read(p)[0]
812 mn = self.changelog.read(p)[0]
818 mf = self.manifest.readflags(mn)
819 m = self.manifest.read(mn)
813 m = self.manifest.read(mn)
820 if not wlock:
814 if not wlock:
821 wlock = self.wlock()
815 wlock = self.wlock()
822 for f in list:
816 for f in list:
823 if self.dirstate.state(f) not in "r":
817 if self.dirstate.state(f) not in "r":
824 self.ui.warn("%s not removed!\n" % f)
818 self.ui.warn("%s not removed!\n" % f)
825 else:
819 else:
826 t = self.file(f).read(m[f])
820 t = self.file(f).read(m[f])
827 self.wwrite(f, t)
821 self.wwrite(f, t)
828 util.set_exec(self.wjoin(f), mf.execf(f))
822 util.set_exec(self.wjoin(f), m.execf(f))
829 self.dirstate.update([f], "n")
823 self.dirstate.update([f], "n")
830
824
831 def copy(self, source, dest, wlock=None):
825 def copy(self, source, dest, wlock=None):
832 p = self.wjoin(dest)
826 p = self.wjoin(dest)
833 if not os.path.exists(p):
827 if not os.path.exists(p):
834 self.ui.warn(_("%s does not exist!\n") % dest)
828 self.ui.warn(_("%s does not exist!\n") % dest)
835 elif not os.path.isfile(p):
829 elif not os.path.isfile(p):
836 self.ui.warn(_("copy failed: %s is not a file\n") % dest)
830 self.ui.warn(_("copy failed: %s is not a file\n") % dest)
837 else:
831 else:
838 if not wlock:
832 if not wlock:
839 wlock = self.wlock()
833 wlock = self.wlock()
840 if self.dirstate.state(dest) == '?':
834 if self.dirstate.state(dest) == '?':
841 self.dirstate.update([dest], "a")
835 self.dirstate.update([dest], "a")
842 self.dirstate.copy(source, dest)
836 self.dirstate.copy(source, dest)
843
837
844 def heads(self, start=None):
838 def heads(self, start=None):
845 heads = self.changelog.heads(start)
839 heads = self.changelog.heads(start)
846 # sort the output in rev descending order
840 # sort the output in rev descending order
847 heads = [(-self.changelog.rev(h), h) for h in heads]
841 heads = [(-self.changelog.rev(h), h) for h in heads]
848 heads.sort()
842 heads.sort()
849 return [n for (r, n) in heads]
843 return [n for (r, n) in heads]
850
844
851 # branchlookup returns a dict giving a list of branches for
845 # branchlookup returns a dict giving a list of branches for
852 # each head. A branch is defined as the tag of a node or
846 # each head. A branch is defined as the tag of a node or
853 # the branch of the node's parents. If a node has multiple
847 # the branch of the node's parents. If a node has multiple
854 # branch tags, tags are eliminated if they are visible from other
848 # branch tags, tags are eliminated if they are visible from other
855 # branch tags.
849 # branch tags.
856 #
850 #
857 # So, for this graph: a->b->c->d->e
851 # So, for this graph: a->b->c->d->e
858 # \ /
852 # \ /
859 # aa -----/
853 # aa -----/
860 # a has tag 2.6.12
854 # a has tag 2.6.12
861 # d has tag 2.6.13
855 # d has tag 2.6.13
862 # e would have branch tags for 2.6.12 and 2.6.13. Because the node
856 # e would have branch tags for 2.6.12 and 2.6.13. Because the node
863 # for 2.6.12 can be reached from the node 2.6.13, that is eliminated
857 # for 2.6.12 can be reached from the node 2.6.13, that is eliminated
864 # from the list.
858 # from the list.
865 #
859 #
866 # It is possible that more than one head will have the same branch tag.
860 # It is possible that more than one head will have the same branch tag.
867 # callers need to check the result for multiple heads under the same
861 # callers need to check the result for multiple heads under the same
868 # branch tag if that is a problem for them (ie checkout of a specific
862 # branch tag if that is a problem for them (ie checkout of a specific
869 # branch).
863 # branch).
870 #
864 #
871 # passing in a specific branch will limit the depth of the search
865 # passing in a specific branch will limit the depth of the search
872 # through the parents. It won't limit the branches returned in the
866 # through the parents. It won't limit the branches returned in the
873 # result though.
867 # result though.
874 def branchlookup(self, heads=None, branch=None):
868 def branchlookup(self, heads=None, branch=None):
875 if not heads:
869 if not heads:
876 heads = self.heads()
870 heads = self.heads()
877 headt = [ h for h in heads ]
871 headt = [ h for h in heads ]
878 chlog = self.changelog
872 chlog = self.changelog
879 branches = {}
873 branches = {}
880 merges = []
874 merges = []
881 seenmerge = {}
875 seenmerge = {}
882
876
883 # traverse the tree once for each head, recording in the branches
877 # traverse the tree once for each head, recording in the branches
884 # dict which tags are visible from this head. The branches
878 # dict which tags are visible from this head. The branches
885 # dict also records which tags are visible from each tag
879 # dict also records which tags are visible from each tag
886 # while we traverse.
880 # while we traverse.
887 while headt or merges:
881 while headt or merges:
888 if merges:
882 if merges:
889 n, found = merges.pop()
883 n, found = merges.pop()
890 visit = [n]
884 visit = [n]
891 else:
885 else:
892 h = headt.pop()
886 h = headt.pop()
893 visit = [h]
887 visit = [h]
894 found = [h]
888 found = [h]
895 seen = {}
889 seen = {}
896 while visit:
890 while visit:
897 n = visit.pop()
891 n = visit.pop()
898 if n in seen:
892 if n in seen:
899 continue
893 continue
900 pp = chlog.parents(n)
894 pp = chlog.parents(n)
901 tags = self.nodetags(n)
895 tags = self.nodetags(n)
902 if tags:
896 if tags:
903 for x in tags:
897 for x in tags:
904 if x == 'tip':
898 if x == 'tip':
905 continue
899 continue
906 for f in found:
900 for f in found:
907 branches.setdefault(f, {})[n] = 1
901 branches.setdefault(f, {})[n] = 1
908 branches.setdefault(n, {})[n] = 1
902 branches.setdefault(n, {})[n] = 1
909 break
903 break
910 if n not in found:
904 if n not in found:
911 found.append(n)
905 found.append(n)
912 if branch in tags:
906 if branch in tags:
913 continue
907 continue
914 seen[n] = 1
908 seen[n] = 1
915 if pp[1] != nullid and n not in seenmerge:
909 if pp[1] != nullid and n not in seenmerge:
916 merges.append((pp[1], [x for x in found]))
910 merges.append((pp[1], [x for x in found]))
917 seenmerge[n] = 1
911 seenmerge[n] = 1
918 if pp[0] != nullid:
912 if pp[0] != nullid:
919 visit.append(pp[0])
913 visit.append(pp[0])
920 # traverse the branches dict, eliminating branch tags from each
914 # traverse the branches dict, eliminating branch tags from each
921 # head that are visible from another branch tag for that head.
915 # head that are visible from another branch tag for that head.
922 out = {}
916 out = {}
923 viscache = {}
917 viscache = {}
924 for h in heads:
918 for h in heads:
925 def visible(node):
919 def visible(node):
926 if node in viscache:
920 if node in viscache:
927 return viscache[node]
921 return viscache[node]
928 ret = {}
922 ret = {}
929 visit = [node]
923 visit = [node]
930 while visit:
924 while visit:
931 x = visit.pop()
925 x = visit.pop()
932 if x in viscache:
926 if x in viscache:
933 ret.update(viscache[x])
927 ret.update(viscache[x])
934 elif x not in ret:
928 elif x not in ret:
935 ret[x] = 1
929 ret[x] = 1
936 if x in branches:
930 if x in branches:
937 visit[len(visit):] = branches[x].keys()
931 visit[len(visit):] = branches[x].keys()
938 viscache[node] = ret
932 viscache[node] = ret
939 return ret
933 return ret
940 if h not in branches:
934 if h not in branches:
941 continue
935 continue
942 # O(n^2), but somewhat limited. This only searches the
936 # O(n^2), but somewhat limited. This only searches the
943 # tags visible from a specific head, not all the tags in the
937 # tags visible from a specific head, not all the tags in the
944 # whole repo.
938 # whole repo.
945 for b in branches[h]:
939 for b in branches[h]:
946 vis = False
940 vis = False
947 for bb in branches[h].keys():
941 for bb in branches[h].keys():
948 if b != bb:
942 if b != bb:
949 if b in visible(bb):
943 if b in visible(bb):
950 vis = True
944 vis = True
951 break
945 break
952 if not vis:
946 if not vis:
953 l = out.setdefault(h, [])
947 l = out.setdefault(h, [])
954 l[len(l):] = self.nodetags(b)
948 l[len(l):] = self.nodetags(b)
955 return out
949 return out
956
950
957 def branches(self, nodes):
951 def branches(self, nodes):
958 if not nodes:
952 if not nodes:
959 nodes = [self.changelog.tip()]
953 nodes = [self.changelog.tip()]
960 b = []
954 b = []
961 for n in nodes:
955 for n in nodes:
962 t = n
956 t = n
963 while 1:
957 while 1:
964 p = self.changelog.parents(n)
958 p = self.changelog.parents(n)
965 if p[1] != nullid or p[0] == nullid:
959 if p[1] != nullid or p[0] == nullid:
966 b.append((t, n, p[0], p[1]))
960 b.append((t, n, p[0], p[1]))
967 break
961 break
968 n = p[0]
962 n = p[0]
969 return b
963 return b
970
964
971 def between(self, pairs):
965 def between(self, pairs):
972 r = []
966 r = []
973
967
974 for top, bottom in pairs:
968 for top, bottom in pairs:
975 n, l, i = top, [], 0
969 n, l, i = top, [], 0
976 f = 1
970 f = 1
977
971
978 while n != bottom:
972 while n != bottom:
979 p = self.changelog.parents(n)[0]
973 p = self.changelog.parents(n)[0]
980 if i == f:
974 if i == f:
981 l.append(n)
975 l.append(n)
982 f = f * 2
976 f = f * 2
983 n = p
977 n = p
984 i += 1
978 i += 1
985
979
986 r.append(l)
980 r.append(l)
987
981
988 return r
982 return r
989
983
990 def findincoming(self, remote, base=None, heads=None, force=False):
984 def findincoming(self, remote, base=None, heads=None, force=False):
991 """Return list of roots of the subsets of missing nodes from remote
985 """Return list of roots of the subsets of missing nodes from remote
992
986
993 If base dict is specified, assume that these nodes and their parents
987 If base dict is specified, assume that these nodes and their parents
994 exist on the remote side and that no child of a node of base exists
988 exist on the remote side and that no child of a node of base exists
995 in both remote and self.
989 in both remote and self.
996 Furthermore base will be updated to include the nodes that exists
990 Furthermore base will be updated to include the nodes that exists
997 in self and remote but no children exists in self and remote.
991 in self and remote but no children exists in self and remote.
998 If a list of heads is specified, return only nodes which are heads
992 If a list of heads is specified, return only nodes which are heads
999 or ancestors of these heads.
993 or ancestors of these heads.
1000
994
1001 All the ancestors of base are in self and in remote.
995 All the ancestors of base are in self and in remote.
1002 All the descendants of the list returned are missing in self.
996 All the descendants of the list returned are missing in self.
1003 (and so we know that the rest of the nodes are missing in remote, see
997 (and so we know that the rest of the nodes are missing in remote, see
1004 outgoing)
998 outgoing)
1005 """
999 """
1006 m = self.changelog.nodemap
1000 m = self.changelog.nodemap
1007 search = []
1001 search = []
1008 fetch = {}
1002 fetch = {}
1009 seen = {}
1003 seen = {}
1010 seenbranch = {}
1004 seenbranch = {}
1011 if base == None:
1005 if base == None:
1012 base = {}
1006 base = {}
1013
1007
1014 if not heads:
1008 if not heads:
1015 heads = remote.heads()
1009 heads = remote.heads()
1016
1010
1017 if self.changelog.tip() == nullid:
1011 if self.changelog.tip() == nullid:
1018 base[nullid] = 1
1012 base[nullid] = 1
1019 if heads != [nullid]:
1013 if heads != [nullid]:
1020 return [nullid]
1014 return [nullid]
1021 return []
1015 return []
1022
1016
1023 # assume we're closer to the tip than the root
1017 # assume we're closer to the tip than the root
1024 # and start by examining the heads
1018 # and start by examining the heads
1025 self.ui.status(_("searching for changes\n"))
1019 self.ui.status(_("searching for changes\n"))
1026
1020
1027 unknown = []
1021 unknown = []
1028 for h in heads:
1022 for h in heads:
1029 if h not in m:
1023 if h not in m:
1030 unknown.append(h)
1024 unknown.append(h)
1031 else:
1025 else:
1032 base[h] = 1
1026 base[h] = 1
1033
1027
1034 if not unknown:
1028 if not unknown:
1035 return []
1029 return []
1036
1030
1037 req = dict.fromkeys(unknown)
1031 req = dict.fromkeys(unknown)
1038 reqcnt = 0
1032 reqcnt = 0
1039
1033
1040 # search through remote branches
1034 # search through remote branches
1041 # a 'branch' here is a linear segment of history, with four parts:
1035 # a 'branch' here is a linear segment of history, with four parts:
1042 # head, root, first parent, second parent
1036 # head, root, first parent, second parent
1043 # (a branch always has two parents (or none) by definition)
1037 # (a branch always has two parents (or none) by definition)
1044 unknown = remote.branches(unknown)
1038 unknown = remote.branches(unknown)
1045 while unknown:
1039 while unknown:
1046 r = []
1040 r = []
1047 while unknown:
1041 while unknown:
1048 n = unknown.pop(0)
1042 n = unknown.pop(0)
1049 if n[0] in seen:
1043 if n[0] in seen:
1050 continue
1044 continue
1051
1045
1052 self.ui.debug(_("examining %s:%s\n")
1046 self.ui.debug(_("examining %s:%s\n")
1053 % (short(n[0]), short(n[1])))
1047 % (short(n[0]), short(n[1])))
1054 if n[0] == nullid: # found the end of the branch
1048 if n[0] == nullid: # found the end of the branch
1055 pass
1049 pass
1056 elif n in seenbranch:
1050 elif n in seenbranch:
1057 self.ui.debug(_("branch already found\n"))
1051 self.ui.debug(_("branch already found\n"))
1058 continue
1052 continue
1059 elif n[1] and n[1] in m: # do we know the base?
1053 elif n[1] and n[1] in m: # do we know the base?
1060 self.ui.debug(_("found incomplete branch %s:%s\n")
1054 self.ui.debug(_("found incomplete branch %s:%s\n")
1061 % (short(n[0]), short(n[1])))
1055 % (short(n[0]), short(n[1])))
1062 search.append(n) # schedule branch range for scanning
1056 search.append(n) # schedule branch range for scanning
1063 seenbranch[n] = 1
1057 seenbranch[n] = 1
1064 else:
1058 else:
1065 if n[1] not in seen and n[1] not in fetch:
1059 if n[1] not in seen and n[1] not in fetch:
1066 if n[2] in m and n[3] in m:
1060 if n[2] in m and n[3] in m:
1067 self.ui.debug(_("found new changeset %s\n") %
1061 self.ui.debug(_("found new changeset %s\n") %
1068 short(n[1]))
1062 short(n[1]))
1069 fetch[n[1]] = 1 # earliest unknown
1063 fetch[n[1]] = 1 # earliest unknown
1070 for p in n[2:4]:
1064 for p in n[2:4]:
1071 if p in m:
1065 if p in m:
1072 base[p] = 1 # latest known
1066 base[p] = 1 # latest known
1073
1067
1074 for p in n[2:4]:
1068 for p in n[2:4]:
1075 if p not in req and p not in m:
1069 if p not in req and p not in m:
1076 r.append(p)
1070 r.append(p)
1077 req[p] = 1
1071 req[p] = 1
1078 seen[n[0]] = 1
1072 seen[n[0]] = 1
1079
1073
1080 if r:
1074 if r:
1081 reqcnt += 1
1075 reqcnt += 1
1082 self.ui.debug(_("request %d: %s\n") %
1076 self.ui.debug(_("request %d: %s\n") %
1083 (reqcnt, " ".join(map(short, r))))
1077 (reqcnt, " ".join(map(short, r))))
1084 for p in range(0, len(r), 10):
1078 for p in range(0, len(r), 10):
1085 for b in remote.branches(r[p:p+10]):
1079 for b in remote.branches(r[p:p+10]):
1086 self.ui.debug(_("received %s:%s\n") %
1080 self.ui.debug(_("received %s:%s\n") %
1087 (short(b[0]), short(b[1])))
1081 (short(b[0]), short(b[1])))
1088 unknown.append(b)
1082 unknown.append(b)
1089
1083
1090 # do binary search on the branches we found
1084 # do binary search on the branches we found
1091 while search:
1085 while search:
1092 n = search.pop(0)
1086 n = search.pop(0)
1093 reqcnt += 1
1087 reqcnt += 1
1094 l = remote.between([(n[0], n[1])])[0]
1088 l = remote.between([(n[0], n[1])])[0]
1095 l.append(n[1])
1089 l.append(n[1])
1096 p = n[0]
1090 p = n[0]
1097 f = 1
1091 f = 1
1098 for i in l:
1092 for i in l:
1099 self.ui.debug(_("narrowing %d:%d %s\n") % (f, len(l), short(i)))
1093 self.ui.debug(_("narrowing %d:%d %s\n") % (f, len(l), short(i)))
1100 if i in m:
1094 if i in m:
1101 if f <= 2:
1095 if f <= 2:
1102 self.ui.debug(_("found new branch changeset %s\n") %
1096 self.ui.debug(_("found new branch changeset %s\n") %
1103 short(p))
1097 short(p))
1104 fetch[p] = 1
1098 fetch[p] = 1
1105 base[i] = 1
1099 base[i] = 1
1106 else:
1100 else:
1107 self.ui.debug(_("narrowed branch search to %s:%s\n")
1101 self.ui.debug(_("narrowed branch search to %s:%s\n")
1108 % (short(p), short(i)))
1102 % (short(p), short(i)))
1109 search.append((p, i))
1103 search.append((p, i))
1110 break
1104 break
1111 p, f = i, f * 2
1105 p, f = i, f * 2
1112
1106
1113 # sanity check our fetch list
1107 # sanity check our fetch list
1114 for f in fetch.keys():
1108 for f in fetch.keys():
1115 if f in m:
1109 if f in m:
1116 raise repo.RepoError(_("already have changeset ") + short(f[:4]))
1110 raise repo.RepoError(_("already have changeset ") + short(f[:4]))
1117
1111
1118 if base.keys() == [nullid]:
1112 if base.keys() == [nullid]:
1119 if force:
1113 if force:
1120 self.ui.warn(_("warning: repository is unrelated\n"))
1114 self.ui.warn(_("warning: repository is unrelated\n"))
1121 else:
1115 else:
1122 raise util.Abort(_("repository is unrelated"))
1116 raise util.Abort(_("repository is unrelated"))
1123
1117
1124 self.ui.note(_("found new changesets starting at ") +
1118 self.ui.note(_("found new changesets starting at ") +
1125 " ".join([short(f) for f in fetch]) + "\n")
1119 " ".join([short(f) for f in fetch]) + "\n")
1126
1120
1127 self.ui.debug(_("%d total queries\n") % reqcnt)
1121 self.ui.debug(_("%d total queries\n") % reqcnt)
1128
1122
1129 return fetch.keys()
1123 return fetch.keys()
1130
1124
1131 def findoutgoing(self, remote, base=None, heads=None, force=False):
1125 def findoutgoing(self, remote, base=None, heads=None, force=False):
1132 """Return list of nodes that are roots of subsets not in remote
1126 """Return list of nodes that are roots of subsets not in remote
1133
1127
1134 If base dict is specified, assume that these nodes and their parents
1128 If base dict is specified, assume that these nodes and their parents
1135 exist on the remote side.
1129 exist on the remote side.
1136 If a list of heads is specified, return only nodes which are heads
1130 If a list of heads is specified, return only nodes which are heads
1137 or ancestors of these heads, and return a second element which
1131 or ancestors of these heads, and return a second element which
1138 contains all remote heads which get new children.
1132 contains all remote heads which get new children.
1139 """
1133 """
1140 if base == None:
1134 if base == None:
1141 base = {}
1135 base = {}
1142 self.findincoming(remote, base, heads, force=force)
1136 self.findincoming(remote, base, heads, force=force)
1143
1137
1144 self.ui.debug(_("common changesets up to ")
1138 self.ui.debug(_("common changesets up to ")
1145 + " ".join(map(short, base.keys())) + "\n")
1139 + " ".join(map(short, base.keys())) + "\n")
1146
1140
1147 remain = dict.fromkeys(self.changelog.nodemap)
1141 remain = dict.fromkeys(self.changelog.nodemap)
1148
1142
1149 # prune everything remote has from the tree
1143 # prune everything remote has from the tree
1150 del remain[nullid]
1144 del remain[nullid]
1151 remove = base.keys()
1145 remove = base.keys()
1152 while remove:
1146 while remove:
1153 n = remove.pop(0)
1147 n = remove.pop(0)
1154 if n in remain:
1148 if n in remain:
1155 del remain[n]
1149 del remain[n]
1156 for p in self.changelog.parents(n):
1150 for p in self.changelog.parents(n):
1157 remove.append(p)
1151 remove.append(p)
1158
1152
1159 # find every node whose parents have been pruned
1153 # find every node whose parents have been pruned
1160 subset = []
1154 subset = []
1161 # find every remote head that will get new children
1155 # find every remote head that will get new children
1162 updated_heads = {}
1156 updated_heads = {}
1163 for n in remain:
1157 for n in remain:
1164 p1, p2 = self.changelog.parents(n)
1158 p1, p2 = self.changelog.parents(n)
1165 if p1 not in remain and p2 not in remain:
1159 if p1 not in remain and p2 not in remain:
1166 subset.append(n)
1160 subset.append(n)
1167 if heads:
1161 if heads:
1168 if p1 in heads:
1162 if p1 in heads:
1169 updated_heads[p1] = True
1163 updated_heads[p1] = True
1170 if p2 in heads:
1164 if p2 in heads:
1171 updated_heads[p2] = True
1165 updated_heads[p2] = True
1172
1166
1173 # this is the set of all roots we have to push
1167 # this is the set of all roots we have to push
1174 if heads:
1168 if heads:
1175 return subset, updated_heads.keys()
1169 return subset, updated_heads.keys()
1176 else:
1170 else:
1177 return subset
1171 return subset
1178
1172
1179 def pull(self, remote, heads=None, force=False, lock=None):
1173 def pull(self, remote, heads=None, force=False, lock=None):
1180 mylock = False
1174 mylock = False
1181 if not lock:
1175 if not lock:
1182 lock = self.lock()
1176 lock = self.lock()
1183 mylock = True
1177 mylock = True
1184
1178
1185 try:
1179 try:
1186 fetch = self.findincoming(remote, force=force)
1180 fetch = self.findincoming(remote, force=force)
1187 if fetch == [nullid]:
1181 if fetch == [nullid]:
1188 self.ui.status(_("requesting all changes\n"))
1182 self.ui.status(_("requesting all changes\n"))
1189
1183
1190 if not fetch:
1184 if not fetch:
1191 self.ui.status(_("no changes found\n"))
1185 self.ui.status(_("no changes found\n"))
1192 return 0
1186 return 0
1193
1187
1194 if heads is None:
1188 if heads is None:
1195 cg = remote.changegroup(fetch, 'pull')
1189 cg = remote.changegroup(fetch, 'pull')
1196 else:
1190 else:
1197 cg = remote.changegroupsubset(fetch, heads, 'pull')
1191 cg = remote.changegroupsubset(fetch, heads, 'pull')
1198 return self.addchangegroup(cg, 'pull', remote.url())
1192 return self.addchangegroup(cg, 'pull', remote.url())
1199 finally:
1193 finally:
1200 if mylock:
1194 if mylock:
1201 lock.release()
1195 lock.release()
1202
1196
1203 def push(self, remote, force=False, revs=None):
1197 def push(self, remote, force=False, revs=None):
1204 # there are two ways to push to remote repo:
1198 # there are two ways to push to remote repo:
1205 #
1199 #
1206 # addchangegroup assumes local user can lock remote
1200 # addchangegroup assumes local user can lock remote
1207 # repo (local filesystem, old ssh servers).
1201 # repo (local filesystem, old ssh servers).
1208 #
1202 #
1209 # unbundle assumes local user cannot lock remote repo (new ssh
1203 # unbundle assumes local user cannot lock remote repo (new ssh
1210 # servers, http servers).
1204 # servers, http servers).
1211
1205
1212 if remote.capable('unbundle'):
1206 if remote.capable('unbundle'):
1213 return self.push_unbundle(remote, force, revs)
1207 return self.push_unbundle(remote, force, revs)
1214 return self.push_addchangegroup(remote, force, revs)
1208 return self.push_addchangegroup(remote, force, revs)
1215
1209
1216 def prepush(self, remote, force, revs):
1210 def prepush(self, remote, force, revs):
1217 base = {}
1211 base = {}
1218 remote_heads = remote.heads()
1212 remote_heads = remote.heads()
1219 inc = self.findincoming(remote, base, remote_heads, force=force)
1213 inc = self.findincoming(remote, base, remote_heads, force=force)
1220 if not force and inc:
1214 if not force and inc:
1221 self.ui.warn(_("abort: unsynced remote changes!\n"))
1215 self.ui.warn(_("abort: unsynced remote changes!\n"))
1222 self.ui.status(_("(did you forget to sync?"
1216 self.ui.status(_("(did you forget to sync?"
1223 " use push -f to force)\n"))
1217 " use push -f to force)\n"))
1224 return None, 1
1218 return None, 1
1225
1219
1226 update, updated_heads = self.findoutgoing(remote, base, remote_heads)
1220 update, updated_heads = self.findoutgoing(remote, base, remote_heads)
1227 if revs is not None:
1221 if revs is not None:
1228 msng_cl, bases, heads = self.changelog.nodesbetween(update, revs)
1222 msng_cl, bases, heads = self.changelog.nodesbetween(update, revs)
1229 else:
1223 else:
1230 bases, heads = update, self.changelog.heads()
1224 bases, heads = update, self.changelog.heads()
1231
1225
1232 if not bases:
1226 if not bases:
1233 self.ui.status(_("no changes found\n"))
1227 self.ui.status(_("no changes found\n"))
1234 return None, 1
1228 return None, 1
1235 elif not force:
1229 elif not force:
1236 # FIXME we don't properly detect creation of new heads
1230 # FIXME we don't properly detect creation of new heads
1237 # in the push -r case, assume the user knows what he's doing
1231 # in the push -r case, assume the user knows what he's doing
1238 if not revs and len(remote_heads) < len(heads) \
1232 if not revs and len(remote_heads) < len(heads) \
1239 and remote_heads != [nullid]:
1233 and remote_heads != [nullid]:
1240 self.ui.warn(_("abort: push creates new remote branches!\n"))
1234 self.ui.warn(_("abort: push creates new remote branches!\n"))
1241 self.ui.status(_("(did you forget to merge?"
1235 self.ui.status(_("(did you forget to merge?"
1242 " use push -f to force)\n"))
1236 " use push -f to force)\n"))
1243 return None, 1
1237 return None, 1
1244
1238
1245 if revs is None:
1239 if revs is None:
1246 cg = self.changegroup(update, 'push')
1240 cg = self.changegroup(update, 'push')
1247 else:
1241 else:
1248 cg = self.changegroupsubset(update, revs, 'push')
1242 cg = self.changegroupsubset(update, revs, 'push')
1249 return cg, remote_heads
1243 return cg, remote_heads
1250
1244
1251 def push_addchangegroup(self, remote, force, revs):
1245 def push_addchangegroup(self, remote, force, revs):
1252 lock = remote.lock()
1246 lock = remote.lock()
1253
1247
1254 ret = self.prepush(remote, force, revs)
1248 ret = self.prepush(remote, force, revs)
1255 if ret[0] is not None:
1249 if ret[0] is not None:
1256 cg, remote_heads = ret
1250 cg, remote_heads = ret
1257 return remote.addchangegroup(cg, 'push', self.url())
1251 return remote.addchangegroup(cg, 'push', self.url())
1258 return ret[1]
1252 return ret[1]
1259
1253
1260 def push_unbundle(self, remote, force, revs):
1254 def push_unbundle(self, remote, force, revs):
1261 # local repo finds heads on server, finds out what revs it
1255 # local repo finds heads on server, finds out what revs it
1262 # must push. once revs transferred, if server finds it has
1256 # must push. once revs transferred, if server finds it has
1263 # different heads (someone else won commit/push race), server
1257 # different heads (someone else won commit/push race), server
1264 # aborts.
1258 # aborts.
1265
1259
1266 ret = self.prepush(remote, force, revs)
1260 ret = self.prepush(remote, force, revs)
1267 if ret[0] is not None:
1261 if ret[0] is not None:
1268 cg, remote_heads = ret
1262 cg, remote_heads = ret
1269 if force: remote_heads = ['force']
1263 if force: remote_heads = ['force']
1270 return remote.unbundle(cg, remote_heads, 'push')
1264 return remote.unbundle(cg, remote_heads, 'push')
1271 return ret[1]
1265 return ret[1]
1272
1266
1273 def changegroupsubset(self, bases, heads, source):
1267 def changegroupsubset(self, bases, heads, source):
1274 """This function generates a changegroup consisting of all the nodes
1268 """This function generates a changegroup consisting of all the nodes
1275 that are descendents of any of the bases, and ancestors of any of
1269 that are descendents of any of the bases, and ancestors of any of
1276 the heads.
1270 the heads.
1277
1271
1278 It is fairly complex as determining which filenodes and which
1272 It is fairly complex as determining which filenodes and which
1279 manifest nodes need to be included for the changeset to be complete
1273 manifest nodes need to be included for the changeset to be complete
1280 is non-trivial.
1274 is non-trivial.
1281
1275
1282 Another wrinkle is doing the reverse, figuring out which changeset in
1276 Another wrinkle is doing the reverse, figuring out which changeset in
1283 the changegroup a particular filenode or manifestnode belongs to."""
1277 the changegroup a particular filenode or manifestnode belongs to."""
1284
1278
1285 self.hook('preoutgoing', throw=True, source=source)
1279 self.hook('preoutgoing', throw=True, source=source)
1286
1280
1287 # Set up some initial variables
1281 # Set up some initial variables
1288 # Make it easy to refer to self.changelog
1282 # Make it easy to refer to self.changelog
1289 cl = self.changelog
1283 cl = self.changelog
1290 # msng is short for missing - compute the list of changesets in this
1284 # msng is short for missing - compute the list of changesets in this
1291 # changegroup.
1285 # changegroup.
1292 msng_cl_lst, bases, heads = cl.nodesbetween(bases, heads)
1286 msng_cl_lst, bases, heads = cl.nodesbetween(bases, heads)
1293 # Some bases may turn out to be superfluous, and some heads may be
1287 # Some bases may turn out to be superfluous, and some heads may be
1294 # too. nodesbetween will return the minimal set of bases and heads
1288 # too. nodesbetween will return the minimal set of bases and heads
1295 # necessary to re-create the changegroup.
1289 # necessary to re-create the changegroup.
1296
1290
1297 # Known heads are the list of heads that it is assumed the recipient
1291 # Known heads are the list of heads that it is assumed the recipient
1298 # of this changegroup will know about.
1292 # of this changegroup will know about.
1299 knownheads = {}
1293 knownheads = {}
1300 # We assume that all parents of bases are known heads.
1294 # We assume that all parents of bases are known heads.
1301 for n in bases:
1295 for n in bases:
1302 for p in cl.parents(n):
1296 for p in cl.parents(n):
1303 if p != nullid:
1297 if p != nullid:
1304 knownheads[p] = 1
1298 knownheads[p] = 1
1305 knownheads = knownheads.keys()
1299 knownheads = knownheads.keys()
1306 if knownheads:
1300 if knownheads:
1307 # Now that we know what heads are known, we can compute which
1301 # Now that we know what heads are known, we can compute which
1308 # changesets are known. The recipient must know about all
1302 # changesets are known. The recipient must know about all
1309 # changesets required to reach the known heads from the null
1303 # changesets required to reach the known heads from the null
1310 # changeset.
1304 # changeset.
1311 has_cl_set, junk, junk = cl.nodesbetween(None, knownheads)
1305 has_cl_set, junk, junk = cl.nodesbetween(None, knownheads)
1312 junk = None
1306 junk = None
1313 # Transform the list into an ersatz set.
1307 # Transform the list into an ersatz set.
1314 has_cl_set = dict.fromkeys(has_cl_set)
1308 has_cl_set = dict.fromkeys(has_cl_set)
1315 else:
1309 else:
1316 # If there were no known heads, the recipient cannot be assumed to
1310 # If there were no known heads, the recipient cannot be assumed to
1317 # know about any changesets.
1311 # know about any changesets.
1318 has_cl_set = {}
1312 has_cl_set = {}
1319
1313
1320 # Make it easy to refer to self.manifest
1314 # Make it easy to refer to self.manifest
1321 mnfst = self.manifest
1315 mnfst = self.manifest
1322 # We don't know which manifests are missing yet
1316 # We don't know which manifests are missing yet
1323 msng_mnfst_set = {}
1317 msng_mnfst_set = {}
1324 # Nor do we know which filenodes are missing.
1318 # Nor do we know which filenodes are missing.
1325 msng_filenode_set = {}
1319 msng_filenode_set = {}
1326
1320
1327 junk = mnfst.index[mnfst.count() - 1] # Get around a bug in lazyindex
1321 junk = mnfst.index[mnfst.count() - 1] # Get around a bug in lazyindex
1328 junk = None
1322 junk = None
1329
1323
1330 # A changeset always belongs to itself, so the changenode lookup
1324 # A changeset always belongs to itself, so the changenode lookup
1331 # function for a changenode is identity.
1325 # function for a changenode is identity.
1332 def identity(x):
1326 def identity(x):
1333 return x
1327 return x
1334
1328
1335 # A function generating function. Sets up an environment for the
1329 # A function generating function. Sets up an environment for the
1336 # inner function.
1330 # inner function.
1337 def cmp_by_rev_func(revlog):
1331 def cmp_by_rev_func(revlog):
1338 # Compare two nodes by their revision number in the environment's
1332 # Compare two nodes by their revision number in the environment's
1339 # revision history. Since the revision number both represents the
1333 # revision history. Since the revision number both represents the
1340 # most efficient order to read the nodes in, and represents a
1334 # most efficient order to read the nodes in, and represents a
1341 # topological sorting of the nodes, this function is often useful.
1335 # topological sorting of the nodes, this function is often useful.
1342 def cmp_by_rev(a, b):
1336 def cmp_by_rev(a, b):
1343 return cmp(revlog.rev(a), revlog.rev(b))
1337 return cmp(revlog.rev(a), revlog.rev(b))
1344 return cmp_by_rev
1338 return cmp_by_rev
1345
1339
1346 # If we determine that a particular file or manifest node must be a
1340 # If we determine that a particular file or manifest node must be a
1347 # node that the recipient of the changegroup will already have, we can
1341 # node that the recipient of the changegroup will already have, we can
1348 # also assume the recipient will have all the parents. This function
1342 # also assume the recipient will have all the parents. This function
1349 # prunes them from the set of missing nodes.
1343 # prunes them from the set of missing nodes.
1350 def prune_parents(revlog, hasset, msngset):
1344 def prune_parents(revlog, hasset, msngset):
1351 haslst = hasset.keys()
1345 haslst = hasset.keys()
1352 haslst.sort(cmp_by_rev_func(revlog))
1346 haslst.sort(cmp_by_rev_func(revlog))
1353 for node in haslst:
1347 for node in haslst:
1354 parentlst = [p for p in revlog.parents(node) if p != nullid]
1348 parentlst = [p for p in revlog.parents(node) if p != nullid]
1355 while parentlst:
1349 while parentlst:
1356 n = parentlst.pop()
1350 n = parentlst.pop()
1357 if n not in hasset:
1351 if n not in hasset:
1358 hasset[n] = 1
1352 hasset[n] = 1
1359 p = [p for p in revlog.parents(n) if p != nullid]
1353 p = [p for p in revlog.parents(n) if p != nullid]
1360 parentlst.extend(p)
1354 parentlst.extend(p)
1361 for n in hasset:
1355 for n in hasset:
1362 msngset.pop(n, None)
1356 msngset.pop(n, None)
1363
1357
1364 # This is a function generating function used to set up an environment
1358 # This is a function generating function used to set up an environment
1365 # for the inner function to execute in.
1359 # for the inner function to execute in.
1366 def manifest_and_file_collector(changedfileset):
1360 def manifest_and_file_collector(changedfileset):
1367 # This is an information gathering function that gathers
1361 # This is an information gathering function that gathers
1368 # information from each changeset node that goes out as part of
1362 # information from each changeset node that goes out as part of
1369 # the changegroup. The information gathered is a list of which
1363 # the changegroup. The information gathered is a list of which
1370 # manifest nodes are potentially required (the recipient may
1364 # manifest nodes are potentially required (the recipient may
1371 # already have them) and total list of all files which were
1365 # already have them) and total list of all files which were
1372 # changed in any changeset in the changegroup.
1366 # changed in any changeset in the changegroup.
1373 #
1367 #
1374 # We also remember the first changenode we saw any manifest
1368 # We also remember the first changenode we saw any manifest
1375 # referenced by so we can later determine which changenode 'owns'
1369 # referenced by so we can later determine which changenode 'owns'
1376 # the manifest.
1370 # the manifest.
1377 def collect_manifests_and_files(clnode):
1371 def collect_manifests_and_files(clnode):
1378 c = cl.read(clnode)
1372 c = cl.read(clnode)
1379 for f in c[3]:
1373 for f in c[3]:
1380 # This is to make sure we only have one instance of each
1374 # This is to make sure we only have one instance of each
1381 # filename string for each filename.
1375 # filename string for each filename.
1382 changedfileset.setdefault(f, f)
1376 changedfileset.setdefault(f, f)
1383 msng_mnfst_set.setdefault(c[0], clnode)
1377 msng_mnfst_set.setdefault(c[0], clnode)
1384 return collect_manifests_and_files
1378 return collect_manifests_and_files
1385
1379
1386 # Figure out which manifest nodes (of the ones we think might be part
1380 # Figure out which manifest nodes (of the ones we think might be part
1387 # of the changegroup) the recipient must know about and remove them
1381 # of the changegroup) the recipient must know about and remove them
1388 # from the changegroup.
1382 # from the changegroup.
1389 def prune_manifests():
1383 def prune_manifests():
1390 has_mnfst_set = {}
1384 has_mnfst_set = {}
1391 for n in msng_mnfst_set:
1385 for n in msng_mnfst_set:
1392 # If a 'missing' manifest thinks it belongs to a changenode
1386 # If a 'missing' manifest thinks it belongs to a changenode
1393 # the recipient is assumed to have, obviously the recipient
1387 # the recipient is assumed to have, obviously the recipient
1394 # must have that manifest.
1388 # must have that manifest.
1395 linknode = cl.node(mnfst.linkrev(n))
1389 linknode = cl.node(mnfst.linkrev(n))
1396 if linknode in has_cl_set:
1390 if linknode in has_cl_set:
1397 has_mnfst_set[n] = 1
1391 has_mnfst_set[n] = 1
1398 prune_parents(mnfst, has_mnfst_set, msng_mnfst_set)
1392 prune_parents(mnfst, has_mnfst_set, msng_mnfst_set)
1399
1393
1400 # Use the information collected in collect_manifests_and_files to say
1394 # Use the information collected in collect_manifests_and_files to say
1401 # which changenode any manifestnode belongs to.
1395 # which changenode any manifestnode belongs to.
1402 def lookup_manifest_link(mnfstnode):
1396 def lookup_manifest_link(mnfstnode):
1403 return msng_mnfst_set[mnfstnode]
1397 return msng_mnfst_set[mnfstnode]
1404
1398
1405 # A function generating function that sets up the initial environment
1399 # A function generating function that sets up the initial environment
1406 # the inner function.
1400 # the inner function.
1407 def filenode_collector(changedfiles):
1401 def filenode_collector(changedfiles):
1408 next_rev = [0]
1402 next_rev = [0]
1409 # This gathers information from each manifestnode included in the
1403 # This gathers information from each manifestnode included in the
1410 # changegroup about which filenodes the manifest node references
1404 # changegroup about which filenodes the manifest node references
1411 # so we can include those in the changegroup too.
1405 # so we can include those in the changegroup too.
1412 #
1406 #
1413 # It also remembers which changenode each filenode belongs to. It
1407 # It also remembers which changenode each filenode belongs to. It
1414 # does this by assuming the a filenode belongs to the changenode
1408 # does this by assuming the a filenode belongs to the changenode
1415 # the first manifest that references it belongs to.
1409 # the first manifest that references it belongs to.
1416 def collect_msng_filenodes(mnfstnode):
1410 def collect_msng_filenodes(mnfstnode):
1417 r = mnfst.rev(mnfstnode)
1411 r = mnfst.rev(mnfstnode)
1418 if r == next_rev[0]:
1412 if r == next_rev[0]:
1419 # If the last rev we looked at was the one just previous,
1413 # If the last rev we looked at was the one just previous,
1420 # we only need to see a diff.
1414 # we only need to see a diff.
1421 delta = mdiff.patchtext(mnfst.delta(mnfstnode))
1415 delta = mdiff.patchtext(mnfst.delta(mnfstnode))
1422 # For each line in the delta
1416 # For each line in the delta
1423 for dline in delta.splitlines():
1417 for dline in delta.splitlines():
1424 # get the filename and filenode for that line
1418 # get the filename and filenode for that line
1425 f, fnode = dline.split('\0')
1419 f, fnode = dline.split('\0')
1426 fnode = bin(fnode[:40])
1420 fnode = bin(fnode[:40])
1427 f = changedfiles.get(f, None)
1421 f = changedfiles.get(f, None)
1428 # And if the file is in the list of files we care
1422 # And if the file is in the list of files we care
1429 # about.
1423 # about.
1430 if f is not None:
1424 if f is not None:
1431 # Get the changenode this manifest belongs to
1425 # Get the changenode this manifest belongs to
1432 clnode = msng_mnfst_set[mnfstnode]
1426 clnode = msng_mnfst_set[mnfstnode]
1433 # Create the set of filenodes for the file if
1427 # Create the set of filenodes for the file if
1434 # there isn't one already.
1428 # there isn't one already.
1435 ndset = msng_filenode_set.setdefault(f, {})
1429 ndset = msng_filenode_set.setdefault(f, {})
1436 # And set the filenode's changelog node to the
1430 # And set the filenode's changelog node to the
1437 # manifest's if it hasn't been set already.
1431 # manifest's if it hasn't been set already.
1438 ndset.setdefault(fnode, clnode)
1432 ndset.setdefault(fnode, clnode)
1439 else:
1433 else:
1440 # Otherwise we need a full manifest.
1434 # Otherwise we need a full manifest.
1441 m = mnfst.read(mnfstnode)
1435 m = mnfst.read(mnfstnode)
1442 # For every file in we care about.
1436 # For every file in we care about.
1443 for f in changedfiles:
1437 for f in changedfiles:
1444 fnode = m.get(f, None)
1438 fnode = m.get(f, None)
1445 # If it's in the manifest
1439 # If it's in the manifest
1446 if fnode is not None:
1440 if fnode is not None:
1447 # See comments above.
1441 # See comments above.
1448 clnode = msng_mnfst_set[mnfstnode]
1442 clnode = msng_mnfst_set[mnfstnode]
1449 ndset = msng_filenode_set.setdefault(f, {})
1443 ndset = msng_filenode_set.setdefault(f, {})
1450 ndset.setdefault(fnode, clnode)
1444 ndset.setdefault(fnode, clnode)
1451 # Remember the revision we hope to see next.
1445 # Remember the revision we hope to see next.
1452 next_rev[0] = r + 1
1446 next_rev[0] = r + 1
1453 return collect_msng_filenodes
1447 return collect_msng_filenodes
1454
1448
1455 # We have a list of filenodes we think we need for a file, lets remove
1449 # We have a list of filenodes we think we need for a file, lets remove
1456 # all those we now the recipient must have.
1450 # all those we now the recipient must have.
1457 def prune_filenodes(f, filerevlog):
1451 def prune_filenodes(f, filerevlog):
1458 msngset = msng_filenode_set[f]
1452 msngset = msng_filenode_set[f]
1459 hasset = {}
1453 hasset = {}
1460 # If a 'missing' filenode thinks it belongs to a changenode we
1454 # If a 'missing' filenode thinks it belongs to a changenode we
1461 # assume the recipient must have, then the recipient must have
1455 # assume the recipient must have, then the recipient must have
1462 # that filenode.
1456 # that filenode.
1463 for n in msngset:
1457 for n in msngset:
1464 clnode = cl.node(filerevlog.linkrev(n))
1458 clnode = cl.node(filerevlog.linkrev(n))
1465 if clnode in has_cl_set:
1459 if clnode in has_cl_set:
1466 hasset[n] = 1
1460 hasset[n] = 1
1467 prune_parents(filerevlog, hasset, msngset)
1461 prune_parents(filerevlog, hasset, msngset)
1468
1462
1469 # A function generator function that sets up the a context for the
1463 # A function generator function that sets up the a context for the
1470 # inner function.
1464 # inner function.
1471 def lookup_filenode_link_func(fname):
1465 def lookup_filenode_link_func(fname):
1472 msngset = msng_filenode_set[fname]
1466 msngset = msng_filenode_set[fname]
1473 # Lookup the changenode the filenode belongs to.
1467 # Lookup the changenode the filenode belongs to.
1474 def lookup_filenode_link(fnode):
1468 def lookup_filenode_link(fnode):
1475 return msngset[fnode]
1469 return msngset[fnode]
1476 return lookup_filenode_link
1470 return lookup_filenode_link
1477
1471
1478 # Now that we have all theses utility functions to help out and
1472 # Now that we have all theses utility functions to help out and
1479 # logically divide up the task, generate the group.
1473 # logically divide up the task, generate the group.
1480 def gengroup():
1474 def gengroup():
1481 # The set of changed files starts empty.
1475 # The set of changed files starts empty.
1482 changedfiles = {}
1476 changedfiles = {}
1483 # Create a changenode group generator that will call our functions
1477 # Create a changenode group generator that will call our functions
1484 # back to lookup the owning changenode and collect information.
1478 # back to lookup the owning changenode and collect information.
1485 group = cl.group(msng_cl_lst, identity,
1479 group = cl.group(msng_cl_lst, identity,
1486 manifest_and_file_collector(changedfiles))
1480 manifest_and_file_collector(changedfiles))
1487 for chnk in group:
1481 for chnk in group:
1488 yield chnk
1482 yield chnk
1489
1483
1490 # The list of manifests has been collected by the generator
1484 # The list of manifests has been collected by the generator
1491 # calling our functions back.
1485 # calling our functions back.
1492 prune_manifests()
1486 prune_manifests()
1493 msng_mnfst_lst = msng_mnfst_set.keys()
1487 msng_mnfst_lst = msng_mnfst_set.keys()
1494 # Sort the manifestnodes by revision number.
1488 # Sort the manifestnodes by revision number.
1495 msng_mnfst_lst.sort(cmp_by_rev_func(mnfst))
1489 msng_mnfst_lst.sort(cmp_by_rev_func(mnfst))
1496 # Create a generator for the manifestnodes that calls our lookup
1490 # Create a generator for the manifestnodes that calls our lookup
1497 # and data collection functions back.
1491 # and data collection functions back.
1498 group = mnfst.group(msng_mnfst_lst, lookup_manifest_link,
1492 group = mnfst.group(msng_mnfst_lst, lookup_manifest_link,
1499 filenode_collector(changedfiles))
1493 filenode_collector(changedfiles))
1500 for chnk in group:
1494 for chnk in group:
1501 yield chnk
1495 yield chnk
1502
1496
1503 # These are no longer needed, dereference and toss the memory for
1497 # These are no longer needed, dereference and toss the memory for
1504 # them.
1498 # them.
1505 msng_mnfst_lst = None
1499 msng_mnfst_lst = None
1506 msng_mnfst_set.clear()
1500 msng_mnfst_set.clear()
1507
1501
1508 changedfiles = changedfiles.keys()
1502 changedfiles = changedfiles.keys()
1509 changedfiles.sort()
1503 changedfiles.sort()
1510 # Go through all our files in order sorted by name.
1504 # Go through all our files in order sorted by name.
1511 for fname in changedfiles:
1505 for fname in changedfiles:
1512 filerevlog = self.file(fname)
1506 filerevlog = self.file(fname)
1513 # Toss out the filenodes that the recipient isn't really
1507 # Toss out the filenodes that the recipient isn't really
1514 # missing.
1508 # missing.
1515 if msng_filenode_set.has_key(fname):
1509 if msng_filenode_set.has_key(fname):
1516 prune_filenodes(fname, filerevlog)
1510 prune_filenodes(fname, filerevlog)
1517 msng_filenode_lst = msng_filenode_set[fname].keys()
1511 msng_filenode_lst = msng_filenode_set[fname].keys()
1518 else:
1512 else:
1519 msng_filenode_lst = []
1513 msng_filenode_lst = []
1520 # If any filenodes are left, generate the group for them,
1514 # If any filenodes are left, generate the group for them,
1521 # otherwise don't bother.
1515 # otherwise don't bother.
1522 if len(msng_filenode_lst) > 0:
1516 if len(msng_filenode_lst) > 0:
1523 yield changegroup.genchunk(fname)
1517 yield changegroup.genchunk(fname)
1524 # Sort the filenodes by their revision #
1518 # Sort the filenodes by their revision #
1525 msng_filenode_lst.sort(cmp_by_rev_func(filerevlog))
1519 msng_filenode_lst.sort(cmp_by_rev_func(filerevlog))
1526 # Create a group generator and only pass in a changenode
1520 # Create a group generator and only pass in a changenode
1527 # lookup function as we need to collect no information
1521 # lookup function as we need to collect no information
1528 # from filenodes.
1522 # from filenodes.
1529 group = filerevlog.group(msng_filenode_lst,
1523 group = filerevlog.group(msng_filenode_lst,
1530 lookup_filenode_link_func(fname))
1524 lookup_filenode_link_func(fname))
1531 for chnk in group:
1525 for chnk in group:
1532 yield chnk
1526 yield chnk
1533 if msng_filenode_set.has_key(fname):
1527 if msng_filenode_set.has_key(fname):
1534 # Don't need this anymore, toss it to free memory.
1528 # Don't need this anymore, toss it to free memory.
1535 del msng_filenode_set[fname]
1529 del msng_filenode_set[fname]
1536 # Signal that no more groups are left.
1530 # Signal that no more groups are left.
1537 yield changegroup.closechunk()
1531 yield changegroup.closechunk()
1538
1532
1539 if msng_cl_lst:
1533 if msng_cl_lst:
1540 self.hook('outgoing', node=hex(msng_cl_lst[0]), source=source)
1534 self.hook('outgoing', node=hex(msng_cl_lst[0]), source=source)
1541
1535
1542 return util.chunkbuffer(gengroup())
1536 return util.chunkbuffer(gengroup())
1543
1537
1544 def changegroup(self, basenodes, source):
1538 def changegroup(self, basenodes, source):
1545 """Generate a changegroup of all nodes that we have that a recipient
1539 """Generate a changegroup of all nodes that we have that a recipient
1546 doesn't.
1540 doesn't.
1547
1541
1548 This is much easier than the previous function as we can assume that
1542 This is much easier than the previous function as we can assume that
1549 the recipient has any changenode we aren't sending them."""
1543 the recipient has any changenode we aren't sending them."""
1550
1544
1551 self.hook('preoutgoing', throw=True, source=source)
1545 self.hook('preoutgoing', throw=True, source=source)
1552
1546
1553 cl = self.changelog
1547 cl = self.changelog
1554 nodes = cl.nodesbetween(basenodes, None)[0]
1548 nodes = cl.nodesbetween(basenodes, None)[0]
1555 revset = dict.fromkeys([cl.rev(n) for n in nodes])
1549 revset = dict.fromkeys([cl.rev(n) for n in nodes])
1556
1550
1557 def identity(x):
1551 def identity(x):
1558 return x
1552 return x
1559
1553
1560 def gennodelst(revlog):
1554 def gennodelst(revlog):
1561 for r in xrange(0, revlog.count()):
1555 for r in xrange(0, revlog.count()):
1562 n = revlog.node(r)
1556 n = revlog.node(r)
1563 if revlog.linkrev(n) in revset:
1557 if revlog.linkrev(n) in revset:
1564 yield n
1558 yield n
1565
1559
1566 def changed_file_collector(changedfileset):
1560 def changed_file_collector(changedfileset):
1567 def collect_changed_files(clnode):
1561 def collect_changed_files(clnode):
1568 c = cl.read(clnode)
1562 c = cl.read(clnode)
1569 for fname in c[3]:
1563 for fname in c[3]:
1570 changedfileset[fname] = 1
1564 changedfileset[fname] = 1
1571 return collect_changed_files
1565 return collect_changed_files
1572
1566
1573 def lookuprevlink_func(revlog):
1567 def lookuprevlink_func(revlog):
1574 def lookuprevlink(n):
1568 def lookuprevlink(n):
1575 return cl.node(revlog.linkrev(n))
1569 return cl.node(revlog.linkrev(n))
1576 return lookuprevlink
1570 return lookuprevlink
1577
1571
1578 def gengroup():
1572 def gengroup():
1579 # construct a list of all changed files
1573 # construct a list of all changed files
1580 changedfiles = {}
1574 changedfiles = {}
1581
1575
1582 for chnk in cl.group(nodes, identity,
1576 for chnk in cl.group(nodes, identity,
1583 changed_file_collector(changedfiles)):
1577 changed_file_collector(changedfiles)):
1584 yield chnk
1578 yield chnk
1585 changedfiles = changedfiles.keys()
1579 changedfiles = changedfiles.keys()
1586 changedfiles.sort()
1580 changedfiles.sort()
1587
1581
1588 mnfst = self.manifest
1582 mnfst = self.manifest
1589 nodeiter = gennodelst(mnfst)
1583 nodeiter = gennodelst(mnfst)
1590 for chnk in mnfst.group(nodeiter, lookuprevlink_func(mnfst)):
1584 for chnk in mnfst.group(nodeiter, lookuprevlink_func(mnfst)):
1591 yield chnk
1585 yield chnk
1592
1586
1593 for fname in changedfiles:
1587 for fname in changedfiles:
1594 filerevlog = self.file(fname)
1588 filerevlog = self.file(fname)
1595 nodeiter = gennodelst(filerevlog)
1589 nodeiter = gennodelst(filerevlog)
1596 nodeiter = list(nodeiter)
1590 nodeiter = list(nodeiter)
1597 if nodeiter:
1591 if nodeiter:
1598 yield changegroup.genchunk(fname)
1592 yield changegroup.genchunk(fname)
1599 lookup = lookuprevlink_func(filerevlog)
1593 lookup = lookuprevlink_func(filerevlog)
1600 for chnk in filerevlog.group(nodeiter, lookup):
1594 for chnk in filerevlog.group(nodeiter, lookup):
1601 yield chnk
1595 yield chnk
1602
1596
1603 yield changegroup.closechunk()
1597 yield changegroup.closechunk()
1604
1598
1605 if nodes:
1599 if nodes:
1606 self.hook('outgoing', node=hex(nodes[0]), source=source)
1600 self.hook('outgoing', node=hex(nodes[0]), source=source)
1607
1601
1608 return util.chunkbuffer(gengroup())
1602 return util.chunkbuffer(gengroup())
1609
1603
1610 def addchangegroup(self, source, srctype, url):
1604 def addchangegroup(self, source, srctype, url):
1611 """add changegroup to repo.
1605 """add changegroup to repo.
1612 returns number of heads modified or added + 1."""
1606 returns number of heads modified or added + 1."""
1613
1607
1614 def csmap(x):
1608 def csmap(x):
1615 self.ui.debug(_("add changeset %s\n") % short(x))
1609 self.ui.debug(_("add changeset %s\n") % short(x))
1616 return cl.count()
1610 return cl.count()
1617
1611
1618 def revmap(x):
1612 def revmap(x):
1619 return cl.rev(x)
1613 return cl.rev(x)
1620
1614
1621 if not source:
1615 if not source:
1622 return 0
1616 return 0
1623
1617
1624 self.hook('prechangegroup', throw=True, source=srctype, url=url)
1618 self.hook('prechangegroup', throw=True, source=srctype, url=url)
1625
1619
1626 changesets = files = revisions = 0
1620 changesets = files = revisions = 0
1627
1621
1628 tr = self.transaction()
1622 tr = self.transaction()
1629
1623
1630 # write changelog data to temp files so concurrent readers will not see
1624 # write changelog data to temp files so concurrent readers will not see
1631 # inconsistent view
1625 # inconsistent view
1632 cl = None
1626 cl = None
1633 try:
1627 try:
1634 cl = appendfile.appendchangelog(self.opener, self.changelog.version)
1628 cl = appendfile.appendchangelog(self.opener, self.changelog.version)
1635
1629
1636 oldheads = len(cl.heads())
1630 oldheads = len(cl.heads())
1637
1631
1638 # pull off the changeset group
1632 # pull off the changeset group
1639 self.ui.status(_("adding changesets\n"))
1633 self.ui.status(_("adding changesets\n"))
1640 cor = cl.count() - 1
1634 cor = cl.count() - 1
1641 chunkiter = changegroup.chunkiter(source)
1635 chunkiter = changegroup.chunkiter(source)
1642 if cl.addgroup(chunkiter, csmap, tr, 1) is None:
1636 if cl.addgroup(chunkiter, csmap, tr, 1) is None:
1643 raise util.Abort(_("received changelog group is empty"))
1637 raise util.Abort(_("received changelog group is empty"))
1644 cnr = cl.count() - 1
1638 cnr = cl.count() - 1
1645 changesets = cnr - cor
1639 changesets = cnr - cor
1646
1640
1647 # pull off the manifest group
1641 # pull off the manifest group
1648 self.ui.status(_("adding manifests\n"))
1642 self.ui.status(_("adding manifests\n"))
1649 chunkiter = changegroup.chunkiter(source)
1643 chunkiter = changegroup.chunkiter(source)
1650 # no need to check for empty manifest group here:
1644 # no need to check for empty manifest group here:
1651 # if the result of the merge of 1 and 2 is the same in 3 and 4,
1645 # if the result of the merge of 1 and 2 is the same in 3 and 4,
1652 # no new manifest will be created and the manifest group will
1646 # no new manifest will be created and the manifest group will
1653 # be empty during the pull
1647 # be empty during the pull
1654 self.manifest.addgroup(chunkiter, revmap, tr)
1648 self.manifest.addgroup(chunkiter, revmap, tr)
1655
1649
1656 # process the files
1650 # process the files
1657 self.ui.status(_("adding file changes\n"))
1651 self.ui.status(_("adding file changes\n"))
1658 while 1:
1652 while 1:
1659 f = changegroup.getchunk(source)
1653 f = changegroup.getchunk(source)
1660 if not f:
1654 if not f:
1661 break
1655 break
1662 self.ui.debug(_("adding %s revisions\n") % f)
1656 self.ui.debug(_("adding %s revisions\n") % f)
1663 fl = self.file(f)
1657 fl = self.file(f)
1664 o = fl.count()
1658 o = fl.count()
1665 chunkiter = changegroup.chunkiter(source)
1659 chunkiter = changegroup.chunkiter(source)
1666 if fl.addgroup(chunkiter, revmap, tr) is None:
1660 if fl.addgroup(chunkiter, revmap, tr) is None:
1667 raise util.Abort(_("received file revlog group is empty"))
1661 raise util.Abort(_("received file revlog group is empty"))
1668 revisions += fl.count() - o
1662 revisions += fl.count() - o
1669 files += 1
1663 files += 1
1670
1664
1671 cl.writedata()
1665 cl.writedata()
1672 finally:
1666 finally:
1673 if cl:
1667 if cl:
1674 cl.cleanup()
1668 cl.cleanup()
1675
1669
1676 # make changelog see real files again
1670 # make changelog see real files again
1677 self.changelog = changelog.changelog(self.opener, self.changelog.version)
1671 self.changelog = changelog.changelog(self.opener, self.changelog.version)
1678 self.changelog.checkinlinesize(tr)
1672 self.changelog.checkinlinesize(tr)
1679
1673
1680 newheads = len(self.changelog.heads())
1674 newheads = len(self.changelog.heads())
1681 heads = ""
1675 heads = ""
1682 if oldheads and newheads != oldheads:
1676 if oldheads and newheads != oldheads:
1683 heads = _(" (%+d heads)") % (newheads - oldheads)
1677 heads = _(" (%+d heads)") % (newheads - oldheads)
1684
1678
1685 self.ui.status(_("added %d changesets"
1679 self.ui.status(_("added %d changesets"
1686 " with %d changes to %d files%s\n")
1680 " with %d changes to %d files%s\n")
1687 % (changesets, revisions, files, heads))
1681 % (changesets, revisions, files, heads))
1688
1682
1689 if changesets > 0:
1683 if changesets > 0:
1690 self.hook('pretxnchangegroup', throw=True,
1684 self.hook('pretxnchangegroup', throw=True,
1691 node=hex(self.changelog.node(cor+1)), source=srctype,
1685 node=hex(self.changelog.node(cor+1)), source=srctype,
1692 url=url)
1686 url=url)
1693
1687
1694 tr.close()
1688 tr.close()
1695
1689
1696 if changesets > 0:
1690 if changesets > 0:
1697 self.hook("changegroup", node=hex(self.changelog.node(cor+1)),
1691 self.hook("changegroup", node=hex(self.changelog.node(cor+1)),
1698 source=srctype, url=url)
1692 source=srctype, url=url)
1699
1693
1700 for i in range(cor + 1, cnr + 1):
1694 for i in range(cor + 1, cnr + 1):
1701 self.hook("incoming", node=hex(self.changelog.node(i)),
1695 self.hook("incoming", node=hex(self.changelog.node(i)),
1702 source=srctype, url=url)
1696 source=srctype, url=url)
1703
1697
1704 return newheads - oldheads + 1
1698 return newheads - oldheads + 1
1705
1699
1706
1700
1707 def stream_in(self, remote):
1701 def stream_in(self, remote):
1708 fp = remote.stream_out()
1702 fp = remote.stream_out()
1709 resp = int(fp.readline())
1703 resp = int(fp.readline())
1710 if resp != 0:
1704 if resp != 0:
1711 raise util.Abort(_('operation forbidden by server'))
1705 raise util.Abort(_('operation forbidden by server'))
1712 self.ui.status(_('streaming all changes\n'))
1706 self.ui.status(_('streaming all changes\n'))
1713 total_files, total_bytes = map(int, fp.readline().split(' ', 1))
1707 total_files, total_bytes = map(int, fp.readline().split(' ', 1))
1714 self.ui.status(_('%d files to transfer, %s of data\n') %
1708 self.ui.status(_('%d files to transfer, %s of data\n') %
1715 (total_files, util.bytecount(total_bytes)))
1709 (total_files, util.bytecount(total_bytes)))
1716 start = time.time()
1710 start = time.time()
1717 for i in xrange(total_files):
1711 for i in xrange(total_files):
1718 name, size = fp.readline().split('\0', 1)
1712 name, size = fp.readline().split('\0', 1)
1719 size = int(size)
1713 size = int(size)
1720 self.ui.debug('adding %s (%s)\n' % (name, util.bytecount(size)))
1714 self.ui.debug('adding %s (%s)\n' % (name, util.bytecount(size)))
1721 ofp = self.opener(name, 'w')
1715 ofp = self.opener(name, 'w')
1722 for chunk in util.filechunkiter(fp, limit=size):
1716 for chunk in util.filechunkiter(fp, limit=size):
1723 ofp.write(chunk)
1717 ofp.write(chunk)
1724 ofp.close()
1718 ofp.close()
1725 elapsed = time.time() - start
1719 elapsed = time.time() - start
1726 self.ui.status(_('transferred %s in %.1f seconds (%s/sec)\n') %
1720 self.ui.status(_('transferred %s in %.1f seconds (%s/sec)\n') %
1727 (util.bytecount(total_bytes), elapsed,
1721 (util.bytecount(total_bytes), elapsed,
1728 util.bytecount(total_bytes / elapsed)))
1722 util.bytecount(total_bytes / elapsed)))
1729 self.reload()
1723 self.reload()
1730 return len(self.heads()) + 1
1724 return len(self.heads()) + 1
1731
1725
1732 def clone(self, remote, heads=[], stream=False):
1726 def clone(self, remote, heads=[], stream=False):
1733 '''clone remote repository.
1727 '''clone remote repository.
1734
1728
1735 keyword arguments:
1729 keyword arguments:
1736 heads: list of revs to clone (forces use of pull)
1730 heads: list of revs to clone (forces use of pull)
1737 stream: use streaming clone if possible'''
1731 stream: use streaming clone if possible'''
1738
1732
1739 # now, all clients that can request uncompressed clones can
1733 # now, all clients that can request uncompressed clones can
1740 # read repo formats supported by all servers that can serve
1734 # read repo formats supported by all servers that can serve
1741 # them.
1735 # them.
1742
1736
1743 # if revlog format changes, client will have to check version
1737 # if revlog format changes, client will have to check version
1744 # and format flags on "stream" capability, and use
1738 # and format flags on "stream" capability, and use
1745 # uncompressed only if compatible.
1739 # uncompressed only if compatible.
1746
1740
1747 if stream and not heads and remote.capable('stream'):
1741 if stream and not heads and remote.capable('stream'):
1748 return self.stream_in(remote)
1742 return self.stream_in(remote)
1749 return self.pull(remote, heads)
1743 return self.pull(remote, heads)
1750
1744
1751 # used to avoid circular references so destructors work
1745 # used to avoid circular references so destructors work
1752 def aftertrans(base):
1746 def aftertrans(base):
1753 p = base
1747 p = base
1754 def a():
1748 def a():
1755 util.rename(os.path.join(p, "journal"), os.path.join(p, "undo"))
1749 util.rename(os.path.join(p, "journal"), os.path.join(p, "undo"))
1756 util.rename(os.path.join(p, "journal.dirstate"),
1750 util.rename(os.path.join(p, "journal.dirstate"),
1757 os.path.join(p, "undo.dirstate"))
1751 os.path.join(p, "undo.dirstate"))
1758 return a
1752 return a
1759
1753
1760 def instance(ui, path, create):
1754 def instance(ui, path, create):
1761 return localrepository(ui, util.drop_scheme('file', path), create)
1755 return localrepository(ui, util.drop_scheme('file', path), create)
1762
1756
1763 def islocal(path):
1757 def islocal(path):
1764 return True
1758 return True
General Comments 0
You need to be logged in to leave comments. Login now