##// END OF EJS Templates
Merge with upstream
Thomas Arendsen Hein -
r3568:23f7d962 merge default
parent child Browse files
Show More
@@ -1,2164 +1,2164 b''
1 # queue.py - patch queues for mercurial
1 # queue.py - patch queues for mercurial
2 #
2 #
3 # Copyright 2005, 2006 Chris Mason <mason@suse.com>
3 # Copyright 2005, 2006 Chris Mason <mason@suse.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7
7
8 '''patch management and development
8 '''patch management and development
9
9
10 This extension lets you work with a stack of patches in a Mercurial
10 This extension lets you work with a stack of patches in a Mercurial
11 repository. It manages two stacks of patches - all known patches, and
11 repository. It manages two stacks of patches - all known patches, and
12 applied patches (subset of known patches).
12 applied patches (subset of known patches).
13
13
14 Known patches are represented as patch files in the .hg/patches
14 Known patches are represented as patch files in the .hg/patches
15 directory. Applied patches are both patch files and changesets.
15 directory. Applied patches are both patch files and changesets.
16
16
17 Common tasks (use "hg help command" for more details):
17 Common tasks (use "hg help command" for more details):
18
18
19 prepare repository to work with patches qinit
19 prepare repository to work with patches qinit
20 create new patch qnew
20 create new patch qnew
21 import existing patch qimport
21 import existing patch qimport
22
22
23 print patch series qseries
23 print patch series qseries
24 print applied patches qapplied
24 print applied patches qapplied
25 print name of top applied patch qtop
25 print name of top applied patch qtop
26
26
27 add known patch to applied stack qpush
27 add known patch to applied stack qpush
28 remove patch from applied stack qpop
28 remove patch from applied stack qpop
29 refresh contents of top applied patch qrefresh
29 refresh contents of top applied patch qrefresh
30 '''
30 '''
31
31
32 from mercurial.demandload import *
32 from mercurial.demandload import *
33 from mercurial.i18n import gettext as _
33 from mercurial.i18n import gettext as _
34 from mercurial import commands
34 from mercurial import commands
35 demandload(globals(), "os sys re struct traceback errno bz2")
35 demandload(globals(), "os sys re struct traceback errno bz2")
36 demandload(globals(), "mercurial:cmdutil,hg,patch,revlog,ui,util")
36 demandload(globals(), "mercurial:cmdutil,hg,patch,revlog,ui,util")
37
37
38 commands.norepo += " qclone qversion"
38 commands.norepo += " qclone qversion"
39
39
40 class statusentry:
40 class statusentry:
41 def __init__(self, rev, name=None):
41 def __init__(self, rev, name=None):
42 if not name:
42 if not name:
43 fields = rev.split(':', 1)
43 fields = rev.split(':', 1)
44 if len(fields) == 2:
44 if len(fields) == 2:
45 self.rev, self.name = fields
45 self.rev, self.name = fields
46 else:
46 else:
47 self.rev, self.name = None, None
47 self.rev, self.name = None, None
48 else:
48 else:
49 self.rev, self.name = rev, name
49 self.rev, self.name = rev, name
50
50
51 def __str__(self):
51 def __str__(self):
52 return self.rev + ':' + self.name
52 return self.rev + ':' + self.name
53
53
54 class queue:
54 class queue:
55 def __init__(self, ui, path, patchdir=None):
55 def __init__(self, ui, path, patchdir=None):
56 self.basepath = path
56 self.basepath = path
57 self.path = patchdir or os.path.join(path, "patches")
57 self.path = patchdir or os.path.join(path, "patches")
58 self.opener = util.opener(self.path)
58 self.opener = util.opener(self.path)
59 self.ui = ui
59 self.ui = ui
60 self.applied = []
60 self.applied = []
61 self.full_series = []
61 self.full_series = []
62 self.applied_dirty = 0
62 self.applied_dirty = 0
63 self.series_dirty = 0
63 self.series_dirty = 0
64 self.series_path = "series"
64 self.series_path = "series"
65 self.status_path = "status"
65 self.status_path = "status"
66 self.guards_path = "guards"
66 self.guards_path = "guards"
67 self.active_guards = None
67 self.active_guards = None
68 self.guards_dirty = False
68 self.guards_dirty = False
69 self._diffopts = None
69 self._diffopts = None
70
70
71 if os.path.exists(self.join(self.series_path)):
71 if os.path.exists(self.join(self.series_path)):
72 self.full_series = self.opener(self.series_path).read().splitlines()
72 self.full_series = self.opener(self.series_path).read().splitlines()
73 self.parse_series()
73 self.parse_series()
74
74
75 if os.path.exists(self.join(self.status_path)):
75 if os.path.exists(self.join(self.status_path)):
76 lines = self.opener(self.status_path).read().splitlines()
76 lines = self.opener(self.status_path).read().splitlines()
77 self.applied = [statusentry(l) for l in lines]
77 self.applied = [statusentry(l) for l in lines]
78
78
79 def diffopts(self):
79 def diffopts(self):
80 if self._diffopts is None:
80 if self._diffopts is None:
81 self._diffopts = patch.diffopts(self.ui)
81 self._diffopts = patch.diffopts(self.ui)
82 return self._diffopts
82 return self._diffopts
83
83
84 def join(self, *p):
84 def join(self, *p):
85 return os.path.join(self.path, *p)
85 return os.path.join(self.path, *p)
86
86
87 def find_series(self, patch):
87 def find_series(self, patch):
88 pre = re.compile("(\s*)([^#]+)")
88 pre = re.compile("(\s*)([^#]+)")
89 index = 0
89 index = 0
90 for l in self.full_series:
90 for l in self.full_series:
91 m = pre.match(l)
91 m = pre.match(l)
92 if m:
92 if m:
93 s = m.group(2)
93 s = m.group(2)
94 s = s.rstrip()
94 s = s.rstrip()
95 if s == patch:
95 if s == patch:
96 return index
96 return index
97 index += 1
97 index += 1
98 return None
98 return None
99
99
100 guard_re = re.compile(r'\s?#([-+][^-+# \t\r\n\f][^# \t\r\n\f]*)')
100 guard_re = re.compile(r'\s?#([-+][^-+# \t\r\n\f][^# \t\r\n\f]*)')
101
101
102 def parse_series(self):
102 def parse_series(self):
103 self.series = []
103 self.series = []
104 self.series_guards = []
104 self.series_guards = []
105 for l in self.full_series:
105 for l in self.full_series:
106 h = l.find('#')
106 h = l.find('#')
107 if h == -1:
107 if h == -1:
108 patch = l
108 patch = l
109 comment = ''
109 comment = ''
110 elif h == 0:
110 elif h == 0:
111 continue
111 continue
112 else:
112 else:
113 patch = l[:h]
113 patch = l[:h]
114 comment = l[h:]
114 comment = l[h:]
115 patch = patch.strip()
115 patch = patch.strip()
116 if patch:
116 if patch:
117 if patch in self.series:
117 if patch in self.series:
118 raise util.Abort(_('%s appears more than once in %s') %
118 raise util.Abort(_('%s appears more than once in %s') %
119 (patch, self.join(self.series_path)))
119 (patch, self.join(self.series_path)))
120 self.series.append(patch)
120 self.series.append(patch)
121 self.series_guards.append(self.guard_re.findall(comment))
121 self.series_guards.append(self.guard_re.findall(comment))
122
122
123 def check_guard(self, guard):
123 def check_guard(self, guard):
124 bad_chars = '# \t\r\n\f'
124 bad_chars = '# \t\r\n\f'
125 first = guard[0]
125 first = guard[0]
126 for c in '-+':
126 for c in '-+':
127 if first == c:
127 if first == c:
128 return (_('guard %r starts with invalid character: %r') %
128 return (_('guard %r starts with invalid character: %r') %
129 (guard, c))
129 (guard, c))
130 for c in bad_chars:
130 for c in bad_chars:
131 if c in guard:
131 if c in guard:
132 return _('invalid character in guard %r: %r') % (guard, c)
132 return _('invalid character in guard %r: %r') % (guard, c)
133
133
134 def set_active(self, guards):
134 def set_active(self, guards):
135 for guard in guards:
135 for guard in guards:
136 bad = self.check_guard(guard)
136 bad = self.check_guard(guard)
137 if bad:
137 if bad:
138 raise util.Abort(bad)
138 raise util.Abort(bad)
139 guards = dict.fromkeys(guards).keys()
139 guards = dict.fromkeys(guards).keys()
140 guards.sort()
140 guards.sort()
141 self.ui.debug('active guards: %s\n' % ' '.join(guards))
141 self.ui.debug('active guards: %s\n' % ' '.join(guards))
142 self.active_guards = guards
142 self.active_guards = guards
143 self.guards_dirty = True
143 self.guards_dirty = True
144
144
145 def active(self):
145 def active(self):
146 if self.active_guards is None:
146 if self.active_guards is None:
147 self.active_guards = []
147 self.active_guards = []
148 try:
148 try:
149 guards = self.opener(self.guards_path).read().split()
149 guards = self.opener(self.guards_path).read().split()
150 except IOError, err:
150 except IOError, err:
151 if err.errno != errno.ENOENT: raise
151 if err.errno != errno.ENOENT: raise
152 guards = []
152 guards = []
153 for i, guard in enumerate(guards):
153 for i, guard in enumerate(guards):
154 bad = self.check_guard(guard)
154 bad = self.check_guard(guard)
155 if bad:
155 if bad:
156 self.ui.warn('%s:%d: %s\n' %
156 self.ui.warn('%s:%d: %s\n' %
157 (self.join(self.guards_path), i + 1, bad))
157 (self.join(self.guards_path), i + 1, bad))
158 else:
158 else:
159 self.active_guards.append(guard)
159 self.active_guards.append(guard)
160 return self.active_guards
160 return self.active_guards
161
161
162 def set_guards(self, idx, guards):
162 def set_guards(self, idx, guards):
163 for g in guards:
163 for g in guards:
164 if len(g) < 2:
164 if len(g) < 2:
165 raise util.Abort(_('guard %r too short') % g)
165 raise util.Abort(_('guard %r too short') % g)
166 if g[0] not in '-+':
166 if g[0] not in '-+':
167 raise util.Abort(_('guard %r starts with invalid char') % g)
167 raise util.Abort(_('guard %r starts with invalid char') % g)
168 bad = self.check_guard(g[1:])
168 bad = self.check_guard(g[1:])
169 if bad:
169 if bad:
170 raise util.Abort(bad)
170 raise util.Abort(bad)
171 drop = self.guard_re.sub('', self.full_series[idx])
171 drop = self.guard_re.sub('', self.full_series[idx])
172 self.full_series[idx] = drop + ''.join([' #' + g for g in guards])
172 self.full_series[idx] = drop + ''.join([' #' + g for g in guards])
173 self.parse_series()
173 self.parse_series()
174 self.series_dirty = True
174 self.series_dirty = True
175
175
176 def pushable(self, idx):
176 def pushable(self, idx):
177 if isinstance(idx, str):
177 if isinstance(idx, str):
178 idx = self.series.index(idx)
178 idx = self.series.index(idx)
179 patchguards = self.series_guards[idx]
179 patchguards = self.series_guards[idx]
180 if not patchguards:
180 if not patchguards:
181 return True, None
181 return True, None
182 default = False
182 default = False
183 guards = self.active()
183 guards = self.active()
184 exactneg = [g for g in patchguards if g[0] == '-' and g[1:] in guards]
184 exactneg = [g for g in patchguards if g[0] == '-' and g[1:] in guards]
185 if exactneg:
185 if exactneg:
186 return False, exactneg[0]
186 return False, exactneg[0]
187 pos = [g for g in patchguards if g[0] == '+']
187 pos = [g for g in patchguards if g[0] == '+']
188 exactpos = [g for g in pos if g[1:] in guards]
188 exactpos = [g for g in pos if g[1:] in guards]
189 if pos:
189 if pos:
190 if exactpos:
190 if exactpos:
191 return True, exactpos[0]
191 return True, exactpos[0]
192 return False, pos
192 return False, pos
193 return True, ''
193 return True, ''
194
194
195 def explain_pushable(self, idx, all_patches=False):
195 def explain_pushable(self, idx, all_patches=False):
196 write = all_patches and self.ui.write or self.ui.warn
196 write = all_patches and self.ui.write or self.ui.warn
197 if all_patches or self.ui.verbose:
197 if all_patches or self.ui.verbose:
198 if isinstance(idx, str):
198 if isinstance(idx, str):
199 idx = self.series.index(idx)
199 idx = self.series.index(idx)
200 pushable, why = self.pushable(idx)
200 pushable, why = self.pushable(idx)
201 if all_patches and pushable:
201 if all_patches and pushable:
202 if why is None:
202 if why is None:
203 write(_('allowing %s - no guards in effect\n') %
203 write(_('allowing %s - no guards in effect\n') %
204 self.series[idx])
204 self.series[idx])
205 else:
205 else:
206 if not why:
206 if not why:
207 write(_('allowing %s - no matching negative guards\n') %
207 write(_('allowing %s - no matching negative guards\n') %
208 self.series[idx])
208 self.series[idx])
209 else:
209 else:
210 write(_('allowing %s - guarded by %r\n') %
210 write(_('allowing %s - guarded by %r\n') %
211 (self.series[idx], why))
211 (self.series[idx], why))
212 if not pushable:
212 if not pushable:
213 if why:
213 if why:
214 write(_('skipping %s - guarded by %r\n') %
214 write(_('skipping %s - guarded by %r\n') %
215 (self.series[idx], ' '.join(why)))
215 (self.series[idx], ' '.join(why)))
216 else:
216 else:
217 write(_('skipping %s - no matching guards\n') %
217 write(_('skipping %s - no matching guards\n') %
218 self.series[idx])
218 self.series[idx])
219
219
220 def save_dirty(self):
220 def save_dirty(self):
221 def write_list(items, path):
221 def write_list(items, path):
222 fp = self.opener(path, 'w')
222 fp = self.opener(path, 'w')
223 for i in items:
223 for i in items:
224 print >> fp, i
224 print >> fp, i
225 fp.close()
225 fp.close()
226 if self.applied_dirty: write_list(map(str, self.applied), self.status_path)
226 if self.applied_dirty: write_list(map(str, self.applied), self.status_path)
227 if self.series_dirty: write_list(self.full_series, self.series_path)
227 if self.series_dirty: write_list(self.full_series, self.series_path)
228 if self.guards_dirty: write_list(self.active_guards, self.guards_path)
228 if self.guards_dirty: write_list(self.active_guards, self.guards_path)
229
229
230 def readheaders(self, patch):
230 def readheaders(self, patch):
231 def eatdiff(lines):
231 def eatdiff(lines):
232 while lines:
232 while lines:
233 l = lines[-1]
233 l = lines[-1]
234 if (l.startswith("diff -") or
234 if (l.startswith("diff -") or
235 l.startswith("Index:") or
235 l.startswith("Index:") or
236 l.startswith("===========")):
236 l.startswith("===========")):
237 del lines[-1]
237 del lines[-1]
238 else:
238 else:
239 break
239 break
240 def eatempty(lines):
240 def eatempty(lines):
241 while lines:
241 while lines:
242 l = lines[-1]
242 l = lines[-1]
243 if re.match('\s*$', l):
243 if re.match('\s*$', l):
244 del lines[-1]
244 del lines[-1]
245 else:
245 else:
246 break
246 break
247
247
248 pf = self.join(patch)
248 pf = self.join(patch)
249 message = []
249 message = []
250 comments = []
250 comments = []
251 user = None
251 user = None
252 date = None
252 date = None
253 format = None
253 format = None
254 subject = None
254 subject = None
255 diffstart = 0
255 diffstart = 0
256
256
257 for line in file(pf):
257 for line in file(pf):
258 line = line.rstrip()
258 line = line.rstrip()
259 if line.startswith('diff --git'):
259 if line.startswith('diff --git'):
260 diffstart = 2
260 diffstart = 2
261 break
261 break
262 if diffstart:
262 if diffstart:
263 if line.startswith('+++ '):
263 if line.startswith('+++ '):
264 diffstart = 2
264 diffstart = 2
265 break
265 break
266 if line.startswith("--- "):
266 if line.startswith("--- "):
267 diffstart = 1
267 diffstart = 1
268 continue
268 continue
269 elif format == "hgpatch":
269 elif format == "hgpatch":
270 # parse values when importing the result of an hg export
270 # parse values when importing the result of an hg export
271 if line.startswith("# User "):
271 if line.startswith("# User "):
272 user = line[7:]
272 user = line[7:]
273 elif line.startswith("# Date "):
273 elif line.startswith("# Date "):
274 date = line[7:]
274 date = line[7:]
275 elif not line.startswith("# ") and line:
275 elif not line.startswith("# ") and line:
276 message.append(line)
276 message.append(line)
277 format = None
277 format = None
278 elif line == '# HG changeset patch':
278 elif line == '# HG changeset patch':
279 format = "hgpatch"
279 format = "hgpatch"
280 elif (format != "tagdone" and (line.startswith("Subject: ") or
280 elif (format != "tagdone" and (line.startswith("Subject: ") or
281 line.startswith("subject: "))):
281 line.startswith("subject: "))):
282 subject = line[9:]
282 subject = line[9:]
283 format = "tag"
283 format = "tag"
284 elif (format != "tagdone" and (line.startswith("From: ") or
284 elif (format != "tagdone" and (line.startswith("From: ") or
285 line.startswith("from: "))):
285 line.startswith("from: "))):
286 user = line[6:]
286 user = line[6:]
287 format = "tag"
287 format = "tag"
288 elif format == "tag" and line == "":
288 elif format == "tag" and line == "":
289 # when looking for tags (subject: from: etc) they
289 # when looking for tags (subject: from: etc) they
290 # end once you find a blank line in the source
290 # end once you find a blank line in the source
291 format = "tagdone"
291 format = "tagdone"
292 elif message or line:
292 elif message or line:
293 message.append(line)
293 message.append(line)
294 comments.append(line)
294 comments.append(line)
295
295
296 eatdiff(message)
296 eatdiff(message)
297 eatdiff(comments)
297 eatdiff(comments)
298 eatempty(message)
298 eatempty(message)
299 eatempty(comments)
299 eatempty(comments)
300
300
301 # make sure message isn't empty
301 # make sure message isn't empty
302 if format and format.startswith("tag") and subject:
302 if format and format.startswith("tag") and subject:
303 message.insert(0, "")
303 message.insert(0, "")
304 message.insert(0, subject)
304 message.insert(0, subject)
305 return (message, comments, user, date, diffstart > 1)
305 return (message, comments, user, date, diffstart > 1)
306
306
307 def printdiff(self, repo, node1, node2=None, files=None,
307 def printdiff(self, repo, node1, node2=None, files=None,
308 fp=None, changes=None, opts={}):
308 fp=None, changes=None, opts={}):
309 fns, matchfn, anypats = cmdutil.matchpats(repo, files, opts)
309 fns, matchfn, anypats = cmdutil.matchpats(repo, files, opts)
310
310
311 patch.diff(repo, node1, node2, fns, match=matchfn,
311 patch.diff(repo, node1, node2, fns, match=matchfn,
312 fp=fp, changes=changes, opts=self.diffopts())
312 fp=fp, changes=changes, opts=self.diffopts())
313
313
314 def mergeone(self, repo, mergeq, head, patch, rev, wlock):
314 def mergeone(self, repo, mergeq, head, patch, rev, wlock):
315 # first try just applying the patch
315 # first try just applying the patch
316 (err, n) = self.apply(repo, [ patch ], update_status=False,
316 (err, n) = self.apply(repo, [ patch ], update_status=False,
317 strict=True, merge=rev, wlock=wlock)
317 strict=True, merge=rev, wlock=wlock)
318
318
319 if err == 0:
319 if err == 0:
320 return (err, n)
320 return (err, n)
321
321
322 if n is None:
322 if n is None:
323 raise util.Abort(_("apply failed for patch %s") % patch)
323 raise util.Abort(_("apply failed for patch %s") % patch)
324
324
325 self.ui.warn("patch didn't work out, merging %s\n" % patch)
325 self.ui.warn("patch didn't work out, merging %s\n" % patch)
326
326
327 # apply failed, strip away that rev and merge.
327 # apply failed, strip away that rev and merge.
328 hg.clean(repo, head, wlock=wlock)
328 hg.clean(repo, head, wlock=wlock)
329 self.strip(repo, n, update=False, backup='strip', wlock=wlock)
329 self.strip(repo, n, update=False, backup='strip', wlock=wlock)
330
330
331 c = repo.changelog.read(rev)
331 c = repo.changelog.read(rev)
332 ret = hg.merge(repo, rev, wlock=wlock)
332 ret = hg.merge(repo, rev, wlock=wlock)
333 if ret:
333 if ret:
334 raise util.Abort(_("update returned %d") % ret)
334 raise util.Abort(_("update returned %d") % ret)
335 n = repo.commit(None, c[4], c[1], force=1, wlock=wlock)
335 n = repo.commit(None, c[4], c[1], force=1, wlock=wlock)
336 if n == None:
336 if n == None:
337 raise util.Abort(_("repo commit failed"))
337 raise util.Abort(_("repo commit failed"))
338 try:
338 try:
339 message, comments, user, date, patchfound = mergeq.readheaders(patch)
339 message, comments, user, date, patchfound = mergeq.readheaders(patch)
340 except:
340 except:
341 raise util.Abort(_("unable to read %s") % patch)
341 raise util.Abort(_("unable to read %s") % patch)
342
342
343 patchf = self.opener(patch, "w")
343 patchf = self.opener(patch, "w")
344 if comments:
344 if comments:
345 comments = "\n".join(comments) + '\n\n'
345 comments = "\n".join(comments) + '\n\n'
346 patchf.write(comments)
346 patchf.write(comments)
347 self.printdiff(repo, head, n, fp=patchf)
347 self.printdiff(repo, head, n, fp=patchf)
348 patchf.close()
348 patchf.close()
349 return (0, n)
349 return (0, n)
350
350
351 def qparents(self, repo, rev=None):
351 def qparents(self, repo, rev=None):
352 if rev is None:
352 if rev is None:
353 (p1, p2) = repo.dirstate.parents()
353 (p1, p2) = repo.dirstate.parents()
354 if p2 == revlog.nullid:
354 if p2 == revlog.nullid:
355 return p1
355 return p1
356 if len(self.applied) == 0:
356 if len(self.applied) == 0:
357 return None
357 return None
358 return revlog.bin(self.applied[-1].rev)
358 return revlog.bin(self.applied[-1].rev)
359 pp = repo.changelog.parents(rev)
359 pp = repo.changelog.parents(rev)
360 if pp[1] != revlog.nullid:
360 if pp[1] != revlog.nullid:
361 arevs = [ x.rev for x in self.applied ]
361 arevs = [ x.rev for x in self.applied ]
362 p0 = revlog.hex(pp[0])
362 p0 = revlog.hex(pp[0])
363 p1 = revlog.hex(pp[1])
363 p1 = revlog.hex(pp[1])
364 if p0 in arevs:
364 if p0 in arevs:
365 return pp[0]
365 return pp[0]
366 if p1 in arevs:
366 if p1 in arevs:
367 return pp[1]
367 return pp[1]
368 return pp[0]
368 return pp[0]
369
369
370 def mergepatch(self, repo, mergeq, series, wlock):
370 def mergepatch(self, repo, mergeq, series, wlock):
371 if len(self.applied) == 0:
371 if len(self.applied) == 0:
372 # each of the patches merged in will have two parents. This
372 # each of the patches merged in will have two parents. This
373 # can confuse the qrefresh, qdiff, and strip code because it
373 # can confuse the qrefresh, qdiff, and strip code because it
374 # needs to know which parent is actually in the patch queue.
374 # needs to know which parent is actually in the patch queue.
375 # so, we insert a merge marker with only one parent. This way
375 # so, we insert a merge marker with only one parent. This way
376 # the first patch in the queue is never a merge patch
376 # the first patch in the queue is never a merge patch
377 #
377 #
378 pname = ".hg.patches.merge.marker"
378 pname = ".hg.patches.merge.marker"
379 n = repo.commit(None, '[mq]: merge marker', user=None, force=1,
379 n = repo.commit(None, '[mq]: merge marker', user=None, force=1,
380 wlock=wlock)
380 wlock=wlock)
381 self.applied.append(statusentry(revlog.hex(n), pname))
381 self.applied.append(statusentry(revlog.hex(n), pname))
382 self.applied_dirty = 1
382 self.applied_dirty = 1
383
383
384 head = self.qparents(repo)
384 head = self.qparents(repo)
385
385
386 for patch in series:
386 for patch in series:
387 patch = mergeq.lookup(patch, strict=True)
387 patch = mergeq.lookup(patch, strict=True)
388 if not patch:
388 if not patch:
389 self.ui.warn("patch %s does not exist\n" % patch)
389 self.ui.warn("patch %s does not exist\n" % patch)
390 return (1, None)
390 return (1, None)
391 pushable, reason = self.pushable(patch)
391 pushable, reason = self.pushable(patch)
392 if not pushable:
392 if not pushable:
393 self.explain_pushable(patch, all_patches=True)
393 self.explain_pushable(patch, all_patches=True)
394 continue
394 continue
395 info = mergeq.isapplied(patch)
395 info = mergeq.isapplied(patch)
396 if not info:
396 if not info:
397 self.ui.warn("patch %s is not applied\n" % patch)
397 self.ui.warn("patch %s is not applied\n" % patch)
398 return (1, None)
398 return (1, None)
399 rev = revlog.bin(info[1])
399 rev = revlog.bin(info[1])
400 (err, head) = self.mergeone(repo, mergeq, head, patch, rev, wlock)
400 (err, head) = self.mergeone(repo, mergeq, head, patch, rev, wlock)
401 if head:
401 if head:
402 self.applied.append(statusentry(revlog.hex(head), patch))
402 self.applied.append(statusentry(revlog.hex(head), patch))
403 self.applied_dirty = 1
403 self.applied_dirty = 1
404 if err:
404 if err:
405 return (err, head)
405 return (err, head)
406 return (0, head)
406 return (0, head)
407
407
408 def patch(self, repo, patchfile):
408 def patch(self, repo, patchfile):
409 '''Apply patchfile to the working directory.
409 '''Apply patchfile to the working directory.
410 patchfile: file name of patch'''
410 patchfile: file name of patch'''
411 files = {}
411 files = {}
412 try:
412 try:
413 fuzz = patch.patch(patchfile, self.ui, strip=1, cwd=repo.root,
413 fuzz = patch.patch(patchfile, self.ui, strip=1, cwd=repo.root,
414 files=files)
414 files=files)
415 except Exception, inst:
415 except Exception, inst:
416 self.ui.note(str(inst) + '\n')
416 self.ui.note(str(inst) + '\n')
417 if not self.ui.verbose:
417 if not self.ui.verbose:
418 self.ui.warn("patch failed, unable to continue (try -v)\n")
418 self.ui.warn("patch failed, unable to continue (try -v)\n")
419 return (False, files, False)
419 return (False, files, False)
420
420
421 return (True, files, fuzz)
421 return (True, files, fuzz)
422
422
423 def apply(self, repo, series, list=False, update_status=True,
423 def apply(self, repo, series, list=False, update_status=True,
424 strict=False, patchdir=None, merge=None, wlock=None):
424 strict=False, patchdir=None, merge=None, wlock=None):
425 # TODO unify with commands.py
425 # TODO unify with commands.py
426 if not patchdir:
426 if not patchdir:
427 patchdir = self.path
427 patchdir = self.path
428 err = 0
428 err = 0
429 if not wlock:
429 if not wlock:
430 wlock = repo.wlock()
430 wlock = repo.wlock()
431 lock = repo.lock()
431 lock = repo.lock()
432 tr = repo.transaction()
432 tr = repo.transaction()
433 n = None
433 n = None
434 for patchname in series:
434 for patchname in series:
435 pushable, reason = self.pushable(patchname)
435 pushable, reason = self.pushable(patchname)
436 if not pushable:
436 if not pushable:
437 self.explain_pushable(patchname, all_patches=True)
437 self.explain_pushable(patchname, all_patches=True)
438 continue
438 continue
439 self.ui.warn("applying %s\n" % patchname)
439 self.ui.warn("applying %s\n" % patchname)
440 pf = os.path.join(patchdir, patchname)
440 pf = os.path.join(patchdir, patchname)
441
441
442 try:
442 try:
443 message, comments, user, date, patchfound = self.readheaders(patchname)
443 message, comments, user, date, patchfound = self.readheaders(patchname)
444 except:
444 except:
445 self.ui.warn("Unable to read %s\n" % patchname)
445 self.ui.warn("Unable to read %s\n" % patchname)
446 err = 1
446 err = 1
447 break
447 break
448
448
449 if not message:
449 if not message:
450 message = "imported patch %s\n" % patchname
450 message = "imported patch %s\n" % patchname
451 else:
451 else:
452 if list:
452 if list:
453 message.append("\nimported patch %s" % patchname)
453 message.append("\nimported patch %s" % patchname)
454 message = '\n'.join(message)
454 message = '\n'.join(message)
455
455
456 (patcherr, files, fuzz) = self.patch(repo, pf)
456 (patcherr, files, fuzz) = self.patch(repo, pf)
457 patcherr = not patcherr
457 patcherr = not patcherr
458
458
459 if merge and files:
459 if merge and files:
460 # Mark as merged and update dirstate parent info
460 # Mark as merged and update dirstate parent info
461 repo.dirstate.update(repo.dirstate.filterfiles(files.keys()), 'm')
461 repo.dirstate.update(repo.dirstate.filterfiles(files.keys()), 'm')
462 p1, p2 = repo.dirstate.parents()
462 p1, p2 = repo.dirstate.parents()
463 repo.dirstate.setparents(p1, merge)
463 repo.dirstate.setparents(p1, merge)
464 files = patch.updatedir(self.ui, repo, files, wlock=wlock)
464 files = patch.updatedir(self.ui, repo, files, wlock=wlock)
465 n = repo.commit(files, message, user, date, force=1, lock=lock,
465 n = repo.commit(files, message, user, date, force=1, lock=lock,
466 wlock=wlock)
466 wlock=wlock)
467
467
468 if n == None:
468 if n == None:
469 raise util.Abort(_("repo commit failed"))
469 raise util.Abort(_("repo commit failed"))
470
470
471 if update_status:
471 if update_status:
472 self.applied.append(statusentry(revlog.hex(n), patchname))
472 self.applied.append(statusentry(revlog.hex(n), patchname))
473
473
474 if patcherr:
474 if patcherr:
475 if not patchfound:
475 if not patchfound:
476 self.ui.warn("patch %s is empty\n" % patchname)
476 self.ui.warn("patch %s is empty\n" % patchname)
477 err = 0
477 err = 0
478 else:
478 else:
479 self.ui.warn("patch failed, rejects left in working dir\n")
479 self.ui.warn("patch failed, rejects left in working dir\n")
480 err = 1
480 err = 1
481 break
481 break
482
482
483 if fuzz and strict:
483 if fuzz and strict:
484 self.ui.warn("fuzz found when applying patch, stopping\n")
484 self.ui.warn("fuzz found when applying patch, stopping\n")
485 err = 1
485 err = 1
486 break
486 break
487 tr.close()
487 tr.close()
488 return (err, n)
488 return (err, n)
489
489
490 def delete(self, repo, patches, opts):
490 def delete(self, repo, patches, opts):
491 realpatches = []
491 realpatches = []
492 for patch in patches:
492 for patch in patches:
493 patch = self.lookup(patch, strict=True)
493 patch = self.lookup(patch, strict=True)
494 info = self.isapplied(patch)
494 info = self.isapplied(patch)
495 if info:
495 if info:
496 raise util.Abort(_("cannot delete applied patch %s") % patch)
496 raise util.Abort(_("cannot delete applied patch %s") % patch)
497 if patch not in self.series:
497 if patch not in self.series:
498 raise util.Abort(_("patch %s not in series file") % patch)
498 raise util.Abort(_("patch %s not in series file") % patch)
499 realpatches.append(patch)
499 realpatches.append(patch)
500
500
501 appliedbase = 0
501 appliedbase = 0
502 if opts.get('rev'):
502 if opts.get('rev'):
503 if not self.applied:
503 if not self.applied:
504 raise util.Abort(_('no patches applied'))
504 raise util.Abort(_('no patches applied'))
505 revs = [int(r) for r in cmdutil.revrange(ui, repo, opts['rev'])]
505 revs = cmdutil.revrange(ui, repo, opts['rev'])
506 if len(revs) > 1 and revs[0] > revs[1]:
506 if len(revs) > 1 and revs[0] > revs[1]:
507 revs.reverse()
507 revs.reverse()
508 for rev in revs:
508 for rev in revs:
509 if appliedbase >= len(self.applied):
509 if appliedbase >= len(self.applied):
510 raise util.Abort(_("revision %d is not managed") % rev)
510 raise util.Abort(_("revision %d is not managed") % rev)
511
511
512 base = revlog.bin(self.applied[appliedbase].rev)
512 base = revlog.bin(self.applied[appliedbase].rev)
513 node = repo.changelog.node(rev)
513 node = repo.changelog.node(rev)
514 if node != base:
514 if node != base:
515 raise util.Abort(_("cannot delete revision %d above "
515 raise util.Abort(_("cannot delete revision %d above "
516 "applied patches") % rev)
516 "applied patches") % rev)
517 realpatches.append(self.applied[appliedbase].name)
517 realpatches.append(self.applied[appliedbase].name)
518 appliedbase += 1
518 appliedbase += 1
519
519
520 if not opts.get('keep'):
520 if not opts.get('keep'):
521 r = self.qrepo()
521 r = self.qrepo()
522 if r:
522 if r:
523 r.remove(realpatches, True)
523 r.remove(realpatches, True)
524 else:
524 else:
525 for p in realpatches:
525 for p in realpatches:
526 os.unlink(self.join(p))
526 os.unlink(self.join(p))
527
527
528 if appliedbase:
528 if appliedbase:
529 del self.applied[:appliedbase]
529 del self.applied[:appliedbase]
530 self.applied_dirty = 1
530 self.applied_dirty = 1
531 indices = [self.find_series(p) for p in realpatches]
531 indices = [self.find_series(p) for p in realpatches]
532 indices.sort()
532 indices.sort()
533 for i in indices[-1::-1]:
533 for i in indices[-1::-1]:
534 del self.full_series[i]
534 del self.full_series[i]
535 self.parse_series()
535 self.parse_series()
536 self.series_dirty = 1
536 self.series_dirty = 1
537
537
538 def check_toppatch(self, repo):
538 def check_toppatch(self, repo):
539 if len(self.applied) > 0:
539 if len(self.applied) > 0:
540 top = revlog.bin(self.applied[-1].rev)
540 top = revlog.bin(self.applied[-1].rev)
541 pp = repo.dirstate.parents()
541 pp = repo.dirstate.parents()
542 if top not in pp:
542 if top not in pp:
543 raise util.Abort(_("queue top not at same revision as working directory"))
543 raise util.Abort(_("queue top not at same revision as working directory"))
544 return top
544 return top
545 return None
545 return None
546 def check_localchanges(self, repo, force=False, refresh=True):
546 def check_localchanges(self, repo, force=False, refresh=True):
547 m, a, r, d = repo.status()[:4]
547 m, a, r, d = repo.status()[:4]
548 if m or a or r or d:
548 if m or a or r or d:
549 if not force:
549 if not force:
550 if refresh:
550 if refresh:
551 raise util.Abort(_("local changes found, refresh first"))
551 raise util.Abort(_("local changes found, refresh first"))
552 else:
552 else:
553 raise util.Abort(_("local changes found"))
553 raise util.Abort(_("local changes found"))
554 return m, a, r, d
554 return m, a, r, d
555 def new(self, repo, patch, msg=None, force=None):
555 def new(self, repo, patch, msg=None, force=None):
556 if os.path.exists(self.join(patch)):
556 if os.path.exists(self.join(patch)):
557 raise util.Abort(_('patch "%s" already exists') % patch)
557 raise util.Abort(_('patch "%s" already exists') % patch)
558 m, a, r, d = self.check_localchanges(repo, force)
558 m, a, r, d = self.check_localchanges(repo, force)
559 commitfiles = m + a + r
559 commitfiles = m + a + r
560 self.check_toppatch(repo)
560 self.check_toppatch(repo)
561 wlock = repo.wlock()
561 wlock = repo.wlock()
562 insert = self.full_series_end()
562 insert = self.full_series_end()
563 if msg:
563 if msg:
564 n = repo.commit(commitfiles, "[mq]: %s" % msg, force=True,
564 n = repo.commit(commitfiles, "[mq]: %s" % msg, force=True,
565 wlock=wlock)
565 wlock=wlock)
566 else:
566 else:
567 n = repo.commit(commitfiles,
567 n = repo.commit(commitfiles,
568 "New patch: %s" % patch, force=True, wlock=wlock)
568 "New patch: %s" % patch, force=True, wlock=wlock)
569 if n == None:
569 if n == None:
570 raise util.Abort(_("repo commit failed"))
570 raise util.Abort(_("repo commit failed"))
571 self.full_series[insert:insert] = [patch]
571 self.full_series[insert:insert] = [patch]
572 self.applied.append(statusentry(revlog.hex(n), patch))
572 self.applied.append(statusentry(revlog.hex(n), patch))
573 self.parse_series()
573 self.parse_series()
574 self.series_dirty = 1
574 self.series_dirty = 1
575 self.applied_dirty = 1
575 self.applied_dirty = 1
576 p = self.opener(patch, "w")
576 p = self.opener(patch, "w")
577 if msg:
577 if msg:
578 msg = msg + "\n"
578 msg = msg + "\n"
579 p.write(msg)
579 p.write(msg)
580 p.close()
580 p.close()
581 wlock = None
581 wlock = None
582 r = self.qrepo()
582 r = self.qrepo()
583 if r: r.add([patch])
583 if r: r.add([patch])
584 if commitfiles:
584 if commitfiles:
585 self.refresh(repo, short=True)
585 self.refresh(repo, short=True)
586
586
587 def strip(self, repo, rev, update=True, backup="all", wlock=None):
587 def strip(self, repo, rev, update=True, backup="all", wlock=None):
588 def limitheads(chlog, stop):
588 def limitheads(chlog, stop):
589 """return the list of all nodes that have no children"""
589 """return the list of all nodes that have no children"""
590 p = {}
590 p = {}
591 h = []
591 h = []
592 stoprev = 0
592 stoprev = 0
593 if stop in chlog.nodemap:
593 if stop in chlog.nodemap:
594 stoprev = chlog.rev(stop)
594 stoprev = chlog.rev(stop)
595
595
596 for r in xrange(chlog.count() - 1, -1, -1):
596 for r in xrange(chlog.count() - 1, -1, -1):
597 n = chlog.node(r)
597 n = chlog.node(r)
598 if n not in p:
598 if n not in p:
599 h.append(n)
599 h.append(n)
600 if n == stop:
600 if n == stop:
601 break
601 break
602 if r < stoprev:
602 if r < stoprev:
603 break
603 break
604 for pn in chlog.parents(n):
604 for pn in chlog.parents(n):
605 p[pn] = 1
605 p[pn] = 1
606 return h
606 return h
607
607
608 def bundle(cg):
608 def bundle(cg):
609 backupdir = repo.join("strip-backup")
609 backupdir = repo.join("strip-backup")
610 if not os.path.isdir(backupdir):
610 if not os.path.isdir(backupdir):
611 os.mkdir(backupdir)
611 os.mkdir(backupdir)
612 name = os.path.join(backupdir, "%s" % revlog.short(rev))
612 name = os.path.join(backupdir, "%s" % revlog.short(rev))
613 name = savename(name)
613 name = savename(name)
614 self.ui.warn("saving bundle to %s\n" % name)
614 self.ui.warn("saving bundle to %s\n" % name)
615 # TODO, exclusive open
615 # TODO, exclusive open
616 f = open(name, "wb")
616 f = open(name, "wb")
617 try:
617 try:
618 f.write("HG10")
618 f.write("HG10")
619 z = bz2.BZ2Compressor(9)
619 z = bz2.BZ2Compressor(9)
620 while 1:
620 while 1:
621 chunk = cg.read(4096)
621 chunk = cg.read(4096)
622 if not chunk:
622 if not chunk:
623 break
623 break
624 f.write(z.compress(chunk))
624 f.write(z.compress(chunk))
625 f.write(z.flush())
625 f.write(z.flush())
626 except:
626 except:
627 os.unlink(name)
627 os.unlink(name)
628 raise
628 raise
629 f.close()
629 f.close()
630 return name
630 return name
631
631
632 def stripall(rev, revnum):
632 def stripall(rev, revnum):
633 cl = repo.changelog
633 cl = repo.changelog
634 c = cl.read(rev)
634 c = cl.read(rev)
635 mm = repo.manifest.read(c[0])
635 mm = repo.manifest.read(c[0])
636 seen = {}
636 seen = {}
637
637
638 for x in xrange(revnum, cl.count()):
638 for x in xrange(revnum, cl.count()):
639 c = cl.read(cl.node(x))
639 c = cl.read(cl.node(x))
640 for f in c[3]:
640 for f in c[3]:
641 if f in seen:
641 if f in seen:
642 continue
642 continue
643 seen[f] = 1
643 seen[f] = 1
644 if f in mm:
644 if f in mm:
645 filerev = mm[f]
645 filerev = mm[f]
646 else:
646 else:
647 filerev = 0
647 filerev = 0
648 seen[f] = filerev
648 seen[f] = filerev
649 # we go in two steps here so the strip loop happens in a
649 # we go in two steps here so the strip loop happens in a
650 # sensible order. When stripping many files, this helps keep
650 # sensible order. When stripping many files, this helps keep
651 # our disk access patterns under control.
651 # our disk access patterns under control.
652 seen_list = seen.keys()
652 seen_list = seen.keys()
653 seen_list.sort()
653 seen_list.sort()
654 for f in seen_list:
654 for f in seen_list:
655 ff = repo.file(f)
655 ff = repo.file(f)
656 filerev = seen[f]
656 filerev = seen[f]
657 if filerev != 0:
657 if filerev != 0:
658 if filerev in ff.nodemap:
658 if filerev in ff.nodemap:
659 filerev = ff.rev(filerev)
659 filerev = ff.rev(filerev)
660 else:
660 else:
661 filerev = 0
661 filerev = 0
662 ff.strip(filerev, revnum)
662 ff.strip(filerev, revnum)
663
663
664 if not wlock:
664 if not wlock:
665 wlock = repo.wlock()
665 wlock = repo.wlock()
666 lock = repo.lock()
666 lock = repo.lock()
667 chlog = repo.changelog
667 chlog = repo.changelog
668 # TODO delete the undo files, and handle undo of merge sets
668 # TODO delete the undo files, and handle undo of merge sets
669 pp = chlog.parents(rev)
669 pp = chlog.parents(rev)
670 revnum = chlog.rev(rev)
670 revnum = chlog.rev(rev)
671
671
672 if update:
672 if update:
673 self.check_localchanges(repo, refresh=False)
673 self.check_localchanges(repo, refresh=False)
674 urev = self.qparents(repo, rev)
674 urev = self.qparents(repo, rev)
675 hg.clean(repo, urev, wlock=wlock)
675 hg.clean(repo, urev, wlock=wlock)
676 repo.dirstate.write()
676 repo.dirstate.write()
677
677
678 # save is a list of all the branches we are truncating away
678 # save is a list of all the branches we are truncating away
679 # that we actually want to keep. changegroup will be used
679 # that we actually want to keep. changegroup will be used
680 # to preserve them and add them back after the truncate
680 # to preserve them and add them back after the truncate
681 saveheads = []
681 saveheads = []
682 savebases = {}
682 savebases = {}
683
683
684 heads = limitheads(chlog, rev)
684 heads = limitheads(chlog, rev)
685 seen = {}
685 seen = {}
686
686
687 # search through all the heads, finding those where the revision
687 # search through all the heads, finding those where the revision
688 # we want to strip away is an ancestor. Also look for merges
688 # we want to strip away is an ancestor. Also look for merges
689 # that might be turned into new heads by the strip.
689 # that might be turned into new heads by the strip.
690 while heads:
690 while heads:
691 h = heads.pop()
691 h = heads.pop()
692 n = h
692 n = h
693 while True:
693 while True:
694 seen[n] = 1
694 seen[n] = 1
695 pp = chlog.parents(n)
695 pp = chlog.parents(n)
696 if pp[1] != revlog.nullid and chlog.rev(pp[1]) > revnum:
696 if pp[1] != revlog.nullid and chlog.rev(pp[1]) > revnum:
697 if pp[1] not in seen:
697 if pp[1] not in seen:
698 heads.append(pp[1])
698 heads.append(pp[1])
699 if pp[0] == revlog.nullid:
699 if pp[0] == revlog.nullid:
700 break
700 break
701 if chlog.rev(pp[0]) < revnum:
701 if chlog.rev(pp[0]) < revnum:
702 break
702 break
703 n = pp[0]
703 n = pp[0]
704 if n == rev:
704 if n == rev:
705 break
705 break
706 r = chlog.reachable(h, rev)
706 r = chlog.reachable(h, rev)
707 if rev not in r:
707 if rev not in r:
708 saveheads.append(h)
708 saveheads.append(h)
709 for x in r:
709 for x in r:
710 if chlog.rev(x) > revnum:
710 if chlog.rev(x) > revnum:
711 savebases[x] = 1
711 savebases[x] = 1
712
712
713 # create a changegroup for all the branches we need to keep
713 # create a changegroup for all the branches we need to keep
714 if backup == "all":
714 if backup == "all":
715 backupch = repo.changegroupsubset([rev], chlog.heads(), 'strip')
715 backupch = repo.changegroupsubset([rev], chlog.heads(), 'strip')
716 bundle(backupch)
716 bundle(backupch)
717 if saveheads:
717 if saveheads:
718 backupch = repo.changegroupsubset(savebases.keys(), saveheads, 'strip')
718 backupch = repo.changegroupsubset(savebases.keys(), saveheads, 'strip')
719 chgrpfile = bundle(backupch)
719 chgrpfile = bundle(backupch)
720
720
721 stripall(rev, revnum)
721 stripall(rev, revnum)
722
722
723 change = chlog.read(rev)
723 change = chlog.read(rev)
724 chlog.strip(revnum, revnum)
724 chlog.strip(revnum, revnum)
725 repo.manifest.strip(repo.manifest.rev(change[0]), revnum)
725 repo.manifest.strip(repo.manifest.rev(change[0]), revnum)
726 if saveheads:
726 if saveheads:
727 self.ui.status("adding branch\n")
727 self.ui.status("adding branch\n")
728 commands.unbundle(self.ui, repo, chgrpfile, update=False)
728 commands.unbundle(self.ui, repo, chgrpfile, update=False)
729 if backup != "strip":
729 if backup != "strip":
730 os.unlink(chgrpfile)
730 os.unlink(chgrpfile)
731
731
732 def isapplied(self, patch):
732 def isapplied(self, patch):
733 """returns (index, rev, patch)"""
733 """returns (index, rev, patch)"""
734 for i in xrange(len(self.applied)):
734 for i in xrange(len(self.applied)):
735 a = self.applied[i]
735 a = self.applied[i]
736 if a.name == patch:
736 if a.name == patch:
737 return (i, a.rev, a.name)
737 return (i, a.rev, a.name)
738 return None
738 return None
739
739
740 # if the exact patch name does not exist, we try a few
740 # if the exact patch name does not exist, we try a few
741 # variations. If strict is passed, we try only #1
741 # variations. If strict is passed, we try only #1
742 #
742 #
743 # 1) a number to indicate an offset in the series file
743 # 1) a number to indicate an offset in the series file
744 # 2) a unique substring of the patch name was given
744 # 2) a unique substring of the patch name was given
745 # 3) patchname[-+]num to indicate an offset in the series file
745 # 3) patchname[-+]num to indicate an offset in the series file
746 def lookup(self, patch, strict=False):
746 def lookup(self, patch, strict=False):
747 patch = patch and str(patch)
747 patch = patch and str(patch)
748
748
749 def partial_name(s):
749 def partial_name(s):
750 if s in self.series:
750 if s in self.series:
751 return s
751 return s
752 matches = [x for x in self.series if s in x]
752 matches = [x for x in self.series if s in x]
753 if len(matches) > 1:
753 if len(matches) > 1:
754 self.ui.warn(_('patch name "%s" is ambiguous:\n') % s)
754 self.ui.warn(_('patch name "%s" is ambiguous:\n') % s)
755 for m in matches:
755 for m in matches:
756 self.ui.warn(' %s\n' % m)
756 self.ui.warn(' %s\n' % m)
757 return None
757 return None
758 if matches:
758 if matches:
759 return matches[0]
759 return matches[0]
760 if len(self.series) > 0 and len(self.applied) > 0:
760 if len(self.series) > 0 and len(self.applied) > 0:
761 if s == 'qtip':
761 if s == 'qtip':
762 return self.series[self.series_end()-1]
762 return self.series[self.series_end()-1]
763 if s == 'qbase':
763 if s == 'qbase':
764 return self.series[0]
764 return self.series[0]
765 return None
765 return None
766 if patch == None:
766 if patch == None:
767 return None
767 return None
768
768
769 # we don't want to return a partial match until we make
769 # we don't want to return a partial match until we make
770 # sure the file name passed in does not exist (checked below)
770 # sure the file name passed in does not exist (checked below)
771 res = partial_name(patch)
771 res = partial_name(patch)
772 if res and res == patch:
772 if res and res == patch:
773 return res
773 return res
774
774
775 if not os.path.isfile(self.join(patch)):
775 if not os.path.isfile(self.join(patch)):
776 try:
776 try:
777 sno = int(patch)
777 sno = int(patch)
778 except(ValueError, OverflowError):
778 except(ValueError, OverflowError):
779 pass
779 pass
780 else:
780 else:
781 if sno < len(self.series):
781 if sno < len(self.series):
782 return self.series[sno]
782 return self.series[sno]
783 if not strict:
783 if not strict:
784 # return any partial match made above
784 # return any partial match made above
785 if res:
785 if res:
786 return res
786 return res
787 minus = patch.rfind('-')
787 minus = patch.rfind('-')
788 if minus >= 0:
788 if minus >= 0:
789 res = partial_name(patch[:minus])
789 res = partial_name(patch[:minus])
790 if res:
790 if res:
791 i = self.series.index(res)
791 i = self.series.index(res)
792 try:
792 try:
793 off = int(patch[minus+1:] or 1)
793 off = int(patch[minus+1:] or 1)
794 except(ValueError, OverflowError):
794 except(ValueError, OverflowError):
795 pass
795 pass
796 else:
796 else:
797 if i - off >= 0:
797 if i - off >= 0:
798 return self.series[i - off]
798 return self.series[i - off]
799 plus = patch.rfind('+')
799 plus = patch.rfind('+')
800 if plus >= 0:
800 if plus >= 0:
801 res = partial_name(patch[:plus])
801 res = partial_name(patch[:plus])
802 if res:
802 if res:
803 i = self.series.index(res)
803 i = self.series.index(res)
804 try:
804 try:
805 off = int(patch[plus+1:] or 1)
805 off = int(patch[plus+1:] or 1)
806 except(ValueError, OverflowError):
806 except(ValueError, OverflowError):
807 pass
807 pass
808 else:
808 else:
809 if i + off < len(self.series):
809 if i + off < len(self.series):
810 return self.series[i + off]
810 return self.series[i + off]
811 raise util.Abort(_("patch %s not in series") % patch)
811 raise util.Abort(_("patch %s not in series") % patch)
812
812
813 def push(self, repo, patch=None, force=False, list=False,
813 def push(self, repo, patch=None, force=False, list=False,
814 mergeq=None, wlock=None):
814 mergeq=None, wlock=None):
815 if not wlock:
815 if not wlock:
816 wlock = repo.wlock()
816 wlock = repo.wlock()
817 patch = self.lookup(patch)
817 patch = self.lookup(patch)
818 if patch and self.isapplied(patch):
818 if patch and self.isapplied(patch):
819 raise util.Abort(_("patch %s is already applied") % patch)
819 raise util.Abort(_("patch %s is already applied") % patch)
820 if self.series_end() == len(self.series):
820 if self.series_end() == len(self.series):
821 raise util.Abort(_("patch series fully applied"))
821 raise util.Abort(_("patch series fully applied"))
822 if not force:
822 if not force:
823 self.check_localchanges(repo)
823 self.check_localchanges(repo)
824
824
825 self.applied_dirty = 1;
825 self.applied_dirty = 1;
826 start = self.series_end()
826 start = self.series_end()
827 if start > 0:
827 if start > 0:
828 self.check_toppatch(repo)
828 self.check_toppatch(repo)
829 if not patch:
829 if not patch:
830 patch = self.series[start]
830 patch = self.series[start]
831 end = start + 1
831 end = start + 1
832 else:
832 else:
833 end = self.series.index(patch, start) + 1
833 end = self.series.index(patch, start) + 1
834 s = self.series[start:end]
834 s = self.series[start:end]
835 if mergeq:
835 if mergeq:
836 ret = self.mergepatch(repo, mergeq, s, wlock)
836 ret = self.mergepatch(repo, mergeq, s, wlock)
837 else:
837 else:
838 ret = self.apply(repo, s, list, wlock=wlock)
838 ret = self.apply(repo, s, list, wlock=wlock)
839 top = self.applied[-1].name
839 top = self.applied[-1].name
840 if ret[0]:
840 if ret[0]:
841 self.ui.write("Errors during apply, please fix and refresh %s\n" %
841 self.ui.write("Errors during apply, please fix and refresh %s\n" %
842 top)
842 top)
843 else:
843 else:
844 self.ui.write("Now at: %s\n" % top)
844 self.ui.write("Now at: %s\n" % top)
845 return ret[0]
845 return ret[0]
846
846
847 def pop(self, repo, patch=None, force=False, update=True, all=False,
847 def pop(self, repo, patch=None, force=False, update=True, all=False,
848 wlock=None):
848 wlock=None):
849 def getfile(f, rev):
849 def getfile(f, rev):
850 t = repo.file(f).read(rev)
850 t = repo.file(f).read(rev)
851 try:
851 try:
852 repo.wfile(f, "w").write(t)
852 repo.wfile(f, "w").write(t)
853 except IOError:
853 except IOError:
854 try:
854 try:
855 os.makedirs(os.path.dirname(repo.wjoin(f)))
855 os.makedirs(os.path.dirname(repo.wjoin(f)))
856 except OSError, err:
856 except OSError, err:
857 if err.errno != errno.EEXIST: raise
857 if err.errno != errno.EEXIST: raise
858 repo.wfile(f, "w").write(t)
858 repo.wfile(f, "w").write(t)
859
859
860 if not wlock:
860 if not wlock:
861 wlock = repo.wlock()
861 wlock = repo.wlock()
862 if patch:
862 if patch:
863 # index, rev, patch
863 # index, rev, patch
864 info = self.isapplied(patch)
864 info = self.isapplied(patch)
865 if not info:
865 if not info:
866 patch = self.lookup(patch)
866 patch = self.lookup(patch)
867 info = self.isapplied(patch)
867 info = self.isapplied(patch)
868 if not info:
868 if not info:
869 raise util.Abort(_("patch %s is not applied") % patch)
869 raise util.Abort(_("patch %s is not applied") % patch)
870 if len(self.applied) == 0:
870 if len(self.applied) == 0:
871 raise util.Abort(_("no patches applied"))
871 raise util.Abort(_("no patches applied"))
872
872
873 if not update:
873 if not update:
874 parents = repo.dirstate.parents()
874 parents = repo.dirstate.parents()
875 rr = [ revlog.bin(x.rev) for x in self.applied ]
875 rr = [ revlog.bin(x.rev) for x in self.applied ]
876 for p in parents:
876 for p in parents:
877 if p in rr:
877 if p in rr:
878 self.ui.warn("qpop: forcing dirstate update\n")
878 self.ui.warn("qpop: forcing dirstate update\n")
879 update = True
879 update = True
880
880
881 if not force and update:
881 if not force and update:
882 self.check_localchanges(repo)
882 self.check_localchanges(repo)
883
883
884 self.applied_dirty = 1;
884 self.applied_dirty = 1;
885 end = len(self.applied)
885 end = len(self.applied)
886 if not patch:
886 if not patch:
887 if all:
887 if all:
888 popi = 0
888 popi = 0
889 else:
889 else:
890 popi = len(self.applied) - 1
890 popi = len(self.applied) - 1
891 else:
891 else:
892 popi = info[0] + 1
892 popi = info[0] + 1
893 if popi >= end:
893 if popi >= end:
894 self.ui.warn("qpop: %s is already at the top\n" % patch)
894 self.ui.warn("qpop: %s is already at the top\n" % patch)
895 return
895 return
896 info = [ popi ] + [self.applied[popi].rev, self.applied[popi].name]
896 info = [ popi ] + [self.applied[popi].rev, self.applied[popi].name]
897
897
898 start = info[0]
898 start = info[0]
899 rev = revlog.bin(info[1])
899 rev = revlog.bin(info[1])
900
900
901 # we know there are no local changes, so we can make a simplified
901 # we know there are no local changes, so we can make a simplified
902 # form of hg.update.
902 # form of hg.update.
903 if update:
903 if update:
904 top = self.check_toppatch(repo)
904 top = self.check_toppatch(repo)
905 qp = self.qparents(repo, rev)
905 qp = self.qparents(repo, rev)
906 changes = repo.changelog.read(qp)
906 changes = repo.changelog.read(qp)
907 mmap = repo.manifest.read(changes[0])
907 mmap = repo.manifest.read(changes[0])
908 m, a, r, d, u = repo.status(qp, top)[:5]
908 m, a, r, d, u = repo.status(qp, top)[:5]
909 if d:
909 if d:
910 raise util.Abort("deletions found between repo revs")
910 raise util.Abort("deletions found between repo revs")
911 for f in m:
911 for f in m:
912 getfile(f, mmap[f])
912 getfile(f, mmap[f])
913 for f in r:
913 for f in r:
914 getfile(f, mmap[f])
914 getfile(f, mmap[f])
915 util.set_exec(repo.wjoin(f), mmap.execf(f))
915 util.set_exec(repo.wjoin(f), mmap.execf(f))
916 repo.dirstate.update(m + r, 'n')
916 repo.dirstate.update(m + r, 'n')
917 for f in a:
917 for f in a:
918 try: os.unlink(repo.wjoin(f))
918 try: os.unlink(repo.wjoin(f))
919 except: raise
919 except: raise
920 try: os.removedirs(os.path.dirname(repo.wjoin(f)))
920 try: os.removedirs(os.path.dirname(repo.wjoin(f)))
921 except: pass
921 except: pass
922 if a:
922 if a:
923 repo.dirstate.forget(a)
923 repo.dirstate.forget(a)
924 repo.dirstate.setparents(qp, revlog.nullid)
924 repo.dirstate.setparents(qp, revlog.nullid)
925 self.strip(repo, rev, update=False, backup='strip', wlock=wlock)
925 self.strip(repo, rev, update=False, backup='strip', wlock=wlock)
926 del self.applied[start:end]
926 del self.applied[start:end]
927 if len(self.applied):
927 if len(self.applied):
928 self.ui.write("Now at: %s\n" % self.applied[-1].name)
928 self.ui.write("Now at: %s\n" % self.applied[-1].name)
929 else:
929 else:
930 self.ui.write("Patch queue now empty\n")
930 self.ui.write("Patch queue now empty\n")
931
931
932 def diff(self, repo, pats, opts):
932 def diff(self, repo, pats, opts):
933 top = self.check_toppatch(repo)
933 top = self.check_toppatch(repo)
934 if not top:
934 if not top:
935 self.ui.write("No patches applied\n")
935 self.ui.write("No patches applied\n")
936 return
936 return
937 qp = self.qparents(repo, top)
937 qp = self.qparents(repo, top)
938 self.printdiff(repo, qp, files=pats, opts=opts)
938 self.printdiff(repo, qp, files=pats, opts=opts)
939
939
940 def refresh(self, repo, pats=None, **opts):
940 def refresh(self, repo, pats=None, **opts):
941 if len(self.applied) == 0:
941 if len(self.applied) == 0:
942 self.ui.write("No patches applied\n")
942 self.ui.write("No patches applied\n")
943 return 1
943 return 1
944 wlock = repo.wlock()
944 wlock = repo.wlock()
945 self.check_toppatch(repo)
945 self.check_toppatch(repo)
946 (top, patchfn) = (self.applied[-1].rev, self.applied[-1].name)
946 (top, patchfn) = (self.applied[-1].rev, self.applied[-1].name)
947 top = revlog.bin(top)
947 top = revlog.bin(top)
948 cparents = repo.changelog.parents(top)
948 cparents = repo.changelog.parents(top)
949 patchparent = self.qparents(repo, top)
949 patchparent = self.qparents(repo, top)
950 message, comments, user, date, patchfound = self.readheaders(patchfn)
950 message, comments, user, date, patchfound = self.readheaders(patchfn)
951
951
952 patchf = self.opener(patchfn, "w")
952 patchf = self.opener(patchfn, "w")
953 msg = opts.get('msg', '').rstrip()
953 msg = opts.get('msg', '').rstrip()
954 if msg:
954 if msg:
955 if comments:
955 if comments:
956 # Remove existing message.
956 # Remove existing message.
957 ci = 0
957 ci = 0
958 for mi in xrange(len(message)):
958 for mi in xrange(len(message)):
959 while message[mi] != comments[ci]:
959 while message[mi] != comments[ci]:
960 ci += 1
960 ci += 1
961 del comments[ci]
961 del comments[ci]
962 comments.append(msg)
962 comments.append(msg)
963 if comments:
963 if comments:
964 comments = "\n".join(comments) + '\n\n'
964 comments = "\n".join(comments) + '\n\n'
965 patchf.write(comments)
965 patchf.write(comments)
966
966
967 fns, matchfn, anypats = cmdutil.matchpats(repo, pats, opts)
967 fns, matchfn, anypats = cmdutil.matchpats(repo, pats, opts)
968 tip = repo.changelog.tip()
968 tip = repo.changelog.tip()
969 if top == tip:
969 if top == tip:
970 # if the top of our patch queue is also the tip, there is an
970 # if the top of our patch queue is also the tip, there is an
971 # optimization here. We update the dirstate in place and strip
971 # optimization here. We update the dirstate in place and strip
972 # off the tip commit. Then just commit the current directory
972 # off the tip commit. Then just commit the current directory
973 # tree. We can also send repo.commit the list of files
973 # tree. We can also send repo.commit the list of files
974 # changed to speed up the diff
974 # changed to speed up the diff
975 #
975 #
976 # in short mode, we only diff the files included in the
976 # in short mode, we only diff the files included in the
977 # patch already
977 # patch already
978 #
978 #
979 # this should really read:
979 # this should really read:
980 # mm, dd, aa, aa2, uu = repo.status(tip, patchparent)[:5]
980 # mm, dd, aa, aa2, uu = repo.status(tip, patchparent)[:5]
981 # but we do it backwards to take advantage of manifest/chlog
981 # but we do it backwards to take advantage of manifest/chlog
982 # caching against the next repo.status call
982 # caching against the next repo.status call
983 #
983 #
984 mm, aa, dd, aa2, uu = repo.status(patchparent, tip)[:5]
984 mm, aa, dd, aa2, uu = repo.status(patchparent, tip)[:5]
985 if opts.get('short'):
985 if opts.get('short'):
986 filelist = mm + aa + dd
986 filelist = mm + aa + dd
987 else:
987 else:
988 filelist = None
988 filelist = None
989 m, a, r, d, u = repo.status(files=filelist)[:5]
989 m, a, r, d, u = repo.status(files=filelist)[:5]
990
990
991 # we might end up with files that were added between tip and
991 # we might end up with files that were added between tip and
992 # the dirstate parent, but then changed in the local dirstate.
992 # the dirstate parent, but then changed in the local dirstate.
993 # in this case, we want them to only show up in the added section
993 # in this case, we want them to only show up in the added section
994 for x in m:
994 for x in m:
995 if x not in aa:
995 if x not in aa:
996 mm.append(x)
996 mm.append(x)
997 # we might end up with files added by the local dirstate that
997 # we might end up with files added by the local dirstate that
998 # were deleted by the patch. In this case, they should only
998 # were deleted by the patch. In this case, they should only
999 # show up in the changed section.
999 # show up in the changed section.
1000 for x in a:
1000 for x in a:
1001 if x in dd:
1001 if x in dd:
1002 del dd[dd.index(x)]
1002 del dd[dd.index(x)]
1003 mm.append(x)
1003 mm.append(x)
1004 else:
1004 else:
1005 aa.append(x)
1005 aa.append(x)
1006 # make sure any files deleted in the local dirstate
1006 # make sure any files deleted in the local dirstate
1007 # are not in the add or change column of the patch
1007 # are not in the add or change column of the patch
1008 forget = []
1008 forget = []
1009 for x in d + r:
1009 for x in d + r:
1010 if x in aa:
1010 if x in aa:
1011 del aa[aa.index(x)]
1011 del aa[aa.index(x)]
1012 forget.append(x)
1012 forget.append(x)
1013 continue
1013 continue
1014 elif x in mm:
1014 elif x in mm:
1015 del mm[mm.index(x)]
1015 del mm[mm.index(x)]
1016 dd.append(x)
1016 dd.append(x)
1017
1017
1018 m = list(util.unique(mm))
1018 m = util.unique(mm)
1019 r = list(util.unique(dd))
1019 r = util.unique(dd)
1020 a = list(util.unique(aa))
1020 a = util.unique(aa)
1021 filelist = filter(matchfn, util.unique(m + r + a))
1021 filelist = filter(matchfn, util.unique(m + r + a))
1022 if opts.get('git'):
1022 if opts.get('git'):
1023 self.diffopts().git = True
1023 self.diffopts().git = True
1024 patch.diff(repo, patchparent, files=filelist, match=matchfn,
1024 patch.diff(repo, patchparent, files=filelist, match=matchfn,
1025 fp=patchf, changes=(m, a, r, [], u),
1025 fp=patchf, changes=(m, a, r, [], u),
1026 opts=self.diffopts())
1026 opts=self.diffopts())
1027 patchf.close()
1027 patchf.close()
1028
1028
1029 changes = repo.changelog.read(tip)
1029 changes = repo.changelog.read(tip)
1030 repo.dirstate.setparents(*cparents)
1030 repo.dirstate.setparents(*cparents)
1031 copies = [(f, repo.dirstate.copied(f)) for f in a]
1031 copies = [(f, repo.dirstate.copied(f)) for f in a]
1032 repo.dirstate.update(a, 'a')
1032 repo.dirstate.update(a, 'a')
1033 for dst, src in copies:
1033 for dst, src in copies:
1034 repo.dirstate.copy(src, dst)
1034 repo.dirstate.copy(src, dst)
1035 repo.dirstate.update(r, 'r')
1035 repo.dirstate.update(r, 'r')
1036 # if the patch excludes a modified file, mark that file with mtime=0
1036 # if the patch excludes a modified file, mark that file with mtime=0
1037 # so status can see it.
1037 # so status can see it.
1038 mm = []
1038 mm = []
1039 for i in xrange(len(m)-1, -1, -1):
1039 for i in xrange(len(m)-1, -1, -1):
1040 if not matchfn(m[i]):
1040 if not matchfn(m[i]):
1041 mm.append(m[i])
1041 mm.append(m[i])
1042 del m[i]
1042 del m[i]
1043 repo.dirstate.update(m, 'n')
1043 repo.dirstate.update(m, 'n')
1044 repo.dirstate.update(mm, 'n', st_mtime=0)
1044 repo.dirstate.update(mm, 'n', st_mtime=0)
1045 repo.dirstate.forget(forget)
1045 repo.dirstate.forget(forget)
1046
1046
1047 if not msg:
1047 if not msg:
1048 if not message:
1048 if not message:
1049 message = "patch queue: %s\n" % patchfn
1049 message = "patch queue: %s\n" % patchfn
1050 else:
1050 else:
1051 message = "\n".join(message)
1051 message = "\n".join(message)
1052 else:
1052 else:
1053 message = msg
1053 message = msg
1054
1054
1055 self.strip(repo, top, update=False, backup='strip', wlock=wlock)
1055 self.strip(repo, top, update=False, backup='strip', wlock=wlock)
1056 n = repo.commit(filelist, message, changes[1], force=1, wlock=wlock)
1056 n = repo.commit(filelist, message, changes[1], force=1, wlock=wlock)
1057 self.applied[-1] = statusentry(revlog.hex(n), patchfn)
1057 self.applied[-1] = statusentry(revlog.hex(n), patchfn)
1058 self.applied_dirty = 1
1058 self.applied_dirty = 1
1059 else:
1059 else:
1060 self.printdiff(repo, patchparent, fp=patchf)
1060 self.printdiff(repo, patchparent, fp=patchf)
1061 patchf.close()
1061 patchf.close()
1062 self.pop(repo, force=True, wlock=wlock)
1062 self.pop(repo, force=True, wlock=wlock)
1063 self.push(repo, force=True, wlock=wlock)
1063 self.push(repo, force=True, wlock=wlock)
1064
1064
1065 def init(self, repo, create=False):
1065 def init(self, repo, create=False):
1066 if os.path.isdir(self.path):
1066 if os.path.isdir(self.path):
1067 raise util.Abort(_("patch queue directory already exists"))
1067 raise util.Abort(_("patch queue directory already exists"))
1068 os.mkdir(self.path)
1068 os.mkdir(self.path)
1069 if create:
1069 if create:
1070 return self.qrepo(create=True)
1070 return self.qrepo(create=True)
1071
1071
1072 def unapplied(self, repo, patch=None):
1072 def unapplied(self, repo, patch=None):
1073 if patch and patch not in self.series:
1073 if patch and patch not in self.series:
1074 raise util.Abort(_("patch %s is not in series file") % patch)
1074 raise util.Abort(_("patch %s is not in series file") % patch)
1075 if not patch:
1075 if not patch:
1076 start = self.series_end()
1076 start = self.series_end()
1077 else:
1077 else:
1078 start = self.series.index(patch) + 1
1078 start = self.series.index(patch) + 1
1079 unapplied = []
1079 unapplied = []
1080 for i in xrange(start, len(self.series)):
1080 for i in xrange(start, len(self.series)):
1081 pushable, reason = self.pushable(i)
1081 pushable, reason = self.pushable(i)
1082 if pushable:
1082 if pushable:
1083 unapplied.append((i, self.series[i]))
1083 unapplied.append((i, self.series[i]))
1084 self.explain_pushable(i)
1084 self.explain_pushable(i)
1085 return unapplied
1085 return unapplied
1086
1086
1087 def qseries(self, repo, missing=None, start=0, length=0, status=None,
1087 def qseries(self, repo, missing=None, start=0, length=0, status=None,
1088 summary=False):
1088 summary=False):
1089 def displayname(patchname):
1089 def displayname(patchname):
1090 if summary:
1090 if summary:
1091 msg = self.readheaders(patchname)[0]
1091 msg = self.readheaders(patchname)[0]
1092 msg = msg and ': ' + msg[0] or ': '
1092 msg = msg and ': ' + msg[0] or ': '
1093 else:
1093 else:
1094 msg = ''
1094 msg = ''
1095 return '%s%s' % (patchname, msg)
1095 return '%s%s' % (patchname, msg)
1096
1096
1097 def pname(i):
1097 def pname(i):
1098 if status == 'A':
1098 if status == 'A':
1099 return self.applied[i].name
1099 return self.applied[i].name
1100 else:
1100 else:
1101 return self.series[i]
1101 return self.series[i]
1102
1102
1103 unapplied = self.series_end(all_patches=True)
1103 unapplied = self.series_end(all_patches=True)
1104 if not length:
1104 if not length:
1105 length = len(self.series) - start
1105 length = len(self.series) - start
1106 if not missing:
1106 if not missing:
1107 for i in xrange(start, start+length):
1107 for i in xrange(start, start+length):
1108 pfx = ''
1108 pfx = ''
1109 patch = pname(i)
1109 patch = pname(i)
1110 if self.ui.verbose:
1110 if self.ui.verbose:
1111 if i < unapplied:
1111 if i < unapplied:
1112 status = 'A'
1112 status = 'A'
1113 elif self.pushable(i)[0]:
1113 elif self.pushable(i)[0]:
1114 status = 'U'
1114 status = 'U'
1115 else:
1115 else:
1116 status = 'G'
1116 status = 'G'
1117 pfx = '%d %s ' % (i, status)
1117 pfx = '%d %s ' % (i, status)
1118 self.ui.write('%s%s\n' % (pfx, displayname(patch)))
1118 self.ui.write('%s%s\n' % (pfx, displayname(patch)))
1119 else:
1119 else:
1120 msng_list = []
1120 msng_list = []
1121 for root, dirs, files in os.walk(self.path):
1121 for root, dirs, files in os.walk(self.path):
1122 d = root[len(self.path) + 1:]
1122 d = root[len(self.path) + 1:]
1123 for f in files:
1123 for f in files:
1124 fl = os.path.join(d, f)
1124 fl = os.path.join(d, f)
1125 if (fl not in self.series and
1125 if (fl not in self.series and
1126 fl not in (self.status_path, self.series_path)
1126 fl not in (self.status_path, self.series_path)
1127 and not fl.startswith('.')):
1127 and not fl.startswith('.')):
1128 msng_list.append(fl)
1128 msng_list.append(fl)
1129 msng_list.sort()
1129 msng_list.sort()
1130 for x in msng_list:
1130 for x in msng_list:
1131 pfx = self.ui.verbose and ('D ') or ''
1131 pfx = self.ui.verbose and ('D ') or ''
1132 self.ui.write("%s%s\n" % (pfx, displayname(x)))
1132 self.ui.write("%s%s\n" % (pfx, displayname(x)))
1133
1133
1134 def issaveline(self, l):
1134 def issaveline(self, l):
1135 if l.name == '.hg.patches.save.line':
1135 if l.name == '.hg.patches.save.line':
1136 return True
1136 return True
1137
1137
1138 def qrepo(self, create=False):
1138 def qrepo(self, create=False):
1139 if create or os.path.isdir(self.join(".hg")):
1139 if create or os.path.isdir(self.join(".hg")):
1140 return hg.repository(self.ui, path=self.path, create=create)
1140 return hg.repository(self.ui, path=self.path, create=create)
1141
1141
1142 def restore(self, repo, rev, delete=None, qupdate=None):
1142 def restore(self, repo, rev, delete=None, qupdate=None):
1143 c = repo.changelog.read(rev)
1143 c = repo.changelog.read(rev)
1144 desc = c[4].strip()
1144 desc = c[4].strip()
1145 lines = desc.splitlines()
1145 lines = desc.splitlines()
1146 i = 0
1146 i = 0
1147 datastart = None
1147 datastart = None
1148 series = []
1148 series = []
1149 applied = []
1149 applied = []
1150 qpp = None
1150 qpp = None
1151 for i in xrange(0, len(lines)):
1151 for i in xrange(0, len(lines)):
1152 if lines[i] == 'Patch Data:':
1152 if lines[i] == 'Patch Data:':
1153 datastart = i + 1
1153 datastart = i + 1
1154 elif lines[i].startswith('Dirstate:'):
1154 elif lines[i].startswith('Dirstate:'):
1155 l = lines[i].rstrip()
1155 l = lines[i].rstrip()
1156 l = l[10:].split(' ')
1156 l = l[10:].split(' ')
1157 qpp = [ hg.bin(x) for x in l ]
1157 qpp = [ hg.bin(x) for x in l ]
1158 elif datastart != None:
1158 elif datastart != None:
1159 l = lines[i].rstrip()
1159 l = lines[i].rstrip()
1160 se = statusentry(l)
1160 se = statusentry(l)
1161 file_ = se.name
1161 file_ = se.name
1162 if se.rev:
1162 if se.rev:
1163 applied.append(se)
1163 applied.append(se)
1164 else:
1164 else:
1165 series.append(file_)
1165 series.append(file_)
1166 if datastart == None:
1166 if datastart == None:
1167 self.ui.warn("No saved patch data found\n")
1167 self.ui.warn("No saved patch data found\n")
1168 return 1
1168 return 1
1169 self.ui.warn("restoring status: %s\n" % lines[0])
1169 self.ui.warn("restoring status: %s\n" % lines[0])
1170 self.full_series = series
1170 self.full_series = series
1171 self.applied = applied
1171 self.applied = applied
1172 self.parse_series()
1172 self.parse_series()
1173 self.series_dirty = 1
1173 self.series_dirty = 1
1174 self.applied_dirty = 1
1174 self.applied_dirty = 1
1175 heads = repo.changelog.heads()
1175 heads = repo.changelog.heads()
1176 if delete:
1176 if delete:
1177 if rev not in heads:
1177 if rev not in heads:
1178 self.ui.warn("save entry has children, leaving it alone\n")
1178 self.ui.warn("save entry has children, leaving it alone\n")
1179 else:
1179 else:
1180 self.ui.warn("removing save entry %s\n" % hg.short(rev))
1180 self.ui.warn("removing save entry %s\n" % hg.short(rev))
1181 pp = repo.dirstate.parents()
1181 pp = repo.dirstate.parents()
1182 if rev in pp:
1182 if rev in pp:
1183 update = True
1183 update = True
1184 else:
1184 else:
1185 update = False
1185 update = False
1186 self.strip(repo, rev, update=update, backup='strip')
1186 self.strip(repo, rev, update=update, backup='strip')
1187 if qpp:
1187 if qpp:
1188 self.ui.warn("saved queue repository parents: %s %s\n" %
1188 self.ui.warn("saved queue repository parents: %s %s\n" %
1189 (hg.short(qpp[0]), hg.short(qpp[1])))
1189 (hg.short(qpp[0]), hg.short(qpp[1])))
1190 if qupdate:
1190 if qupdate:
1191 print "queue directory updating"
1191 print "queue directory updating"
1192 r = self.qrepo()
1192 r = self.qrepo()
1193 if not r:
1193 if not r:
1194 self.ui.warn("Unable to load queue repository\n")
1194 self.ui.warn("Unable to load queue repository\n")
1195 return 1
1195 return 1
1196 hg.clean(r, qpp[0])
1196 hg.clean(r, qpp[0])
1197
1197
1198 def save(self, repo, msg=None):
1198 def save(self, repo, msg=None):
1199 if len(self.applied) == 0:
1199 if len(self.applied) == 0:
1200 self.ui.warn("save: no patches applied, exiting\n")
1200 self.ui.warn("save: no patches applied, exiting\n")
1201 return 1
1201 return 1
1202 if self.issaveline(self.applied[-1]):
1202 if self.issaveline(self.applied[-1]):
1203 self.ui.warn("status is already saved\n")
1203 self.ui.warn("status is already saved\n")
1204 return 1
1204 return 1
1205
1205
1206 ar = [ ':' + x for x in self.full_series ]
1206 ar = [ ':' + x for x in self.full_series ]
1207 if not msg:
1207 if not msg:
1208 msg = "hg patches saved state"
1208 msg = "hg patches saved state"
1209 else:
1209 else:
1210 msg = "hg patches: " + msg.rstrip('\r\n')
1210 msg = "hg patches: " + msg.rstrip('\r\n')
1211 r = self.qrepo()
1211 r = self.qrepo()
1212 if r:
1212 if r:
1213 pp = r.dirstate.parents()
1213 pp = r.dirstate.parents()
1214 msg += "\nDirstate: %s %s" % (hg.hex(pp[0]), hg.hex(pp[1]))
1214 msg += "\nDirstate: %s %s" % (hg.hex(pp[0]), hg.hex(pp[1]))
1215 msg += "\n\nPatch Data:\n"
1215 msg += "\n\nPatch Data:\n"
1216 text = msg + "\n".join([str(x) for x in self.applied]) + '\n' + (ar and
1216 text = msg + "\n".join([str(x) for x in self.applied]) + '\n' + (ar and
1217 "\n".join(ar) + '\n' or "")
1217 "\n".join(ar) + '\n' or "")
1218 n = repo.commit(None, text, user=None, force=1)
1218 n = repo.commit(None, text, user=None, force=1)
1219 if not n:
1219 if not n:
1220 self.ui.warn("repo commit failed\n")
1220 self.ui.warn("repo commit failed\n")
1221 return 1
1221 return 1
1222 self.applied.append(statusentry(revlog.hex(n),'.hg.patches.save.line'))
1222 self.applied.append(statusentry(revlog.hex(n),'.hg.patches.save.line'))
1223 self.applied_dirty = 1
1223 self.applied_dirty = 1
1224
1224
1225 def full_series_end(self):
1225 def full_series_end(self):
1226 if len(self.applied) > 0:
1226 if len(self.applied) > 0:
1227 p = self.applied[-1].name
1227 p = self.applied[-1].name
1228 end = self.find_series(p)
1228 end = self.find_series(p)
1229 if end == None:
1229 if end == None:
1230 return len(self.full_series)
1230 return len(self.full_series)
1231 return end + 1
1231 return end + 1
1232 return 0
1232 return 0
1233
1233
1234 def series_end(self, all_patches=False):
1234 def series_end(self, all_patches=False):
1235 end = 0
1235 end = 0
1236 def next(start):
1236 def next(start):
1237 if all_patches:
1237 if all_patches:
1238 return start
1238 return start
1239 i = start
1239 i = start
1240 while i < len(self.series):
1240 while i < len(self.series):
1241 p, reason = self.pushable(i)
1241 p, reason = self.pushable(i)
1242 if p:
1242 if p:
1243 break
1243 break
1244 self.explain_pushable(i)
1244 self.explain_pushable(i)
1245 i += 1
1245 i += 1
1246 return i
1246 return i
1247 if len(self.applied) > 0:
1247 if len(self.applied) > 0:
1248 p = self.applied[-1].name
1248 p = self.applied[-1].name
1249 try:
1249 try:
1250 end = self.series.index(p)
1250 end = self.series.index(p)
1251 except ValueError:
1251 except ValueError:
1252 return 0
1252 return 0
1253 return next(end + 1)
1253 return next(end + 1)
1254 return next(end)
1254 return next(end)
1255
1255
1256 def appliedname(self, index):
1256 def appliedname(self, index):
1257 pname = self.applied[index].name
1257 pname = self.applied[index].name
1258 if not self.ui.verbose:
1258 if not self.ui.verbose:
1259 p = pname
1259 p = pname
1260 else:
1260 else:
1261 p = str(self.series.index(pname)) + " " + pname
1261 p = str(self.series.index(pname)) + " " + pname
1262 return p
1262 return p
1263
1263
1264 def qimport(self, repo, files, patchname=None, rev=None, existing=None,
1264 def qimport(self, repo, files, patchname=None, rev=None, existing=None,
1265 force=None):
1265 force=None):
1266 def checkseries(patchname):
1266 def checkseries(patchname):
1267 if patchname in self.series:
1267 if patchname in self.series:
1268 raise util.Abort(_('patch %s is already in the series file')
1268 raise util.Abort(_('patch %s is already in the series file')
1269 % patchname)
1269 % patchname)
1270 def checkfile(patchname):
1270 def checkfile(patchname):
1271 if not force and os.path.exists(self.join(patchname)):
1271 if not force and os.path.exists(self.join(patchname)):
1272 raise util.Abort(_('patch "%s" already exists')
1272 raise util.Abort(_('patch "%s" already exists')
1273 % patchname)
1273 % patchname)
1274
1274
1275 if rev:
1275 if rev:
1276 if files:
1276 if files:
1277 raise util.Abort(_('option "-r" not valid when importing '
1277 raise util.Abort(_('option "-r" not valid when importing '
1278 'files'))
1278 'files'))
1279 rev = [int(r) for r in cmdutil.revrange(self.ui, repo, rev)]
1279 rev = cmdutil.revrange(self.ui, repo, rev)
1280 rev.sort(lambda x, y: cmp(y, x))
1280 rev.sort(lambda x, y: cmp(y, x))
1281 if (len(files) > 1 or len(rev) > 1) and patchname:
1281 if (len(files) > 1 or len(rev) > 1) and patchname:
1282 raise util.Abort(_('option "-n" not valid when importing multiple '
1282 raise util.Abort(_('option "-n" not valid when importing multiple '
1283 'patches'))
1283 'patches'))
1284 i = 0
1284 i = 0
1285 added = []
1285 added = []
1286 if rev:
1286 if rev:
1287 # If mq patches are applied, we can only import revisions
1287 # If mq patches are applied, we can only import revisions
1288 # that form a linear path to qbase.
1288 # that form a linear path to qbase.
1289 # Otherwise, they should form a linear path to a head.
1289 # Otherwise, they should form a linear path to a head.
1290 heads = repo.changelog.heads(repo.changelog.node(rev[-1]))
1290 heads = repo.changelog.heads(repo.changelog.node(rev[-1]))
1291 if len(heads) > 1:
1291 if len(heads) > 1:
1292 raise util.Abort(_('revision %d is the root of more than one '
1292 raise util.Abort(_('revision %d is the root of more than one '
1293 'branch') % rev[-1])
1293 'branch') % rev[-1])
1294 if self.applied:
1294 if self.applied:
1295 base = revlog.hex(repo.changelog.node(rev[0]))
1295 base = revlog.hex(repo.changelog.node(rev[0]))
1296 if base in [n.rev for n in self.applied]:
1296 if base in [n.rev for n in self.applied]:
1297 raise util.Abort(_('revision %d is already managed')
1297 raise util.Abort(_('revision %d is already managed')
1298 % rev[0])
1298 % rev[0])
1299 if heads != [revlog.bin(self.applied[-1].rev)]:
1299 if heads != [revlog.bin(self.applied[-1].rev)]:
1300 raise util.Abort(_('revision %d is not the parent of '
1300 raise util.Abort(_('revision %d is not the parent of '
1301 'the queue') % rev[0])
1301 'the queue') % rev[0])
1302 base = repo.changelog.rev(revlog.bin(self.applied[0].rev))
1302 base = repo.changelog.rev(revlog.bin(self.applied[0].rev))
1303 lastparent = repo.changelog.parentrevs(base)[0]
1303 lastparent = repo.changelog.parentrevs(base)[0]
1304 else:
1304 else:
1305 if heads != [repo.changelog.node(rev[0])]:
1305 if heads != [repo.changelog.node(rev[0])]:
1306 raise util.Abort(_('revision %d has unmanaged children')
1306 raise util.Abort(_('revision %d has unmanaged children')
1307 % rev[0])
1307 % rev[0])
1308 lastparent = None
1308 lastparent = None
1309
1309
1310 for r in rev:
1310 for r in rev:
1311 p1, p2 = repo.changelog.parentrevs(r)
1311 p1, p2 = repo.changelog.parentrevs(r)
1312 n = repo.changelog.node(r)
1312 n = repo.changelog.node(r)
1313 if p2 != -1:
1313 if p2 != -1:
1314 raise util.Abort(_('cannot import merge revision %d') % r)
1314 raise util.Abort(_('cannot import merge revision %d') % r)
1315 if lastparent and lastparent != r:
1315 if lastparent and lastparent != r:
1316 raise util.Abort(_('revision %d is not the parent of %d')
1316 raise util.Abort(_('revision %d is not the parent of %d')
1317 % (r, lastparent))
1317 % (r, lastparent))
1318 lastparent = p1
1318 lastparent = p1
1319
1319
1320 if not patchname:
1320 if not patchname:
1321 patchname = '%d.diff' % r
1321 patchname = '%d.diff' % r
1322 checkseries(patchname)
1322 checkseries(patchname)
1323 checkfile(patchname)
1323 checkfile(patchname)
1324 self.full_series.insert(0, patchname)
1324 self.full_series.insert(0, patchname)
1325
1325
1326 patchf = self.opener(patchname, "w")
1326 patchf = self.opener(patchname, "w")
1327 patch.export(repo, [n], fp=patchf, opts=self.diffopts())
1327 patch.export(repo, [n], fp=patchf, opts=self.diffopts())
1328 patchf.close()
1328 patchf.close()
1329
1329
1330 se = statusentry(revlog.hex(n), patchname)
1330 se = statusentry(revlog.hex(n), patchname)
1331 self.applied.insert(0, se)
1331 self.applied.insert(0, se)
1332
1332
1333 added.append(patchname)
1333 added.append(patchname)
1334 patchname = None
1334 patchname = None
1335 self.parse_series()
1335 self.parse_series()
1336 self.applied_dirty = 1
1336 self.applied_dirty = 1
1337
1337
1338 for filename in files:
1338 for filename in files:
1339 if existing:
1339 if existing:
1340 if filename == '-':
1340 if filename == '-':
1341 raise util.Abort(_('-e is incompatible with import from -'))
1341 raise util.Abort(_('-e is incompatible with import from -'))
1342 if not patchname:
1342 if not patchname:
1343 patchname = filename
1343 patchname = filename
1344 if not os.path.isfile(self.join(patchname)):
1344 if not os.path.isfile(self.join(patchname)):
1345 raise util.Abort(_("patch %s does not exist") % patchname)
1345 raise util.Abort(_("patch %s does not exist") % patchname)
1346 else:
1346 else:
1347 try:
1347 try:
1348 if filename == '-':
1348 if filename == '-':
1349 if not patchname:
1349 if not patchname:
1350 raise util.Abort(_('need --name to import a patch from -'))
1350 raise util.Abort(_('need --name to import a patch from -'))
1351 text = sys.stdin.read()
1351 text = sys.stdin.read()
1352 else:
1352 else:
1353 text = file(filename).read()
1353 text = file(filename).read()
1354 except IOError:
1354 except IOError:
1355 raise util.Abort(_("unable to read %s") % patchname)
1355 raise util.Abort(_("unable to read %s") % patchname)
1356 if not patchname:
1356 if not patchname:
1357 patchname = os.path.basename(filename)
1357 patchname = os.path.basename(filename)
1358 checkfile(patchname)
1358 checkfile(patchname)
1359 patchf = self.opener(patchname, "w")
1359 patchf = self.opener(patchname, "w")
1360 patchf.write(text)
1360 patchf.write(text)
1361 checkseries(patchname)
1361 checkseries(patchname)
1362 index = self.full_series_end() + i
1362 index = self.full_series_end() + i
1363 self.full_series[index:index] = [patchname]
1363 self.full_series[index:index] = [patchname]
1364 self.parse_series()
1364 self.parse_series()
1365 self.ui.warn("adding %s to series file\n" % patchname)
1365 self.ui.warn("adding %s to series file\n" % patchname)
1366 i += 1
1366 i += 1
1367 added.append(patchname)
1367 added.append(patchname)
1368 patchname = None
1368 patchname = None
1369 self.series_dirty = 1
1369 self.series_dirty = 1
1370 qrepo = self.qrepo()
1370 qrepo = self.qrepo()
1371 if qrepo:
1371 if qrepo:
1372 qrepo.add(added)
1372 qrepo.add(added)
1373
1373
1374 def delete(ui, repo, *patches, **opts):
1374 def delete(ui, repo, *patches, **opts):
1375 """remove patches from queue
1375 """remove patches from queue
1376
1376
1377 With --rev, mq will stop managing the named revisions. The
1377 With --rev, mq will stop managing the named revisions. The
1378 patches must be applied and at the base of the stack. This option
1378 patches must be applied and at the base of the stack. This option
1379 is useful when the patches have been applied upstream.
1379 is useful when the patches have been applied upstream.
1380
1380
1381 Otherwise, the patches must not be applied.
1381 Otherwise, the patches must not be applied.
1382
1382
1383 With --keep, the patch files are preserved in the patch directory."""
1383 With --keep, the patch files are preserved in the patch directory."""
1384 q = repo.mq
1384 q = repo.mq
1385 q.delete(repo, patches, opts)
1385 q.delete(repo, patches, opts)
1386 q.save_dirty()
1386 q.save_dirty()
1387 return 0
1387 return 0
1388
1388
1389 def applied(ui, repo, patch=None, **opts):
1389 def applied(ui, repo, patch=None, **opts):
1390 """print the patches already applied"""
1390 """print the patches already applied"""
1391 q = repo.mq
1391 q = repo.mq
1392 if patch:
1392 if patch:
1393 if patch not in q.series:
1393 if patch not in q.series:
1394 raise util.Abort(_("patch %s is not in series file") % patch)
1394 raise util.Abort(_("patch %s is not in series file") % patch)
1395 end = q.series.index(patch) + 1
1395 end = q.series.index(patch) + 1
1396 else:
1396 else:
1397 end = len(q.applied)
1397 end = len(q.applied)
1398 if not end:
1398 if not end:
1399 return
1399 return
1400
1400
1401 return q.qseries(repo, length=end, status='A', summary=opts.get('summary'))
1401 return q.qseries(repo, length=end, status='A', summary=opts.get('summary'))
1402
1402
1403 def unapplied(ui, repo, patch=None, **opts):
1403 def unapplied(ui, repo, patch=None, **opts):
1404 """print the patches not yet applied"""
1404 """print the patches not yet applied"""
1405 q = repo.mq
1405 q = repo.mq
1406 if patch:
1406 if patch:
1407 if patch not in q.series:
1407 if patch not in q.series:
1408 raise util.Abort(_("patch %s is not in series file") % patch)
1408 raise util.Abort(_("patch %s is not in series file") % patch)
1409 start = q.series.index(patch) + 1
1409 start = q.series.index(patch) + 1
1410 else:
1410 else:
1411 start = q.series_end()
1411 start = q.series_end()
1412 q.qseries(repo, start=start, summary=opts.get('summary'))
1412 q.qseries(repo, start=start, summary=opts.get('summary'))
1413
1413
1414 def qimport(ui, repo, *filename, **opts):
1414 def qimport(ui, repo, *filename, **opts):
1415 """import a patch
1415 """import a patch
1416
1416
1417 The patch will have the same name as its source file unless you
1417 The patch will have the same name as its source file unless you
1418 give it a new one with --name.
1418 give it a new one with --name.
1419
1419
1420 You can register an existing patch inside the patch directory
1420 You can register an existing patch inside the patch directory
1421 with the --existing flag.
1421 with the --existing flag.
1422
1422
1423 With --force, an existing patch of the same name will be overwritten.
1423 With --force, an existing patch of the same name will be overwritten.
1424
1424
1425 An existing changeset may be placed under mq control with --rev
1425 An existing changeset may be placed under mq control with --rev
1426 (e.g. qimport --rev tip -n patch will place tip under mq control).
1426 (e.g. qimport --rev tip -n patch will place tip under mq control).
1427 """
1427 """
1428 q = repo.mq
1428 q = repo.mq
1429 q.qimport(repo, filename, patchname=opts['name'],
1429 q.qimport(repo, filename, patchname=opts['name'],
1430 existing=opts['existing'], force=opts['force'], rev=opts['rev'])
1430 existing=opts['existing'], force=opts['force'], rev=opts['rev'])
1431 q.save_dirty()
1431 q.save_dirty()
1432 return 0
1432 return 0
1433
1433
1434 def init(ui, repo, **opts):
1434 def init(ui, repo, **opts):
1435 """init a new queue repository
1435 """init a new queue repository
1436
1436
1437 The queue repository is unversioned by default. If -c is
1437 The queue repository is unversioned by default. If -c is
1438 specified, qinit will create a separate nested repository
1438 specified, qinit will create a separate nested repository
1439 for patches. Use qcommit to commit changes to this queue
1439 for patches. Use qcommit to commit changes to this queue
1440 repository."""
1440 repository."""
1441 q = repo.mq
1441 q = repo.mq
1442 r = q.init(repo, create=opts['create_repo'])
1442 r = q.init(repo, create=opts['create_repo'])
1443 q.save_dirty()
1443 q.save_dirty()
1444 if r:
1444 if r:
1445 fp = r.wopener('.hgignore', 'w')
1445 fp = r.wopener('.hgignore', 'w')
1446 print >> fp, 'syntax: glob'
1446 print >> fp, 'syntax: glob'
1447 print >> fp, 'status'
1447 print >> fp, 'status'
1448 fp.close()
1448 fp.close()
1449 r.wopener('series', 'w').close()
1449 r.wopener('series', 'w').close()
1450 r.add(['.hgignore', 'series'])
1450 r.add(['.hgignore', 'series'])
1451 return 0
1451 return 0
1452
1452
1453 def clone(ui, source, dest=None, **opts):
1453 def clone(ui, source, dest=None, **opts):
1454 '''clone main and patch repository at same time
1454 '''clone main and patch repository at same time
1455
1455
1456 If source is local, destination will have no patches applied. If
1456 If source is local, destination will have no patches applied. If
1457 source is remote, this command can not check if patches are
1457 source is remote, this command can not check if patches are
1458 applied in source, so cannot guarantee that patches are not
1458 applied in source, so cannot guarantee that patches are not
1459 applied in destination. If you clone remote repository, be sure
1459 applied in destination. If you clone remote repository, be sure
1460 before that it has no patches applied.
1460 before that it has no patches applied.
1461
1461
1462 Source patch repository is looked for in <src>/.hg/patches by
1462 Source patch repository is looked for in <src>/.hg/patches by
1463 default. Use -p <url> to change.
1463 default. Use -p <url> to change.
1464 '''
1464 '''
1465 commands.setremoteconfig(ui, opts)
1465 commands.setremoteconfig(ui, opts)
1466 if dest is None:
1466 if dest is None:
1467 dest = hg.defaultdest(source)
1467 dest = hg.defaultdest(source)
1468 sr = hg.repository(ui, ui.expandpath(source))
1468 sr = hg.repository(ui, ui.expandpath(source))
1469 qbase, destrev = None, None
1469 qbase, destrev = None, None
1470 if sr.local():
1470 if sr.local():
1471 reposetup(ui, sr)
1471 reposetup(ui, sr)
1472 if sr.mq.applied:
1472 if sr.mq.applied:
1473 qbase = revlog.bin(sr.mq.applied[0].rev)
1473 qbase = revlog.bin(sr.mq.applied[0].rev)
1474 if not hg.islocal(dest):
1474 if not hg.islocal(dest):
1475 destrev = sr.parents(qbase)[0]
1475 destrev = sr.parents(qbase)[0]
1476 ui.note(_('cloning main repo\n'))
1476 ui.note(_('cloning main repo\n'))
1477 sr, dr = hg.clone(ui, sr, dest,
1477 sr, dr = hg.clone(ui, sr, dest,
1478 pull=opts['pull'],
1478 pull=opts['pull'],
1479 rev=destrev,
1479 rev=destrev,
1480 update=False,
1480 update=False,
1481 stream=opts['uncompressed'])
1481 stream=opts['uncompressed'])
1482 ui.note(_('cloning patch repo\n'))
1482 ui.note(_('cloning patch repo\n'))
1483 spr, dpr = hg.clone(ui, opts['patches'] or (sr.url() + '/.hg/patches'),
1483 spr, dpr = hg.clone(ui, opts['patches'] or (sr.url() + '/.hg/patches'),
1484 dr.url() + '/.hg/patches',
1484 dr.url() + '/.hg/patches',
1485 pull=opts['pull'],
1485 pull=opts['pull'],
1486 update=not opts['noupdate'],
1486 update=not opts['noupdate'],
1487 stream=opts['uncompressed'])
1487 stream=opts['uncompressed'])
1488 if dr.local():
1488 if dr.local():
1489 if qbase:
1489 if qbase:
1490 ui.note(_('stripping applied patches from destination repo\n'))
1490 ui.note(_('stripping applied patches from destination repo\n'))
1491 reposetup(ui, dr)
1491 reposetup(ui, dr)
1492 dr.mq.strip(dr, qbase, update=False, backup=None)
1492 dr.mq.strip(dr, qbase, update=False, backup=None)
1493 if not opts['noupdate']:
1493 if not opts['noupdate']:
1494 ui.note(_('updating destination repo\n'))
1494 ui.note(_('updating destination repo\n'))
1495 hg.update(dr, dr.changelog.tip())
1495 hg.update(dr, dr.changelog.tip())
1496
1496
1497 def commit(ui, repo, *pats, **opts):
1497 def commit(ui, repo, *pats, **opts):
1498 """commit changes in the queue repository"""
1498 """commit changes in the queue repository"""
1499 q = repo.mq
1499 q = repo.mq
1500 r = q.qrepo()
1500 r = q.qrepo()
1501 if not r: raise util.Abort('no queue repository')
1501 if not r: raise util.Abort('no queue repository')
1502 commands.commit(r.ui, r, *pats, **opts)
1502 commands.commit(r.ui, r, *pats, **opts)
1503
1503
1504 def series(ui, repo, **opts):
1504 def series(ui, repo, **opts):
1505 """print the entire series file"""
1505 """print the entire series file"""
1506 repo.mq.qseries(repo, missing=opts['missing'], summary=opts['summary'])
1506 repo.mq.qseries(repo, missing=opts['missing'], summary=opts['summary'])
1507 return 0
1507 return 0
1508
1508
1509 def top(ui, repo, **opts):
1509 def top(ui, repo, **opts):
1510 """print the name of the current patch"""
1510 """print the name of the current patch"""
1511 q = repo.mq
1511 q = repo.mq
1512 t = len(q.applied)
1512 t = len(q.applied)
1513 if t:
1513 if t:
1514 return q.qseries(repo, start=t-1, length=1, status='A',
1514 return q.qseries(repo, start=t-1, length=1, status='A',
1515 summary=opts.get('summary'))
1515 summary=opts.get('summary'))
1516 else:
1516 else:
1517 ui.write("No patches applied\n")
1517 ui.write("No patches applied\n")
1518 return 1
1518 return 1
1519
1519
1520 def next(ui, repo, **opts):
1520 def next(ui, repo, **opts):
1521 """print the name of the next patch"""
1521 """print the name of the next patch"""
1522 q = repo.mq
1522 q = repo.mq
1523 end = q.series_end()
1523 end = q.series_end()
1524 if end == len(q.series):
1524 if end == len(q.series):
1525 ui.write("All patches applied\n")
1525 ui.write("All patches applied\n")
1526 return 1
1526 return 1
1527 return q.qseries(repo, start=end, length=1, summary=opts.get('summary'))
1527 return q.qseries(repo, start=end, length=1, summary=opts.get('summary'))
1528
1528
1529 def prev(ui, repo, **opts):
1529 def prev(ui, repo, **opts):
1530 """print the name of the previous patch"""
1530 """print the name of the previous patch"""
1531 q = repo.mq
1531 q = repo.mq
1532 l = len(q.applied)
1532 l = len(q.applied)
1533 if l == 1:
1533 if l == 1:
1534 ui.write("Only one patch applied\n")
1534 ui.write("Only one patch applied\n")
1535 return 1
1535 return 1
1536 if not l:
1536 if not l:
1537 ui.write("No patches applied\n")
1537 ui.write("No patches applied\n")
1538 return 1
1538 return 1
1539 return q.qseries(repo, start=l-2, length=1, status='A',
1539 return q.qseries(repo, start=l-2, length=1, status='A',
1540 summary=opts.get('summary'))
1540 summary=opts.get('summary'))
1541
1541
1542 def new(ui, repo, patch, **opts):
1542 def new(ui, repo, patch, **opts):
1543 """create a new patch
1543 """create a new patch
1544
1544
1545 qnew creates a new patch on top of the currently-applied patch
1545 qnew creates a new patch on top of the currently-applied patch
1546 (if any). It will refuse to run if there are any outstanding
1546 (if any). It will refuse to run if there are any outstanding
1547 changes unless -f is specified, in which case the patch will
1547 changes unless -f is specified, in which case the patch will
1548 be initialised with them.
1548 be initialised with them.
1549
1549
1550 -e, -m or -l set the patch header as well as the commit message.
1550 -e, -m or -l set the patch header as well as the commit message.
1551 If none is specified, the patch header is empty and the
1551 If none is specified, the patch header is empty and the
1552 commit message is 'New patch: PATCH'"""
1552 commit message is 'New patch: PATCH'"""
1553 q = repo.mq
1553 q = repo.mq
1554 message = commands.logmessage(opts)
1554 message = commands.logmessage(opts)
1555 if opts['edit']:
1555 if opts['edit']:
1556 message = ui.edit(message, ui.username())
1556 message = ui.edit(message, ui.username())
1557 q.new(repo, patch, msg=message, force=opts['force'])
1557 q.new(repo, patch, msg=message, force=opts['force'])
1558 q.save_dirty()
1558 q.save_dirty()
1559 return 0
1559 return 0
1560
1560
1561 def refresh(ui, repo, *pats, **opts):
1561 def refresh(ui, repo, *pats, **opts):
1562 """update the current patch
1562 """update the current patch
1563
1563
1564 If any file patterns are provided, the refreshed patch will contain only
1564 If any file patterns are provided, the refreshed patch will contain only
1565 the modifications that match those patterns; the remaining modifications
1565 the modifications that match those patterns; the remaining modifications
1566 will remain in the working directory.
1566 will remain in the working directory.
1567 """
1567 """
1568 q = repo.mq
1568 q = repo.mq
1569 message = commands.logmessage(opts)
1569 message = commands.logmessage(opts)
1570 if opts['edit']:
1570 if opts['edit']:
1571 if message:
1571 if message:
1572 raise util.Abort(_('option "-e" incompatible with "-m" or "-l"'))
1572 raise util.Abort(_('option "-e" incompatible with "-m" or "-l"'))
1573 patch = q.applied[-1].name
1573 patch = q.applied[-1].name
1574 (message, comment, user, date, hasdiff) = q.readheaders(patch)
1574 (message, comment, user, date, hasdiff) = q.readheaders(patch)
1575 message = ui.edit('\n'.join(message), user or ui.username())
1575 message = ui.edit('\n'.join(message), user or ui.username())
1576 ret = q.refresh(repo, pats, msg=message, **opts)
1576 ret = q.refresh(repo, pats, msg=message, **opts)
1577 q.save_dirty()
1577 q.save_dirty()
1578 return ret
1578 return ret
1579
1579
1580 def diff(ui, repo, *pats, **opts):
1580 def diff(ui, repo, *pats, **opts):
1581 """diff of the current patch"""
1581 """diff of the current patch"""
1582 repo.mq.diff(repo, pats, opts)
1582 repo.mq.diff(repo, pats, opts)
1583 return 0
1583 return 0
1584
1584
1585 def fold(ui, repo, *files, **opts):
1585 def fold(ui, repo, *files, **opts):
1586 """fold the named patches into the current patch
1586 """fold the named patches into the current patch
1587
1587
1588 Patches must not yet be applied. Each patch will be successively
1588 Patches must not yet be applied. Each patch will be successively
1589 applied to the current patch in the order given. If all the
1589 applied to the current patch in the order given. If all the
1590 patches apply successfully, the current patch will be refreshed
1590 patches apply successfully, the current patch will be refreshed
1591 with the new cumulative patch, and the folded patches will
1591 with the new cumulative patch, and the folded patches will
1592 be deleted. With -k/--keep, the folded patch files will not
1592 be deleted. With -k/--keep, the folded patch files will not
1593 be removed afterwards.
1593 be removed afterwards.
1594
1594
1595 The header for each folded patch will be concatenated with
1595 The header for each folded patch will be concatenated with
1596 the current patch header, separated by a line of '* * *'."""
1596 the current patch header, separated by a line of '* * *'."""
1597
1597
1598 q = repo.mq
1598 q = repo.mq
1599
1599
1600 if not files:
1600 if not files:
1601 raise util.Abort(_('qfold requires at least one patch name'))
1601 raise util.Abort(_('qfold requires at least one patch name'))
1602 if not q.check_toppatch(repo):
1602 if not q.check_toppatch(repo):
1603 raise util.Abort(_('No patches applied'))
1603 raise util.Abort(_('No patches applied'))
1604
1604
1605 message = commands.logmessage(opts)
1605 message = commands.logmessage(opts)
1606 if opts['edit']:
1606 if opts['edit']:
1607 if message:
1607 if message:
1608 raise util.Abort(_('option "-e" incompatible with "-m" or "-l"'))
1608 raise util.Abort(_('option "-e" incompatible with "-m" or "-l"'))
1609
1609
1610 parent = q.lookup('qtip')
1610 parent = q.lookup('qtip')
1611 patches = []
1611 patches = []
1612 messages = []
1612 messages = []
1613 for f in files:
1613 for f in files:
1614 p = q.lookup(f)
1614 p = q.lookup(f)
1615 if p in patches or p == parent:
1615 if p in patches or p == parent:
1616 ui.warn(_('Skipping already folded patch %s') % p)
1616 ui.warn(_('Skipping already folded patch %s') % p)
1617 if q.isapplied(p):
1617 if q.isapplied(p):
1618 raise util.Abort(_('qfold cannot fold already applied patch %s') % p)
1618 raise util.Abort(_('qfold cannot fold already applied patch %s') % p)
1619 patches.append(p)
1619 patches.append(p)
1620
1620
1621 for p in patches:
1621 for p in patches:
1622 if not message:
1622 if not message:
1623 messages.append(q.readheaders(p)[0])
1623 messages.append(q.readheaders(p)[0])
1624 pf = q.join(p)
1624 pf = q.join(p)
1625 (patchsuccess, files, fuzz) = q.patch(repo, pf)
1625 (patchsuccess, files, fuzz) = q.patch(repo, pf)
1626 if not patchsuccess:
1626 if not patchsuccess:
1627 raise util.Abort(_('Error folding patch %s') % p)
1627 raise util.Abort(_('Error folding patch %s') % p)
1628 patch.updatedir(ui, repo, files)
1628 patch.updatedir(ui, repo, files)
1629
1629
1630 if not message:
1630 if not message:
1631 message, comments, user = q.readheaders(parent)[0:3]
1631 message, comments, user = q.readheaders(parent)[0:3]
1632 for msg in messages:
1632 for msg in messages:
1633 message.append('* * *')
1633 message.append('* * *')
1634 message.extend(msg)
1634 message.extend(msg)
1635 message = '\n'.join(message)
1635 message = '\n'.join(message)
1636
1636
1637 if opts['edit']:
1637 if opts['edit']:
1638 message = ui.edit(message, user or ui.username())
1638 message = ui.edit(message, user or ui.username())
1639
1639
1640 q.refresh(repo, msg=message)
1640 q.refresh(repo, msg=message)
1641 q.delete(repo, patches, opts)
1641 q.delete(repo, patches, opts)
1642 q.save_dirty()
1642 q.save_dirty()
1643
1643
1644 def guard(ui, repo, *args, **opts):
1644 def guard(ui, repo, *args, **opts):
1645 '''set or print guards for a patch
1645 '''set or print guards for a patch
1646
1646
1647 Guards control whether a patch can be pushed. A patch with no
1647 Guards control whether a patch can be pushed. A patch with no
1648 guards is always pushed. A patch with a positive guard ("+foo") is
1648 guards is always pushed. A patch with a positive guard ("+foo") is
1649 pushed only if the qselect command has activated it. A patch with
1649 pushed only if the qselect command has activated it. A patch with
1650 a negative guard ("-foo") is never pushed if the qselect command
1650 a negative guard ("-foo") is never pushed if the qselect command
1651 has activated it.
1651 has activated it.
1652
1652
1653 With no arguments, print the currently active guards.
1653 With no arguments, print the currently active guards.
1654 With arguments, set guards for the named patch.
1654 With arguments, set guards for the named patch.
1655
1655
1656 To set a negative guard "-foo" on topmost patch ("--" is needed so
1656 To set a negative guard "-foo" on topmost patch ("--" is needed so
1657 hg will not interpret "-foo" as an option):
1657 hg will not interpret "-foo" as an option):
1658 hg qguard -- -foo
1658 hg qguard -- -foo
1659
1659
1660 To set guards on another patch:
1660 To set guards on another patch:
1661 hg qguard other.patch +2.6.17 -stable
1661 hg qguard other.patch +2.6.17 -stable
1662 '''
1662 '''
1663 def status(idx):
1663 def status(idx):
1664 guards = q.series_guards[idx] or ['unguarded']
1664 guards = q.series_guards[idx] or ['unguarded']
1665 ui.write('%s: %s\n' % (q.series[idx], ' '.join(guards)))
1665 ui.write('%s: %s\n' % (q.series[idx], ' '.join(guards)))
1666 q = repo.mq
1666 q = repo.mq
1667 patch = None
1667 patch = None
1668 args = list(args)
1668 args = list(args)
1669 if opts['list']:
1669 if opts['list']:
1670 if args or opts['none']:
1670 if args or opts['none']:
1671 raise util.Abort(_('cannot mix -l/--list with options or arguments'))
1671 raise util.Abort(_('cannot mix -l/--list with options or arguments'))
1672 for i in xrange(len(q.series)):
1672 for i in xrange(len(q.series)):
1673 status(i)
1673 status(i)
1674 return
1674 return
1675 if not args or args[0][0:1] in '-+':
1675 if not args or args[0][0:1] in '-+':
1676 if not q.applied:
1676 if not q.applied:
1677 raise util.Abort(_('no patches applied'))
1677 raise util.Abort(_('no patches applied'))
1678 patch = q.applied[-1].name
1678 patch = q.applied[-1].name
1679 if patch is None and args[0][0:1] not in '-+':
1679 if patch is None and args[0][0:1] not in '-+':
1680 patch = args.pop(0)
1680 patch = args.pop(0)
1681 if patch is None:
1681 if patch is None:
1682 raise util.Abort(_('no patch to work with'))
1682 raise util.Abort(_('no patch to work with'))
1683 if args or opts['none']:
1683 if args or opts['none']:
1684 q.set_guards(q.find_series(patch), args)
1684 q.set_guards(q.find_series(patch), args)
1685 q.save_dirty()
1685 q.save_dirty()
1686 else:
1686 else:
1687 status(q.series.index(q.lookup(patch)))
1687 status(q.series.index(q.lookup(patch)))
1688
1688
1689 def header(ui, repo, patch=None):
1689 def header(ui, repo, patch=None):
1690 """Print the header of the topmost or specified patch"""
1690 """Print the header of the topmost or specified patch"""
1691 q = repo.mq
1691 q = repo.mq
1692
1692
1693 if patch:
1693 if patch:
1694 patch = q.lookup(patch)
1694 patch = q.lookup(patch)
1695 else:
1695 else:
1696 if not q.applied:
1696 if not q.applied:
1697 ui.write('No patches applied\n')
1697 ui.write('No patches applied\n')
1698 return 1
1698 return 1
1699 patch = q.lookup('qtip')
1699 patch = q.lookup('qtip')
1700 message = repo.mq.readheaders(patch)[0]
1700 message = repo.mq.readheaders(patch)[0]
1701
1701
1702 ui.write('\n'.join(message) + '\n')
1702 ui.write('\n'.join(message) + '\n')
1703
1703
1704 def lastsavename(path):
1704 def lastsavename(path):
1705 (directory, base) = os.path.split(path)
1705 (directory, base) = os.path.split(path)
1706 names = os.listdir(directory)
1706 names = os.listdir(directory)
1707 namere = re.compile("%s.([0-9]+)" % base)
1707 namere = re.compile("%s.([0-9]+)" % base)
1708 maxindex = None
1708 maxindex = None
1709 maxname = None
1709 maxname = None
1710 for f in names:
1710 for f in names:
1711 m = namere.match(f)
1711 m = namere.match(f)
1712 if m:
1712 if m:
1713 index = int(m.group(1))
1713 index = int(m.group(1))
1714 if maxindex == None or index > maxindex:
1714 if maxindex == None or index > maxindex:
1715 maxindex = index
1715 maxindex = index
1716 maxname = f
1716 maxname = f
1717 if maxname:
1717 if maxname:
1718 return (os.path.join(directory, maxname), maxindex)
1718 return (os.path.join(directory, maxname), maxindex)
1719 return (None, None)
1719 return (None, None)
1720
1720
1721 def savename(path):
1721 def savename(path):
1722 (last, index) = lastsavename(path)
1722 (last, index) = lastsavename(path)
1723 if last is None:
1723 if last is None:
1724 index = 0
1724 index = 0
1725 newpath = path + ".%d" % (index + 1)
1725 newpath = path + ".%d" % (index + 1)
1726 return newpath
1726 return newpath
1727
1727
1728 def push(ui, repo, patch=None, **opts):
1728 def push(ui, repo, patch=None, **opts):
1729 """push the next patch onto the stack"""
1729 """push the next patch onto the stack"""
1730 q = repo.mq
1730 q = repo.mq
1731 mergeq = None
1731 mergeq = None
1732
1732
1733 if opts['all']:
1733 if opts['all']:
1734 patch = q.series[-1]
1734 patch = q.series[-1]
1735 if opts['merge']:
1735 if opts['merge']:
1736 if opts['name']:
1736 if opts['name']:
1737 newpath = opts['name']
1737 newpath = opts['name']
1738 else:
1738 else:
1739 newpath, i = lastsavename(q.path)
1739 newpath, i = lastsavename(q.path)
1740 if not newpath:
1740 if not newpath:
1741 ui.warn("no saved queues found, please use -n\n")
1741 ui.warn("no saved queues found, please use -n\n")
1742 return 1
1742 return 1
1743 mergeq = queue(ui, repo.join(""), newpath)
1743 mergeq = queue(ui, repo.join(""), newpath)
1744 ui.warn("merging with queue at: %s\n" % mergeq.path)
1744 ui.warn("merging with queue at: %s\n" % mergeq.path)
1745 ret = q.push(repo, patch, force=opts['force'], list=opts['list'],
1745 ret = q.push(repo, patch, force=opts['force'], list=opts['list'],
1746 mergeq=mergeq)
1746 mergeq=mergeq)
1747 q.save_dirty()
1747 q.save_dirty()
1748 return ret
1748 return ret
1749
1749
1750 def pop(ui, repo, patch=None, **opts):
1750 def pop(ui, repo, patch=None, **opts):
1751 """pop the current patch off the stack"""
1751 """pop the current patch off the stack"""
1752 localupdate = True
1752 localupdate = True
1753 if opts['name']:
1753 if opts['name']:
1754 q = queue(ui, repo.join(""), repo.join(opts['name']))
1754 q = queue(ui, repo.join(""), repo.join(opts['name']))
1755 ui.warn('using patch queue: %s\n' % q.path)
1755 ui.warn('using patch queue: %s\n' % q.path)
1756 localupdate = False
1756 localupdate = False
1757 else:
1757 else:
1758 q = repo.mq
1758 q = repo.mq
1759 q.pop(repo, patch, force=opts['force'], update=localupdate, all=opts['all'])
1759 q.pop(repo, patch, force=opts['force'], update=localupdate, all=opts['all'])
1760 q.save_dirty()
1760 q.save_dirty()
1761 return 0
1761 return 0
1762
1762
1763 def rename(ui, repo, patch, name=None, **opts):
1763 def rename(ui, repo, patch, name=None, **opts):
1764 """rename a patch
1764 """rename a patch
1765
1765
1766 With one argument, renames the current patch to PATCH1.
1766 With one argument, renames the current patch to PATCH1.
1767 With two arguments, renames PATCH1 to PATCH2."""
1767 With two arguments, renames PATCH1 to PATCH2."""
1768
1768
1769 q = repo.mq
1769 q = repo.mq
1770
1770
1771 if not name:
1771 if not name:
1772 name = patch
1772 name = patch
1773 patch = None
1773 patch = None
1774
1774
1775 if patch:
1775 if patch:
1776 patch = q.lookup(patch)
1776 patch = q.lookup(patch)
1777 else:
1777 else:
1778 if not q.applied:
1778 if not q.applied:
1779 ui.write(_('No patches applied\n'))
1779 ui.write(_('No patches applied\n'))
1780 return
1780 return
1781 patch = q.lookup('qtip')
1781 patch = q.lookup('qtip')
1782 absdest = q.join(name)
1782 absdest = q.join(name)
1783 if os.path.isdir(absdest):
1783 if os.path.isdir(absdest):
1784 name = os.path.join(name, os.path.basename(patch))
1784 name = os.path.join(name, os.path.basename(patch))
1785 absdest = q.join(name)
1785 absdest = q.join(name)
1786 if os.path.exists(absdest):
1786 if os.path.exists(absdest):
1787 raise util.Abort(_('%s already exists') % absdest)
1787 raise util.Abort(_('%s already exists') % absdest)
1788
1788
1789 if name in q.series:
1789 if name in q.series:
1790 raise util.Abort(_('A patch named %s already exists in the series file') % name)
1790 raise util.Abort(_('A patch named %s already exists in the series file') % name)
1791
1791
1792 if ui.verbose:
1792 if ui.verbose:
1793 ui.write('Renaming %s to %s\n' % (patch, name))
1793 ui.write('Renaming %s to %s\n' % (patch, name))
1794 i = q.find_series(patch)
1794 i = q.find_series(patch)
1795 q.full_series[i] = name
1795 q.full_series[i] = name
1796 q.parse_series()
1796 q.parse_series()
1797 q.series_dirty = 1
1797 q.series_dirty = 1
1798
1798
1799 info = q.isapplied(patch)
1799 info = q.isapplied(patch)
1800 if info:
1800 if info:
1801 q.applied[info[0]] = statusentry(info[1], name)
1801 q.applied[info[0]] = statusentry(info[1], name)
1802 q.applied_dirty = 1
1802 q.applied_dirty = 1
1803
1803
1804 util.rename(q.join(patch), absdest)
1804 util.rename(q.join(patch), absdest)
1805 r = q.qrepo()
1805 r = q.qrepo()
1806 if r:
1806 if r:
1807 wlock = r.wlock()
1807 wlock = r.wlock()
1808 if r.dirstate.state(name) == 'r':
1808 if r.dirstate.state(name) == 'r':
1809 r.undelete([name], wlock)
1809 r.undelete([name], wlock)
1810 r.copy(patch, name, wlock)
1810 r.copy(patch, name, wlock)
1811 r.remove([patch], False, wlock)
1811 r.remove([patch], False, wlock)
1812
1812
1813 q.save_dirty()
1813 q.save_dirty()
1814
1814
1815 def restore(ui, repo, rev, **opts):
1815 def restore(ui, repo, rev, **opts):
1816 """restore the queue state saved by a rev"""
1816 """restore the queue state saved by a rev"""
1817 rev = repo.lookup(rev)
1817 rev = repo.lookup(rev)
1818 q = repo.mq
1818 q = repo.mq
1819 q.restore(repo, rev, delete=opts['delete'],
1819 q.restore(repo, rev, delete=opts['delete'],
1820 qupdate=opts['update'])
1820 qupdate=opts['update'])
1821 q.save_dirty()
1821 q.save_dirty()
1822 return 0
1822 return 0
1823
1823
1824 def save(ui, repo, **opts):
1824 def save(ui, repo, **opts):
1825 """save current queue state"""
1825 """save current queue state"""
1826 q = repo.mq
1826 q = repo.mq
1827 message = commands.logmessage(opts)
1827 message = commands.logmessage(opts)
1828 ret = q.save(repo, msg=message)
1828 ret = q.save(repo, msg=message)
1829 if ret:
1829 if ret:
1830 return ret
1830 return ret
1831 q.save_dirty()
1831 q.save_dirty()
1832 if opts['copy']:
1832 if opts['copy']:
1833 path = q.path
1833 path = q.path
1834 if opts['name']:
1834 if opts['name']:
1835 newpath = os.path.join(q.basepath, opts['name'])
1835 newpath = os.path.join(q.basepath, opts['name'])
1836 if os.path.exists(newpath):
1836 if os.path.exists(newpath):
1837 if not os.path.isdir(newpath):
1837 if not os.path.isdir(newpath):
1838 raise util.Abort(_('destination %s exists and is not '
1838 raise util.Abort(_('destination %s exists and is not '
1839 'a directory') % newpath)
1839 'a directory') % newpath)
1840 if not opts['force']:
1840 if not opts['force']:
1841 raise util.Abort(_('destination %s exists, '
1841 raise util.Abort(_('destination %s exists, '
1842 'use -f to force') % newpath)
1842 'use -f to force') % newpath)
1843 else:
1843 else:
1844 newpath = savename(path)
1844 newpath = savename(path)
1845 ui.warn("copy %s to %s\n" % (path, newpath))
1845 ui.warn("copy %s to %s\n" % (path, newpath))
1846 util.copyfiles(path, newpath)
1846 util.copyfiles(path, newpath)
1847 if opts['empty']:
1847 if opts['empty']:
1848 try:
1848 try:
1849 os.unlink(q.join(q.status_path))
1849 os.unlink(q.join(q.status_path))
1850 except:
1850 except:
1851 pass
1851 pass
1852 return 0
1852 return 0
1853
1853
1854 def strip(ui, repo, rev, **opts):
1854 def strip(ui, repo, rev, **opts):
1855 """strip a revision and all later revs on the same branch"""
1855 """strip a revision and all later revs on the same branch"""
1856 rev = repo.lookup(rev)
1856 rev = repo.lookup(rev)
1857 backup = 'all'
1857 backup = 'all'
1858 if opts['backup']:
1858 if opts['backup']:
1859 backup = 'strip'
1859 backup = 'strip'
1860 elif opts['nobackup']:
1860 elif opts['nobackup']:
1861 backup = 'none'
1861 backup = 'none'
1862 update = repo.dirstate.parents()[0] != revlog.nullid
1862 update = repo.dirstate.parents()[0] != revlog.nullid
1863 repo.mq.strip(repo, rev, backup=backup, update=update)
1863 repo.mq.strip(repo, rev, backup=backup, update=update)
1864 return 0
1864 return 0
1865
1865
1866 def select(ui, repo, *args, **opts):
1866 def select(ui, repo, *args, **opts):
1867 '''set or print guarded patches to push
1867 '''set or print guarded patches to push
1868
1868
1869 Use the qguard command to set or print guards on patch, then use
1869 Use the qguard command to set or print guards on patch, then use
1870 qselect to tell mq which guards to use. A patch will be pushed if it
1870 qselect to tell mq which guards to use. A patch will be pushed if it
1871 has no guards or any positive guards match the currently selected guard,
1871 has no guards or any positive guards match the currently selected guard,
1872 but will not be pushed if any negative guards match the current guard.
1872 but will not be pushed if any negative guards match the current guard.
1873 For example:
1873 For example:
1874
1874
1875 qguard foo.patch -stable (negative guard)
1875 qguard foo.patch -stable (negative guard)
1876 qguard bar.patch +stable (positive guard)
1876 qguard bar.patch +stable (positive guard)
1877 qselect stable
1877 qselect stable
1878
1878
1879 This activates the "stable" guard. mq will skip foo.patch (because
1879 This activates the "stable" guard. mq will skip foo.patch (because
1880 it has a negative match) but push bar.patch (because it
1880 it has a negative match) but push bar.patch (because it
1881 has a positive match).
1881 has a positive match).
1882
1882
1883 With no arguments, prints the currently active guards.
1883 With no arguments, prints the currently active guards.
1884 With one argument, sets the active guard.
1884 With one argument, sets the active guard.
1885
1885
1886 Use -n/--none to deactivate guards (no other arguments needed).
1886 Use -n/--none to deactivate guards (no other arguments needed).
1887 When no guards are active, patches with positive guards are skipped
1887 When no guards are active, patches with positive guards are skipped
1888 and patches with negative guards are pushed.
1888 and patches with negative guards are pushed.
1889
1889
1890 qselect can change the guards on applied patches. It does not pop
1890 qselect can change the guards on applied patches. It does not pop
1891 guarded patches by default. Use --pop to pop back to the last applied
1891 guarded patches by default. Use --pop to pop back to the last applied
1892 patch that is not guarded. Use --reapply (which implies --pop) to push
1892 patch that is not guarded. Use --reapply (which implies --pop) to push
1893 back to the current patch afterwards, but skip guarded patches.
1893 back to the current patch afterwards, but skip guarded patches.
1894
1894
1895 Use -s/--series to print a list of all guards in the series file (no
1895 Use -s/--series to print a list of all guards in the series file (no
1896 other arguments needed). Use -v for more information.'''
1896 other arguments needed). Use -v for more information.'''
1897
1897
1898 q = repo.mq
1898 q = repo.mq
1899 guards = q.active()
1899 guards = q.active()
1900 if args or opts['none']:
1900 if args or opts['none']:
1901 old_unapplied = q.unapplied(repo)
1901 old_unapplied = q.unapplied(repo)
1902 old_guarded = [i for i in xrange(len(q.applied)) if
1902 old_guarded = [i for i in xrange(len(q.applied)) if
1903 not q.pushable(i)[0]]
1903 not q.pushable(i)[0]]
1904 q.set_active(args)
1904 q.set_active(args)
1905 q.save_dirty()
1905 q.save_dirty()
1906 if not args:
1906 if not args:
1907 ui.status(_('guards deactivated\n'))
1907 ui.status(_('guards deactivated\n'))
1908 if not opts['pop'] and not opts['reapply']:
1908 if not opts['pop'] and not opts['reapply']:
1909 unapplied = q.unapplied(repo)
1909 unapplied = q.unapplied(repo)
1910 guarded = [i for i in xrange(len(q.applied))
1910 guarded = [i for i in xrange(len(q.applied))
1911 if not q.pushable(i)[0]]
1911 if not q.pushable(i)[0]]
1912 if len(unapplied) != len(old_unapplied):
1912 if len(unapplied) != len(old_unapplied):
1913 ui.status(_('number of unguarded, unapplied patches has '
1913 ui.status(_('number of unguarded, unapplied patches has '
1914 'changed from %d to %d\n') %
1914 'changed from %d to %d\n') %
1915 (len(old_unapplied), len(unapplied)))
1915 (len(old_unapplied), len(unapplied)))
1916 if len(guarded) != len(old_guarded):
1916 if len(guarded) != len(old_guarded):
1917 ui.status(_('number of guarded, applied patches has changed '
1917 ui.status(_('number of guarded, applied patches has changed '
1918 'from %d to %d\n') %
1918 'from %d to %d\n') %
1919 (len(old_guarded), len(guarded)))
1919 (len(old_guarded), len(guarded)))
1920 elif opts['series']:
1920 elif opts['series']:
1921 guards = {}
1921 guards = {}
1922 noguards = 0
1922 noguards = 0
1923 for gs in q.series_guards:
1923 for gs in q.series_guards:
1924 if not gs:
1924 if not gs:
1925 noguards += 1
1925 noguards += 1
1926 for g in gs:
1926 for g in gs:
1927 guards.setdefault(g, 0)
1927 guards.setdefault(g, 0)
1928 guards[g] += 1
1928 guards[g] += 1
1929 if ui.verbose:
1929 if ui.verbose:
1930 guards['NONE'] = noguards
1930 guards['NONE'] = noguards
1931 guards = guards.items()
1931 guards = guards.items()
1932 guards.sort(lambda a, b: cmp(a[0][1:], b[0][1:]))
1932 guards.sort(lambda a, b: cmp(a[0][1:], b[0][1:]))
1933 if guards:
1933 if guards:
1934 ui.note(_('guards in series file:\n'))
1934 ui.note(_('guards in series file:\n'))
1935 for guard, count in guards:
1935 for guard, count in guards:
1936 ui.note('%2d ' % count)
1936 ui.note('%2d ' % count)
1937 ui.write(guard, '\n')
1937 ui.write(guard, '\n')
1938 else:
1938 else:
1939 ui.note(_('no guards in series file\n'))
1939 ui.note(_('no guards in series file\n'))
1940 else:
1940 else:
1941 if guards:
1941 if guards:
1942 ui.note(_('active guards:\n'))
1942 ui.note(_('active guards:\n'))
1943 for g in guards:
1943 for g in guards:
1944 ui.write(g, '\n')
1944 ui.write(g, '\n')
1945 else:
1945 else:
1946 ui.write(_('no active guards\n'))
1946 ui.write(_('no active guards\n'))
1947 reapply = opts['reapply'] and q.applied and q.appliedname(-1)
1947 reapply = opts['reapply'] and q.applied and q.appliedname(-1)
1948 popped = False
1948 popped = False
1949 if opts['pop'] or opts['reapply']:
1949 if opts['pop'] or opts['reapply']:
1950 for i in xrange(len(q.applied)):
1950 for i in xrange(len(q.applied)):
1951 pushable, reason = q.pushable(i)
1951 pushable, reason = q.pushable(i)
1952 if not pushable:
1952 if not pushable:
1953 ui.status(_('popping guarded patches\n'))
1953 ui.status(_('popping guarded patches\n'))
1954 popped = True
1954 popped = True
1955 if i == 0:
1955 if i == 0:
1956 q.pop(repo, all=True)
1956 q.pop(repo, all=True)
1957 else:
1957 else:
1958 q.pop(repo, i-1)
1958 q.pop(repo, i-1)
1959 break
1959 break
1960 if popped:
1960 if popped:
1961 try:
1961 try:
1962 if reapply:
1962 if reapply:
1963 ui.status(_('reapplying unguarded patches\n'))
1963 ui.status(_('reapplying unguarded patches\n'))
1964 q.push(repo, reapply)
1964 q.push(repo, reapply)
1965 finally:
1965 finally:
1966 q.save_dirty()
1966 q.save_dirty()
1967
1967
1968 def reposetup(ui, repo):
1968 def reposetup(ui, repo):
1969 class mqrepo(repo.__class__):
1969 class mqrepo(repo.__class__):
1970 def abort_if_wdir_patched(self, errmsg, force=False):
1970 def abort_if_wdir_patched(self, errmsg, force=False):
1971 if self.mq.applied and not force:
1971 if self.mq.applied and not force:
1972 parent = revlog.hex(self.dirstate.parents()[0])
1972 parent = revlog.hex(self.dirstate.parents()[0])
1973 if parent in [s.rev for s in self.mq.applied]:
1973 if parent in [s.rev for s in self.mq.applied]:
1974 raise util.Abort(errmsg)
1974 raise util.Abort(errmsg)
1975
1975
1976 def commit(self, *args, **opts):
1976 def commit(self, *args, **opts):
1977 if len(args) >= 6:
1977 if len(args) >= 6:
1978 force = args[5]
1978 force = args[5]
1979 else:
1979 else:
1980 force = opts.get('force')
1980 force = opts.get('force')
1981 self.abort_if_wdir_patched(
1981 self.abort_if_wdir_patched(
1982 _('cannot commit over an applied mq patch'),
1982 _('cannot commit over an applied mq patch'),
1983 force)
1983 force)
1984
1984
1985 return super(mqrepo, self).commit(*args, **opts)
1985 return super(mqrepo, self).commit(*args, **opts)
1986
1986
1987 def push(self, remote, force=False, revs=None):
1987 def push(self, remote, force=False, revs=None):
1988 if self.mq.applied and not force:
1988 if self.mq.applied and not force:
1989 raise util.Abort(_('source has mq patches applied'))
1989 raise util.Abort(_('source has mq patches applied'))
1990 return super(mqrepo, self).push(remote, force, revs)
1990 return super(mqrepo, self).push(remote, force, revs)
1991
1991
1992 def tags(self):
1992 def tags(self):
1993 if self.tagscache:
1993 if self.tagscache:
1994 return self.tagscache
1994 return self.tagscache
1995
1995
1996 tagscache = super(mqrepo, self).tags()
1996 tagscache = super(mqrepo, self).tags()
1997
1997
1998 q = self.mq
1998 q = self.mq
1999 if not q.applied:
1999 if not q.applied:
2000 return tagscache
2000 return tagscache
2001
2001
2002 mqtags = [(patch.rev, patch.name) for patch in q.applied]
2002 mqtags = [(patch.rev, patch.name) for patch in q.applied]
2003 mqtags.append((mqtags[-1][0], 'qtip'))
2003 mqtags.append((mqtags[-1][0], 'qtip'))
2004 mqtags.append((mqtags[0][0], 'qbase'))
2004 mqtags.append((mqtags[0][0], 'qbase'))
2005 for patch in mqtags:
2005 for patch in mqtags:
2006 if patch[1] in tagscache:
2006 if patch[1] in tagscache:
2007 self.ui.warn('Tag %s overrides mq patch of the same name\n' % patch[1])
2007 self.ui.warn('Tag %s overrides mq patch of the same name\n' % patch[1])
2008 else:
2008 else:
2009 tagscache[patch[1]] = revlog.bin(patch[0])
2009 tagscache[patch[1]] = revlog.bin(patch[0])
2010
2010
2011 return tagscache
2011 return tagscache
2012
2012
2013 def branchtags(self):
2013 def branchtags(self):
2014 if self.branchcache != None:
2014 if self.branchcache != None:
2015 return self.branchcache
2015 return self.branchcache
2016
2016
2017 q = self.mq
2017 q = self.mq
2018 if not q.applied:
2018 if not q.applied:
2019 return super(mqrepo, self).branchtags()
2019 return super(mqrepo, self).branchtags()
2020
2020
2021 self.branchcache = {} # avoid recursion in changectx
2021 self.branchcache = {} # avoid recursion in changectx
2022 cl = self.changelog
2022 cl = self.changelog
2023 partial, last, lrev = self._readbranchcache()
2023 partial, last, lrev = self._readbranchcache()
2024
2024
2025 qbase = cl.rev(revlog.bin(q.applied[0].rev))
2025 qbase = cl.rev(revlog.bin(q.applied[0].rev))
2026 start = lrev + 1
2026 start = lrev + 1
2027 if start < qbase:
2027 if start < qbase:
2028 # update the cache (excluding the patches) and save it
2028 # update the cache (excluding the patches) and save it
2029 self._updatebranchcache(partial, lrev+1, qbase)
2029 self._updatebranchcache(partial, lrev+1, qbase)
2030 self._writebranchcache(partial, cl.node(qbase-1), qbase-1)
2030 self._writebranchcache(partial, cl.node(qbase-1), qbase-1)
2031 start = qbase
2031 start = qbase
2032 # if start = qbase, the cache is as updated as it should be.
2032 # if start = qbase, the cache is as updated as it should be.
2033 # if start > qbase, the cache includes (part of) the patches.
2033 # if start > qbase, the cache includes (part of) the patches.
2034 # we might as well use it, but we won't save it.
2034 # we might as well use it, but we won't save it.
2035
2035
2036 # update the cache up to the tip
2036 # update the cache up to the tip
2037 self._updatebranchcache(partial, start, cl.count())
2037 self._updatebranchcache(partial, start, cl.count())
2038
2038
2039 self.branchcache = partial
2039 self.branchcache = partial
2040 return self.branchcache
2040 return self.branchcache
2041
2041
2042 if repo.local():
2042 if repo.local():
2043 repo.__class__ = mqrepo
2043 repo.__class__ = mqrepo
2044 repo.mq = queue(ui, repo.join(""))
2044 repo.mq = queue(ui, repo.join(""))
2045
2045
2046 seriesopts = [('s', 'summary', None, _('print first line of patch header'))]
2046 seriesopts = [('s', 'summary', None, _('print first line of patch header'))]
2047
2047
2048 cmdtable = {
2048 cmdtable = {
2049 "qapplied": (applied, [] + seriesopts, 'hg qapplied [-s] [PATCH]'),
2049 "qapplied": (applied, [] + seriesopts, 'hg qapplied [-s] [PATCH]'),
2050 "qclone": (clone,
2050 "qclone": (clone,
2051 [('', 'pull', None, _('use pull protocol to copy metadata')),
2051 [('', 'pull', None, _('use pull protocol to copy metadata')),
2052 ('U', 'noupdate', None, _('do not update the new working directories')),
2052 ('U', 'noupdate', None, _('do not update the new working directories')),
2053 ('', 'uncompressed', None,
2053 ('', 'uncompressed', None,
2054 _('use uncompressed transfer (fast over LAN)')),
2054 _('use uncompressed transfer (fast over LAN)')),
2055 ('e', 'ssh', '', _('specify ssh command to use')),
2055 ('e', 'ssh', '', _('specify ssh command to use')),
2056 ('p', 'patches', '', _('location of source patch repo')),
2056 ('p', 'patches', '', _('location of source patch repo')),
2057 ('', 'remotecmd', '',
2057 ('', 'remotecmd', '',
2058 _('specify hg command to run on the remote side'))],
2058 _('specify hg command to run on the remote side'))],
2059 'hg qclone [OPTION]... SOURCE [DEST]'),
2059 'hg qclone [OPTION]... SOURCE [DEST]'),
2060 "qcommit|qci":
2060 "qcommit|qci":
2061 (commit,
2061 (commit,
2062 commands.table["^commit|ci"][1],
2062 commands.table["^commit|ci"][1],
2063 'hg qcommit [OPTION]... [FILE]...'),
2063 'hg qcommit [OPTION]... [FILE]...'),
2064 "^qdiff": (diff,
2064 "^qdiff": (diff,
2065 [('I', 'include', [], _('include names matching the given patterns')),
2065 [('I', 'include', [], _('include names matching the given patterns')),
2066 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2066 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2067 'hg qdiff [-I] [-X] [FILE]...'),
2067 'hg qdiff [-I] [-X] [FILE]...'),
2068 "qdelete|qremove|qrm":
2068 "qdelete|qremove|qrm":
2069 (delete,
2069 (delete,
2070 [('k', 'keep', None, _('keep patch file')),
2070 [('k', 'keep', None, _('keep patch file')),
2071 ('r', 'rev', [], _('stop managing a revision'))],
2071 ('r', 'rev', [], _('stop managing a revision'))],
2072 'hg qdelete [-k] [-r REV]... PATCH...'),
2072 'hg qdelete [-k] [-r REV]... PATCH...'),
2073 'qfold':
2073 'qfold':
2074 (fold,
2074 (fold,
2075 [('e', 'edit', None, _('edit patch header')),
2075 [('e', 'edit', None, _('edit patch header')),
2076 ('k', 'keep', None, _('keep folded patch files')),
2076 ('k', 'keep', None, _('keep folded patch files')),
2077 ('m', 'message', '', _('set patch header to <text>')),
2077 ('m', 'message', '', _('set patch header to <text>')),
2078 ('l', 'logfile', '', _('set patch header to contents of <file>'))],
2078 ('l', 'logfile', '', _('set patch header to contents of <file>'))],
2079 'hg qfold [-e] [-m <text>] [-l <file] PATCH...'),
2079 'hg qfold [-e] [-m <text>] [-l <file] PATCH...'),
2080 'qguard': (guard, [('l', 'list', None, _('list all patches and guards')),
2080 'qguard': (guard, [('l', 'list', None, _('list all patches and guards')),
2081 ('n', 'none', None, _('drop all guards'))],
2081 ('n', 'none', None, _('drop all guards'))],
2082 'hg qguard [PATCH] [+GUARD...] [-GUARD...]'),
2082 'hg qguard [PATCH] [+GUARD...] [-GUARD...]'),
2083 'qheader': (header, [],
2083 'qheader': (header, [],
2084 _('hg qheader [PATCH]')),
2084 _('hg qheader [PATCH]')),
2085 "^qimport":
2085 "^qimport":
2086 (qimport,
2086 (qimport,
2087 [('e', 'existing', None, 'import file in patch dir'),
2087 [('e', 'existing', None, 'import file in patch dir'),
2088 ('n', 'name', '', 'patch file name'),
2088 ('n', 'name', '', 'patch file name'),
2089 ('f', 'force', None, 'overwrite existing files'),
2089 ('f', 'force', None, 'overwrite existing files'),
2090 ('r', 'rev', [], 'place existing revisions under mq control')],
2090 ('r', 'rev', [], 'place existing revisions under mq control')],
2091 'hg qimport [-e] [-n NAME] [-f] [-r REV]... FILE...'),
2091 'hg qimport [-e] [-n NAME] [-f] [-r REV]... FILE...'),
2092 "^qinit":
2092 "^qinit":
2093 (init,
2093 (init,
2094 [('c', 'create-repo', None, 'create queue repository')],
2094 [('c', 'create-repo', None, 'create queue repository')],
2095 'hg qinit [-c]'),
2095 'hg qinit [-c]'),
2096 "qnew":
2096 "qnew":
2097 (new,
2097 (new,
2098 [('e', 'edit', None, _('edit commit message')),
2098 [('e', 'edit', None, _('edit commit message')),
2099 ('m', 'message', '', _('use <text> as commit message')),
2099 ('m', 'message', '', _('use <text> as commit message')),
2100 ('l', 'logfile', '', _('read the commit message from <file>')),
2100 ('l', 'logfile', '', _('read the commit message from <file>')),
2101 ('f', 'force', None, _('import uncommitted changes into patch'))],
2101 ('f', 'force', None, _('import uncommitted changes into patch'))],
2102 'hg qnew [-e] [-m TEXT] [-l FILE] [-f] PATCH'),
2102 'hg qnew [-e] [-m TEXT] [-l FILE] [-f] PATCH'),
2103 "qnext": (next, [] + seriesopts, 'hg qnext [-s]'),
2103 "qnext": (next, [] + seriesopts, 'hg qnext [-s]'),
2104 "qprev": (prev, [] + seriesopts, 'hg qprev [-s]'),
2104 "qprev": (prev, [] + seriesopts, 'hg qprev [-s]'),
2105 "^qpop":
2105 "^qpop":
2106 (pop,
2106 (pop,
2107 [('a', 'all', None, 'pop all patches'),
2107 [('a', 'all', None, 'pop all patches'),
2108 ('n', 'name', '', 'queue name to pop'),
2108 ('n', 'name', '', 'queue name to pop'),
2109 ('f', 'force', None, 'forget any local changes')],
2109 ('f', 'force', None, 'forget any local changes')],
2110 'hg qpop [-a] [-n NAME] [-f] [PATCH | INDEX]'),
2110 'hg qpop [-a] [-n NAME] [-f] [PATCH | INDEX]'),
2111 "^qpush":
2111 "^qpush":
2112 (push,
2112 (push,
2113 [('f', 'force', None, 'apply if the patch has rejects'),
2113 [('f', 'force', None, 'apply if the patch has rejects'),
2114 ('l', 'list', None, 'list patch name in commit text'),
2114 ('l', 'list', None, 'list patch name in commit text'),
2115 ('a', 'all', None, 'apply all patches'),
2115 ('a', 'all', None, 'apply all patches'),
2116 ('m', 'merge', None, 'merge from another queue'),
2116 ('m', 'merge', None, 'merge from another queue'),
2117 ('n', 'name', '', 'merge queue name')],
2117 ('n', 'name', '', 'merge queue name')],
2118 'hg qpush [-f] [-l] [-a] [-m] [-n NAME] [PATCH | INDEX]'),
2118 'hg qpush [-f] [-l] [-a] [-m] [-n NAME] [PATCH | INDEX]'),
2119 "^qrefresh":
2119 "^qrefresh":
2120 (refresh,
2120 (refresh,
2121 [('e', 'edit', None, _('edit commit message')),
2121 [('e', 'edit', None, _('edit commit message')),
2122 ('m', 'message', '', _('change commit message with <text>')),
2122 ('m', 'message', '', _('change commit message with <text>')),
2123 ('l', 'logfile', '', _('change commit message with <file> content')),
2123 ('l', 'logfile', '', _('change commit message with <file> content')),
2124 ('g', 'git', None, _('use git extended diff format')),
2124 ('g', 'git', None, _('use git extended diff format')),
2125 ('s', 'short', None, 'short refresh'),
2125 ('s', 'short', None, 'short refresh'),
2126 ('I', 'include', [], _('include names matching the given patterns')),
2126 ('I', 'include', [], _('include names matching the given patterns')),
2127 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2127 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2128 'hg qrefresh [-I] [-X] [-e] [-m TEXT] [-l FILE] [-s] FILES...'),
2128 'hg qrefresh [-I] [-X] [-e] [-m TEXT] [-l FILE] [-s] FILES...'),
2129 'qrename|qmv':
2129 'qrename|qmv':
2130 (rename, [], 'hg qrename PATCH1 [PATCH2]'),
2130 (rename, [], 'hg qrename PATCH1 [PATCH2]'),
2131 "qrestore":
2131 "qrestore":
2132 (restore,
2132 (restore,
2133 [('d', 'delete', None, 'delete save entry'),
2133 [('d', 'delete', None, 'delete save entry'),
2134 ('u', 'update', None, 'update queue working dir')],
2134 ('u', 'update', None, 'update queue working dir')],
2135 'hg qrestore [-d] [-u] REV'),
2135 'hg qrestore [-d] [-u] REV'),
2136 "qsave":
2136 "qsave":
2137 (save,
2137 (save,
2138 [('m', 'message', '', _('use <text> as commit message')),
2138 [('m', 'message', '', _('use <text> as commit message')),
2139 ('l', 'logfile', '', _('read the commit message from <file>')),
2139 ('l', 'logfile', '', _('read the commit message from <file>')),
2140 ('c', 'copy', None, 'copy patch directory'),
2140 ('c', 'copy', None, 'copy patch directory'),
2141 ('n', 'name', '', 'copy directory name'),
2141 ('n', 'name', '', 'copy directory name'),
2142 ('e', 'empty', None, 'clear queue status file'),
2142 ('e', 'empty', None, 'clear queue status file'),
2143 ('f', 'force', None, 'force copy')],
2143 ('f', 'force', None, 'force copy')],
2144 'hg qsave [-m TEXT] [-l FILE] [-c] [-n NAME] [-e] [-f]'),
2144 'hg qsave [-m TEXT] [-l FILE] [-c] [-n NAME] [-e] [-f]'),
2145 "qselect": (select,
2145 "qselect": (select,
2146 [('n', 'none', None, _('disable all guards')),
2146 [('n', 'none', None, _('disable all guards')),
2147 ('s', 'series', None, _('list all guards in series file')),
2147 ('s', 'series', None, _('list all guards in series file')),
2148 ('', 'pop', None,
2148 ('', 'pop', None,
2149 _('pop to before first guarded applied patch')),
2149 _('pop to before first guarded applied patch')),
2150 ('', 'reapply', None, _('pop, then reapply patches'))],
2150 ('', 'reapply', None, _('pop, then reapply patches'))],
2151 'hg qselect [OPTION...] [GUARD...]'),
2151 'hg qselect [OPTION...] [GUARD...]'),
2152 "qseries":
2152 "qseries":
2153 (series,
2153 (series,
2154 [('m', 'missing', None, 'print patches not in series')] + seriesopts,
2154 [('m', 'missing', None, 'print patches not in series')] + seriesopts,
2155 'hg qseries [-ms]'),
2155 'hg qseries [-ms]'),
2156 "^strip":
2156 "^strip":
2157 (strip,
2157 (strip,
2158 [('f', 'force', None, 'force multi-head removal'),
2158 [('f', 'force', None, 'force multi-head removal'),
2159 ('b', 'backup', None, 'bundle unrelated changesets'),
2159 ('b', 'backup', None, 'bundle unrelated changesets'),
2160 ('n', 'nobackup', None, 'no backups')],
2160 ('n', 'nobackup', None, 'no backups')],
2161 'hg strip [-f] [-b] [-n] REV'),
2161 'hg strip [-f] [-b] [-n] REV'),
2162 "qtop": (top, [] + seriesopts, 'hg qtop [-s]'),
2162 "qtop": (top, [] + seriesopts, 'hg qtop [-s]'),
2163 "qunapplied": (unapplied, [] + seriesopts, 'hg qunapplied [-s] [PATCH]'),
2163 "qunapplied": (unapplied, [] + seriesopts, 'hg qunapplied [-s] [PATCH]'),
2164 }
2164 }
@@ -1,215 +1,197 b''
1 # cmdutil.py - help for command processing in mercurial
1 # cmdutil.py - help for command processing in mercurial
2 #
2 #
3 # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7
7
8 from demandload import demandload
8 from demandload import demandload
9 from node import *
9 from node import *
10 from i18n import gettext as _
10 from i18n import gettext as _
11 demandload(globals(), 'mdiff util')
11 demandload(globals(), 'mdiff util')
12 demandload(globals(), 'os sys')
12 demandload(globals(), 'os sys')
13
13
14 revrangesep = ':'
14 revrangesep = ':'
15
15
16 def revfix(repo, val, defval):
17 '''turn user-level id of changeset into rev number.
18 user-level id can be tag, changeset, rev number, or negative rev
19 number relative to number of revs (-1 is tip, etc).'''
20 if not val:
21 return defval
22 try:
23 num = int(val)
24 if str(num) != val:
25 raise ValueError
26 if num < 0:
27 num += repo.changelog.count()
28 if num < 0:
29 num = 0
30 elif num >= repo.changelog.count():
31 raise ValueError
32 except ValueError:
33 try:
34 num = repo.changelog.rev(repo.lookup(val))
35 except KeyError:
36 raise util.Abort(_('invalid revision identifier %s') % val)
37 return num
38
39 def revpair(ui, repo, revs):
16 def revpair(ui, repo, revs):
40 '''return pair of nodes, given list of revisions. second item can
17 '''return pair of nodes, given list of revisions. second item can
41 be None, meaning use working dir.'''
18 be None, meaning use working dir.'''
19
20 def revfix(repo, val, defval):
21 if not val and val != 0:
22 val = defval
23 return repo.lookup(val)
24
42 if not revs:
25 if not revs:
43 return repo.dirstate.parents()[0], None
26 return repo.dirstate.parents()[0], None
44 end = None
27 end = None
45 if len(revs) == 1:
28 if len(revs) == 1:
46 start = revs[0]
29 if revrangesep in revs[0]:
47 if revrangesep in start:
30 start, end = revs[0].split(revrangesep, 1)
48 start, end = start.split(revrangesep, 1)
49 start = revfix(repo, start, 0)
31 start = revfix(repo, start, 0)
50 end = revfix(repo, end, repo.changelog.count() - 1)
32 end = revfix(repo, end, repo.changelog.count() - 1)
51 else:
33 else:
52 start = revfix(repo, start, None)
34 start = revfix(repo, revs[0], None)
53 elif len(revs) == 2:
35 elif len(revs) == 2:
54 if revrangesep in revs[0] or revrangesep in revs[1]:
36 if revrangesep in revs[0] or revrangesep in revs[1]:
55 raise util.Abort(_('too many revisions specified'))
37 raise util.Abort(_('too many revisions specified'))
56 start = revfix(repo, revs[0], None)
38 start = revfix(repo, revs[0], None)
57 end = revfix(repo, revs[1], None)
39 end = revfix(repo, revs[1], None)
58 else:
40 else:
59 raise util.Abort(_('too many revisions specified'))
41 raise util.Abort(_('too many revisions specified'))
60 if end is not None: end = repo.lookup(str(end))
42 return start, end
61 return repo.lookup(str(start)), end
62
43
63 def revrange(ui, repo, revs):
44 def revrange(ui, repo, revs):
64 """Yield revision as strings from a list of revision specifications."""
45 """Yield revision as strings from a list of revision specifications."""
65 seen = {}
46
47 def revfix(repo, val, defval):
48 if not val and val != 0:
49 return defval
50 return repo.changelog.rev(repo.lookup(val))
51
52 seen, l = {}, []
66 for spec in revs:
53 for spec in revs:
67 if revrangesep in spec:
54 if revrangesep in spec:
68 start, end = spec.split(revrangesep, 1)
55 start, end = spec.split(revrangesep, 1)
69 start = revfix(repo, start, 0)
56 start = revfix(repo, start, 0)
70 end = revfix(repo, end, repo.changelog.count() - 1)
57 end = revfix(repo, end, repo.changelog.count() - 1)
71 step = start > end and -1 or 1
58 step = start > end and -1 or 1
72 for rev in xrange(start, end+step, step):
59 for rev in xrange(start, end+step, step):
73 if rev in seen:
60 if rev in seen:
74 continue
61 continue
75 seen[rev] = 1
62 seen[rev] = 1
76 yield str(rev)
63 l.append(rev)
77 else:
64 else:
78 rev = revfix(repo, spec, None)
65 rev = revfix(repo, spec, None)
79 if rev in seen:
66 if rev in seen:
80 continue
67 continue
81 seen[rev] = 1
68 seen[rev] = 1
82 yield str(rev)
69 l.append(rev)
70
71 return l
83
72
84 def make_filename(repo, pat, node,
73 def make_filename(repo, pat, node,
85 total=None, seqno=None, revwidth=None, pathname=None):
74 total=None, seqno=None, revwidth=None, pathname=None):
86 node_expander = {
75 node_expander = {
87 'H': lambda: hex(node),
76 'H': lambda: hex(node),
88 'R': lambda: str(repo.changelog.rev(node)),
77 'R': lambda: str(repo.changelog.rev(node)),
89 'h': lambda: short(node),
78 'h': lambda: short(node),
90 }
79 }
91 expander = {
80 expander = {
92 '%': lambda: '%',
81 '%': lambda: '%',
93 'b': lambda: os.path.basename(repo.root),
82 'b': lambda: os.path.basename(repo.root),
94 }
83 }
95
84
96 try:
85 try:
97 if node:
86 if node:
98 expander.update(node_expander)
87 expander.update(node_expander)
99 if node and revwidth is not None:
88 if node and revwidth is not None:
100 expander['r'] = (lambda:
89 expander['r'] = (lambda:
101 str(repo.changelog.rev(node)).zfill(revwidth))
90 str(repo.changelog.rev(node)).zfill(revwidth))
102 if total is not None:
91 if total is not None:
103 expander['N'] = lambda: str(total)
92 expander['N'] = lambda: str(total)
104 if seqno is not None:
93 if seqno is not None:
105 expander['n'] = lambda: str(seqno)
94 expander['n'] = lambda: str(seqno)
106 if total is not None and seqno is not None:
95 if total is not None and seqno is not None:
107 expander['n'] = lambda:str(seqno).zfill(len(str(total)))
96 expander['n'] = lambda:str(seqno).zfill(len(str(total)))
108 if pathname is not None:
97 if pathname is not None:
109 expander['s'] = lambda: os.path.basename(pathname)
98 expander['s'] = lambda: os.path.basename(pathname)
110 expander['d'] = lambda: os.path.dirname(pathname) or '.'
99 expander['d'] = lambda: os.path.dirname(pathname) or '.'
111 expander['p'] = lambda: pathname
100 expander['p'] = lambda: pathname
112
101
113 newname = []
102 newname = []
114 patlen = len(pat)
103 patlen = len(pat)
115 i = 0
104 i = 0
116 while i < patlen:
105 while i < patlen:
117 c = pat[i]
106 c = pat[i]
118 if c == '%':
107 if c == '%':
119 i += 1
108 i += 1
120 c = pat[i]
109 c = pat[i]
121 c = expander[c]()
110 c = expander[c]()
122 newname.append(c)
111 newname.append(c)
123 i += 1
112 i += 1
124 return ''.join(newname)
113 return ''.join(newname)
125 except KeyError, inst:
114 except KeyError, inst:
126 raise util.Abort(_("invalid format spec '%%%s' in output file name") %
115 raise util.Abort(_("invalid format spec '%%%s' in output file name") %
127 inst.args[0])
116 inst.args[0])
128
117
129 def make_file(repo, pat, node=None,
118 def make_file(repo, pat, node=None,
130 total=None, seqno=None, revwidth=None, mode='wb', pathname=None):
119 total=None, seqno=None, revwidth=None, mode='wb', pathname=None):
131 if not pat or pat == '-':
120 if not pat or pat == '-':
132 return 'w' in mode and sys.stdout or sys.stdin
121 return 'w' in mode and sys.stdout or sys.stdin
133 if hasattr(pat, 'write') and 'w' in mode:
122 if hasattr(pat, 'write') and 'w' in mode:
134 return pat
123 return pat
135 if hasattr(pat, 'read') and 'r' in mode:
124 if hasattr(pat, 'read') and 'r' in mode:
136 return pat
125 return pat
137 return open(make_filename(repo, pat, node, total, seqno, revwidth,
126 return open(make_filename(repo, pat, node, total, seqno, revwidth,
138 pathname),
127 pathname),
139 mode)
128 mode)
140
129
141 def matchpats(repo, pats=[], opts={}, head=''):
130 def matchpats(repo, pats=[], opts={}, head=''):
142 cwd = repo.getcwd()
131 cwd = repo.getcwd()
143 if not pats and cwd:
132 if not pats and cwd:
144 opts['include'] = [os.path.join(cwd, i)
133 opts['include'] = [os.path.join(cwd, i)
145 for i in opts.get('include', [])]
134 for i in opts.get('include', [])]
146 opts['exclude'] = [os.path.join(cwd, x)
135 opts['exclude'] = [os.path.join(cwd, x)
147 for x in opts.get('exclude', [])]
136 for x in opts.get('exclude', [])]
148 cwd = ''
137 cwd = ''
149 return util.cmdmatcher(repo.root, cwd, pats or ['.'], opts.get('include'),
138 return util.cmdmatcher(repo.root, cwd, pats or ['.'], opts.get('include'),
150 opts.get('exclude'), head)
139 opts.get('exclude'), head)
151
140
152 def makewalk(repo, pats=[], opts={}, node=None, head='', badmatch=None):
141 def walk(repo, pats=[], opts={}, node=None, head='', badmatch=None):
153 files, matchfn, anypats = matchpats(repo, pats, opts, head)
142 files, matchfn, anypats = matchpats(repo, pats, opts, head)
154 exact = dict(zip(files, files))
143 exact = dict.fromkeys(files)
155 def walk():
144 for src, fn in repo.walk(node=node, files=files, match=matchfn,
156 for src, fn in repo.walk(node=node, files=files, match=matchfn,
145 badmatch=badmatch):
157 badmatch=badmatch):
146 yield src, fn, util.pathto(repo.getcwd(), fn), fn in exact
158 yield src, fn, util.pathto(repo.getcwd(), fn), fn in exact
159 return files, matchfn, walk()
160
161 def walk(repo, pats=[], opts={}, node=None, head='', badmatch=None):
162 files, matchfn, results = makewalk(repo, pats, opts, node, head, badmatch)
163 for r in results:
164 yield r
165
147
166 def findrenames(repo, added=None, removed=None, threshold=0.5):
148 def findrenames(repo, added=None, removed=None, threshold=0.5):
167 if added is None or removed is None:
149 if added is None or removed is None:
168 added, removed = repo.status()[1:3]
150 added, removed = repo.status()[1:3]
169 changes = repo.changelog.read(repo.dirstate.parents()[0])
151 changes = repo.changelog.read(repo.dirstate.parents()[0])
170 mf = repo.manifest.read(changes[0])
152 mf = repo.manifest.read(changes[0])
171 for a in added:
153 for a in added:
172 aa = repo.wread(a)
154 aa = repo.wread(a)
173 bestscore, bestname = None, None
155 bestscore, bestname = None, None
174 for r in removed:
156 for r in removed:
175 rr = repo.file(r).read(mf[r])
157 rr = repo.file(r).read(mf[r])
176 delta = mdiff.textdiff(aa, rr)
158 delta = mdiff.textdiff(aa, rr)
177 if len(delta) < len(aa):
159 if len(delta) < len(aa):
178 myscore = 1.0 - (float(len(delta)) / len(aa))
160 myscore = 1.0 - (float(len(delta)) / len(aa))
179 if bestscore is None or myscore > bestscore:
161 if bestscore is None or myscore > bestscore:
180 bestscore, bestname = myscore, r
162 bestscore, bestname = myscore, r
181 if bestname and bestscore >= threshold:
163 if bestname and bestscore >= threshold:
182 yield bestname, a, bestscore
164 yield bestname, a, bestscore
183
165
184 def addremove(repo, pats=[], opts={}, wlock=None, dry_run=None,
166 def addremove(repo, pats=[], opts={}, wlock=None, dry_run=None,
185 similarity=None):
167 similarity=None):
186 if dry_run is None:
168 if dry_run is None:
187 dry_run = opts.get('dry_run')
169 dry_run = opts.get('dry_run')
188 if similarity is None:
170 if similarity is None:
189 similarity = float(opts.get('similarity') or 0)
171 similarity = float(opts.get('similarity') or 0)
190 add, remove = [], []
172 add, remove = [], []
191 mapping = {}
173 mapping = {}
192 for src, abs, rel, exact in walk(repo, pats, opts):
174 for src, abs, rel, exact in walk(repo, pats, opts):
193 if src == 'f' and repo.dirstate.state(abs) == '?':
175 if src == 'f' and repo.dirstate.state(abs) == '?':
194 add.append(abs)
176 add.append(abs)
195 mapping[abs] = rel, exact
177 mapping[abs] = rel, exact
196 if repo.ui.verbose or not exact:
178 if repo.ui.verbose or not exact:
197 repo.ui.status(_('adding %s\n') % ((pats and rel) or abs))
179 repo.ui.status(_('adding %s\n') % ((pats and rel) or abs))
198 if repo.dirstate.state(abs) != 'r' and not os.path.exists(rel):
180 if repo.dirstate.state(abs) != 'r' and not os.path.exists(rel):
199 remove.append(abs)
181 remove.append(abs)
200 mapping[abs] = rel, exact
182 mapping[abs] = rel, exact
201 if repo.ui.verbose or not exact:
183 if repo.ui.verbose or not exact:
202 repo.ui.status(_('removing %s\n') % ((pats and rel) or abs))
184 repo.ui.status(_('removing %s\n') % ((pats and rel) or abs))
203 if not dry_run:
185 if not dry_run:
204 repo.add(add, wlock=wlock)
186 repo.add(add, wlock=wlock)
205 repo.remove(remove, wlock=wlock)
187 repo.remove(remove, wlock=wlock)
206 if similarity > 0:
188 if similarity > 0:
207 for old, new, score in findrenames(repo, add, remove, similarity):
189 for old, new, score in findrenames(repo, add, remove, similarity):
208 oldrel, oldexact = mapping[old]
190 oldrel, oldexact = mapping[old]
209 newrel, newexact = mapping[new]
191 newrel, newexact = mapping[new]
210 if repo.ui.verbose or not oldexact or not newexact:
192 if repo.ui.verbose or not oldexact or not newexact:
211 repo.ui.status(_('recording removal of %s as rename to %s '
193 repo.ui.status(_('recording removal of %s as rename to %s '
212 '(%d%% similar)\n') %
194 '(%d%% similar)\n') %
213 (oldrel, newrel, score * 100))
195 (oldrel, newrel, score * 100))
214 if not dry_run:
196 if not dry_run:
215 repo.copy(old, new, wlock=wlock)
197 repo.copy(old, new, wlock=wlock)
@@ -1,3557 +1,3534 b''
1 # commands.py - command processing for mercurial
1 # commands.py - command processing for mercurial
2 #
2 #
3 # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7
7
8 from demandload import demandload
8 from demandload import demandload
9 from node import *
9 from node import *
10 from i18n import gettext as _
10 from i18n import gettext as _
11 demandload(globals(), "os re sys signal shutil imp urllib pdb shlex")
11 demandload(globals(), "os re sys signal shutil imp urllib pdb shlex")
12 demandload(globals(), "fancyopts ui hg util lock revlog templater bundlerepo")
12 demandload(globals(), "fancyopts ui hg util lock revlog templater bundlerepo")
13 demandload(globals(), "fnmatch difflib patch random signal tempfile time")
13 demandload(globals(), "fnmatch difflib patch random signal tempfile time")
14 demandload(globals(), "traceback errno socket version struct atexit sets bz2")
14 demandload(globals(), "traceback errno socket version struct atexit sets bz2")
15 demandload(globals(), "archival cStringIO changegroup")
15 demandload(globals(), "archival cStringIO changegroup")
16 demandload(globals(), "cmdutil hgweb.server sshserver")
16 demandload(globals(), "cmdutil hgweb.server sshserver")
17
17
18 class UnknownCommand(Exception):
18 class UnknownCommand(Exception):
19 """Exception raised if command is not in the command table."""
19 """Exception raised if command is not in the command table."""
20 class AmbiguousCommand(Exception):
20 class AmbiguousCommand(Exception):
21 """Exception raised if command shortcut matches more than one command."""
21 """Exception raised if command shortcut matches more than one command."""
22
22
23 def bail_if_changed(repo):
23 def bail_if_changed(repo):
24 modified, added, removed, deleted = repo.status()[:4]
24 modified, added, removed, deleted = repo.status()[:4]
25 if modified or added or removed or deleted:
25 if modified or added or removed or deleted:
26 raise util.Abort(_("outstanding uncommitted changes"))
26 raise util.Abort(_("outstanding uncommitted changes"))
27
27
28 def relpath(repo, args):
28 def relpath(repo, args):
29 cwd = repo.getcwd()
29 cwd = repo.getcwd()
30 if cwd:
30 if cwd:
31 return [util.normpath(os.path.join(cwd, x)) for x in args]
31 return [util.normpath(os.path.join(cwd, x)) for x in args]
32 return args
32 return args
33
33
34 def logmessage(opts):
34 def logmessage(opts):
35 """ get the log message according to -m and -l option """
35 """ get the log message according to -m and -l option """
36 message = opts['message']
36 message = opts['message']
37 logfile = opts['logfile']
37 logfile = opts['logfile']
38
38
39 if message and logfile:
39 if message and logfile:
40 raise util.Abort(_('options --message and --logfile are mutually '
40 raise util.Abort(_('options --message and --logfile are mutually '
41 'exclusive'))
41 'exclusive'))
42 if not message and logfile:
42 if not message and logfile:
43 try:
43 try:
44 if logfile == '-':
44 if logfile == '-':
45 message = sys.stdin.read()
45 message = sys.stdin.read()
46 else:
46 else:
47 message = open(logfile).read()
47 message = open(logfile).read()
48 except IOError, inst:
48 except IOError, inst:
49 raise util.Abort(_("can't read commit message '%s': %s") %
49 raise util.Abort(_("can't read commit message '%s': %s") %
50 (logfile, inst.strerror))
50 (logfile, inst.strerror))
51 return message
51 return message
52
52
53 def walkchangerevs(ui, repo, pats, opts):
53 def walkchangerevs(ui, repo, pats, change, opts):
54 '''Iterate over files and the revs they changed in.
54 '''Iterate over files and the revs they changed in.
55
55
56 Callers most commonly need to iterate backwards over the history
56 Callers most commonly need to iterate backwards over the history
57 it is interested in. Doing so has awful (quadratic-looking)
57 it is interested in. Doing so has awful (quadratic-looking)
58 performance, so we use iterators in a "windowed" way.
58 performance, so we use iterators in a "windowed" way.
59
59
60 We walk a window of revisions in the desired order. Within the
60 We walk a window of revisions in the desired order. Within the
61 window, we first walk forwards to gather data, then in the desired
61 window, we first walk forwards to gather data, then in the desired
62 order (usually backwards) to display it.
62 order (usually backwards) to display it.
63
63
64 This function returns an (iterator, getchange, matchfn) tuple. The
64 This function returns an (iterator, matchfn) tuple. The iterator
65 getchange function returns the changelog entry for a numeric
65 yields 3-tuples. They will be of one of the following forms:
66 revision. The iterator yields 3-tuples. They will be of one of
67 the following forms:
68
66
69 "window", incrementing, lastrev: stepping through a window,
67 "window", incrementing, lastrev: stepping through a window,
70 positive if walking forwards through revs, last rev in the
68 positive if walking forwards through revs, last rev in the
71 sequence iterated over - use to reset state for the current window
69 sequence iterated over - use to reset state for the current window
72
70
73 "add", rev, fns: out-of-order traversal of the given file names
71 "add", rev, fns: out-of-order traversal of the given file names
74 fns, which changed during revision rev - use to gather data for
72 fns, which changed during revision rev - use to gather data for
75 possible display
73 possible display
76
74
77 "iter", rev, None: in-order traversal of the revs earlier iterated
75 "iter", rev, None: in-order traversal of the revs earlier iterated
78 over with "add" - use to display data'''
76 over with "add" - use to display data'''
79
77
80 def increasing_windows(start, end, windowsize=8, sizelimit=512):
78 def increasing_windows(start, end, windowsize=8, sizelimit=512):
81 if start < end:
79 if start < end:
82 while start < end:
80 while start < end:
83 yield start, min(windowsize, end-start)
81 yield start, min(windowsize, end-start)
84 start += windowsize
82 start += windowsize
85 if windowsize < sizelimit:
83 if windowsize < sizelimit:
86 windowsize *= 2
84 windowsize *= 2
87 else:
85 else:
88 while start > end:
86 while start > end:
89 yield start, min(windowsize, start-end-1)
87 yield start, min(windowsize, start-end-1)
90 start -= windowsize
88 start -= windowsize
91 if windowsize < sizelimit:
89 if windowsize < sizelimit:
92 windowsize *= 2
90 windowsize *= 2
93
91
94
95 files, matchfn, anypats = cmdutil.matchpats(repo, pats, opts)
92 files, matchfn, anypats = cmdutil.matchpats(repo, pats, opts)
96 follow = opts.get('follow') or opts.get('follow_first')
93 follow = opts.get('follow') or opts.get('follow_first')
97
94
98 if repo.changelog.count() == 0:
95 if repo.changelog.count() == 0:
99 return [], False, matchfn
96 return [], matchfn
100
97
101 if follow:
98 if follow:
102 defrange = '%s:0' % repo.changectx().rev()
99 defrange = '%s:0' % repo.changectx().rev()
103 else:
100 else:
104 defrange = 'tip:0'
101 defrange = 'tip:0'
105 revs = map(int, cmdutil.revrange(ui, repo, opts['rev'] or [defrange]))
102 revs = cmdutil.revrange(ui, repo, opts['rev'] or [defrange])
106 wanted = {}
103 wanted = {}
107 slowpath = anypats
104 slowpath = anypats
108 fncache = {}
105 fncache = {}
109
106
110 chcache = {}
111 def getchange(rev):
112 ch = chcache.get(rev)
113 if ch is None:
114 chcache[rev] = ch = repo.changelog.read(repo.lookup(str(rev)))
115 return ch
116
117 if not slowpath and not files:
107 if not slowpath and not files:
118 # No files, no patterns. Display all revs.
108 # No files, no patterns. Display all revs.
119 wanted = dict(zip(revs, revs))
109 wanted = dict.fromkeys(revs)
120 copies = []
110 copies = []
121 if not slowpath:
111 if not slowpath:
122 # Only files, no patterns. Check the history of each file.
112 # Only files, no patterns. Check the history of each file.
123 def filerevgen(filelog, node):
113 def filerevgen(filelog, node):
124 cl_count = repo.changelog.count()
114 cl_count = repo.changelog.count()
125 if node is None:
115 if node is None:
126 last = filelog.count() - 1
116 last = filelog.count() - 1
127 else:
117 else:
128 last = filelog.rev(node)
118 last = filelog.rev(node)
129 for i, window in increasing_windows(last, -1):
119 for i, window in increasing_windows(last, -1):
130 revs = []
120 revs = []
131 for j in xrange(i - window, i + 1):
121 for j in xrange(i - window, i + 1):
132 n = filelog.node(j)
122 n = filelog.node(j)
133 revs.append((filelog.linkrev(n),
123 revs.append((filelog.linkrev(n),
134 follow and filelog.renamed(n)))
124 follow and filelog.renamed(n)))
135 revs.reverse()
125 revs.reverse()
136 for rev in revs:
126 for rev in revs:
137 # only yield rev for which we have the changelog, it can
127 # only yield rev for which we have the changelog, it can
138 # happen while doing "hg log" during a pull or commit
128 # happen while doing "hg log" during a pull or commit
139 if rev[0] < cl_count:
129 if rev[0] < cl_count:
140 yield rev
130 yield rev
141 def iterfiles():
131 def iterfiles():
142 for filename in files:
132 for filename in files:
143 yield filename, None
133 yield filename, None
144 for filename_node in copies:
134 for filename_node in copies:
145 yield filename_node
135 yield filename_node
146 minrev, maxrev = min(revs), max(revs)
136 minrev, maxrev = min(revs), max(revs)
147 for file_, node in iterfiles():
137 for file_, node in iterfiles():
148 filelog = repo.file(file_)
138 filelog = repo.file(file_)
149 # A zero count may be a directory or deleted file, so
139 # A zero count may be a directory or deleted file, so
150 # try to find matching entries on the slow path.
140 # try to find matching entries on the slow path.
151 if filelog.count() == 0:
141 if filelog.count() == 0:
152 slowpath = True
142 slowpath = True
153 break
143 break
154 for rev, copied in filerevgen(filelog, node):
144 for rev, copied in filerevgen(filelog, node):
155 if rev <= maxrev:
145 if rev <= maxrev:
156 if rev < minrev:
146 if rev < minrev:
157 break
147 break
158 fncache.setdefault(rev, [])
148 fncache.setdefault(rev, [])
159 fncache[rev].append(file_)
149 fncache[rev].append(file_)
160 wanted[rev] = 1
150 wanted[rev] = 1
161 if follow and copied:
151 if follow and copied:
162 copies.append(copied)
152 copies.append(copied)
163 if slowpath:
153 if slowpath:
164 if follow:
154 if follow:
165 raise util.Abort(_('can only follow copies/renames for explicit '
155 raise util.Abort(_('can only follow copies/renames for explicit '
166 'file names'))
156 'file names'))
167
157
168 # The slow path checks files modified in every changeset.
158 # The slow path checks files modified in every changeset.
169 def changerevgen():
159 def changerevgen():
170 for i, window in increasing_windows(repo.changelog.count()-1, -1):
160 for i, window in increasing_windows(repo.changelog.count()-1, -1):
171 for j in xrange(i - window, i + 1):
161 for j in xrange(i - window, i + 1):
172 yield j, getchange(j)[3]
162 yield j, change(j)[3]
173
163
174 for rev, changefiles in changerevgen():
164 for rev, changefiles in changerevgen():
175 matches = filter(matchfn, changefiles)
165 matches = filter(matchfn, changefiles)
176 if matches:
166 if matches:
177 fncache[rev] = matches
167 fncache[rev] = matches
178 wanted[rev] = 1
168 wanted[rev] = 1
179
169
180 class followfilter:
170 class followfilter:
181 def __init__(self, onlyfirst=False):
171 def __init__(self, onlyfirst=False):
182 self.startrev = -1
172 self.startrev = -1
183 self.roots = []
173 self.roots = []
184 self.onlyfirst = onlyfirst
174 self.onlyfirst = onlyfirst
185
175
186 def match(self, rev):
176 def match(self, rev):
187 def realparents(rev):
177 def realparents(rev):
188 if self.onlyfirst:
178 if self.onlyfirst:
189 return repo.changelog.parentrevs(rev)[0:1]
179 return repo.changelog.parentrevs(rev)[0:1]
190 else:
180 else:
191 return filter(lambda x: x != -1, repo.changelog.parentrevs(rev))
181 return filter(lambda x: x != -1, repo.changelog.parentrevs(rev))
192
182
193 if self.startrev == -1:
183 if self.startrev == -1:
194 self.startrev = rev
184 self.startrev = rev
195 return True
185 return True
196
186
197 if rev > self.startrev:
187 if rev > self.startrev:
198 # forward: all descendants
188 # forward: all descendants
199 if not self.roots:
189 if not self.roots:
200 self.roots.append(self.startrev)
190 self.roots.append(self.startrev)
201 for parent in realparents(rev):
191 for parent in realparents(rev):
202 if parent in self.roots:
192 if parent in self.roots:
203 self.roots.append(rev)
193 self.roots.append(rev)
204 return True
194 return True
205 else:
195 else:
206 # backwards: all parents
196 # backwards: all parents
207 if not self.roots:
197 if not self.roots:
208 self.roots.extend(realparents(self.startrev))
198 self.roots.extend(realparents(self.startrev))
209 if rev in self.roots:
199 if rev in self.roots:
210 self.roots.remove(rev)
200 self.roots.remove(rev)
211 self.roots.extend(realparents(rev))
201 self.roots.extend(realparents(rev))
212 return True
202 return True
213
203
214 return False
204 return False
215
205
216 # it might be worthwhile to do this in the iterator if the rev range
206 # it might be worthwhile to do this in the iterator if the rev range
217 # is descending and the prune args are all within that range
207 # is descending and the prune args are all within that range
218 for rev in opts.get('prune', ()):
208 for rev in opts.get('prune', ()):
219 rev = repo.changelog.rev(repo.lookup(rev))
209 rev = repo.changelog.rev(repo.lookup(rev))
220 ff = followfilter()
210 ff = followfilter()
221 stop = min(revs[0], revs[-1])
211 stop = min(revs[0], revs[-1])
222 for x in xrange(rev, stop-1, -1):
212 for x in xrange(rev, stop-1, -1):
223 if ff.match(x) and wanted.has_key(x):
213 if ff.match(x) and x in wanted:
224 del wanted[x]
214 del wanted[x]
225
215
226 def iterate():
216 def iterate():
227 if follow and not files:
217 if follow and not files:
228 ff = followfilter(onlyfirst=opts.get('follow_first'))
218 ff = followfilter(onlyfirst=opts.get('follow_first'))
229 def want(rev):
219 def want(rev):
230 if ff.match(rev) and rev in wanted:
220 if ff.match(rev) and rev in wanted:
231 return True
221 return True
232 return False
222 return False
233 else:
223 else:
234 def want(rev):
224 def want(rev):
235 return rev in wanted
225 return rev in wanted
236
226
237 for i, window in increasing_windows(0, len(revs)):
227 for i, window in increasing_windows(0, len(revs)):
238 yield 'window', revs[0] < revs[-1], revs[-1]
228 yield 'window', revs[0] < revs[-1], revs[-1]
239 nrevs = [rev for rev in revs[i:i+window] if want(rev)]
229 nrevs = [rev for rev in revs[i:i+window] if want(rev)]
240 srevs = list(nrevs)
230 srevs = list(nrevs)
241 srevs.sort()
231 srevs.sort()
242 for rev in srevs:
232 for rev in srevs:
243 fns = fncache.get(rev) or filter(matchfn, getchange(rev)[3])
233 fns = fncache.get(rev) or filter(matchfn, change(rev)[3])
244 yield 'add', rev, fns
234 yield 'add', rev, fns
245 for rev in nrevs:
235 for rev in nrevs:
246 yield 'iter', rev, None
236 yield 'iter', rev, None
247 return iterate(), getchange, matchfn
237 return iterate(), matchfn
248
238
249 def write_bundle(cg, filename=None, compress=True):
239 def write_bundle(cg, filename=None, compress=True):
250 """Write a bundle file and return its filename.
240 """Write a bundle file and return its filename.
251
241
252 Existing files will not be overwritten.
242 Existing files will not be overwritten.
253 If no filename is specified, a temporary file is created.
243 If no filename is specified, a temporary file is created.
254 bz2 compression can be turned off.
244 bz2 compression can be turned off.
255 The bundle file will be deleted in case of errors.
245 The bundle file will be deleted in case of errors.
256 """
246 """
257 class nocompress(object):
247 class nocompress(object):
258 def compress(self, x):
248 def compress(self, x):
259 return x
249 return x
260 def flush(self):
250 def flush(self):
261 return ""
251 return ""
262
252
263 fh = None
253 fh = None
264 cleanup = None
254 cleanup = None
265 try:
255 try:
266 if filename:
256 if filename:
267 if os.path.exists(filename):
257 if os.path.exists(filename):
268 raise util.Abort(_("file '%s' already exists") % filename)
258 raise util.Abort(_("file '%s' already exists") % filename)
269 fh = open(filename, "wb")
259 fh = open(filename, "wb")
270 else:
260 else:
271 fd, filename = tempfile.mkstemp(prefix="hg-bundle-", suffix=".hg")
261 fd, filename = tempfile.mkstemp(prefix="hg-bundle-", suffix=".hg")
272 fh = os.fdopen(fd, "wb")
262 fh = os.fdopen(fd, "wb")
273 cleanup = filename
263 cleanup = filename
274
264
275 if compress:
265 if compress:
276 fh.write("HG10")
266 fh.write("HG10")
277 z = bz2.BZ2Compressor(9)
267 z = bz2.BZ2Compressor(9)
278 else:
268 else:
279 fh.write("HG10UN")
269 fh.write("HG10UN")
280 z = nocompress()
270 z = nocompress()
281 # parse the changegroup data, otherwise we will block
271 # parse the changegroup data, otherwise we will block
282 # in case of sshrepo because we don't know the end of the stream
272 # in case of sshrepo because we don't know the end of the stream
283
273
284 # an empty chunkiter is the end of the changegroup
274 # an empty chunkiter is the end of the changegroup
285 empty = False
275 empty = False
286 while not empty:
276 while not empty:
287 empty = True
277 empty = True
288 for chunk in changegroup.chunkiter(cg):
278 for chunk in changegroup.chunkiter(cg):
289 empty = False
279 empty = False
290 fh.write(z.compress(changegroup.genchunk(chunk)))
280 fh.write(z.compress(changegroup.genchunk(chunk)))
291 fh.write(z.compress(changegroup.closechunk()))
281 fh.write(z.compress(changegroup.closechunk()))
292 fh.write(z.flush())
282 fh.write(z.flush())
293 cleanup = None
283 cleanup = None
294 return filename
284 return filename
295 finally:
285 finally:
296 if fh is not None:
286 if fh is not None:
297 fh.close()
287 fh.close()
298 if cleanup is not None:
288 if cleanup is not None:
299 os.unlink(cleanup)
289 os.unlink(cleanup)
300
290
301 def trimuser(ui, name, rev, revcache):
302 """trim the name of the user who committed a change"""
303 user = revcache.get(rev)
304 if user is None:
305 user = revcache[rev] = ui.shortuser(name)
306 return user
307
308 class changeset_printer(object):
291 class changeset_printer(object):
309 '''show changeset information when templating not requested.'''
292 '''show changeset information when templating not requested.'''
310
293
311 def __init__(self, ui, repo):
294 def __init__(self, ui, repo):
312 self.ui = ui
295 self.ui = ui
313 self.repo = repo
296 self.repo = repo
314
297
315 def show(self, rev=0, changenode=None, brinfo=None, copies=None):
298 def show(self, rev=0, changenode=None, brinfo=None, copies=None):
316 '''show a single changeset or file revision'''
299 '''show a single changeset or file revision'''
317 log = self.repo.changelog
300 log = self.repo.changelog
318 if changenode is None:
301 if changenode is None:
319 changenode = log.node(rev)
302 changenode = log.node(rev)
320 elif not rev:
303 elif not rev:
321 rev = log.rev(changenode)
304 rev = log.rev(changenode)
322
305
323 if self.ui.quiet:
306 if self.ui.quiet:
324 self.ui.write("%d:%s\n" % (rev, short(changenode)))
307 self.ui.write("%d:%s\n" % (rev, short(changenode)))
325 return
308 return
326
309
327 changes = log.read(changenode)
310 changes = log.read(changenode)
328 date = util.datestr(changes[2])
311 date = util.datestr(changes[2])
329 extra = changes[5]
312 extra = changes[5]
330 branch = extra.get("branch")
313 branch = extra.get("branch")
331
314
332 hexfunc = self.ui.debugflag and hex or short
315 hexfunc = self.ui.debugflag and hex or short
333
316
334 parents = [(log.rev(p), hexfunc(p)) for p in log.parents(changenode)
317 parents = [(log.rev(p), hexfunc(p)) for p in log.parents(changenode)
335 if self.ui.debugflag or p != nullid]
318 if self.ui.debugflag or p != nullid]
336 if (not self.ui.debugflag and len(parents) == 1 and
319 if (not self.ui.debugflag and len(parents) == 1 and
337 parents[0][0] == rev-1):
320 parents[0][0] == rev-1):
338 parents = []
321 parents = []
339
322
340 self.ui.write(_("changeset: %d:%s\n") % (rev, hexfunc(changenode)))
323 self.ui.write(_("changeset: %d:%s\n") % (rev, hexfunc(changenode)))
341
324
342 if branch:
325 if branch:
343 self.ui.status(_("branch: %s\n") % branch)
326 self.ui.status(_("branch: %s\n") % branch)
344 for tag in self.repo.nodetags(changenode):
327 for tag in self.repo.nodetags(changenode):
345 self.ui.status(_("tag: %s\n") % tag)
328 self.ui.status(_("tag: %s\n") % tag)
346 for parent in parents:
329 for parent in parents:
347 self.ui.write(_("parent: %d:%s\n") % parent)
330 self.ui.write(_("parent: %d:%s\n") % parent)
348
331
349 if brinfo and changenode in brinfo:
332 if brinfo and changenode in brinfo:
350 br = brinfo[changenode]
333 br = brinfo[changenode]
351 self.ui.write(_("branch: %s\n") % " ".join(br))
334 self.ui.write(_("branch: %s\n") % " ".join(br))
352
335
353 self.ui.debug(_("manifest: %d:%s\n") %
336 self.ui.debug(_("manifest: %d:%s\n") %
354 (self.repo.manifest.rev(changes[0]), hex(changes[0])))
337 (self.repo.manifest.rev(changes[0]), hex(changes[0])))
355 self.ui.status(_("user: %s\n") % changes[1])
338 self.ui.status(_("user: %s\n") % changes[1])
356 self.ui.status(_("date: %s\n") % date)
339 self.ui.status(_("date: %s\n") % date)
357
340
358 if self.ui.debugflag:
341 if self.ui.debugflag:
359 files = self.repo.status(log.parents(changenode)[0], changenode)[:3]
342 files = self.repo.status(log.parents(changenode)[0], changenode)[:3]
360 for key, value in zip([_("files:"), _("files+:"), _("files-:")],
343 for key, value in zip([_("files:"), _("files+:"), _("files-:")],
361 files):
344 files):
362 if value:
345 if value:
363 self.ui.note("%-12s %s\n" % (key, " ".join(value)))
346 self.ui.note("%-12s %s\n" % (key, " ".join(value)))
364 elif changes[3]:
347 elif changes[3]:
365 self.ui.note(_("files: %s\n") % " ".join(changes[3]))
348 self.ui.note(_("files: %s\n") % " ".join(changes[3]))
366 if copies:
349 if copies:
367 copies = ['%s (%s)' % c for c in copies]
350 copies = ['%s (%s)' % c for c in copies]
368 self.ui.note(_("copies: %s\n") % ' '.join(copies))
351 self.ui.note(_("copies: %s\n") % ' '.join(copies))
369
352
370 if extra and self.ui.debugflag:
353 if extra and self.ui.debugflag:
371 extraitems = extra.items()
354 extraitems = extra.items()
372 extraitems.sort()
355 extraitems.sort()
373 for key, value in extraitems:
356 for key, value in extraitems:
374 self.ui.debug(_("extra: %s=%s\n")
357 self.ui.debug(_("extra: %s=%s\n")
375 % (key, value.encode('string_escape')))
358 % (key, value.encode('string_escape')))
376
359
377 description = changes[4].strip()
360 description = changes[4].strip()
378 if description:
361 if description:
379 if self.ui.verbose:
362 if self.ui.verbose:
380 self.ui.status(_("description:\n"))
363 self.ui.status(_("description:\n"))
381 self.ui.status(description)
364 self.ui.status(description)
382 self.ui.status("\n\n")
365 self.ui.status("\n\n")
383 else:
366 else:
384 self.ui.status(_("summary: %s\n") %
367 self.ui.status(_("summary: %s\n") %
385 description.splitlines()[0])
368 description.splitlines()[0])
386 self.ui.status("\n")
369 self.ui.status("\n")
387
370
388 def show_changeset(ui, repo, opts):
371 def show_changeset(ui, repo, opts):
389 """show one changeset using template or regular display.
372 """show one changeset using template or regular display.
390
373
391 Display format will be the first non-empty hit of:
374 Display format will be the first non-empty hit of:
392 1. option 'template'
375 1. option 'template'
393 2. option 'style'
376 2. option 'style'
394 3. [ui] setting 'logtemplate'
377 3. [ui] setting 'logtemplate'
395 4. [ui] setting 'style'
378 4. [ui] setting 'style'
396 If all of these values are either the unset or the empty string,
379 If all of these values are either the unset or the empty string,
397 regular display via changeset_printer() is done.
380 regular display via changeset_printer() is done.
398 """
381 """
399 # options
382 # options
400 tmpl = opts.get('template')
383 tmpl = opts.get('template')
401 mapfile = None
384 mapfile = None
402 if tmpl:
385 if tmpl:
403 tmpl = templater.parsestring(tmpl, quoted=False)
386 tmpl = templater.parsestring(tmpl, quoted=False)
404 else:
387 else:
405 mapfile = opts.get('style')
388 mapfile = opts.get('style')
406 # ui settings
389 # ui settings
407 if not mapfile:
390 if not mapfile:
408 tmpl = ui.config('ui', 'logtemplate')
391 tmpl = ui.config('ui', 'logtemplate')
409 if tmpl:
392 if tmpl:
410 tmpl = templater.parsestring(tmpl)
393 tmpl = templater.parsestring(tmpl)
411 else:
394 else:
412 mapfile = ui.config('ui', 'style')
395 mapfile = ui.config('ui', 'style')
413
396
414 if tmpl or mapfile:
397 if tmpl or mapfile:
415 if mapfile:
398 if mapfile:
416 if not os.path.split(mapfile)[0]:
399 if not os.path.split(mapfile)[0]:
417 mapname = (templater.templatepath('map-cmdline.' + mapfile)
400 mapname = (templater.templatepath('map-cmdline.' + mapfile)
418 or templater.templatepath(mapfile))
401 or templater.templatepath(mapfile))
419 if mapname: mapfile = mapname
402 if mapname: mapfile = mapname
420 try:
403 try:
421 t = templater.changeset_templater(ui, repo, mapfile)
404 t = templater.changeset_templater(ui, repo, mapfile)
422 except SyntaxError, inst:
405 except SyntaxError, inst:
423 raise util.Abort(inst.args[0])
406 raise util.Abort(inst.args[0])
424 if tmpl: t.use_template(tmpl)
407 if tmpl: t.use_template(tmpl)
425 return t
408 return t
426 return changeset_printer(ui, repo)
409 return changeset_printer(ui, repo)
427
410
428 def setremoteconfig(ui, opts):
411 def setremoteconfig(ui, opts):
429 "copy remote options to ui tree"
412 "copy remote options to ui tree"
430 if opts.get('ssh'):
413 if opts.get('ssh'):
431 ui.setconfig("ui", "ssh", opts['ssh'])
414 ui.setconfig("ui", "ssh", opts['ssh'])
432 if opts.get('remotecmd'):
415 if opts.get('remotecmd'):
433 ui.setconfig("ui", "remotecmd", opts['remotecmd'])
416 ui.setconfig("ui", "remotecmd", opts['remotecmd'])
434
417
435 def show_version(ui):
418 def show_version(ui):
436 """output version and copyright information"""
419 """output version and copyright information"""
437 ui.write(_("Mercurial Distributed SCM (version %s)\n")
420 ui.write(_("Mercurial Distributed SCM (version %s)\n")
438 % version.get_version())
421 % version.get_version())
439 ui.status(_(
422 ui.status(_(
440 "\nCopyright (C) 2005, 2006 Matt Mackall <mpm@selenic.com>\n"
423 "\nCopyright (C) 2005, 2006 Matt Mackall <mpm@selenic.com>\n"
441 "This is free software; see the source for copying conditions. "
424 "This is free software; see the source for copying conditions. "
442 "There is NO\nwarranty; "
425 "There is NO\nwarranty; "
443 "not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.\n"
426 "not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.\n"
444 ))
427 ))
445
428
446 def help_(ui, name=None, with_version=False):
429 def help_(ui, name=None, with_version=False):
447 """show help for a command, extension, or list of commands
430 """show help for a command, extension, or list of commands
448
431
449 With no arguments, print a list of commands and short help.
432 With no arguments, print a list of commands and short help.
450
433
451 Given a command name, print help for that command.
434 Given a command name, print help for that command.
452
435
453 Given an extension name, print help for that extension, and the
436 Given an extension name, print help for that extension, and the
454 commands it provides."""
437 commands it provides."""
455 option_lists = []
438 option_lists = []
456
439
457 def helpcmd(name):
440 def helpcmd(name):
458 if with_version:
441 if with_version:
459 show_version(ui)
442 show_version(ui)
460 ui.write('\n')
443 ui.write('\n')
461 aliases, i = findcmd(ui, name)
444 aliases, i = findcmd(ui, name)
462 # synopsis
445 # synopsis
463 ui.write("%s\n\n" % i[2])
446 ui.write("%s\n\n" % i[2])
464
447
465 # description
448 # description
466 doc = i[0].__doc__
449 doc = i[0].__doc__
467 if not doc:
450 if not doc:
468 doc = _("(No help text available)")
451 doc = _("(No help text available)")
469 if ui.quiet:
452 if ui.quiet:
470 doc = doc.splitlines(0)[0]
453 doc = doc.splitlines(0)[0]
471 ui.write("%s\n" % doc.rstrip())
454 ui.write("%s\n" % doc.rstrip())
472
455
473 if not ui.quiet:
456 if not ui.quiet:
474 # aliases
457 # aliases
475 if len(aliases) > 1:
458 if len(aliases) > 1:
476 ui.write(_("\naliases: %s\n") % ', '.join(aliases[1:]))
459 ui.write(_("\naliases: %s\n") % ', '.join(aliases[1:]))
477
460
478 # options
461 # options
479 if i[1]:
462 if i[1]:
480 option_lists.append(("options", i[1]))
463 option_lists.append(("options", i[1]))
481
464
482 def helplist(select=None):
465 def helplist(select=None):
483 h = {}
466 h = {}
484 cmds = {}
467 cmds = {}
485 for c, e in table.items():
468 for c, e in table.items():
486 f = c.split("|", 1)[0]
469 f = c.split("|", 1)[0]
487 if select and not select(f):
470 if select and not select(f):
488 continue
471 continue
489 if name == "shortlist" and not f.startswith("^"):
472 if name == "shortlist" and not f.startswith("^"):
490 continue
473 continue
491 f = f.lstrip("^")
474 f = f.lstrip("^")
492 if not ui.debugflag and f.startswith("debug"):
475 if not ui.debugflag and f.startswith("debug"):
493 continue
476 continue
494 doc = e[0].__doc__
477 doc = e[0].__doc__
495 if not doc:
478 if not doc:
496 doc = _("(No help text available)")
479 doc = _("(No help text available)")
497 h[f] = doc.splitlines(0)[0].rstrip()
480 h[f] = doc.splitlines(0)[0].rstrip()
498 cmds[f] = c.lstrip("^")
481 cmds[f] = c.lstrip("^")
499
482
500 fns = h.keys()
483 fns = h.keys()
501 fns.sort()
484 fns.sort()
502 m = max(map(len, fns))
485 m = max(map(len, fns))
503 for f in fns:
486 for f in fns:
504 if ui.verbose:
487 if ui.verbose:
505 commands = cmds[f].replace("|",", ")
488 commands = cmds[f].replace("|",", ")
506 ui.write(" %s:\n %s\n"%(commands, h[f]))
489 ui.write(" %s:\n %s\n"%(commands, h[f]))
507 else:
490 else:
508 ui.write(' %-*s %s\n' % (m, f, h[f]))
491 ui.write(' %-*s %s\n' % (m, f, h[f]))
509
492
510 def helpext(name):
493 def helpext(name):
511 try:
494 try:
512 mod = findext(name)
495 mod = findext(name)
513 except KeyError:
496 except KeyError:
514 raise UnknownCommand(name)
497 raise UnknownCommand(name)
515
498
516 doc = (mod.__doc__ or _('No help text available')).splitlines(0)
499 doc = (mod.__doc__ or _('No help text available')).splitlines(0)
517 ui.write(_('%s extension - %s\n') % (name.split('.')[-1], doc[0]))
500 ui.write(_('%s extension - %s\n') % (name.split('.')[-1], doc[0]))
518 for d in doc[1:]:
501 for d in doc[1:]:
519 ui.write(d, '\n')
502 ui.write(d, '\n')
520
503
521 ui.status('\n')
504 ui.status('\n')
522 if ui.verbose:
505 if ui.verbose:
523 ui.status(_('list of commands:\n\n'))
506 ui.status(_('list of commands:\n\n'))
524 else:
507 else:
525 ui.status(_('list of commands (use "hg help -v %s" '
508 ui.status(_('list of commands (use "hg help -v %s" '
526 'to show aliases and global options):\n\n') % name)
509 'to show aliases and global options):\n\n') % name)
527
510
528 modcmds = dict.fromkeys([c.split('|', 1)[0] for c in mod.cmdtable])
511 modcmds = dict.fromkeys([c.split('|', 1)[0] for c in mod.cmdtable])
529 helplist(modcmds.has_key)
512 helplist(modcmds.has_key)
530
513
531 if name and name != 'shortlist':
514 if name and name != 'shortlist':
532 try:
515 try:
533 helpcmd(name)
516 helpcmd(name)
534 except UnknownCommand:
517 except UnknownCommand:
535 helpext(name)
518 helpext(name)
536
519
537 else:
520 else:
538 # program name
521 # program name
539 if ui.verbose or with_version:
522 if ui.verbose or with_version:
540 show_version(ui)
523 show_version(ui)
541 else:
524 else:
542 ui.status(_("Mercurial Distributed SCM\n"))
525 ui.status(_("Mercurial Distributed SCM\n"))
543 ui.status('\n')
526 ui.status('\n')
544
527
545 # list of commands
528 # list of commands
546 if name == "shortlist":
529 if name == "shortlist":
547 ui.status(_('basic commands (use "hg help" '
530 ui.status(_('basic commands (use "hg help" '
548 'for the full list or option "-v" for details):\n\n'))
531 'for the full list or option "-v" for details):\n\n'))
549 elif ui.verbose:
532 elif ui.verbose:
550 ui.status(_('list of commands:\n\n'))
533 ui.status(_('list of commands:\n\n'))
551 else:
534 else:
552 ui.status(_('list of commands (use "hg help -v" '
535 ui.status(_('list of commands (use "hg help -v" '
553 'to show aliases and global options):\n\n'))
536 'to show aliases and global options):\n\n'))
554
537
555 helplist()
538 helplist()
556
539
557 # global options
540 # global options
558 if ui.verbose:
541 if ui.verbose:
559 option_lists.append(("global options", globalopts))
542 option_lists.append(("global options", globalopts))
560
543
561 # list all option lists
544 # list all option lists
562 opt_output = []
545 opt_output = []
563 for title, options in option_lists:
546 for title, options in option_lists:
564 opt_output.append(("\n%s:\n" % title, None))
547 opt_output.append(("\n%s:\n" % title, None))
565 for shortopt, longopt, default, desc in options:
548 for shortopt, longopt, default, desc in options:
566 if "DEPRECATED" in desc and not ui.verbose: continue
549 if "DEPRECATED" in desc and not ui.verbose: continue
567 opt_output.append(("%2s%s" % (shortopt and "-%s" % shortopt,
550 opt_output.append(("%2s%s" % (shortopt and "-%s" % shortopt,
568 longopt and " --%s" % longopt),
551 longopt and " --%s" % longopt),
569 "%s%s" % (desc,
552 "%s%s" % (desc,
570 default
553 default
571 and _(" (default: %s)") % default
554 and _(" (default: %s)") % default
572 or "")))
555 or "")))
573
556
574 if opt_output:
557 if opt_output:
575 opts_len = max([len(line[0]) for line in opt_output if line[1]])
558 opts_len = max([len(line[0]) for line in opt_output if line[1]])
576 for first, second in opt_output:
559 for first, second in opt_output:
577 if second:
560 if second:
578 ui.write(" %-*s %s\n" % (opts_len, first, second))
561 ui.write(" %-*s %s\n" % (opts_len, first, second))
579 else:
562 else:
580 ui.write("%s\n" % first)
563 ui.write("%s\n" % first)
581
564
582 # Commands start here, listed alphabetically
565 # Commands start here, listed alphabetically
583
566
584 def add(ui, repo, *pats, **opts):
567 def add(ui, repo, *pats, **opts):
585 """add the specified files on the next commit
568 """add the specified files on the next commit
586
569
587 Schedule files to be version controlled and added to the repository.
570 Schedule files to be version controlled and added to the repository.
588
571
589 The files will be added to the repository at the next commit.
572 The files will be added to the repository at the next commit.
590
573
591 If no names are given, add all files in the repository.
574 If no names are given, add all files in the repository.
592 """
575 """
593
576
594 names = []
577 names = []
595 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts):
578 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts):
596 if exact:
579 if exact:
597 if ui.verbose:
580 if ui.verbose:
598 ui.status(_('adding %s\n') % rel)
581 ui.status(_('adding %s\n') % rel)
599 names.append(abs)
582 names.append(abs)
600 elif repo.dirstate.state(abs) == '?':
583 elif repo.dirstate.state(abs) == '?':
601 ui.status(_('adding %s\n') % rel)
584 ui.status(_('adding %s\n') % rel)
602 names.append(abs)
585 names.append(abs)
603 if not opts.get('dry_run'):
586 if not opts.get('dry_run'):
604 repo.add(names)
587 repo.add(names)
605
588
606 def addremove(ui, repo, *pats, **opts):
589 def addremove(ui, repo, *pats, **opts):
607 """add all new files, delete all missing files
590 """add all new files, delete all missing files
608
591
609 Add all new files and remove all missing files from the repository.
592 Add all new files and remove all missing files from the repository.
610
593
611 New files are ignored if they match any of the patterns in .hgignore. As
594 New files are ignored if they match any of the patterns in .hgignore. As
612 with add, these changes take effect at the next commit.
595 with add, these changes take effect at the next commit.
613
596
614 Use the -s option to detect renamed files. With a parameter > 0,
597 Use the -s option to detect renamed files. With a parameter > 0,
615 this compares every removed file with every added file and records
598 this compares every removed file with every added file and records
616 those similar enough as renames. This option takes a percentage
599 those similar enough as renames. This option takes a percentage
617 between 0 (disabled) and 100 (files must be identical) as its
600 between 0 (disabled) and 100 (files must be identical) as its
618 parameter. Detecting renamed files this way can be expensive.
601 parameter. Detecting renamed files this way can be expensive.
619 """
602 """
620 sim = float(opts.get('similarity') or 0)
603 sim = float(opts.get('similarity') or 0)
621 if sim < 0 or sim > 100:
604 if sim < 0 or sim > 100:
622 raise util.Abort(_('similarity must be between 0 and 100'))
605 raise util.Abort(_('similarity must be between 0 and 100'))
623 return cmdutil.addremove(repo, pats, opts, similarity=sim/100.)
606 return cmdutil.addremove(repo, pats, opts, similarity=sim/100.)
624
607
625 def annotate(ui, repo, *pats, **opts):
608 def annotate(ui, repo, *pats, **opts):
626 """show changeset information per file line
609 """show changeset information per file line
627
610
628 List changes in files, showing the revision id responsible for each line
611 List changes in files, showing the revision id responsible for each line
629
612
630 This command is useful to discover who did a change or when a change took
613 This command is useful to discover who did a change or when a change took
631 place.
614 place.
632
615
633 Without the -a option, annotate will avoid processing files it
616 Without the -a option, annotate will avoid processing files it
634 detects as binary. With -a, annotate will generate an annotation
617 detects as binary. With -a, annotate will generate an annotation
635 anyway, probably with undesirable results.
618 anyway, probably with undesirable results.
636 """
619 """
637 getdate = util.cachefunc(lambda x: util.datestr(x.date()))
620 getdate = util.cachefunc(lambda x: util.datestr(x.date()))
638
621
639 if not pats:
622 if not pats:
640 raise util.Abort(_('at least one file name or pattern required'))
623 raise util.Abort(_('at least one file name or pattern required'))
641
624
642 opmap = [['user', lambda x: ui.shortuser(x.user())],
625 opmap = [['user', lambda x: ui.shortuser(x.user())],
643 ['number', lambda x: str(x.rev())],
626 ['number', lambda x: str(x.rev())],
644 ['changeset', lambda x: short(x.node())],
627 ['changeset', lambda x: short(x.node())],
645 ['date', getdate], ['follow', lambda x: x.path()]]
628 ['date', getdate], ['follow', lambda x: x.path()]]
646 if (not opts['user'] and not opts['changeset'] and not opts['date']
629 if (not opts['user'] and not opts['changeset'] and not opts['date']
647 and not opts['follow']):
630 and not opts['follow']):
648 opts['number'] = 1
631 opts['number'] = 1
649
632
650 ctx = repo.changectx(opts['rev'])
633 ctx = repo.changectx(opts['rev'])
651
634
652 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts,
635 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts,
653 node=ctx.node()):
636 node=ctx.node()):
654 fctx = ctx.filectx(abs)
637 fctx = ctx.filectx(abs)
655 if not opts['text'] and util.binary(fctx.data()):
638 if not opts['text'] and util.binary(fctx.data()):
656 ui.write(_("%s: binary file\n") % ((pats and rel) or abs))
639 ui.write(_("%s: binary file\n") % ((pats and rel) or abs))
657 continue
640 continue
658
641
659 lines = fctx.annotate(follow=opts.get('follow'))
642 lines = fctx.annotate(follow=opts.get('follow'))
660 pieces = []
643 pieces = []
661
644
662 for o, f in opmap:
645 for o, f in opmap:
663 if opts[o]:
646 if opts[o]:
664 l = [f(n) for n, dummy in lines]
647 l = [f(n) for n, dummy in lines]
665 if l:
648 if l:
666 m = max(map(len, l))
649 m = max(map(len, l))
667 pieces.append(["%*s" % (m, x) for x in l])
650 pieces.append(["%*s" % (m, x) for x in l])
668
651
669 if pieces:
652 if pieces:
670 for p, l in zip(zip(*pieces), lines):
653 for p, l in zip(zip(*pieces), lines):
671 ui.write("%s: %s" % (" ".join(p), l[1]))
654 ui.write("%s: %s" % (" ".join(p), l[1]))
672
655
673 def archive(ui, repo, dest, **opts):
656 def archive(ui, repo, dest, **opts):
674 '''create unversioned archive of a repository revision
657 '''create unversioned archive of a repository revision
675
658
676 By default, the revision used is the parent of the working
659 By default, the revision used is the parent of the working
677 directory; use "-r" to specify a different revision.
660 directory; use "-r" to specify a different revision.
678
661
679 To specify the type of archive to create, use "-t". Valid
662 To specify the type of archive to create, use "-t". Valid
680 types are:
663 types are:
681
664
682 "files" (default): a directory full of files
665 "files" (default): a directory full of files
683 "tar": tar archive, uncompressed
666 "tar": tar archive, uncompressed
684 "tbz2": tar archive, compressed using bzip2
667 "tbz2": tar archive, compressed using bzip2
685 "tgz": tar archive, compressed using gzip
668 "tgz": tar archive, compressed using gzip
686 "uzip": zip archive, uncompressed
669 "uzip": zip archive, uncompressed
687 "zip": zip archive, compressed using deflate
670 "zip": zip archive, compressed using deflate
688
671
689 The exact name of the destination archive or directory is given
672 The exact name of the destination archive or directory is given
690 using a format string; see "hg help export" for details.
673 using a format string; see "hg help export" for details.
691
674
692 Each member added to an archive file has a directory prefix
675 Each member added to an archive file has a directory prefix
693 prepended. Use "-p" to specify a format string for the prefix.
676 prepended. Use "-p" to specify a format string for the prefix.
694 The default is the basename of the archive, with suffixes removed.
677 The default is the basename of the archive, with suffixes removed.
695 '''
678 '''
696
679
697 node = repo.changectx(opts['rev']).node()
680 node = repo.changectx(opts['rev']).node()
698 dest = cmdutil.make_filename(repo, dest, node)
681 dest = cmdutil.make_filename(repo, dest, node)
699 if os.path.realpath(dest) == repo.root:
682 if os.path.realpath(dest) == repo.root:
700 raise util.Abort(_('repository root cannot be destination'))
683 raise util.Abort(_('repository root cannot be destination'))
701 dummy, matchfn, dummy = cmdutil.matchpats(repo, [], opts)
684 dummy, matchfn, dummy = cmdutil.matchpats(repo, [], opts)
702 kind = opts.get('type') or 'files'
685 kind = opts.get('type') or 'files'
703 prefix = opts['prefix']
686 prefix = opts['prefix']
704 if dest == '-':
687 if dest == '-':
705 if kind == 'files':
688 if kind == 'files':
706 raise util.Abort(_('cannot archive plain files to stdout'))
689 raise util.Abort(_('cannot archive plain files to stdout'))
707 dest = sys.stdout
690 dest = sys.stdout
708 if not prefix: prefix = os.path.basename(repo.root) + '-%h'
691 if not prefix: prefix = os.path.basename(repo.root) + '-%h'
709 prefix = cmdutil.make_filename(repo, prefix, node)
692 prefix = cmdutil.make_filename(repo, prefix, node)
710 archival.archive(repo, dest, node, kind, not opts['no_decode'],
693 archival.archive(repo, dest, node, kind, not opts['no_decode'],
711 matchfn, prefix)
694 matchfn, prefix)
712
695
713 def backout(ui, repo, rev, **opts):
696 def backout(ui, repo, rev, **opts):
714 '''reverse effect of earlier changeset
697 '''reverse effect of earlier changeset
715
698
716 Commit the backed out changes as a new changeset. The new
699 Commit the backed out changes as a new changeset. The new
717 changeset is a child of the backed out changeset.
700 changeset is a child of the backed out changeset.
718
701
719 If you back out a changeset other than the tip, a new head is
702 If you back out a changeset other than the tip, a new head is
720 created. This head is the parent of the working directory. If
703 created. This head is the parent of the working directory. If
721 you back out an old changeset, your working directory will appear
704 you back out an old changeset, your working directory will appear
722 old after the backout. You should merge the backout changeset
705 old after the backout. You should merge the backout changeset
723 with another head.
706 with another head.
724
707
725 The --merge option remembers the parent of the working directory
708 The --merge option remembers the parent of the working directory
726 before starting the backout, then merges the new head with that
709 before starting the backout, then merges the new head with that
727 changeset afterwards. This saves you from doing the merge by
710 changeset afterwards. This saves you from doing the merge by
728 hand. The result of this merge is not committed, as for a normal
711 hand. The result of this merge is not committed, as for a normal
729 merge.'''
712 merge.'''
730
713
731 bail_if_changed(repo)
714 bail_if_changed(repo)
732 op1, op2 = repo.dirstate.parents()
715 op1, op2 = repo.dirstate.parents()
733 if op2 != nullid:
716 if op2 != nullid:
734 raise util.Abort(_('outstanding uncommitted merge'))
717 raise util.Abort(_('outstanding uncommitted merge'))
735 node = repo.lookup(rev)
718 node = repo.lookup(rev)
736 p1, p2 = repo.changelog.parents(node)
719 p1, p2 = repo.changelog.parents(node)
737 if p1 == nullid:
720 if p1 == nullid:
738 raise util.Abort(_('cannot back out a change with no parents'))
721 raise util.Abort(_('cannot back out a change with no parents'))
739 if p2 != nullid:
722 if p2 != nullid:
740 if not opts['parent']:
723 if not opts['parent']:
741 raise util.Abort(_('cannot back out a merge changeset without '
724 raise util.Abort(_('cannot back out a merge changeset without '
742 '--parent'))
725 '--parent'))
743 p = repo.lookup(opts['parent'])
726 p = repo.lookup(opts['parent'])
744 if p not in (p1, p2):
727 if p not in (p1, p2):
745 raise util.Abort(_('%s is not a parent of %s' %
728 raise util.Abort(_('%s is not a parent of %s' %
746 (short(p), short(node))))
729 (short(p), short(node))))
747 parent = p
730 parent = p
748 else:
731 else:
749 if opts['parent']:
732 if opts['parent']:
750 raise util.Abort(_('cannot use --parent on non-merge changeset'))
733 raise util.Abort(_('cannot use --parent on non-merge changeset'))
751 parent = p1
734 parent = p1
752 hg.clean(repo, node, show_stats=False)
735 hg.clean(repo, node, show_stats=False)
753 revert_opts = opts.copy()
736 revert_opts = opts.copy()
754 revert_opts['all'] = True
737 revert_opts['all'] = True
755 revert_opts['rev'] = hex(parent)
738 revert_opts['rev'] = hex(parent)
756 revert(ui, repo, **revert_opts)
739 revert(ui, repo, **revert_opts)
757 commit_opts = opts.copy()
740 commit_opts = opts.copy()
758 commit_opts['addremove'] = False
741 commit_opts['addremove'] = False
759 if not commit_opts['message'] and not commit_opts['logfile']:
742 if not commit_opts['message'] and not commit_opts['logfile']:
760 commit_opts['message'] = _("Backed out changeset %s") % (hex(node))
743 commit_opts['message'] = _("Backed out changeset %s") % (hex(node))
761 commit_opts['force_editor'] = True
744 commit_opts['force_editor'] = True
762 commit(ui, repo, **commit_opts)
745 commit(ui, repo, **commit_opts)
763 def nice(node):
746 def nice(node):
764 return '%d:%s' % (repo.changelog.rev(node), short(node))
747 return '%d:%s' % (repo.changelog.rev(node), short(node))
765 ui.status(_('changeset %s backs out changeset %s\n') %
748 ui.status(_('changeset %s backs out changeset %s\n') %
766 (nice(repo.changelog.tip()), nice(node)))
749 (nice(repo.changelog.tip()), nice(node)))
767 if op1 != node:
750 if op1 != node:
768 if opts['merge']:
751 if opts['merge']:
769 ui.status(_('merging with changeset %s\n') % nice(op1))
752 ui.status(_('merging with changeset %s\n') % nice(op1))
770 n = _lookup(repo, hex(op1))
753 n = _lookup(repo, hex(op1))
771 hg.merge(repo, n)
754 hg.merge(repo, n)
772 else:
755 else:
773 ui.status(_('the backout changeset is a new head - '
756 ui.status(_('the backout changeset is a new head - '
774 'do not forget to merge\n'))
757 'do not forget to merge\n'))
775 ui.status(_('(use "backout --merge" '
758 ui.status(_('(use "backout --merge" '
776 'if you want to auto-merge)\n'))
759 'if you want to auto-merge)\n'))
777
760
778 def branch(ui, repo, label=None):
761 def branch(ui, repo, label=None):
779 """set or show the current branch name
762 """set or show the current branch name
780
763
781 With <name>, set the current branch name. Otherwise, show the
764 With <name>, set the current branch name. Otherwise, show the
782 current branch name.
765 current branch name.
783 """
766 """
784
767
785 if label is not None:
768 if label is not None:
786 repo.opener("branch", "w").write(label)
769 repo.opener("branch", "w").write(label)
787 else:
770 else:
788 b = repo.workingctx().branch()
771 b = repo.workingctx().branch()
789 if b:
772 if b:
790 ui.write("%s\n" % b)
773 ui.write("%s\n" % b)
791
774
792 def branches(ui, repo):
775 def branches(ui, repo):
793 """list repository named branches
776 """list repository named branches
794
777
795 List the repository's named branches.
778 List the repository's named branches.
796 """
779 """
797 b = repo.branchtags()
780 b = repo.branchtags()
798 l = [(-repo.changelog.rev(n), n, t) for t,n in b.items()]
781 l = [(-repo.changelog.rev(n), n, t) for t,n in b.items()]
799 l.sort()
782 l.sort()
800 for r, n, t in l:
783 for r, n, t in l:
801 hexfunc = ui.debugflag and hex or short
784 hexfunc = ui.debugflag and hex or short
802 if ui.quiet:
785 if ui.quiet:
803 ui.write("%s\n" % t)
786 ui.write("%s\n" % t)
804 else:
787 else:
805 ui.write("%-30s %s:%s\n" % (t, -r, hexfunc(n)))
788 ui.write("%-30s %s:%s\n" % (t, -r, hexfunc(n)))
806
789
807 def bundle(ui, repo, fname, dest=None, **opts):
790 def bundle(ui, repo, fname, dest=None, **opts):
808 """create a changegroup file
791 """create a changegroup file
809
792
810 Generate a compressed changegroup file collecting changesets not
793 Generate a compressed changegroup file collecting changesets not
811 found in the other repository.
794 found in the other repository.
812
795
813 If no destination repository is specified the destination is assumed
796 If no destination repository is specified the destination is assumed
814 to have all the nodes specified by one or more --base parameters.
797 to have all the nodes specified by one or more --base parameters.
815
798
816 The bundle file can then be transferred using conventional means and
799 The bundle file can then be transferred using conventional means and
817 applied to another repository with the unbundle or pull command.
800 applied to another repository with the unbundle or pull command.
818 This is useful when direct push and pull are not available or when
801 This is useful when direct push and pull are not available or when
819 exporting an entire repository is undesirable.
802 exporting an entire repository is undesirable.
820
803
821 Applying bundles preserves all changeset contents including
804 Applying bundles preserves all changeset contents including
822 permissions, copy/rename information, and revision history.
805 permissions, copy/rename information, and revision history.
823 """
806 """
824 revs = opts.get('rev') or None
807 revs = opts.get('rev') or None
825 if revs:
808 if revs:
826 revs = [repo.lookup(rev) for rev in revs]
809 revs = [repo.lookup(rev) for rev in revs]
827 base = opts.get('base')
810 base = opts.get('base')
828 if base:
811 if base:
829 if dest:
812 if dest:
830 raise util.Abort(_("--base is incompatible with specifiying "
813 raise util.Abort(_("--base is incompatible with specifiying "
831 "a destination"))
814 "a destination"))
832 base = [repo.lookup(rev) for rev in base]
815 base = [repo.lookup(rev) for rev in base]
833 # create the right base
816 # create the right base
834 # XXX: nodesbetween / changegroup* should be "fixed" instead
817 # XXX: nodesbetween / changegroup* should be "fixed" instead
835 o = []
818 o = []
836 has_set = sets.Set(base)
819 has_set = sets.Set(base)
837 for n in base:
820 for n in base:
838 has_set.update(repo.changelog.reachable(n))
821 has_set.update(repo.changelog.reachable(n))
839 if revs:
822 if revs:
840 visit = list(revs)
823 visit = list(revs)
841 else:
824 else:
842 visit = repo.changelog.heads()
825 visit = repo.changelog.heads()
843 seen = sets.Set(visit)
826 seen = sets.Set(visit)
844 while visit:
827 while visit:
845 n = visit.pop(0)
828 n = visit.pop(0)
846 parents = [p for p in repo.changelog.parents(n)
829 parents = [p for p in repo.changelog.parents(n)
847 if p != nullid and p not in has_set]
830 if p != nullid and p not in has_set]
848 if len(parents) == 0:
831 if len(parents) == 0:
849 o.insert(0, n)
832 o.insert(0, n)
850 else:
833 else:
851 for p in parents:
834 for p in parents:
852 if p not in seen:
835 if p not in seen:
853 seen.add(p)
836 seen.add(p)
854 visit.append(p)
837 visit.append(p)
855 else:
838 else:
856 setremoteconfig(ui, opts)
839 setremoteconfig(ui, opts)
857 dest = ui.expandpath(dest or 'default-push', dest or 'default')
840 dest = ui.expandpath(dest or 'default-push', dest or 'default')
858 other = hg.repository(ui, dest)
841 other = hg.repository(ui, dest)
859 o = repo.findoutgoing(other, force=opts['force'])
842 o = repo.findoutgoing(other, force=opts['force'])
860
843
861 if revs:
844 if revs:
862 cg = repo.changegroupsubset(o, revs, 'bundle')
845 cg = repo.changegroupsubset(o, revs, 'bundle')
863 else:
846 else:
864 cg = repo.changegroup(o, 'bundle')
847 cg = repo.changegroup(o, 'bundle')
865 write_bundle(cg, fname)
848 write_bundle(cg, fname)
866
849
867 def cat(ui, repo, file1, *pats, **opts):
850 def cat(ui, repo, file1, *pats, **opts):
868 """output the latest or given revisions of files
851 """output the latest or given revisions of files
869
852
870 Print the specified files as they were at the given revision.
853 Print the specified files as they were at the given revision.
871 If no revision is given then working dir parent is used, or tip
854 If no revision is given then working dir parent is used, or tip
872 if no revision is checked out.
855 if no revision is checked out.
873
856
874 Output may be to a file, in which case the name of the file is
857 Output may be to a file, in which case the name of the file is
875 given using a format string. The formatting rules are the same as
858 given using a format string. The formatting rules are the same as
876 for the export command, with the following additions:
859 for the export command, with the following additions:
877
860
878 %s basename of file being printed
861 %s basename of file being printed
879 %d dirname of file being printed, or '.' if in repo root
862 %d dirname of file being printed, or '.' if in repo root
880 %p root-relative path name of file being printed
863 %p root-relative path name of file being printed
881 """
864 """
882 ctx = repo.changectx(opts['rev'])
865 ctx = repo.changectx(opts['rev'])
883 for src, abs, rel, exact in cmdutil.walk(repo, (file1,) + pats, opts,
866 for src, abs, rel, exact in cmdutil.walk(repo, (file1,) + pats, opts,
884 ctx.node()):
867 ctx.node()):
885 fp = cmdutil.make_file(repo, opts['output'], ctx.node(), pathname=abs)
868 fp = cmdutil.make_file(repo, opts['output'], ctx.node(), pathname=abs)
886 fp.write(ctx.filectx(abs).data())
869 fp.write(ctx.filectx(abs).data())
887
870
888 def clone(ui, source, dest=None, **opts):
871 def clone(ui, source, dest=None, **opts):
889 """make a copy of an existing repository
872 """make a copy of an existing repository
890
873
891 Create a copy of an existing repository in a new directory.
874 Create a copy of an existing repository in a new directory.
892
875
893 If no destination directory name is specified, it defaults to the
876 If no destination directory name is specified, it defaults to the
894 basename of the source.
877 basename of the source.
895
878
896 The location of the source is added to the new repository's
879 The location of the source is added to the new repository's
897 .hg/hgrc file, as the default to be used for future pulls.
880 .hg/hgrc file, as the default to be used for future pulls.
898
881
899 For efficiency, hardlinks are used for cloning whenever the source
882 For efficiency, hardlinks are used for cloning whenever the source
900 and destination are on the same filesystem (note this applies only
883 and destination are on the same filesystem (note this applies only
901 to the repository data, not to the checked out files). Some
884 to the repository data, not to the checked out files). Some
902 filesystems, such as AFS, implement hardlinking incorrectly, but
885 filesystems, such as AFS, implement hardlinking incorrectly, but
903 do not report errors. In these cases, use the --pull option to
886 do not report errors. In these cases, use the --pull option to
904 avoid hardlinking.
887 avoid hardlinking.
905
888
906 You can safely clone repositories and checked out files using full
889 You can safely clone repositories and checked out files using full
907 hardlinks with
890 hardlinks with
908
891
909 $ cp -al REPO REPOCLONE
892 $ cp -al REPO REPOCLONE
910
893
911 which is the fastest way to clone. However, the operation is not
894 which is the fastest way to clone. However, the operation is not
912 atomic (making sure REPO is not modified during the operation is
895 atomic (making sure REPO is not modified during the operation is
913 up to you) and you have to make sure your editor breaks hardlinks
896 up to you) and you have to make sure your editor breaks hardlinks
914 (Emacs and most Linux Kernel tools do so).
897 (Emacs and most Linux Kernel tools do so).
915
898
916 If you use the -r option to clone up to a specific revision, no
899 If you use the -r option to clone up to a specific revision, no
917 subsequent revisions will be present in the cloned repository.
900 subsequent revisions will be present in the cloned repository.
918 This option implies --pull, even on local repositories.
901 This option implies --pull, even on local repositories.
919
902
920 See pull for valid source format details.
903 See pull for valid source format details.
921
904
922 It is possible to specify an ssh:// URL as the destination, but no
905 It is possible to specify an ssh:// URL as the destination, but no
923 .hg/hgrc will be created on the remote side. Look at the help text
906 .hg/hgrc will be created on the remote side. Look at the help text
924 for the pull command for important details about ssh:// URLs.
907 for the pull command for important details about ssh:// URLs.
925 """
908 """
926 setremoteconfig(ui, opts)
909 setremoteconfig(ui, opts)
927 hg.clone(ui, ui.expandpath(source), dest,
910 hg.clone(ui, ui.expandpath(source), dest,
928 pull=opts['pull'],
911 pull=opts['pull'],
929 stream=opts['uncompressed'],
912 stream=opts['uncompressed'],
930 rev=opts['rev'],
913 rev=opts['rev'],
931 update=not opts['noupdate'])
914 update=not opts['noupdate'])
932
915
933 def commit(ui, repo, *pats, **opts):
916 def commit(ui, repo, *pats, **opts):
934 """commit the specified files or all outstanding changes
917 """commit the specified files or all outstanding changes
935
918
936 Commit changes to the given files into the repository.
919 Commit changes to the given files into the repository.
937
920
938 If a list of files is omitted, all changes reported by "hg status"
921 If a list of files is omitted, all changes reported by "hg status"
939 will be committed.
922 will be committed.
940
923
941 If no commit message is specified, the editor configured in your hgrc
924 If no commit message is specified, the editor configured in your hgrc
942 or in the EDITOR environment variable is started to enter a message.
925 or in the EDITOR environment variable is started to enter a message.
943 """
926 """
944 message = logmessage(opts)
927 message = logmessage(opts)
945
928
946 if opts['addremove']:
929 if opts['addremove']:
947 cmdutil.addremove(repo, pats, opts)
930 cmdutil.addremove(repo, pats, opts)
948 fns, match, anypats = cmdutil.matchpats(repo, pats, opts)
931 fns, match, anypats = cmdutil.matchpats(repo, pats, opts)
949 if pats:
932 if pats:
950 modified, added, removed = repo.status(files=fns, match=match)[:3]
933 modified, added, removed = repo.status(files=fns, match=match)[:3]
951 files = modified + added + removed
934 files = modified + added + removed
952 else:
935 else:
953 files = []
936 files = []
954 try:
937 try:
955 repo.commit(files, message, opts['user'], opts['date'], match,
938 repo.commit(files, message, opts['user'], opts['date'], match,
956 force_editor=opts.get('force_editor'))
939 force_editor=opts.get('force_editor'))
957 except ValueError, inst:
940 except ValueError, inst:
958 raise util.Abort(str(inst))
941 raise util.Abort(str(inst))
959
942
960 def docopy(ui, repo, pats, opts, wlock):
943 def docopy(ui, repo, pats, opts, wlock):
961 # called with the repo lock held
944 # called with the repo lock held
962 cwd = repo.getcwd()
945 cwd = repo.getcwd()
963 errors = 0
946 errors = 0
964 copied = []
947 copied = []
965 targets = {}
948 targets = {}
966
949
967 def okaytocopy(abs, rel, exact):
950 def okaytocopy(abs, rel, exact):
968 reasons = {'?': _('is not managed'),
951 reasons = {'?': _('is not managed'),
969 'a': _('has been marked for add'),
952 'a': _('has been marked for add'),
970 'r': _('has been marked for remove')}
953 'r': _('has been marked for remove')}
971 state = repo.dirstate.state(abs)
954 state = repo.dirstate.state(abs)
972 reason = reasons.get(state)
955 reason = reasons.get(state)
973 if reason:
956 if reason:
974 if state == 'a':
957 if state == 'a':
975 origsrc = repo.dirstate.copied(abs)
958 origsrc = repo.dirstate.copied(abs)
976 if origsrc is not None:
959 if origsrc is not None:
977 return origsrc
960 return origsrc
978 if exact:
961 if exact:
979 ui.warn(_('%s: not copying - file %s\n') % (rel, reason))
962 ui.warn(_('%s: not copying - file %s\n') % (rel, reason))
980 else:
963 else:
981 return abs
964 return abs
982
965
983 def copy(origsrc, abssrc, relsrc, target, exact):
966 def copy(origsrc, abssrc, relsrc, target, exact):
984 abstarget = util.canonpath(repo.root, cwd, target)
967 abstarget = util.canonpath(repo.root, cwd, target)
985 reltarget = util.pathto(cwd, abstarget)
968 reltarget = util.pathto(cwd, abstarget)
986 prevsrc = targets.get(abstarget)
969 prevsrc = targets.get(abstarget)
987 if prevsrc is not None:
970 if prevsrc is not None:
988 ui.warn(_('%s: not overwriting - %s collides with %s\n') %
971 ui.warn(_('%s: not overwriting - %s collides with %s\n') %
989 (reltarget, abssrc, prevsrc))
972 (reltarget, abssrc, prevsrc))
990 return
973 return
991 if (not opts['after'] and os.path.exists(reltarget) or
974 if (not opts['after'] and os.path.exists(reltarget) or
992 opts['after'] and repo.dirstate.state(abstarget) not in '?r'):
975 opts['after'] and repo.dirstate.state(abstarget) not in '?r'):
993 if not opts['force']:
976 if not opts['force']:
994 ui.warn(_('%s: not overwriting - file exists\n') %
977 ui.warn(_('%s: not overwriting - file exists\n') %
995 reltarget)
978 reltarget)
996 return
979 return
997 if not opts['after'] and not opts.get('dry_run'):
980 if not opts['after'] and not opts.get('dry_run'):
998 os.unlink(reltarget)
981 os.unlink(reltarget)
999 if opts['after']:
982 if opts['after']:
1000 if not os.path.exists(reltarget):
983 if not os.path.exists(reltarget):
1001 return
984 return
1002 else:
985 else:
1003 targetdir = os.path.dirname(reltarget) or '.'
986 targetdir = os.path.dirname(reltarget) or '.'
1004 if not os.path.isdir(targetdir) and not opts.get('dry_run'):
987 if not os.path.isdir(targetdir) and not opts.get('dry_run'):
1005 os.makedirs(targetdir)
988 os.makedirs(targetdir)
1006 try:
989 try:
1007 restore = repo.dirstate.state(abstarget) == 'r'
990 restore = repo.dirstate.state(abstarget) == 'r'
1008 if restore and not opts.get('dry_run'):
991 if restore and not opts.get('dry_run'):
1009 repo.undelete([abstarget], wlock)
992 repo.undelete([abstarget], wlock)
1010 try:
993 try:
1011 if not opts.get('dry_run'):
994 if not opts.get('dry_run'):
1012 shutil.copyfile(relsrc, reltarget)
995 shutil.copyfile(relsrc, reltarget)
1013 shutil.copymode(relsrc, reltarget)
996 shutil.copymode(relsrc, reltarget)
1014 restore = False
997 restore = False
1015 finally:
998 finally:
1016 if restore:
999 if restore:
1017 repo.remove([abstarget], wlock)
1000 repo.remove([abstarget], wlock)
1018 except shutil.Error, inst:
1001 except shutil.Error, inst:
1019 raise util.Abort(str(inst))
1002 raise util.Abort(str(inst))
1020 except IOError, inst:
1003 except IOError, inst:
1021 if inst.errno == errno.ENOENT:
1004 if inst.errno == errno.ENOENT:
1022 ui.warn(_('%s: deleted in working copy\n') % relsrc)
1005 ui.warn(_('%s: deleted in working copy\n') % relsrc)
1023 else:
1006 else:
1024 ui.warn(_('%s: cannot copy - %s\n') %
1007 ui.warn(_('%s: cannot copy - %s\n') %
1025 (relsrc, inst.strerror))
1008 (relsrc, inst.strerror))
1026 errors += 1
1009 errors += 1
1027 return
1010 return
1028 if ui.verbose or not exact:
1011 if ui.verbose or not exact:
1029 ui.status(_('copying %s to %s\n') % (relsrc, reltarget))
1012 ui.status(_('copying %s to %s\n') % (relsrc, reltarget))
1030 targets[abstarget] = abssrc
1013 targets[abstarget] = abssrc
1031 if abstarget != origsrc and not opts.get('dry_run'):
1014 if abstarget != origsrc and not opts.get('dry_run'):
1032 repo.copy(origsrc, abstarget, wlock)
1015 repo.copy(origsrc, abstarget, wlock)
1033 copied.append((abssrc, relsrc, exact))
1016 copied.append((abssrc, relsrc, exact))
1034
1017
1035 def targetpathfn(pat, dest, srcs):
1018 def targetpathfn(pat, dest, srcs):
1036 if os.path.isdir(pat):
1019 if os.path.isdir(pat):
1037 abspfx = util.canonpath(repo.root, cwd, pat)
1020 abspfx = util.canonpath(repo.root, cwd, pat)
1038 if destdirexists:
1021 if destdirexists:
1039 striplen = len(os.path.split(abspfx)[0])
1022 striplen = len(os.path.split(abspfx)[0])
1040 else:
1023 else:
1041 striplen = len(abspfx)
1024 striplen = len(abspfx)
1042 if striplen:
1025 if striplen:
1043 striplen += len(os.sep)
1026 striplen += len(os.sep)
1044 res = lambda p: os.path.join(dest, p[striplen:])
1027 res = lambda p: os.path.join(dest, p[striplen:])
1045 elif destdirexists:
1028 elif destdirexists:
1046 res = lambda p: os.path.join(dest, os.path.basename(p))
1029 res = lambda p: os.path.join(dest, os.path.basename(p))
1047 else:
1030 else:
1048 res = lambda p: dest
1031 res = lambda p: dest
1049 return res
1032 return res
1050
1033
1051 def targetpathafterfn(pat, dest, srcs):
1034 def targetpathafterfn(pat, dest, srcs):
1052 if util.patkind(pat, None)[0]:
1035 if util.patkind(pat, None)[0]:
1053 # a mercurial pattern
1036 # a mercurial pattern
1054 res = lambda p: os.path.join(dest, os.path.basename(p))
1037 res = lambda p: os.path.join(dest, os.path.basename(p))
1055 else:
1038 else:
1056 abspfx = util.canonpath(repo.root, cwd, pat)
1039 abspfx = util.canonpath(repo.root, cwd, pat)
1057 if len(abspfx) < len(srcs[0][0]):
1040 if len(abspfx) < len(srcs[0][0]):
1058 # A directory. Either the target path contains the last
1041 # A directory. Either the target path contains the last
1059 # component of the source path or it does not.
1042 # component of the source path or it does not.
1060 def evalpath(striplen):
1043 def evalpath(striplen):
1061 score = 0
1044 score = 0
1062 for s in srcs:
1045 for s in srcs:
1063 t = os.path.join(dest, s[0][striplen:])
1046 t = os.path.join(dest, s[0][striplen:])
1064 if os.path.exists(t):
1047 if os.path.exists(t):
1065 score += 1
1048 score += 1
1066 return score
1049 return score
1067
1050
1068 striplen = len(abspfx)
1051 striplen = len(abspfx)
1069 if striplen:
1052 if striplen:
1070 striplen += len(os.sep)
1053 striplen += len(os.sep)
1071 if os.path.isdir(os.path.join(dest, os.path.split(abspfx)[1])):
1054 if os.path.isdir(os.path.join(dest, os.path.split(abspfx)[1])):
1072 score = evalpath(striplen)
1055 score = evalpath(striplen)
1073 striplen1 = len(os.path.split(abspfx)[0])
1056 striplen1 = len(os.path.split(abspfx)[0])
1074 if striplen1:
1057 if striplen1:
1075 striplen1 += len(os.sep)
1058 striplen1 += len(os.sep)
1076 if evalpath(striplen1) > score:
1059 if evalpath(striplen1) > score:
1077 striplen = striplen1
1060 striplen = striplen1
1078 res = lambda p: os.path.join(dest, p[striplen:])
1061 res = lambda p: os.path.join(dest, p[striplen:])
1079 else:
1062 else:
1080 # a file
1063 # a file
1081 if destdirexists:
1064 if destdirexists:
1082 res = lambda p: os.path.join(dest, os.path.basename(p))
1065 res = lambda p: os.path.join(dest, os.path.basename(p))
1083 else:
1066 else:
1084 res = lambda p: dest
1067 res = lambda p: dest
1085 return res
1068 return res
1086
1069
1087
1070
1088 pats = list(pats)
1071 pats = list(pats)
1089 if not pats:
1072 if not pats:
1090 raise util.Abort(_('no source or destination specified'))
1073 raise util.Abort(_('no source or destination specified'))
1091 if len(pats) == 1:
1074 if len(pats) == 1:
1092 raise util.Abort(_('no destination specified'))
1075 raise util.Abort(_('no destination specified'))
1093 dest = pats.pop()
1076 dest = pats.pop()
1094 destdirexists = os.path.isdir(dest)
1077 destdirexists = os.path.isdir(dest)
1095 if (len(pats) > 1 or util.patkind(pats[0], None)[0]) and not destdirexists:
1078 if (len(pats) > 1 or util.patkind(pats[0], None)[0]) and not destdirexists:
1096 raise util.Abort(_('with multiple sources, destination must be an '
1079 raise util.Abort(_('with multiple sources, destination must be an '
1097 'existing directory'))
1080 'existing directory'))
1098 if opts['after']:
1081 if opts['after']:
1099 tfn = targetpathafterfn
1082 tfn = targetpathafterfn
1100 else:
1083 else:
1101 tfn = targetpathfn
1084 tfn = targetpathfn
1102 copylist = []
1085 copylist = []
1103 for pat in pats:
1086 for pat in pats:
1104 srcs = []
1087 srcs = []
1105 for tag, abssrc, relsrc, exact in cmdutil.walk(repo, [pat], opts):
1088 for tag, abssrc, relsrc, exact in cmdutil.walk(repo, [pat], opts):
1106 origsrc = okaytocopy(abssrc, relsrc, exact)
1089 origsrc = okaytocopy(abssrc, relsrc, exact)
1107 if origsrc:
1090 if origsrc:
1108 srcs.append((origsrc, abssrc, relsrc, exact))
1091 srcs.append((origsrc, abssrc, relsrc, exact))
1109 if not srcs:
1092 if not srcs:
1110 continue
1093 continue
1111 copylist.append((tfn(pat, dest, srcs), srcs))
1094 copylist.append((tfn(pat, dest, srcs), srcs))
1112 if not copylist:
1095 if not copylist:
1113 raise util.Abort(_('no files to copy'))
1096 raise util.Abort(_('no files to copy'))
1114
1097
1115 for targetpath, srcs in copylist:
1098 for targetpath, srcs in copylist:
1116 for origsrc, abssrc, relsrc, exact in srcs:
1099 for origsrc, abssrc, relsrc, exact in srcs:
1117 copy(origsrc, abssrc, relsrc, targetpath(abssrc), exact)
1100 copy(origsrc, abssrc, relsrc, targetpath(abssrc), exact)
1118
1101
1119 if errors:
1102 if errors:
1120 ui.warn(_('(consider using --after)\n'))
1103 ui.warn(_('(consider using --after)\n'))
1121 return errors, copied
1104 return errors, copied
1122
1105
1123 def copy(ui, repo, *pats, **opts):
1106 def copy(ui, repo, *pats, **opts):
1124 """mark files as copied for the next commit
1107 """mark files as copied for the next commit
1125
1108
1126 Mark dest as having copies of source files. If dest is a
1109 Mark dest as having copies of source files. If dest is a
1127 directory, copies are put in that directory. If dest is a file,
1110 directory, copies are put in that directory. If dest is a file,
1128 there can only be one source.
1111 there can only be one source.
1129
1112
1130 By default, this command copies the contents of files as they
1113 By default, this command copies the contents of files as they
1131 stand in the working directory. If invoked with --after, the
1114 stand in the working directory. If invoked with --after, the
1132 operation is recorded, but no copying is performed.
1115 operation is recorded, but no copying is performed.
1133
1116
1134 This command takes effect in the next commit.
1117 This command takes effect in the next commit.
1135
1118
1136 NOTE: This command should be treated as experimental. While it
1119 NOTE: This command should be treated as experimental. While it
1137 should properly record copied files, this information is not yet
1120 should properly record copied files, this information is not yet
1138 fully used by merge, nor fully reported by log.
1121 fully used by merge, nor fully reported by log.
1139 """
1122 """
1140 wlock = repo.wlock(0)
1123 wlock = repo.wlock(0)
1141 errs, copied = docopy(ui, repo, pats, opts, wlock)
1124 errs, copied = docopy(ui, repo, pats, opts, wlock)
1142 return errs
1125 return errs
1143
1126
1144 def debugancestor(ui, index, rev1, rev2):
1127 def debugancestor(ui, index, rev1, rev2):
1145 """find the ancestor revision of two revisions in a given index"""
1128 """find the ancestor revision of two revisions in a given index"""
1146 r = revlog.revlog(util.opener(os.getcwd(), audit=False), index, "", 0)
1129 r = revlog.revlog(util.opener(os.getcwd(), audit=False), index, "", 0)
1147 a = r.ancestor(r.lookup(rev1), r.lookup(rev2))
1130 a = r.ancestor(r.lookup(rev1), r.lookup(rev2))
1148 ui.write("%d:%s\n" % (r.rev(a), hex(a)))
1131 ui.write("%d:%s\n" % (r.rev(a), hex(a)))
1149
1132
1150 def debugcomplete(ui, cmd='', **opts):
1133 def debugcomplete(ui, cmd='', **opts):
1151 """returns the completion list associated with the given command"""
1134 """returns the completion list associated with the given command"""
1152
1135
1153 if opts['options']:
1136 if opts['options']:
1154 options = []
1137 options = []
1155 otables = [globalopts]
1138 otables = [globalopts]
1156 if cmd:
1139 if cmd:
1157 aliases, entry = findcmd(ui, cmd)
1140 aliases, entry = findcmd(ui, cmd)
1158 otables.append(entry[1])
1141 otables.append(entry[1])
1159 for t in otables:
1142 for t in otables:
1160 for o in t:
1143 for o in t:
1161 if o[0]:
1144 if o[0]:
1162 options.append('-%s' % o[0])
1145 options.append('-%s' % o[0])
1163 options.append('--%s' % o[1])
1146 options.append('--%s' % o[1])
1164 ui.write("%s\n" % "\n".join(options))
1147 ui.write("%s\n" % "\n".join(options))
1165 return
1148 return
1166
1149
1167 clist = findpossible(ui, cmd).keys()
1150 clist = findpossible(ui, cmd).keys()
1168 clist.sort()
1151 clist.sort()
1169 ui.write("%s\n" % "\n".join(clist))
1152 ui.write("%s\n" % "\n".join(clist))
1170
1153
1171 def debugrebuildstate(ui, repo, rev=None):
1154 def debugrebuildstate(ui, repo, rev=None):
1172 """rebuild the dirstate as it would look like for the given revision"""
1155 """rebuild the dirstate as it would look like for the given revision"""
1173 if not rev:
1156 if not rev:
1174 rev = repo.changelog.tip()
1157 rev = repo.changelog.tip()
1175 else:
1158 else:
1176 rev = repo.lookup(rev)
1159 rev = repo.lookup(rev)
1177 change = repo.changelog.read(rev)
1160 change = repo.changelog.read(rev)
1178 n = change[0]
1161 n = change[0]
1179 files = repo.manifest.read(n)
1162 files = repo.manifest.read(n)
1180 wlock = repo.wlock()
1163 wlock = repo.wlock()
1181 repo.dirstate.rebuild(rev, files)
1164 repo.dirstate.rebuild(rev, files)
1182
1165
1183 def debugcheckstate(ui, repo):
1166 def debugcheckstate(ui, repo):
1184 """validate the correctness of the current dirstate"""
1167 """validate the correctness of the current dirstate"""
1185 parent1, parent2 = repo.dirstate.parents()
1168 parent1, parent2 = repo.dirstate.parents()
1186 repo.dirstate.read()
1169 repo.dirstate.read()
1187 dc = repo.dirstate.map
1170 dc = repo.dirstate.map
1188 keys = dc.keys()
1171 keys = dc.keys()
1189 keys.sort()
1172 keys.sort()
1190 m1n = repo.changelog.read(parent1)[0]
1173 m1n = repo.changelog.read(parent1)[0]
1191 m2n = repo.changelog.read(parent2)[0]
1174 m2n = repo.changelog.read(parent2)[0]
1192 m1 = repo.manifest.read(m1n)
1175 m1 = repo.manifest.read(m1n)
1193 m2 = repo.manifest.read(m2n)
1176 m2 = repo.manifest.read(m2n)
1194 errors = 0
1177 errors = 0
1195 for f in dc:
1178 for f in dc:
1196 state = repo.dirstate.state(f)
1179 state = repo.dirstate.state(f)
1197 if state in "nr" and f not in m1:
1180 if state in "nr" and f not in m1:
1198 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
1181 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
1199 errors += 1
1182 errors += 1
1200 if state in "a" and f in m1:
1183 if state in "a" and f in m1:
1201 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
1184 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
1202 errors += 1
1185 errors += 1
1203 if state in "m" and f not in m1 and f not in m2:
1186 if state in "m" and f not in m1 and f not in m2:
1204 ui.warn(_("%s in state %s, but not in either manifest\n") %
1187 ui.warn(_("%s in state %s, but not in either manifest\n") %
1205 (f, state))
1188 (f, state))
1206 errors += 1
1189 errors += 1
1207 for f in m1:
1190 for f in m1:
1208 state = repo.dirstate.state(f)
1191 state = repo.dirstate.state(f)
1209 if state not in "nrm":
1192 if state not in "nrm":
1210 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
1193 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
1211 errors += 1
1194 errors += 1
1212 if errors:
1195 if errors:
1213 error = _(".hg/dirstate inconsistent with current parent's manifest")
1196 error = _(".hg/dirstate inconsistent with current parent's manifest")
1214 raise util.Abort(error)
1197 raise util.Abort(error)
1215
1198
1216 def showconfig(ui, repo, *values, **opts):
1199 def showconfig(ui, repo, *values, **opts):
1217 """show combined config settings from all hgrc files
1200 """show combined config settings from all hgrc files
1218
1201
1219 With no args, print names and values of all config items.
1202 With no args, print names and values of all config items.
1220
1203
1221 With one arg of the form section.name, print just the value of
1204 With one arg of the form section.name, print just the value of
1222 that config item.
1205 that config item.
1223
1206
1224 With multiple args, print names and values of all config items
1207 With multiple args, print names and values of all config items
1225 with matching section names."""
1208 with matching section names."""
1226
1209
1227 untrusted = bool(opts.get('untrusted'))
1210 untrusted = bool(opts.get('untrusted'))
1228 if values:
1211 if values:
1229 if len([v for v in values if '.' in v]) > 1:
1212 if len([v for v in values if '.' in v]) > 1:
1230 raise util.Abort(_('only one config item permitted'))
1213 raise util.Abort(_('only one config item permitted'))
1231 for section, name, value in ui.walkconfig(untrusted=untrusted):
1214 for section, name, value in ui.walkconfig(untrusted=untrusted):
1232 sectname = section + '.' + name
1215 sectname = section + '.' + name
1233 if values:
1216 if values:
1234 for v in values:
1217 for v in values:
1235 if v == section:
1218 if v == section:
1236 ui.write('%s=%s\n' % (sectname, value))
1219 ui.write('%s=%s\n' % (sectname, value))
1237 elif v == sectname:
1220 elif v == sectname:
1238 ui.write(value, '\n')
1221 ui.write(value, '\n')
1239 else:
1222 else:
1240 ui.write('%s=%s\n' % (sectname, value))
1223 ui.write('%s=%s\n' % (sectname, value))
1241
1224
1242 def debugsetparents(ui, repo, rev1, rev2=None):
1225 def debugsetparents(ui, repo, rev1, rev2=None):
1243 """manually set the parents of the current working directory
1226 """manually set the parents of the current working directory
1244
1227
1245 This is useful for writing repository conversion tools, but should
1228 This is useful for writing repository conversion tools, but should
1246 be used with care.
1229 be used with care.
1247 """
1230 """
1248
1231
1249 if not rev2:
1232 if not rev2:
1250 rev2 = hex(nullid)
1233 rev2 = hex(nullid)
1251
1234
1252 repo.dirstate.setparents(repo.lookup(rev1), repo.lookup(rev2))
1235 repo.dirstate.setparents(repo.lookup(rev1), repo.lookup(rev2))
1253
1236
1254 def debugstate(ui, repo):
1237 def debugstate(ui, repo):
1255 """show the contents of the current dirstate"""
1238 """show the contents of the current dirstate"""
1256 repo.dirstate.read()
1239 repo.dirstate.read()
1257 dc = repo.dirstate.map
1240 dc = repo.dirstate.map
1258 keys = dc.keys()
1241 keys = dc.keys()
1259 keys.sort()
1242 keys.sort()
1260 for file_ in keys:
1243 for file_ in keys:
1261 ui.write("%c %3o %10d %s %s\n"
1244 ui.write("%c %3o %10d %s %s\n"
1262 % (dc[file_][0], dc[file_][1] & 0777, dc[file_][2],
1245 % (dc[file_][0], dc[file_][1] & 0777, dc[file_][2],
1263 time.strftime("%x %X",
1246 time.strftime("%x %X",
1264 time.localtime(dc[file_][3])), file_))
1247 time.localtime(dc[file_][3])), file_))
1265 for f in repo.dirstate.copies():
1248 for f in repo.dirstate.copies():
1266 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
1249 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
1267
1250
1268 def debugdata(ui, file_, rev):
1251 def debugdata(ui, file_, rev):
1269 """dump the contents of an data file revision"""
1252 """dump the contents of an data file revision"""
1270 r = revlog.revlog(util.opener(os.getcwd(), audit=False),
1253 r = revlog.revlog(util.opener(os.getcwd(), audit=False),
1271 file_[:-2] + ".i", file_, 0)
1254 file_[:-2] + ".i", file_, 0)
1272 try:
1255 try:
1273 ui.write(r.revision(r.lookup(rev)))
1256 ui.write(r.revision(r.lookup(rev)))
1274 except KeyError:
1257 except KeyError:
1275 raise util.Abort(_('invalid revision identifier %s') % rev)
1258 raise util.Abort(_('invalid revision identifier %s') % rev)
1276
1259
1277 def debugindex(ui, file_):
1260 def debugindex(ui, file_):
1278 """dump the contents of an index file"""
1261 """dump the contents of an index file"""
1279 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_, "", 0)
1262 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_, "", 0)
1280 ui.write(" rev offset length base linkrev" +
1263 ui.write(" rev offset length base linkrev" +
1281 " nodeid p1 p2\n")
1264 " nodeid p1 p2\n")
1282 for i in xrange(r.count()):
1265 for i in xrange(r.count()):
1283 node = r.node(i)
1266 node = r.node(i)
1284 pp = r.parents(node)
1267 pp = r.parents(node)
1285 ui.write("% 6d % 9d % 7d % 6d % 7d %s %s %s\n" % (
1268 ui.write("% 6d % 9d % 7d % 6d % 7d %s %s %s\n" % (
1286 i, r.start(i), r.length(i), r.base(i), r.linkrev(node),
1269 i, r.start(i), r.length(i), r.base(i), r.linkrev(node),
1287 short(node), short(pp[0]), short(pp[1])))
1270 short(node), short(pp[0]), short(pp[1])))
1288
1271
1289 def debugindexdot(ui, file_):
1272 def debugindexdot(ui, file_):
1290 """dump an index DAG as a .dot file"""
1273 """dump an index DAG as a .dot file"""
1291 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_, "", 0)
1274 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_, "", 0)
1292 ui.write("digraph G {\n")
1275 ui.write("digraph G {\n")
1293 for i in xrange(r.count()):
1276 for i in xrange(r.count()):
1294 node = r.node(i)
1277 node = r.node(i)
1295 pp = r.parents(node)
1278 pp = r.parents(node)
1296 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
1279 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
1297 if pp[1] != nullid:
1280 if pp[1] != nullid:
1298 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
1281 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
1299 ui.write("}\n")
1282 ui.write("}\n")
1300
1283
1301 def debugrename(ui, repo, file, rev=None):
1284 def debugrename(ui, repo, file, rev=None):
1302 """dump rename information"""
1285 """dump rename information"""
1303 r = repo.file(relpath(repo, [file])[0])
1286 r = repo.file(relpath(repo, [file])[0])
1304 if rev:
1287 if rev:
1305 try:
1288 try:
1306 # assume all revision numbers are for changesets
1289 # assume all revision numbers are for changesets
1307 n = repo.lookup(rev)
1290 n = repo.lookup(rev)
1308 change = repo.changelog.read(n)
1291 change = repo.changelog.read(n)
1309 m = repo.manifest.read(change[0])
1292 m = repo.manifest.read(change[0])
1310 n = m[relpath(repo, [file])[0]]
1293 n = m[relpath(repo, [file])[0]]
1311 except (hg.RepoError, KeyError):
1294 except (hg.RepoError, KeyError):
1312 n = r.lookup(rev)
1295 n = r.lookup(rev)
1313 else:
1296 else:
1314 n = r.tip()
1297 n = r.tip()
1315 m = r.renamed(n)
1298 m = r.renamed(n)
1316 if m:
1299 if m:
1317 ui.write(_("renamed from %s:%s\n") % (m[0], hex(m[1])))
1300 ui.write(_("renamed from %s:%s\n") % (m[0], hex(m[1])))
1318 else:
1301 else:
1319 ui.write(_("not renamed\n"))
1302 ui.write(_("not renamed\n"))
1320
1303
1321 def debugwalk(ui, repo, *pats, **opts):
1304 def debugwalk(ui, repo, *pats, **opts):
1322 """show how files match on given patterns"""
1305 """show how files match on given patterns"""
1323 items = list(cmdutil.walk(repo, pats, opts))
1306 items = list(cmdutil.walk(repo, pats, opts))
1324 if not items:
1307 if not items:
1325 return
1308 return
1326 fmt = '%%s %%-%ds %%-%ds %%s' % (
1309 fmt = '%%s %%-%ds %%-%ds %%s' % (
1327 max([len(abs) for (src, abs, rel, exact) in items]),
1310 max([len(abs) for (src, abs, rel, exact) in items]),
1328 max([len(rel) for (src, abs, rel, exact) in items]))
1311 max([len(rel) for (src, abs, rel, exact) in items]))
1329 for src, abs, rel, exact in items:
1312 for src, abs, rel, exact in items:
1330 line = fmt % (src, abs, rel, exact and 'exact' or '')
1313 line = fmt % (src, abs, rel, exact and 'exact' or '')
1331 ui.write("%s\n" % line.rstrip())
1314 ui.write("%s\n" % line.rstrip())
1332
1315
1333 def diff(ui, repo, *pats, **opts):
1316 def diff(ui, repo, *pats, **opts):
1334 """diff repository (or selected files)
1317 """diff repository (or selected files)
1335
1318
1336 Show differences between revisions for the specified files.
1319 Show differences between revisions for the specified files.
1337
1320
1338 Differences between files are shown using the unified diff format.
1321 Differences between files are shown using the unified diff format.
1339
1322
1340 When two revision arguments are given, then changes are shown
1323 When two revision arguments are given, then changes are shown
1341 between those revisions. If only one revision is specified then
1324 between those revisions. If only one revision is specified then
1342 that revision is compared to the working directory, and, when no
1325 that revision is compared to the working directory, and, when no
1343 revisions are specified, the working directory files are compared
1326 revisions are specified, the working directory files are compared
1344 to its parent.
1327 to its parent.
1345
1328
1346 Without the -a option, diff will avoid generating diffs of files
1329 Without the -a option, diff will avoid generating diffs of files
1347 it detects as binary. With -a, diff will generate a diff anyway,
1330 it detects as binary. With -a, diff will generate a diff anyway,
1348 probably with undesirable results.
1331 probably with undesirable results.
1349 """
1332 """
1350 node1, node2 = cmdutil.revpair(ui, repo, opts['rev'])
1333 node1, node2 = cmdutil.revpair(ui, repo, opts['rev'])
1351
1334
1352 fns, matchfn, anypats = cmdutil.matchpats(repo, pats, opts)
1335 fns, matchfn, anypats = cmdutil.matchpats(repo, pats, opts)
1353
1336
1354 patch.diff(repo, node1, node2, fns, match=matchfn,
1337 patch.diff(repo, node1, node2, fns, match=matchfn,
1355 opts=patch.diffopts(ui, opts))
1338 opts=patch.diffopts(ui, opts))
1356
1339
1357 def export(ui, repo, *changesets, **opts):
1340 def export(ui, repo, *changesets, **opts):
1358 """dump the header and diffs for one or more changesets
1341 """dump the header and diffs for one or more changesets
1359
1342
1360 Print the changeset header and diffs for one or more revisions.
1343 Print the changeset header and diffs for one or more revisions.
1361
1344
1362 The information shown in the changeset header is: author,
1345 The information shown in the changeset header is: author,
1363 changeset hash, parent and commit comment.
1346 changeset hash, parent and commit comment.
1364
1347
1365 Output may be to a file, in which case the name of the file is
1348 Output may be to a file, in which case the name of the file is
1366 given using a format string. The formatting rules are as follows:
1349 given using a format string. The formatting rules are as follows:
1367
1350
1368 %% literal "%" character
1351 %% literal "%" character
1369 %H changeset hash (40 bytes of hexadecimal)
1352 %H changeset hash (40 bytes of hexadecimal)
1370 %N number of patches being generated
1353 %N number of patches being generated
1371 %R changeset revision number
1354 %R changeset revision number
1372 %b basename of the exporting repository
1355 %b basename of the exporting repository
1373 %h short-form changeset hash (12 bytes of hexadecimal)
1356 %h short-form changeset hash (12 bytes of hexadecimal)
1374 %n zero-padded sequence number, starting at 1
1357 %n zero-padded sequence number, starting at 1
1375 %r zero-padded changeset revision number
1358 %r zero-padded changeset revision number
1376
1359
1377 Without the -a option, export will avoid generating diffs of files
1360 Without the -a option, export will avoid generating diffs of files
1378 it detects as binary. With -a, export will generate a diff anyway,
1361 it detects as binary. With -a, export will generate a diff anyway,
1379 probably with undesirable results.
1362 probably with undesirable results.
1380
1363
1381 With the --switch-parent option, the diff will be against the second
1364 With the --switch-parent option, the diff will be against the second
1382 parent. It can be useful to review a merge.
1365 parent. It can be useful to review a merge.
1383 """
1366 """
1384 if not changesets:
1367 if not changesets:
1385 raise util.Abort(_("export requires at least one changeset"))
1368 raise util.Abort(_("export requires at least one changeset"))
1386 revs = list(cmdutil.revrange(ui, repo, changesets))
1369 revs = cmdutil.revrange(ui, repo, changesets)
1387 if len(revs) > 1:
1370 if len(revs) > 1:
1388 ui.note(_('exporting patches:\n'))
1371 ui.note(_('exporting patches:\n'))
1389 else:
1372 else:
1390 ui.note(_('exporting patch:\n'))
1373 ui.note(_('exporting patch:\n'))
1391 patch.export(repo, map(repo.lookup, revs), template=opts['output'],
1374 patch.export(repo, map(repo.lookup, revs), template=opts['output'],
1392 switch_parent=opts['switch_parent'],
1375 switch_parent=opts['switch_parent'],
1393 opts=patch.diffopts(ui, opts))
1376 opts=patch.diffopts(ui, opts))
1394
1377
1395 def grep(ui, repo, pattern, *pats, **opts):
1378 def grep(ui, repo, pattern, *pats, **opts):
1396 """search for a pattern in specified files and revisions
1379 """search for a pattern in specified files and revisions
1397
1380
1398 Search revisions of files for a regular expression.
1381 Search revisions of files for a regular expression.
1399
1382
1400 This command behaves differently than Unix grep. It only accepts
1383 This command behaves differently than Unix grep. It only accepts
1401 Python/Perl regexps. It searches repository history, not the
1384 Python/Perl regexps. It searches repository history, not the
1402 working directory. It always prints the revision number in which
1385 working directory. It always prints the revision number in which
1403 a match appears.
1386 a match appears.
1404
1387
1405 By default, grep only prints output for the first revision of a
1388 By default, grep only prints output for the first revision of a
1406 file in which it finds a match. To get it to print every revision
1389 file in which it finds a match. To get it to print every revision
1407 that contains a change in match status ("-" for a match that
1390 that contains a change in match status ("-" for a match that
1408 becomes a non-match, or "+" for a non-match that becomes a match),
1391 becomes a non-match, or "+" for a non-match that becomes a match),
1409 use the --all flag.
1392 use the --all flag.
1410 """
1393 """
1411 reflags = 0
1394 reflags = 0
1412 if opts['ignore_case']:
1395 if opts['ignore_case']:
1413 reflags |= re.I
1396 reflags |= re.I
1414 regexp = re.compile(pattern, reflags)
1397 regexp = re.compile(pattern, reflags)
1415 sep, eol = ':', '\n'
1398 sep, eol = ':', '\n'
1416 if opts['print0']:
1399 if opts['print0']:
1417 sep = eol = '\0'
1400 sep = eol = '\0'
1418
1401
1419 fcache = {}
1402 fcache = {}
1420 def getfile(fn):
1403 def getfile(fn):
1421 if fn not in fcache:
1404 if fn not in fcache:
1422 fcache[fn] = repo.file(fn)
1405 fcache[fn] = repo.file(fn)
1423 return fcache[fn]
1406 return fcache[fn]
1424
1407
1425 def matchlines(body):
1408 def matchlines(body):
1426 begin = 0
1409 begin = 0
1427 linenum = 0
1410 linenum = 0
1428 while True:
1411 while True:
1429 match = regexp.search(body, begin)
1412 match = regexp.search(body, begin)
1430 if not match:
1413 if not match:
1431 break
1414 break
1432 mstart, mend = match.span()
1415 mstart, mend = match.span()
1433 linenum += body.count('\n', begin, mstart) + 1
1416 linenum += body.count('\n', begin, mstart) + 1
1434 lstart = body.rfind('\n', begin, mstart) + 1 or begin
1417 lstart = body.rfind('\n', begin, mstart) + 1 or begin
1435 lend = body.find('\n', mend)
1418 lend = body.find('\n', mend)
1436 yield linenum, mstart - lstart, mend - lstart, body[lstart:lend]
1419 yield linenum, mstart - lstart, mend - lstart, body[lstart:lend]
1437 begin = lend + 1
1420 begin = lend + 1
1438
1421
1439 class linestate(object):
1422 class linestate(object):
1440 def __init__(self, line, linenum, colstart, colend):
1423 def __init__(self, line, linenum, colstart, colend):
1441 self.line = line
1424 self.line = line
1442 self.linenum = linenum
1425 self.linenum = linenum
1443 self.colstart = colstart
1426 self.colstart = colstart
1444 self.colend = colend
1427 self.colend = colend
1445
1428
1446 def __eq__(self, other):
1429 def __eq__(self, other):
1447 return self.line == other.line
1430 return self.line == other.line
1448
1431
1449 matches = {}
1432 matches = {}
1450 copies = {}
1433 copies = {}
1451 def grepbody(fn, rev, body):
1434 def grepbody(fn, rev, body):
1452 matches[rev].setdefault(fn, [])
1435 matches[rev].setdefault(fn, [])
1453 m = matches[rev][fn]
1436 m = matches[rev][fn]
1454 for lnum, cstart, cend, line in matchlines(body):
1437 for lnum, cstart, cend, line in matchlines(body):
1455 s = linestate(line, lnum, cstart, cend)
1438 s = linestate(line, lnum, cstart, cend)
1456 m.append(s)
1439 m.append(s)
1457
1440
1458 def difflinestates(a, b):
1441 def difflinestates(a, b):
1459 sm = difflib.SequenceMatcher(None, a, b)
1442 sm = difflib.SequenceMatcher(None, a, b)
1460 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
1443 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
1461 if tag == 'insert':
1444 if tag == 'insert':
1462 for i in xrange(blo, bhi):
1445 for i in xrange(blo, bhi):
1463 yield ('+', b[i])
1446 yield ('+', b[i])
1464 elif tag == 'delete':
1447 elif tag == 'delete':
1465 for i in xrange(alo, ahi):
1448 for i in xrange(alo, ahi):
1466 yield ('-', a[i])
1449 yield ('-', a[i])
1467 elif tag == 'replace':
1450 elif tag == 'replace':
1468 for i in xrange(alo, ahi):
1451 for i in xrange(alo, ahi):
1469 yield ('-', a[i])
1452 yield ('-', a[i])
1470 for i in xrange(blo, bhi):
1453 for i in xrange(blo, bhi):
1471 yield ('+', b[i])
1454 yield ('+', b[i])
1472
1455
1473 prev = {}
1456 prev = {}
1474 ucache = {}
1475 def display(fn, rev, states, prevstates):
1457 def display(fn, rev, states, prevstates):
1476 counts = {'-': 0, '+': 0}
1458 counts = {'-': 0, '+': 0}
1477 filerevmatches = {}
1459 filerevmatches = {}
1478 if incrementing or not opts['all']:
1460 if incrementing or not opts['all']:
1479 a, b = prevstates, states
1461 a, b, r = prevstates, states, rev
1480 else:
1462 else:
1481 a, b = states, prevstates
1463 a, b, r = states, prevstates, prev.get(fn, -1)
1482 for change, l in difflinestates(a, b):
1464 for change, l in difflinestates(a, b):
1483 if incrementing or not opts['all']:
1484 r = rev
1485 else:
1486 r = prev[fn]
1487 cols = [fn, str(r)]
1465 cols = [fn, str(r)]
1488 if opts['line_number']:
1466 if opts['line_number']:
1489 cols.append(str(l.linenum))
1467 cols.append(str(l.linenum))
1490 if opts['all']:
1468 if opts['all']:
1491 cols.append(change)
1469 cols.append(change)
1492 if opts['user']:
1470 if opts['user']:
1493 cols.append(trimuser(ui, getchange(r)[1], rev,
1471 cols.append(ui.shortuser(getchange(r)[1]))
1494 ucache))
1495 if opts['files_with_matches']:
1472 if opts['files_with_matches']:
1496 c = (fn, rev)
1473 c = (fn, r)
1497 if c in filerevmatches:
1474 if c in filerevmatches:
1498 continue
1475 continue
1499 filerevmatches[c] = 1
1476 filerevmatches[c] = 1
1500 else:
1477 else:
1501 cols.append(l.line)
1478 cols.append(l.line)
1502 ui.write(sep.join(cols), eol)
1479 ui.write(sep.join(cols), eol)
1503 counts[change] += 1
1480 counts[change] += 1
1504 return counts['+'], counts['-']
1481 return counts['+'], counts['-']
1505
1482
1506 fstate = {}
1483 fstate = {}
1507 skip = {}
1484 skip = {}
1508 changeiter, getchange, matchfn = walkchangerevs(ui, repo, pats, opts)
1485 getchange = util.cachefunc(lambda r:repo.changectx(r).changeset())
1486 changeiter, matchfn = walkchangerevs(ui, repo, pats, getchange, opts)
1509 count = 0
1487 count = 0
1510 incrementing = False
1488 incrementing = False
1511 follow = opts.get('follow')
1489 follow = opts.get('follow')
1512 for st, rev, fns in changeiter:
1490 for st, rev, fns in changeiter:
1513 if st == 'window':
1491 if st == 'window':
1514 incrementing = rev
1492 incrementing = rev
1515 matches.clear()
1493 matches.clear()
1516 elif st == 'add':
1494 elif st == 'add':
1517 change = repo.changelog.read(repo.lookup(str(rev)))
1495 mf = repo.changectx(rev).manifest()
1518 mf = repo.manifest.read(change[0])
1519 matches[rev] = {}
1496 matches[rev] = {}
1520 for fn in fns:
1497 for fn in fns:
1521 if fn in skip:
1498 if fn in skip:
1522 continue
1499 continue
1523 fstate.setdefault(fn, {})
1500 fstate.setdefault(fn, {})
1524 try:
1501 try:
1525 grepbody(fn, rev, getfile(fn).read(mf[fn]))
1502 grepbody(fn, rev, getfile(fn).read(mf[fn]))
1526 if follow:
1503 if follow:
1527 copied = getfile(fn).renamed(mf[fn])
1504 copied = getfile(fn).renamed(mf[fn])
1528 if copied:
1505 if copied:
1529 copies.setdefault(rev, {})[fn] = copied[0]
1506 copies.setdefault(rev, {})[fn] = copied[0]
1530 except KeyError:
1507 except KeyError:
1531 pass
1508 pass
1532 elif st == 'iter':
1509 elif st == 'iter':
1533 states = matches[rev].items()
1510 states = matches[rev].items()
1534 states.sort()
1511 states.sort()
1535 for fn, m in states:
1512 for fn, m in states:
1536 copy = copies.get(rev, {}).get(fn)
1513 copy = copies.get(rev, {}).get(fn)
1537 if fn in skip:
1514 if fn in skip:
1538 if copy:
1515 if copy:
1539 skip[copy] = True
1516 skip[copy] = True
1540 continue
1517 continue
1541 if incrementing or not opts['all'] or fstate[fn]:
1518 if incrementing or not opts['all'] or fstate[fn]:
1542 pos, neg = display(fn, rev, m, fstate[fn])
1519 pos, neg = display(fn, rev, m, fstate[fn])
1543 count += pos + neg
1520 count += pos + neg
1544 if pos and not opts['all']:
1521 if pos and not opts['all']:
1545 skip[fn] = True
1522 skip[fn] = True
1546 if copy:
1523 if copy:
1547 skip[copy] = True
1524 skip[copy] = True
1548 fstate[fn] = m
1525 fstate[fn] = m
1549 if copy:
1526 if copy:
1550 fstate[copy] = m
1527 fstate[copy] = m
1551 prev[fn] = rev
1528 prev[fn] = rev
1552
1529
1553 if not incrementing:
1530 if not incrementing:
1554 fstate = fstate.items()
1531 fstate = fstate.items()
1555 fstate.sort()
1532 fstate.sort()
1556 for fn, state in fstate:
1533 for fn, state in fstate:
1557 if fn in skip:
1534 if fn in skip:
1558 continue
1535 continue
1559 if fn not in copies.get(prev[fn], {}):
1536 if fn not in copies.get(prev[fn], {}):
1560 display(fn, rev, {}, state)
1537 display(fn, rev, {}, state)
1561 return (count == 0 and 1) or 0
1538 return (count == 0 and 1) or 0
1562
1539
1563 def heads(ui, repo, **opts):
1540 def heads(ui, repo, **opts):
1564 """show current repository heads
1541 """show current repository heads
1565
1542
1566 Show all repository head changesets.
1543 Show all repository head changesets.
1567
1544
1568 Repository "heads" are changesets that don't have children
1545 Repository "heads" are changesets that don't have children
1569 changesets. They are where development generally takes place and
1546 changesets. They are where development generally takes place and
1570 are the usual targets for update and merge operations.
1547 are the usual targets for update and merge operations.
1571 """
1548 """
1572 if opts['rev']:
1549 if opts['rev']:
1573 heads = repo.heads(repo.lookup(opts['rev']))
1550 heads = repo.heads(repo.lookup(opts['rev']))
1574 else:
1551 else:
1575 heads = repo.heads()
1552 heads = repo.heads()
1576 br = None
1553 br = None
1577 if opts['branches']:
1554 if opts['branches']:
1578 ui.warn(_("the --branches option is deprecated, "
1555 ui.warn(_("the --branches option is deprecated, "
1579 "please use 'hg branches' instead\n"))
1556 "please use 'hg branches' instead\n"))
1580 br = repo.branchlookup(heads)
1557 br = repo.branchlookup(heads)
1581 displayer = show_changeset(ui, repo, opts)
1558 displayer = show_changeset(ui, repo, opts)
1582 for n in heads:
1559 for n in heads:
1583 displayer.show(changenode=n, brinfo=br)
1560 displayer.show(changenode=n, brinfo=br)
1584
1561
1585 def identify(ui, repo):
1562 def identify(ui, repo):
1586 """print information about the working copy
1563 """print information about the working copy
1587
1564
1588 Print a short summary of the current state of the repo.
1565 Print a short summary of the current state of the repo.
1589
1566
1590 This summary identifies the repository state using one or two parent
1567 This summary identifies the repository state using one or two parent
1591 hash identifiers, followed by a "+" if there are uncommitted changes
1568 hash identifiers, followed by a "+" if there are uncommitted changes
1592 in the working directory, followed by a list of tags for this revision.
1569 in the working directory, followed by a list of tags for this revision.
1593 """
1570 """
1594 parents = [p for p in repo.dirstate.parents() if p != nullid]
1571 parents = [p for p in repo.dirstate.parents() if p != nullid]
1595 if not parents:
1572 if not parents:
1596 ui.write(_("unknown\n"))
1573 ui.write(_("unknown\n"))
1597 return
1574 return
1598
1575
1599 hexfunc = ui.debugflag and hex or short
1576 hexfunc = ui.debugflag and hex or short
1600 modified, added, removed, deleted = repo.status()[:4]
1577 modified, added, removed, deleted = repo.status()[:4]
1601 output = ["%s%s" %
1578 output = ["%s%s" %
1602 ('+'.join([hexfunc(parent) for parent in parents]),
1579 ('+'.join([hexfunc(parent) for parent in parents]),
1603 (modified or added or removed or deleted) and "+" or "")]
1580 (modified or added or removed or deleted) and "+" or "")]
1604
1581
1605 if not ui.quiet:
1582 if not ui.quiet:
1606
1583
1607 branch = repo.workingctx().branch()
1584 branch = repo.workingctx().branch()
1608 if branch:
1585 if branch:
1609 output.append("(%s)" % branch)
1586 output.append("(%s)" % branch)
1610
1587
1611 # multiple tags for a single parent separated by '/'
1588 # multiple tags for a single parent separated by '/'
1612 parenttags = ['/'.join(tags)
1589 parenttags = ['/'.join(tags)
1613 for tags in map(repo.nodetags, parents) if tags]
1590 for tags in map(repo.nodetags, parents) if tags]
1614 # tags for multiple parents separated by ' + '
1591 # tags for multiple parents separated by ' + '
1615 if parenttags:
1592 if parenttags:
1616 output.append(' + '.join(parenttags))
1593 output.append(' + '.join(parenttags))
1617
1594
1618 ui.write("%s\n" % ' '.join(output))
1595 ui.write("%s\n" % ' '.join(output))
1619
1596
1620 def import_(ui, repo, patch1, *patches, **opts):
1597 def import_(ui, repo, patch1, *patches, **opts):
1621 """import an ordered set of patches
1598 """import an ordered set of patches
1622
1599
1623 Import a list of patches and commit them individually.
1600 Import a list of patches and commit them individually.
1624
1601
1625 If there are outstanding changes in the working directory, import
1602 If there are outstanding changes in the working directory, import
1626 will abort unless given the -f flag.
1603 will abort unless given the -f flag.
1627
1604
1628 You can import a patch straight from a mail message. Even patches
1605 You can import a patch straight from a mail message. Even patches
1629 as attachments work (body part must be type text/plain or
1606 as attachments work (body part must be type text/plain or
1630 text/x-patch to be used). From and Subject headers of email
1607 text/x-patch to be used). From and Subject headers of email
1631 message are used as default committer and commit message. All
1608 message are used as default committer and commit message. All
1632 text/plain body parts before first diff are added to commit
1609 text/plain body parts before first diff are added to commit
1633 message.
1610 message.
1634
1611
1635 If imported patch was generated by hg export, user and description
1612 If imported patch was generated by hg export, user and description
1636 from patch override values from message headers and body. Values
1613 from patch override values from message headers and body. Values
1637 given on command line with -m and -u override these.
1614 given on command line with -m and -u override these.
1638
1615
1639 To read a patch from standard input, use patch name "-".
1616 To read a patch from standard input, use patch name "-".
1640 """
1617 """
1641 patches = (patch1,) + patches
1618 patches = (patch1,) + patches
1642
1619
1643 if not opts['force']:
1620 if not opts['force']:
1644 bail_if_changed(repo)
1621 bail_if_changed(repo)
1645
1622
1646 d = opts["base"]
1623 d = opts["base"]
1647 strip = opts["strip"]
1624 strip = opts["strip"]
1648
1625
1649 wlock = repo.wlock()
1626 wlock = repo.wlock()
1650 lock = repo.lock()
1627 lock = repo.lock()
1651
1628
1652 for p in patches:
1629 for p in patches:
1653 pf = os.path.join(d, p)
1630 pf = os.path.join(d, p)
1654
1631
1655 if pf == '-':
1632 if pf == '-':
1656 ui.status(_("applying patch from stdin\n"))
1633 ui.status(_("applying patch from stdin\n"))
1657 tmpname, message, user, date = patch.extract(ui, sys.stdin)
1634 tmpname, message, user, date = patch.extract(ui, sys.stdin)
1658 else:
1635 else:
1659 ui.status(_("applying %s\n") % p)
1636 ui.status(_("applying %s\n") % p)
1660 tmpname, message, user, date = patch.extract(ui, file(pf))
1637 tmpname, message, user, date = patch.extract(ui, file(pf))
1661
1638
1662 if tmpname is None:
1639 if tmpname is None:
1663 raise util.Abort(_('no diffs found'))
1640 raise util.Abort(_('no diffs found'))
1664
1641
1665 try:
1642 try:
1666 if opts['message']:
1643 if opts['message']:
1667 # pickup the cmdline msg
1644 # pickup the cmdline msg
1668 message = opts['message']
1645 message = opts['message']
1669 elif message:
1646 elif message:
1670 # pickup the patch msg
1647 # pickup the patch msg
1671 message = message.strip()
1648 message = message.strip()
1672 else:
1649 else:
1673 # launch the editor
1650 # launch the editor
1674 message = None
1651 message = None
1675 ui.debug(_('message:\n%s\n') % message)
1652 ui.debug(_('message:\n%s\n') % message)
1676
1653
1677 files = {}
1654 files = {}
1678 try:
1655 try:
1679 fuzz = patch.patch(tmpname, ui, strip=strip, cwd=repo.root,
1656 fuzz = patch.patch(tmpname, ui, strip=strip, cwd=repo.root,
1680 files=files)
1657 files=files)
1681 finally:
1658 finally:
1682 files = patch.updatedir(ui, repo, files, wlock=wlock)
1659 files = patch.updatedir(ui, repo, files, wlock=wlock)
1683 repo.commit(files, message, user, date, wlock=wlock, lock=lock)
1660 repo.commit(files, message, user, date, wlock=wlock, lock=lock)
1684 finally:
1661 finally:
1685 os.unlink(tmpname)
1662 os.unlink(tmpname)
1686
1663
1687 def incoming(ui, repo, source="default", **opts):
1664 def incoming(ui, repo, source="default", **opts):
1688 """show new changesets found in source
1665 """show new changesets found in source
1689
1666
1690 Show new changesets found in the specified path/URL or the default
1667 Show new changesets found in the specified path/URL or the default
1691 pull location. These are the changesets that would be pulled if a pull
1668 pull location. These are the changesets that would be pulled if a pull
1692 was requested.
1669 was requested.
1693
1670
1694 For remote repository, using --bundle avoids downloading the changesets
1671 For remote repository, using --bundle avoids downloading the changesets
1695 twice if the incoming is followed by a pull.
1672 twice if the incoming is followed by a pull.
1696
1673
1697 See pull for valid source format details.
1674 See pull for valid source format details.
1698 """
1675 """
1699 source = ui.expandpath(source)
1676 source = ui.expandpath(source)
1700 setremoteconfig(ui, opts)
1677 setremoteconfig(ui, opts)
1701
1678
1702 other = hg.repository(ui, source)
1679 other = hg.repository(ui, source)
1703 incoming = repo.findincoming(other, force=opts["force"])
1680 incoming = repo.findincoming(other, force=opts["force"])
1704 if not incoming:
1681 if not incoming:
1705 ui.status(_("no changes found\n"))
1682 ui.status(_("no changes found\n"))
1706 return
1683 return
1707
1684
1708 cleanup = None
1685 cleanup = None
1709 try:
1686 try:
1710 fname = opts["bundle"]
1687 fname = opts["bundle"]
1711 if fname or not other.local():
1688 if fname or not other.local():
1712 # create a bundle (uncompressed if other repo is not local)
1689 # create a bundle (uncompressed if other repo is not local)
1713 cg = other.changegroup(incoming, "incoming")
1690 cg = other.changegroup(incoming, "incoming")
1714 fname = cleanup = write_bundle(cg, fname, compress=other.local())
1691 fname = cleanup = write_bundle(cg, fname, compress=other.local())
1715 # keep written bundle?
1692 # keep written bundle?
1716 if opts["bundle"]:
1693 if opts["bundle"]:
1717 cleanup = None
1694 cleanup = None
1718 if not other.local():
1695 if not other.local():
1719 # use the created uncompressed bundlerepo
1696 # use the created uncompressed bundlerepo
1720 other = bundlerepo.bundlerepository(ui, repo.root, fname)
1697 other = bundlerepo.bundlerepository(ui, repo.root, fname)
1721
1698
1722 revs = None
1699 revs = None
1723 if opts['rev']:
1700 if opts['rev']:
1724 revs = [other.lookup(rev) for rev in opts['rev']]
1701 revs = [other.lookup(rev) for rev in opts['rev']]
1725 o = other.changelog.nodesbetween(incoming, revs)[0]
1702 o = other.changelog.nodesbetween(incoming, revs)[0]
1726 if opts['newest_first']:
1703 if opts['newest_first']:
1727 o.reverse()
1704 o.reverse()
1728 displayer = show_changeset(ui, other, opts)
1705 displayer = show_changeset(ui, other, opts)
1729 for n in o:
1706 for n in o:
1730 parents = [p for p in other.changelog.parents(n) if p != nullid]
1707 parents = [p for p in other.changelog.parents(n) if p != nullid]
1731 if opts['no_merges'] and len(parents) == 2:
1708 if opts['no_merges'] and len(parents) == 2:
1732 continue
1709 continue
1733 displayer.show(changenode=n)
1710 displayer.show(changenode=n)
1734 if opts['patch']:
1711 if opts['patch']:
1735 prev = (parents and parents[0]) or nullid
1712 prev = (parents and parents[0]) or nullid
1736 patch.diff(other, prev, n, fp=repo.ui)
1713 patch.diff(other, prev, n, fp=repo.ui)
1737 ui.write("\n")
1714 ui.write("\n")
1738 finally:
1715 finally:
1739 if hasattr(other, 'close'):
1716 if hasattr(other, 'close'):
1740 other.close()
1717 other.close()
1741 if cleanup:
1718 if cleanup:
1742 os.unlink(cleanup)
1719 os.unlink(cleanup)
1743
1720
1744 def init(ui, dest=".", **opts):
1721 def init(ui, dest=".", **opts):
1745 """create a new repository in the given directory
1722 """create a new repository in the given directory
1746
1723
1747 Initialize a new repository in the given directory. If the given
1724 Initialize a new repository in the given directory. If the given
1748 directory does not exist, it is created.
1725 directory does not exist, it is created.
1749
1726
1750 If no directory is given, the current directory is used.
1727 If no directory is given, the current directory is used.
1751
1728
1752 It is possible to specify an ssh:// URL as the destination.
1729 It is possible to specify an ssh:// URL as the destination.
1753 Look at the help text for the pull command for important details
1730 Look at the help text for the pull command for important details
1754 about ssh:// URLs.
1731 about ssh:// URLs.
1755 """
1732 """
1756 setremoteconfig(ui, opts)
1733 setremoteconfig(ui, opts)
1757 hg.repository(ui, dest, create=1)
1734 hg.repository(ui, dest, create=1)
1758
1735
1759 def locate(ui, repo, *pats, **opts):
1736 def locate(ui, repo, *pats, **opts):
1760 """locate files matching specific patterns
1737 """locate files matching specific patterns
1761
1738
1762 Print all files under Mercurial control whose names match the
1739 Print all files under Mercurial control whose names match the
1763 given patterns.
1740 given patterns.
1764
1741
1765 This command searches the current directory and its
1742 This command searches the current directory and its
1766 subdirectories. To search an entire repository, move to the root
1743 subdirectories. To search an entire repository, move to the root
1767 of the repository.
1744 of the repository.
1768
1745
1769 If no patterns are given to match, this command prints all file
1746 If no patterns are given to match, this command prints all file
1770 names.
1747 names.
1771
1748
1772 If you want to feed the output of this command into the "xargs"
1749 If you want to feed the output of this command into the "xargs"
1773 command, use the "-0" option to both this command and "xargs".
1750 command, use the "-0" option to both this command and "xargs".
1774 This will avoid the problem of "xargs" treating single filenames
1751 This will avoid the problem of "xargs" treating single filenames
1775 that contain white space as multiple filenames.
1752 that contain white space as multiple filenames.
1776 """
1753 """
1777 end = opts['print0'] and '\0' or '\n'
1754 end = opts['print0'] and '\0' or '\n'
1778 rev = opts['rev']
1755 rev = opts['rev']
1779 if rev:
1756 if rev:
1780 node = repo.lookup(rev)
1757 node = repo.lookup(rev)
1781 else:
1758 else:
1782 node = None
1759 node = None
1783
1760
1784 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts, node=node,
1761 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts, node=node,
1785 head='(?:.*/|)'):
1762 head='(?:.*/|)'):
1786 if not node and repo.dirstate.state(abs) == '?':
1763 if not node and repo.dirstate.state(abs) == '?':
1787 continue
1764 continue
1788 if opts['fullpath']:
1765 if opts['fullpath']:
1789 ui.write(os.path.join(repo.root, abs), end)
1766 ui.write(os.path.join(repo.root, abs), end)
1790 else:
1767 else:
1791 ui.write(((pats and rel) or abs), end)
1768 ui.write(((pats and rel) or abs), end)
1792
1769
1793 def log(ui, repo, *pats, **opts):
1770 def log(ui, repo, *pats, **opts):
1794 """show revision history of entire repository or files
1771 """show revision history of entire repository or files
1795
1772
1796 Print the revision history of the specified files or the entire
1773 Print the revision history of the specified files or the entire
1797 project.
1774 project.
1798
1775
1799 File history is shown without following rename or copy history of
1776 File history is shown without following rename or copy history of
1800 files. Use -f/--follow with a file name to follow history across
1777 files. Use -f/--follow with a file name to follow history across
1801 renames and copies. --follow without a file name will only show
1778 renames and copies. --follow without a file name will only show
1802 ancestors or descendants of the starting revision. --follow-first
1779 ancestors or descendants of the starting revision. --follow-first
1803 only follows the first parent of merge revisions.
1780 only follows the first parent of merge revisions.
1804
1781
1805 If no revision range is specified, the default is tip:0 unless
1782 If no revision range is specified, the default is tip:0 unless
1806 --follow is set, in which case the working directory parent is
1783 --follow is set, in which case the working directory parent is
1807 used as the starting revision.
1784 used as the starting revision.
1808
1785
1809 By default this command outputs: changeset id and hash, tags,
1786 By default this command outputs: changeset id and hash, tags,
1810 non-trivial parents, user, date and time, and a summary for each
1787 non-trivial parents, user, date and time, and a summary for each
1811 commit. When the -v/--verbose switch is used, the list of changed
1788 commit. When the -v/--verbose switch is used, the list of changed
1812 files and full commit message is shown.
1789 files and full commit message is shown.
1813 """
1790 """
1814 class dui(object):
1791 class dui(object):
1815 # Implement and delegate some ui protocol. Save hunks of
1792 # Implement and delegate some ui protocol. Save hunks of
1816 # output for later display in the desired order.
1793 # output for later display in the desired order.
1817 def __init__(self, ui):
1794 def __init__(self, ui):
1818 self.ui = ui
1795 self.ui = ui
1819 self.hunk = {}
1796 self.hunk = {}
1820 self.header = {}
1797 self.header = {}
1821 def bump(self, rev):
1798 def bump(self, rev):
1822 self.rev = rev
1799 self.rev = rev
1823 self.hunk[rev] = []
1800 self.hunk[rev] = []
1824 self.header[rev] = []
1801 self.header[rev] = []
1825 def note(self, *args):
1802 def note(self, *args):
1826 if self.verbose:
1803 if self.verbose:
1827 self.write(*args)
1804 self.write(*args)
1828 def status(self, *args):
1805 def status(self, *args):
1829 if not self.quiet:
1806 if not self.quiet:
1830 self.write(*args)
1807 self.write(*args)
1831 def write(self, *args):
1808 def write(self, *args):
1832 self.hunk[self.rev].append(args)
1809 self.hunk[self.rev].append(args)
1833 def write_header(self, *args):
1810 def write_header(self, *args):
1834 self.header[self.rev].append(args)
1811 self.header[self.rev].append(args)
1835 def debug(self, *args):
1812 def debug(self, *args):
1836 if self.debugflag:
1813 if self.debugflag:
1837 self.write(*args)
1814 self.write(*args)
1838 def __getattr__(self, key):
1815 def __getattr__(self, key):
1839 return getattr(self.ui, key)
1816 return getattr(self.ui, key)
1840
1817
1841 changeiter, getchange, matchfn = walkchangerevs(ui, repo, pats, opts)
1818 getchange = util.cachefunc(lambda r:repo.changectx(r).changeset())
1819 changeiter, matchfn = walkchangerevs(ui, repo, pats, getchange, opts)
1842
1820
1843 if opts['branches']:
1821 if opts['branches']:
1844 ui.warn(_("the --branches option is deprecated, "
1822 ui.warn(_("the --branches option is deprecated, "
1845 "please use 'hg branches' instead\n"))
1823 "please use 'hg branches' instead\n"))
1846
1824
1847 if opts['limit']:
1825 if opts['limit']:
1848 try:
1826 try:
1849 limit = int(opts['limit'])
1827 limit = int(opts['limit'])
1850 except ValueError:
1828 except ValueError:
1851 raise util.Abort(_('limit must be a positive integer'))
1829 raise util.Abort(_('limit must be a positive integer'))
1852 if limit <= 0: raise util.Abort(_('limit must be positive'))
1830 if limit <= 0: raise util.Abort(_('limit must be positive'))
1853 else:
1831 else:
1854 limit = sys.maxint
1832 limit = sys.maxint
1855 count = 0
1833 count = 0
1856
1834
1857 if opts['copies'] and opts['rev']:
1835 if opts['copies'] and opts['rev']:
1858 endrev = max([int(i)
1836 endrev = max(cmdutil.revrange(ui, repo, opts['rev'])) + 1
1859 for i in cmdutil.revrange(ui, repo, opts['rev'])]) + 1
1860 else:
1837 else:
1861 endrev = repo.changelog.count()
1838 endrev = repo.changelog.count()
1862 rcache = {}
1839 rcache = {}
1863 ncache = {}
1840 ncache = {}
1864 dcache = []
1841 dcache = []
1865 def getrenamed(fn, rev, man):
1842 def getrenamed(fn, rev, man):
1866 '''looks up all renames for a file (up to endrev) the first
1843 '''looks up all renames for a file (up to endrev) the first
1867 time the file is given. It indexes on the changerev and only
1844 time the file is given. It indexes on the changerev and only
1868 parses the manifest if linkrev != changerev.
1845 parses the manifest if linkrev != changerev.
1869 Returns rename info for fn at changerev rev.'''
1846 Returns rename info for fn at changerev rev.'''
1870 if fn not in rcache:
1847 if fn not in rcache:
1871 rcache[fn] = {}
1848 rcache[fn] = {}
1872 ncache[fn] = {}
1849 ncache[fn] = {}
1873 fl = repo.file(fn)
1850 fl = repo.file(fn)
1874 for i in xrange(fl.count()):
1851 for i in xrange(fl.count()):
1875 node = fl.node(i)
1852 node = fl.node(i)
1876 lr = fl.linkrev(node)
1853 lr = fl.linkrev(node)
1877 renamed = fl.renamed(node)
1854 renamed = fl.renamed(node)
1878 rcache[fn][lr] = renamed
1855 rcache[fn][lr] = renamed
1879 if renamed:
1856 if renamed:
1880 ncache[fn][node] = renamed
1857 ncache[fn][node] = renamed
1881 if lr >= endrev:
1858 if lr >= endrev:
1882 break
1859 break
1883 if rev in rcache[fn]:
1860 if rev in rcache[fn]:
1884 return rcache[fn][rev]
1861 return rcache[fn][rev]
1885 mr = repo.manifest.rev(man)
1862 mr = repo.manifest.rev(man)
1886 if repo.manifest.parentrevs(mr) != (mr - 1, -1):
1863 if repo.manifest.parentrevs(mr) != (mr - 1, -1):
1887 return ncache[fn].get(repo.manifest.find(man, fn)[0])
1864 return ncache[fn].get(repo.manifest.find(man, fn)[0])
1888 if not dcache or dcache[0] != man:
1865 if not dcache or dcache[0] != man:
1889 dcache[:] = [man, repo.manifest.readdelta(man)]
1866 dcache[:] = [man, repo.manifest.readdelta(man)]
1890 if fn in dcache[1]:
1867 if fn in dcache[1]:
1891 return ncache[fn].get(dcache[1][fn])
1868 return ncache[fn].get(dcache[1][fn])
1892 return None
1869 return None
1893
1870
1894 displayer = show_changeset(ui, repo, opts)
1871 displayer = show_changeset(ui, repo, opts)
1895 for st, rev, fns in changeiter:
1872 for st, rev, fns in changeiter:
1896 if st == 'window':
1873 if st == 'window':
1897 du = dui(ui)
1874 du = dui(ui)
1898 displayer.ui = du
1875 displayer.ui = du
1899 elif st == 'add':
1876 elif st == 'add':
1900 du.bump(rev)
1877 du.bump(rev)
1901 changenode = repo.changelog.node(rev)
1878 changenode = repo.changelog.node(rev)
1902 parents = [p for p in repo.changelog.parents(changenode)
1879 parents = [p for p in repo.changelog.parents(changenode)
1903 if p != nullid]
1880 if p != nullid]
1904 if opts['no_merges'] and len(parents) == 2:
1881 if opts['no_merges'] and len(parents) == 2:
1905 continue
1882 continue
1906 if opts['only_merges'] and len(parents) != 2:
1883 if opts['only_merges'] and len(parents) != 2:
1907 continue
1884 continue
1908
1885
1909 if opts['keyword']:
1886 if opts['keyword']:
1910 changes = getchange(rev)
1887 changes = getchange(rev)
1911 miss = 0
1888 miss = 0
1912 for k in [kw.lower() for kw in opts['keyword']]:
1889 for k in [kw.lower() for kw in opts['keyword']]:
1913 if not (k in changes[1].lower() or
1890 if not (k in changes[1].lower() or
1914 k in changes[4].lower() or
1891 k in changes[4].lower() or
1915 k in " ".join(changes[3][:20]).lower()):
1892 k in " ".join(changes[3][:20]).lower()):
1916 miss = 1
1893 miss = 1
1917 break
1894 break
1918 if miss:
1895 if miss:
1919 continue
1896 continue
1920
1897
1921 br = None
1898 br = None
1922 if opts['branches']:
1899 if opts['branches']:
1923 br = repo.branchlookup([repo.changelog.node(rev)])
1900 br = repo.branchlookup([repo.changelog.node(rev)])
1924
1901
1925 copies = []
1902 copies = []
1926 if opts.get('copies') and rev:
1903 if opts.get('copies') and rev:
1927 mf = getchange(rev)[0]
1904 mf = getchange(rev)[0]
1928 for fn in getchange(rev)[3]:
1905 for fn in getchange(rev)[3]:
1929 rename = getrenamed(fn, rev, mf)
1906 rename = getrenamed(fn, rev, mf)
1930 if rename:
1907 if rename:
1931 copies.append((fn, rename[0]))
1908 copies.append((fn, rename[0]))
1932 displayer.show(rev, brinfo=br, copies=copies)
1909 displayer.show(rev, brinfo=br, copies=copies)
1933 if opts['patch']:
1910 if opts['patch']:
1934 prev = (parents and parents[0]) or nullid
1911 prev = (parents and parents[0]) or nullid
1935 patch.diff(repo, prev, changenode, match=matchfn, fp=du)
1912 patch.diff(repo, prev, changenode, match=matchfn, fp=du)
1936 du.write("\n\n")
1913 du.write("\n\n")
1937 elif st == 'iter':
1914 elif st == 'iter':
1938 if count == limit: break
1915 if count == limit: break
1939 if du.header[rev]:
1916 if du.header[rev]:
1940 for args in du.header[rev]:
1917 for args in du.header[rev]:
1941 ui.write_header(*args)
1918 ui.write_header(*args)
1942 if du.hunk[rev]:
1919 if du.hunk[rev]:
1943 count += 1
1920 count += 1
1944 for args in du.hunk[rev]:
1921 for args in du.hunk[rev]:
1945 ui.write(*args)
1922 ui.write(*args)
1946
1923
1947 def manifest(ui, repo, rev=None):
1924 def manifest(ui, repo, rev=None):
1948 """output the latest or given revision of the project manifest
1925 """output the latest or given revision of the project manifest
1949
1926
1950 Print a list of version controlled files for the given revision.
1927 Print a list of version controlled files for the given revision.
1951
1928
1952 The manifest is the list of files being version controlled. If no revision
1929 The manifest is the list of files being version controlled. If no revision
1953 is given then the tip is used.
1930 is given then the tip is used.
1954 """
1931 """
1955 if rev:
1932 if rev:
1956 try:
1933 try:
1957 # assume all revision numbers are for changesets
1934 # assume all revision numbers are for changesets
1958 n = repo.lookup(rev)
1935 n = repo.lookup(rev)
1959 change = repo.changelog.read(n)
1936 change = repo.changelog.read(n)
1960 n = change[0]
1937 n = change[0]
1961 except hg.RepoError:
1938 except hg.RepoError:
1962 n = repo.manifest.lookup(rev)
1939 n = repo.manifest.lookup(rev)
1963 else:
1940 else:
1964 n = repo.manifest.tip()
1941 n = repo.manifest.tip()
1965 m = repo.manifest.read(n)
1942 m = repo.manifest.read(n)
1966 files = m.keys()
1943 files = m.keys()
1967 files.sort()
1944 files.sort()
1968
1945
1969 for f in files:
1946 for f in files:
1970 ui.write("%40s %3s %s\n" % (hex(m[f]),
1947 ui.write("%40s %3s %s\n" % (hex(m[f]),
1971 m.execf(f) and "755" or "644", f))
1948 m.execf(f) and "755" or "644", f))
1972
1949
1973 def merge(ui, repo, node=None, force=None, branch=None):
1950 def merge(ui, repo, node=None, force=None, branch=None):
1974 """Merge working directory with another revision
1951 """Merge working directory with another revision
1975
1952
1976 Merge the contents of the current working directory and the
1953 Merge the contents of the current working directory and the
1977 requested revision. Files that changed between either parent are
1954 requested revision. Files that changed between either parent are
1978 marked as changed for the next commit and a commit must be
1955 marked as changed for the next commit and a commit must be
1979 performed before any further updates are allowed.
1956 performed before any further updates are allowed.
1980
1957
1981 If no revision is specified, the working directory's parent is a
1958 If no revision is specified, the working directory's parent is a
1982 head revision, and the repository contains exactly one other head,
1959 head revision, and the repository contains exactly one other head,
1983 the other head is merged with by default. Otherwise, an explicit
1960 the other head is merged with by default. Otherwise, an explicit
1984 revision to merge with must be provided.
1961 revision to merge with must be provided.
1985 """
1962 """
1986
1963
1987 if node or branch:
1964 if node or branch:
1988 node = _lookup(repo, node, branch)
1965 node = _lookup(repo, node, branch)
1989 else:
1966 else:
1990 heads = repo.heads()
1967 heads = repo.heads()
1991 if len(heads) > 2:
1968 if len(heads) > 2:
1992 raise util.Abort(_('repo has %d heads - '
1969 raise util.Abort(_('repo has %d heads - '
1993 'please merge with an explicit rev') %
1970 'please merge with an explicit rev') %
1994 len(heads))
1971 len(heads))
1995 if len(heads) == 1:
1972 if len(heads) == 1:
1996 raise util.Abort(_('there is nothing to merge - '
1973 raise util.Abort(_('there is nothing to merge - '
1997 'use "hg update" instead'))
1974 'use "hg update" instead'))
1998 parent = repo.dirstate.parents()[0]
1975 parent = repo.dirstate.parents()[0]
1999 if parent not in heads:
1976 if parent not in heads:
2000 raise util.Abort(_('working dir not at a head rev - '
1977 raise util.Abort(_('working dir not at a head rev - '
2001 'use "hg update" or merge with an explicit rev'))
1978 'use "hg update" or merge with an explicit rev'))
2002 node = parent == heads[0] and heads[-1] or heads[0]
1979 node = parent == heads[0] and heads[-1] or heads[0]
2003 return hg.merge(repo, node, force=force)
1980 return hg.merge(repo, node, force=force)
2004
1981
2005 def outgoing(ui, repo, dest=None, **opts):
1982 def outgoing(ui, repo, dest=None, **opts):
2006 """show changesets not found in destination
1983 """show changesets not found in destination
2007
1984
2008 Show changesets not found in the specified destination repository or
1985 Show changesets not found in the specified destination repository or
2009 the default push location. These are the changesets that would be pushed
1986 the default push location. These are the changesets that would be pushed
2010 if a push was requested.
1987 if a push was requested.
2011
1988
2012 See pull for valid destination format details.
1989 See pull for valid destination format details.
2013 """
1990 """
2014 dest = ui.expandpath(dest or 'default-push', dest or 'default')
1991 dest = ui.expandpath(dest or 'default-push', dest or 'default')
2015 setremoteconfig(ui, opts)
1992 setremoteconfig(ui, opts)
2016 revs = None
1993 revs = None
2017 if opts['rev']:
1994 if opts['rev']:
2018 revs = [repo.lookup(rev) for rev in opts['rev']]
1995 revs = [repo.lookup(rev) for rev in opts['rev']]
2019
1996
2020 other = hg.repository(ui, dest)
1997 other = hg.repository(ui, dest)
2021 o = repo.findoutgoing(other, force=opts['force'])
1998 o = repo.findoutgoing(other, force=opts['force'])
2022 if not o:
1999 if not o:
2023 ui.status(_("no changes found\n"))
2000 ui.status(_("no changes found\n"))
2024 return
2001 return
2025 o = repo.changelog.nodesbetween(o, revs)[0]
2002 o = repo.changelog.nodesbetween(o, revs)[0]
2026 if opts['newest_first']:
2003 if opts['newest_first']:
2027 o.reverse()
2004 o.reverse()
2028 displayer = show_changeset(ui, repo, opts)
2005 displayer = show_changeset(ui, repo, opts)
2029 for n in o:
2006 for n in o:
2030 parents = [p for p in repo.changelog.parents(n) if p != nullid]
2007 parents = [p for p in repo.changelog.parents(n) if p != nullid]
2031 if opts['no_merges'] and len(parents) == 2:
2008 if opts['no_merges'] and len(parents) == 2:
2032 continue
2009 continue
2033 displayer.show(changenode=n)
2010 displayer.show(changenode=n)
2034 if opts['patch']:
2011 if opts['patch']:
2035 prev = (parents and parents[0]) or nullid
2012 prev = (parents and parents[0]) or nullid
2036 patch.diff(repo, prev, n)
2013 patch.diff(repo, prev, n)
2037 ui.write("\n")
2014 ui.write("\n")
2038
2015
2039 def parents(ui, repo, file_=None, rev=None, branches=None, **opts):
2016 def parents(ui, repo, file_=None, rev=None, branches=None, **opts):
2040 """show the parents of the working dir or revision
2017 """show the parents of the working dir or revision
2041
2018
2042 Print the working directory's parent revisions.
2019 Print the working directory's parent revisions.
2043 """
2020 """
2044 # legacy
2021 # legacy
2045 if file_ and not rev:
2022 if file_ and not rev:
2046 try:
2023 try:
2047 rev = repo.lookup(file_)
2024 rev = repo.lookup(file_)
2048 file_ = None
2025 file_ = None
2049 except hg.RepoError:
2026 except hg.RepoError:
2050 pass
2027 pass
2051 else:
2028 else:
2052 ui.warn(_("'hg parent REV' is deprecated, "
2029 ui.warn(_("'hg parent REV' is deprecated, "
2053 "please use 'hg parents -r REV instead\n"))
2030 "please use 'hg parents -r REV instead\n"))
2054
2031
2055 if rev:
2032 if rev:
2056 if file_:
2033 if file_:
2057 ctx = repo.filectx(file_, changeid=rev)
2034 ctx = repo.filectx(file_, changeid=rev)
2058 else:
2035 else:
2059 ctx = repo.changectx(rev)
2036 ctx = repo.changectx(rev)
2060 p = [cp.node() for cp in ctx.parents()]
2037 p = [cp.node() for cp in ctx.parents()]
2061 else:
2038 else:
2062 p = repo.dirstate.parents()
2039 p = repo.dirstate.parents()
2063
2040
2064 br = None
2041 br = None
2065 if branches is not None:
2042 if branches is not None:
2066 ui.warn(_("the --branches option is deprecated, "
2043 ui.warn(_("the --branches option is deprecated, "
2067 "please use 'hg branches' instead\n"))
2044 "please use 'hg branches' instead\n"))
2068 br = repo.branchlookup(p)
2045 br = repo.branchlookup(p)
2069 displayer = show_changeset(ui, repo, opts)
2046 displayer = show_changeset(ui, repo, opts)
2070 for n in p:
2047 for n in p:
2071 if n != nullid:
2048 if n != nullid:
2072 displayer.show(changenode=n, brinfo=br)
2049 displayer.show(changenode=n, brinfo=br)
2073
2050
2074 def paths(ui, repo, search=None):
2051 def paths(ui, repo, search=None):
2075 """show definition of symbolic path names
2052 """show definition of symbolic path names
2076
2053
2077 Show definition of symbolic path name NAME. If no name is given, show
2054 Show definition of symbolic path name NAME. If no name is given, show
2078 definition of available names.
2055 definition of available names.
2079
2056
2080 Path names are defined in the [paths] section of /etc/mercurial/hgrc
2057 Path names are defined in the [paths] section of /etc/mercurial/hgrc
2081 and $HOME/.hgrc. If run inside a repository, .hg/hgrc is used, too.
2058 and $HOME/.hgrc. If run inside a repository, .hg/hgrc is used, too.
2082 """
2059 """
2083 if search:
2060 if search:
2084 for name, path in ui.configitems("paths"):
2061 for name, path in ui.configitems("paths"):
2085 if name == search:
2062 if name == search:
2086 ui.write("%s\n" % path)
2063 ui.write("%s\n" % path)
2087 return
2064 return
2088 ui.warn(_("not found!\n"))
2065 ui.warn(_("not found!\n"))
2089 return 1
2066 return 1
2090 else:
2067 else:
2091 for name, path in ui.configitems("paths"):
2068 for name, path in ui.configitems("paths"):
2092 ui.write("%s = %s\n" % (name, path))
2069 ui.write("%s = %s\n" % (name, path))
2093
2070
2094 def postincoming(ui, repo, modheads, optupdate):
2071 def postincoming(ui, repo, modheads, optupdate):
2095 if modheads == 0:
2072 if modheads == 0:
2096 return
2073 return
2097 if optupdate:
2074 if optupdate:
2098 if modheads == 1:
2075 if modheads == 1:
2099 return hg.update(repo, repo.changelog.tip()) # update
2076 return hg.update(repo, repo.changelog.tip()) # update
2100 else:
2077 else:
2101 ui.status(_("not updating, since new heads added\n"))
2078 ui.status(_("not updating, since new heads added\n"))
2102 if modheads > 1:
2079 if modheads > 1:
2103 ui.status(_("(run 'hg heads' to see heads, 'hg merge' to merge)\n"))
2080 ui.status(_("(run 'hg heads' to see heads, 'hg merge' to merge)\n"))
2104 else:
2081 else:
2105 ui.status(_("(run 'hg update' to get a working copy)\n"))
2082 ui.status(_("(run 'hg update' to get a working copy)\n"))
2106
2083
2107 def pull(ui, repo, source="default", **opts):
2084 def pull(ui, repo, source="default", **opts):
2108 """pull changes from the specified source
2085 """pull changes from the specified source
2109
2086
2110 Pull changes from a remote repository to a local one.
2087 Pull changes from a remote repository to a local one.
2111
2088
2112 This finds all changes from the repository at the specified path
2089 This finds all changes from the repository at the specified path
2113 or URL and adds them to the local repository. By default, this
2090 or URL and adds them to the local repository. By default, this
2114 does not update the copy of the project in the working directory.
2091 does not update the copy of the project in the working directory.
2115
2092
2116 Valid URLs are of the form:
2093 Valid URLs are of the form:
2117
2094
2118 local/filesystem/path (or file://local/filesystem/path)
2095 local/filesystem/path (or file://local/filesystem/path)
2119 http://[user@]host[:port]/[path]
2096 http://[user@]host[:port]/[path]
2120 https://[user@]host[:port]/[path]
2097 https://[user@]host[:port]/[path]
2121 ssh://[user@]host[:port]/[path]
2098 ssh://[user@]host[:port]/[path]
2122 static-http://host[:port]/[path]
2099 static-http://host[:port]/[path]
2123
2100
2124 Paths in the local filesystem can either point to Mercurial
2101 Paths in the local filesystem can either point to Mercurial
2125 repositories or to bundle files (as created by 'hg bundle' or
2102 repositories or to bundle files (as created by 'hg bundle' or
2126 'hg incoming --bundle'). The static-http:// protocol, albeit slow,
2103 'hg incoming --bundle'). The static-http:// protocol, albeit slow,
2127 allows access to a Mercurial repository where you simply use a web
2104 allows access to a Mercurial repository where you simply use a web
2128 server to publish the .hg directory as static content.
2105 server to publish the .hg directory as static content.
2129
2106
2130 Some notes about using SSH with Mercurial:
2107 Some notes about using SSH with Mercurial:
2131 - SSH requires an accessible shell account on the destination machine
2108 - SSH requires an accessible shell account on the destination machine
2132 and a copy of hg in the remote path or specified with as remotecmd.
2109 and a copy of hg in the remote path or specified with as remotecmd.
2133 - path is relative to the remote user's home directory by default.
2110 - path is relative to the remote user's home directory by default.
2134 Use an extra slash at the start of a path to specify an absolute path:
2111 Use an extra slash at the start of a path to specify an absolute path:
2135 ssh://example.com//tmp/repository
2112 ssh://example.com//tmp/repository
2136 - Mercurial doesn't use its own compression via SSH; the right thing
2113 - Mercurial doesn't use its own compression via SSH; the right thing
2137 to do is to configure it in your ~/.ssh/config, e.g.:
2114 to do is to configure it in your ~/.ssh/config, e.g.:
2138 Host *.mylocalnetwork.example.com
2115 Host *.mylocalnetwork.example.com
2139 Compression no
2116 Compression no
2140 Host *
2117 Host *
2141 Compression yes
2118 Compression yes
2142 Alternatively specify "ssh -C" as your ssh command in your hgrc or
2119 Alternatively specify "ssh -C" as your ssh command in your hgrc or
2143 with the --ssh command line option.
2120 with the --ssh command line option.
2144 """
2121 """
2145 source = ui.expandpath(source)
2122 source = ui.expandpath(source)
2146 setremoteconfig(ui, opts)
2123 setremoteconfig(ui, opts)
2147
2124
2148 other = hg.repository(ui, source)
2125 other = hg.repository(ui, source)
2149 ui.status(_('pulling from %s\n') % (source))
2126 ui.status(_('pulling from %s\n') % (source))
2150 revs = None
2127 revs = None
2151 if opts['rev']:
2128 if opts['rev']:
2152 if 'lookup' in other.capabilities:
2129 if 'lookup' in other.capabilities:
2153 revs = [other.lookup(rev) for rev in opts['rev']]
2130 revs = [other.lookup(rev) for rev in opts['rev']]
2154 else:
2131 else:
2155 error = _("Other repository doesn't support revision lookup, so a rev cannot be specified.")
2132 error = _("Other repository doesn't support revision lookup, so a rev cannot be specified.")
2156 raise util.Abort(error)
2133 raise util.Abort(error)
2157 modheads = repo.pull(other, heads=revs, force=opts['force'])
2134 modheads = repo.pull(other, heads=revs, force=opts['force'])
2158 return postincoming(ui, repo, modheads, opts['update'])
2135 return postincoming(ui, repo, modheads, opts['update'])
2159
2136
2160 def push(ui, repo, dest=None, **opts):
2137 def push(ui, repo, dest=None, **opts):
2161 """push changes to the specified destination
2138 """push changes to the specified destination
2162
2139
2163 Push changes from the local repository to the given destination.
2140 Push changes from the local repository to the given destination.
2164
2141
2165 This is the symmetrical operation for pull. It helps to move
2142 This is the symmetrical operation for pull. It helps to move
2166 changes from the current repository to a different one. If the
2143 changes from the current repository to a different one. If the
2167 destination is local this is identical to a pull in that directory
2144 destination is local this is identical to a pull in that directory
2168 from the current one.
2145 from the current one.
2169
2146
2170 By default, push will refuse to run if it detects the result would
2147 By default, push will refuse to run if it detects the result would
2171 increase the number of remote heads. This generally indicates the
2148 increase the number of remote heads. This generally indicates the
2172 the client has forgotten to sync and merge before pushing.
2149 the client has forgotten to sync and merge before pushing.
2173
2150
2174 Valid URLs are of the form:
2151 Valid URLs are of the form:
2175
2152
2176 local/filesystem/path (or file://local/filesystem/path)
2153 local/filesystem/path (or file://local/filesystem/path)
2177 ssh://[user@]host[:port]/[path]
2154 ssh://[user@]host[:port]/[path]
2178 http://[user@]host[:port]/[path]
2155 http://[user@]host[:port]/[path]
2179 https://[user@]host[:port]/[path]
2156 https://[user@]host[:port]/[path]
2180
2157
2181 Look at the help text for the pull command for important details
2158 Look at the help text for the pull command for important details
2182 about ssh:// URLs.
2159 about ssh:// URLs.
2183
2160
2184 Pushing to http:// and https:// URLs is only possible, if this
2161 Pushing to http:// and https:// URLs is only possible, if this
2185 feature is explicitly enabled on the remote Mercurial server.
2162 feature is explicitly enabled on the remote Mercurial server.
2186 """
2163 """
2187 dest = ui.expandpath(dest or 'default-push', dest or 'default')
2164 dest = ui.expandpath(dest or 'default-push', dest or 'default')
2188 setremoteconfig(ui, opts)
2165 setremoteconfig(ui, opts)
2189
2166
2190 other = hg.repository(ui, dest)
2167 other = hg.repository(ui, dest)
2191 ui.status('pushing to %s\n' % (dest))
2168 ui.status('pushing to %s\n' % (dest))
2192 revs = None
2169 revs = None
2193 if opts['rev']:
2170 if opts['rev']:
2194 revs = [repo.lookup(rev) for rev in opts['rev']]
2171 revs = [repo.lookup(rev) for rev in opts['rev']]
2195 r = repo.push(other, opts['force'], revs=revs)
2172 r = repo.push(other, opts['force'], revs=revs)
2196 return r == 0
2173 return r == 0
2197
2174
2198 def rawcommit(ui, repo, *flist, **rc):
2175 def rawcommit(ui, repo, *flist, **rc):
2199 """raw commit interface (DEPRECATED)
2176 """raw commit interface (DEPRECATED)
2200
2177
2201 (DEPRECATED)
2178 (DEPRECATED)
2202 Lowlevel commit, for use in helper scripts.
2179 Lowlevel commit, for use in helper scripts.
2203
2180
2204 This command is not intended to be used by normal users, as it is
2181 This command is not intended to be used by normal users, as it is
2205 primarily useful for importing from other SCMs.
2182 primarily useful for importing from other SCMs.
2206
2183
2207 This command is now deprecated and will be removed in a future
2184 This command is now deprecated and will be removed in a future
2208 release, please use debugsetparents and commit instead.
2185 release, please use debugsetparents and commit instead.
2209 """
2186 """
2210
2187
2211 ui.warn(_("(the rawcommit command is deprecated)\n"))
2188 ui.warn(_("(the rawcommit command is deprecated)\n"))
2212
2189
2213 message = rc['message']
2190 message = rc['message']
2214 if not message and rc['logfile']:
2191 if not message and rc['logfile']:
2215 try:
2192 try:
2216 message = open(rc['logfile']).read()
2193 message = open(rc['logfile']).read()
2217 except IOError:
2194 except IOError:
2218 pass
2195 pass
2219 if not message and not rc['logfile']:
2196 if not message and not rc['logfile']:
2220 raise util.Abort(_("missing commit message"))
2197 raise util.Abort(_("missing commit message"))
2221
2198
2222 files = relpath(repo, list(flist))
2199 files = relpath(repo, list(flist))
2223 if rc['files']:
2200 if rc['files']:
2224 files += open(rc['files']).read().splitlines()
2201 files += open(rc['files']).read().splitlines()
2225
2202
2226 rc['parent'] = map(repo.lookup, rc['parent'])
2203 rc['parent'] = map(repo.lookup, rc['parent'])
2227
2204
2228 try:
2205 try:
2229 repo.rawcommit(files, message, rc['user'], rc['date'], *rc['parent'])
2206 repo.rawcommit(files, message, rc['user'], rc['date'], *rc['parent'])
2230 except ValueError, inst:
2207 except ValueError, inst:
2231 raise util.Abort(str(inst))
2208 raise util.Abort(str(inst))
2232
2209
2233 def recover(ui, repo):
2210 def recover(ui, repo):
2234 """roll back an interrupted transaction
2211 """roll back an interrupted transaction
2235
2212
2236 Recover from an interrupted commit or pull.
2213 Recover from an interrupted commit or pull.
2237
2214
2238 This command tries to fix the repository status after an interrupted
2215 This command tries to fix the repository status after an interrupted
2239 operation. It should only be necessary when Mercurial suggests it.
2216 operation. It should only be necessary when Mercurial suggests it.
2240 """
2217 """
2241 if repo.recover():
2218 if repo.recover():
2242 return hg.verify(repo)
2219 return hg.verify(repo)
2243 return 1
2220 return 1
2244
2221
2245 def remove(ui, repo, *pats, **opts):
2222 def remove(ui, repo, *pats, **opts):
2246 """remove the specified files on the next commit
2223 """remove the specified files on the next commit
2247
2224
2248 Schedule the indicated files for removal from the repository.
2225 Schedule the indicated files for removal from the repository.
2249
2226
2250 This command schedules the files to be removed at the next commit.
2227 This command schedules the files to be removed at the next commit.
2251 This only removes files from the current branch, not from the
2228 This only removes files from the current branch, not from the
2252 entire project history. If the files still exist in the working
2229 entire project history. If the files still exist in the working
2253 directory, they will be deleted from it. If invoked with --after,
2230 directory, they will be deleted from it. If invoked with --after,
2254 files that have been manually deleted are marked as removed.
2231 files that have been manually deleted are marked as removed.
2255
2232
2256 Modified files and added files are not removed by default. To
2233 Modified files and added files are not removed by default. To
2257 remove them, use the -f/--force option.
2234 remove them, use the -f/--force option.
2258 """
2235 """
2259 names = []
2236 names = []
2260 if not opts['after'] and not pats:
2237 if not opts['after'] and not pats:
2261 raise util.Abort(_('no files specified'))
2238 raise util.Abort(_('no files specified'))
2262 files, matchfn, anypats = cmdutil.matchpats(repo, pats, opts)
2239 files, matchfn, anypats = cmdutil.matchpats(repo, pats, opts)
2263 exact = dict.fromkeys(files)
2240 exact = dict.fromkeys(files)
2264 mardu = map(dict.fromkeys, repo.status(files=files, match=matchfn))[:5]
2241 mardu = map(dict.fromkeys, repo.status(files=files, match=matchfn))[:5]
2265 modified, added, removed, deleted, unknown = mardu
2242 modified, added, removed, deleted, unknown = mardu
2266 remove, forget = [], []
2243 remove, forget = [], []
2267 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts):
2244 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts):
2268 reason = None
2245 reason = None
2269 if abs not in deleted and opts['after']:
2246 if abs not in deleted and opts['after']:
2270 reason = _('is still present')
2247 reason = _('is still present')
2271 elif abs in modified and not opts['force']:
2248 elif abs in modified and not opts['force']:
2272 reason = _('is modified (use -f to force removal)')
2249 reason = _('is modified (use -f to force removal)')
2273 elif abs in added:
2250 elif abs in added:
2274 if opts['force']:
2251 if opts['force']:
2275 forget.append(abs)
2252 forget.append(abs)
2276 continue
2253 continue
2277 reason = _('has been marked for add (use -f to force removal)')
2254 reason = _('has been marked for add (use -f to force removal)')
2278 elif abs in unknown:
2255 elif abs in unknown:
2279 reason = _('is not managed')
2256 reason = _('is not managed')
2280 elif abs in removed:
2257 elif abs in removed:
2281 continue
2258 continue
2282 if reason:
2259 if reason:
2283 if exact:
2260 if exact:
2284 ui.warn(_('not removing %s: file %s\n') % (rel, reason))
2261 ui.warn(_('not removing %s: file %s\n') % (rel, reason))
2285 else:
2262 else:
2286 if ui.verbose or not exact:
2263 if ui.verbose or not exact:
2287 ui.status(_('removing %s\n') % rel)
2264 ui.status(_('removing %s\n') % rel)
2288 remove.append(abs)
2265 remove.append(abs)
2289 repo.forget(forget)
2266 repo.forget(forget)
2290 repo.remove(remove, unlink=not opts['after'])
2267 repo.remove(remove, unlink=not opts['after'])
2291
2268
2292 def rename(ui, repo, *pats, **opts):
2269 def rename(ui, repo, *pats, **opts):
2293 """rename files; equivalent of copy + remove
2270 """rename files; equivalent of copy + remove
2294
2271
2295 Mark dest as copies of sources; mark sources for deletion. If
2272 Mark dest as copies of sources; mark sources for deletion. If
2296 dest is a directory, copies are put in that directory. If dest is
2273 dest is a directory, copies are put in that directory. If dest is
2297 a file, there can only be one source.
2274 a file, there can only be one source.
2298
2275
2299 By default, this command copies the contents of files as they
2276 By default, this command copies the contents of files as they
2300 stand in the working directory. If invoked with --after, the
2277 stand in the working directory. If invoked with --after, the
2301 operation is recorded, but no copying is performed.
2278 operation is recorded, but no copying is performed.
2302
2279
2303 This command takes effect in the next commit.
2280 This command takes effect in the next commit.
2304
2281
2305 NOTE: This command should be treated as experimental. While it
2282 NOTE: This command should be treated as experimental. While it
2306 should properly record rename files, this information is not yet
2283 should properly record rename files, this information is not yet
2307 fully used by merge, nor fully reported by log.
2284 fully used by merge, nor fully reported by log.
2308 """
2285 """
2309 wlock = repo.wlock(0)
2286 wlock = repo.wlock(0)
2310 errs, copied = docopy(ui, repo, pats, opts, wlock)
2287 errs, copied = docopy(ui, repo, pats, opts, wlock)
2311 names = []
2288 names = []
2312 for abs, rel, exact in copied:
2289 for abs, rel, exact in copied:
2313 if ui.verbose or not exact:
2290 if ui.verbose or not exact:
2314 ui.status(_('removing %s\n') % rel)
2291 ui.status(_('removing %s\n') % rel)
2315 names.append(abs)
2292 names.append(abs)
2316 if not opts.get('dry_run'):
2293 if not opts.get('dry_run'):
2317 repo.remove(names, True, wlock)
2294 repo.remove(names, True, wlock)
2318 return errs
2295 return errs
2319
2296
2320 def revert(ui, repo, *pats, **opts):
2297 def revert(ui, repo, *pats, **opts):
2321 """revert files or dirs to their states as of some revision
2298 """revert files or dirs to their states as of some revision
2322
2299
2323 With no revision specified, revert the named files or directories
2300 With no revision specified, revert the named files or directories
2324 to the contents they had in the parent of the working directory.
2301 to the contents they had in the parent of the working directory.
2325 This restores the contents of the affected files to an unmodified
2302 This restores the contents of the affected files to an unmodified
2326 state. If the working directory has two parents, you must
2303 state. If the working directory has two parents, you must
2327 explicitly specify the revision to revert to.
2304 explicitly specify the revision to revert to.
2328
2305
2329 Modified files are saved with a .orig suffix before reverting.
2306 Modified files are saved with a .orig suffix before reverting.
2330 To disable these backups, use --no-backup.
2307 To disable these backups, use --no-backup.
2331
2308
2332 Using the -r option, revert the given files or directories to their
2309 Using the -r option, revert the given files or directories to their
2333 contents as of a specific revision. This can be helpful to "roll
2310 contents as of a specific revision. This can be helpful to "roll
2334 back" some or all of a change that should not have been committed.
2311 back" some or all of a change that should not have been committed.
2335
2312
2336 Revert modifies the working directory. It does not commit any
2313 Revert modifies the working directory. It does not commit any
2337 changes, or change the parent of the working directory. If you
2314 changes, or change the parent of the working directory. If you
2338 revert to a revision other than the parent of the working
2315 revert to a revision other than the parent of the working
2339 directory, the reverted files will thus appear modified
2316 directory, the reverted files will thus appear modified
2340 afterwards.
2317 afterwards.
2341
2318
2342 If a file has been deleted, it is recreated. If the executable
2319 If a file has been deleted, it is recreated. If the executable
2343 mode of a file was changed, it is reset.
2320 mode of a file was changed, it is reset.
2344
2321
2345 If names are given, all files matching the names are reverted.
2322 If names are given, all files matching the names are reverted.
2346
2323
2347 If no arguments are given, no files are reverted.
2324 If no arguments are given, no files are reverted.
2348 """
2325 """
2349
2326
2350 if not pats and not opts['all']:
2327 if not pats and not opts['all']:
2351 raise util.Abort(_('no files or directories specified; '
2328 raise util.Abort(_('no files or directories specified; '
2352 'use --all to revert the whole repo'))
2329 'use --all to revert the whole repo'))
2353
2330
2354 parent, p2 = repo.dirstate.parents()
2331 parent, p2 = repo.dirstate.parents()
2355 if not opts['rev'] and p2 != nullid:
2332 if not opts['rev'] and p2 != nullid:
2356 raise util.Abort(_('uncommitted merge - please provide a '
2333 raise util.Abort(_('uncommitted merge - please provide a '
2357 'specific revision'))
2334 'specific revision'))
2358 node = repo.changectx(opts['rev']).node()
2335 node = repo.changectx(opts['rev']).node()
2359 mf = repo.manifest.read(repo.changelog.read(node)[0])
2336 mf = repo.manifest.read(repo.changelog.read(node)[0])
2360 if node == parent:
2337 if node == parent:
2361 pmf = mf
2338 pmf = mf
2362 else:
2339 else:
2363 pmf = None
2340 pmf = None
2364
2341
2365 wlock = repo.wlock()
2342 wlock = repo.wlock()
2366
2343
2367 # need all matching names in dirstate and manifest of target rev,
2344 # need all matching names in dirstate and manifest of target rev,
2368 # so have to walk both. do not print errors if files exist in one
2345 # so have to walk both. do not print errors if files exist in one
2369 # but not other.
2346 # but not other.
2370
2347
2371 names = {}
2348 names = {}
2372 target_only = {}
2349 target_only = {}
2373
2350
2374 # walk dirstate.
2351 # walk dirstate.
2375
2352
2376 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts,
2353 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts,
2377 badmatch=mf.has_key):
2354 badmatch=mf.has_key):
2378 names[abs] = (rel, exact)
2355 names[abs] = (rel, exact)
2379 if src == 'b':
2356 if src == 'b':
2380 target_only[abs] = True
2357 target_only[abs] = True
2381
2358
2382 # walk target manifest.
2359 # walk target manifest.
2383
2360
2384 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts, node=node,
2361 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts, node=node,
2385 badmatch=names.has_key):
2362 badmatch=names.has_key):
2386 if abs in names: continue
2363 if abs in names: continue
2387 names[abs] = (rel, exact)
2364 names[abs] = (rel, exact)
2388 target_only[abs] = True
2365 target_only[abs] = True
2389
2366
2390 changes = repo.status(match=names.has_key, wlock=wlock)[:5]
2367 changes = repo.status(match=names.has_key, wlock=wlock)[:5]
2391 modified, added, removed, deleted, unknown = map(dict.fromkeys, changes)
2368 modified, added, removed, deleted, unknown = map(dict.fromkeys, changes)
2392
2369
2393 revert = ([], _('reverting %s\n'))
2370 revert = ([], _('reverting %s\n'))
2394 add = ([], _('adding %s\n'))
2371 add = ([], _('adding %s\n'))
2395 remove = ([], _('removing %s\n'))
2372 remove = ([], _('removing %s\n'))
2396 forget = ([], _('forgetting %s\n'))
2373 forget = ([], _('forgetting %s\n'))
2397 undelete = ([], _('undeleting %s\n'))
2374 undelete = ([], _('undeleting %s\n'))
2398 update = {}
2375 update = {}
2399
2376
2400 disptable = (
2377 disptable = (
2401 # dispatch table:
2378 # dispatch table:
2402 # file state
2379 # file state
2403 # action if in target manifest
2380 # action if in target manifest
2404 # action if not in target manifest
2381 # action if not in target manifest
2405 # make backup if in target manifest
2382 # make backup if in target manifest
2406 # make backup if not in target manifest
2383 # make backup if not in target manifest
2407 (modified, revert, remove, True, True),
2384 (modified, revert, remove, True, True),
2408 (added, revert, forget, True, False),
2385 (added, revert, forget, True, False),
2409 (removed, undelete, None, False, False),
2386 (removed, undelete, None, False, False),
2410 (deleted, revert, remove, False, False),
2387 (deleted, revert, remove, False, False),
2411 (unknown, add, None, True, False),
2388 (unknown, add, None, True, False),
2412 (target_only, add, None, False, False),
2389 (target_only, add, None, False, False),
2413 )
2390 )
2414
2391
2415 entries = names.items()
2392 entries = names.items()
2416 entries.sort()
2393 entries.sort()
2417
2394
2418 for abs, (rel, exact) in entries:
2395 for abs, (rel, exact) in entries:
2419 mfentry = mf.get(abs)
2396 mfentry = mf.get(abs)
2420 def handle(xlist, dobackup):
2397 def handle(xlist, dobackup):
2421 xlist[0].append(abs)
2398 xlist[0].append(abs)
2422 update[abs] = 1
2399 update[abs] = 1
2423 if dobackup and not opts['no_backup'] and os.path.exists(rel):
2400 if dobackup and not opts['no_backup'] and os.path.exists(rel):
2424 bakname = "%s.orig" % rel
2401 bakname = "%s.orig" % rel
2425 ui.note(_('saving current version of %s as %s\n') %
2402 ui.note(_('saving current version of %s as %s\n') %
2426 (rel, bakname))
2403 (rel, bakname))
2427 if not opts.get('dry_run'):
2404 if not opts.get('dry_run'):
2428 shutil.copyfile(rel, bakname)
2405 shutil.copyfile(rel, bakname)
2429 shutil.copymode(rel, bakname)
2406 shutil.copymode(rel, bakname)
2430 if ui.verbose or not exact:
2407 if ui.verbose or not exact:
2431 ui.status(xlist[1] % rel)
2408 ui.status(xlist[1] % rel)
2432 for table, hitlist, misslist, backuphit, backupmiss in disptable:
2409 for table, hitlist, misslist, backuphit, backupmiss in disptable:
2433 if abs not in table: continue
2410 if abs not in table: continue
2434 # file has changed in dirstate
2411 # file has changed in dirstate
2435 if mfentry:
2412 if mfentry:
2436 handle(hitlist, backuphit)
2413 handle(hitlist, backuphit)
2437 elif misslist is not None:
2414 elif misslist is not None:
2438 handle(misslist, backupmiss)
2415 handle(misslist, backupmiss)
2439 else:
2416 else:
2440 if exact: ui.warn(_('file not managed: %s\n' % rel))
2417 if exact: ui.warn(_('file not managed: %s\n' % rel))
2441 break
2418 break
2442 else:
2419 else:
2443 # file has not changed in dirstate
2420 # file has not changed in dirstate
2444 if node == parent:
2421 if node == parent:
2445 if exact: ui.warn(_('no changes needed to %s\n' % rel))
2422 if exact: ui.warn(_('no changes needed to %s\n' % rel))
2446 continue
2423 continue
2447 if pmf is None:
2424 if pmf is None:
2448 # only need parent manifest in this unlikely case,
2425 # only need parent manifest in this unlikely case,
2449 # so do not read by default
2426 # so do not read by default
2450 pmf = repo.manifest.read(repo.changelog.read(parent)[0])
2427 pmf = repo.manifest.read(repo.changelog.read(parent)[0])
2451 if abs in pmf:
2428 if abs in pmf:
2452 if mfentry:
2429 if mfentry:
2453 # if version of file is same in parent and target
2430 # if version of file is same in parent and target
2454 # manifests, do nothing
2431 # manifests, do nothing
2455 if pmf[abs] != mfentry:
2432 if pmf[abs] != mfentry:
2456 handle(revert, False)
2433 handle(revert, False)
2457 else:
2434 else:
2458 handle(remove, False)
2435 handle(remove, False)
2459
2436
2460 if not opts.get('dry_run'):
2437 if not opts.get('dry_run'):
2461 repo.dirstate.forget(forget[0])
2438 repo.dirstate.forget(forget[0])
2462 r = hg.revert(repo, node, update.has_key, wlock)
2439 r = hg.revert(repo, node, update.has_key, wlock)
2463 repo.dirstate.update(add[0], 'a')
2440 repo.dirstate.update(add[0], 'a')
2464 repo.dirstate.update(undelete[0], 'n')
2441 repo.dirstate.update(undelete[0], 'n')
2465 repo.dirstate.update(remove[0], 'r')
2442 repo.dirstate.update(remove[0], 'r')
2466 return r
2443 return r
2467
2444
2468 def rollback(ui, repo):
2445 def rollback(ui, repo):
2469 """roll back the last transaction in this repository
2446 """roll back the last transaction in this repository
2470
2447
2471 Roll back the last transaction in this repository, restoring the
2448 Roll back the last transaction in this repository, restoring the
2472 project to its state prior to the transaction.
2449 project to its state prior to the transaction.
2473
2450
2474 Transactions are used to encapsulate the effects of all commands
2451 Transactions are used to encapsulate the effects of all commands
2475 that create new changesets or propagate existing changesets into a
2452 that create new changesets or propagate existing changesets into a
2476 repository. For example, the following commands are transactional,
2453 repository. For example, the following commands are transactional,
2477 and their effects can be rolled back:
2454 and their effects can be rolled back:
2478
2455
2479 commit
2456 commit
2480 import
2457 import
2481 pull
2458 pull
2482 push (with this repository as destination)
2459 push (with this repository as destination)
2483 unbundle
2460 unbundle
2484
2461
2485 This command should be used with care. There is only one level of
2462 This command should be used with care. There is only one level of
2486 rollback, and there is no way to undo a rollback.
2463 rollback, and there is no way to undo a rollback.
2487
2464
2488 This command is not intended for use on public repositories. Once
2465 This command is not intended for use on public repositories. Once
2489 changes are visible for pull by other users, rolling a transaction
2466 changes are visible for pull by other users, rolling a transaction
2490 back locally is ineffective (someone else may already have pulled
2467 back locally is ineffective (someone else may already have pulled
2491 the changes). Furthermore, a race is possible with readers of the
2468 the changes). Furthermore, a race is possible with readers of the
2492 repository; for example an in-progress pull from the repository
2469 repository; for example an in-progress pull from the repository
2493 may fail if a rollback is performed.
2470 may fail if a rollback is performed.
2494 """
2471 """
2495 repo.rollback()
2472 repo.rollback()
2496
2473
2497 def root(ui, repo):
2474 def root(ui, repo):
2498 """print the root (top) of the current working dir
2475 """print the root (top) of the current working dir
2499
2476
2500 Print the root directory of the current repository.
2477 Print the root directory of the current repository.
2501 """
2478 """
2502 ui.write(repo.root + "\n")
2479 ui.write(repo.root + "\n")
2503
2480
2504 def serve(ui, repo, **opts):
2481 def serve(ui, repo, **opts):
2505 """export the repository via HTTP
2482 """export the repository via HTTP
2506
2483
2507 Start a local HTTP repository browser and pull server.
2484 Start a local HTTP repository browser and pull server.
2508
2485
2509 By default, the server logs accesses to stdout and errors to
2486 By default, the server logs accesses to stdout and errors to
2510 stderr. Use the "-A" and "-E" options to log to files.
2487 stderr. Use the "-A" and "-E" options to log to files.
2511 """
2488 """
2512
2489
2513 if opts["stdio"]:
2490 if opts["stdio"]:
2514 if repo is None:
2491 if repo is None:
2515 raise hg.RepoError(_("There is no Mercurial repository here"
2492 raise hg.RepoError(_("There is no Mercurial repository here"
2516 " (.hg not found)"))
2493 " (.hg not found)"))
2517 s = sshserver.sshserver(ui, repo)
2494 s = sshserver.sshserver(ui, repo)
2518 s.serve_forever()
2495 s.serve_forever()
2519
2496
2520 optlist = ("name templates style address port ipv6"
2497 optlist = ("name templates style address port ipv6"
2521 " accesslog errorlog webdir_conf")
2498 " accesslog errorlog webdir_conf")
2522 for o in optlist.split():
2499 for o in optlist.split():
2523 if opts[o]:
2500 if opts[o]:
2524 ui.setconfig("web", o, str(opts[o]))
2501 ui.setconfig("web", o, str(opts[o]))
2525
2502
2526 if repo is None and not ui.config("web", "webdir_conf"):
2503 if repo is None and not ui.config("web", "webdir_conf"):
2527 raise hg.RepoError(_("There is no Mercurial repository here"
2504 raise hg.RepoError(_("There is no Mercurial repository here"
2528 " (.hg not found)"))
2505 " (.hg not found)"))
2529
2506
2530 if opts['daemon'] and not opts['daemon_pipefds']:
2507 if opts['daemon'] and not opts['daemon_pipefds']:
2531 rfd, wfd = os.pipe()
2508 rfd, wfd = os.pipe()
2532 args = sys.argv[:]
2509 args = sys.argv[:]
2533 args.append('--daemon-pipefds=%d,%d' % (rfd, wfd))
2510 args.append('--daemon-pipefds=%d,%d' % (rfd, wfd))
2534 pid = os.spawnvp(os.P_NOWAIT | getattr(os, 'P_DETACH', 0),
2511 pid = os.spawnvp(os.P_NOWAIT | getattr(os, 'P_DETACH', 0),
2535 args[0], args)
2512 args[0], args)
2536 os.close(wfd)
2513 os.close(wfd)
2537 os.read(rfd, 1)
2514 os.read(rfd, 1)
2538 os._exit(0)
2515 os._exit(0)
2539
2516
2540 try:
2517 try:
2541 httpd = hgweb.server.create_server(ui, repo)
2518 httpd = hgweb.server.create_server(ui, repo)
2542 except socket.error, inst:
2519 except socket.error, inst:
2543 raise util.Abort(_('cannot start server: %s') % inst.args[1])
2520 raise util.Abort(_('cannot start server: %s') % inst.args[1])
2544
2521
2545 if ui.verbose:
2522 if ui.verbose:
2546 addr, port = httpd.socket.getsockname()
2523 addr, port = httpd.socket.getsockname()
2547 if addr == '0.0.0.0':
2524 if addr == '0.0.0.0':
2548 addr = socket.gethostname()
2525 addr = socket.gethostname()
2549 else:
2526 else:
2550 try:
2527 try:
2551 addr = socket.gethostbyaddr(addr)[0]
2528 addr = socket.gethostbyaddr(addr)[0]
2552 except socket.error:
2529 except socket.error:
2553 pass
2530 pass
2554 if port != 80:
2531 if port != 80:
2555 ui.status(_('listening at http://%s:%d/\n') % (addr, port))
2532 ui.status(_('listening at http://%s:%d/\n') % (addr, port))
2556 else:
2533 else:
2557 ui.status(_('listening at http://%s/\n') % addr)
2534 ui.status(_('listening at http://%s/\n') % addr)
2558
2535
2559 if opts['pid_file']:
2536 if opts['pid_file']:
2560 fp = open(opts['pid_file'], 'w')
2537 fp = open(opts['pid_file'], 'w')
2561 fp.write(str(os.getpid()) + '\n')
2538 fp.write(str(os.getpid()) + '\n')
2562 fp.close()
2539 fp.close()
2563
2540
2564 if opts['daemon_pipefds']:
2541 if opts['daemon_pipefds']:
2565 rfd, wfd = [int(x) for x in opts['daemon_pipefds'].split(',')]
2542 rfd, wfd = [int(x) for x in opts['daemon_pipefds'].split(',')]
2566 os.close(rfd)
2543 os.close(rfd)
2567 os.write(wfd, 'y')
2544 os.write(wfd, 'y')
2568 os.close(wfd)
2545 os.close(wfd)
2569 sys.stdout.flush()
2546 sys.stdout.flush()
2570 sys.stderr.flush()
2547 sys.stderr.flush()
2571 fd = os.open(util.nulldev, os.O_RDWR)
2548 fd = os.open(util.nulldev, os.O_RDWR)
2572 if fd != 0: os.dup2(fd, 0)
2549 if fd != 0: os.dup2(fd, 0)
2573 if fd != 1: os.dup2(fd, 1)
2550 if fd != 1: os.dup2(fd, 1)
2574 if fd != 2: os.dup2(fd, 2)
2551 if fd != 2: os.dup2(fd, 2)
2575 if fd not in (0, 1, 2): os.close(fd)
2552 if fd not in (0, 1, 2): os.close(fd)
2576
2553
2577 httpd.serve_forever()
2554 httpd.serve_forever()
2578
2555
2579 def status(ui, repo, *pats, **opts):
2556 def status(ui, repo, *pats, **opts):
2580 """show changed files in the working directory
2557 """show changed files in the working directory
2581
2558
2582 Show status of files in the repository. If names are given, only
2559 Show status of files in the repository. If names are given, only
2583 files that match are shown. Files that are clean or ignored, are
2560 files that match are shown. Files that are clean or ignored, are
2584 not listed unless -c (clean), -i (ignored) or -A is given.
2561 not listed unless -c (clean), -i (ignored) or -A is given.
2585
2562
2586 If one revision is given, it is used as the base revision.
2563 If one revision is given, it is used as the base revision.
2587 If two revisions are given, the difference between them is shown.
2564 If two revisions are given, the difference between them is shown.
2588
2565
2589 The codes used to show the status of files are:
2566 The codes used to show the status of files are:
2590 M = modified
2567 M = modified
2591 A = added
2568 A = added
2592 R = removed
2569 R = removed
2593 C = clean
2570 C = clean
2594 ! = deleted, but still tracked
2571 ! = deleted, but still tracked
2595 ? = not tracked
2572 ? = not tracked
2596 I = ignored (not shown by default)
2573 I = ignored (not shown by default)
2597 = the previous added file was copied from here
2574 = the previous added file was copied from here
2598 """
2575 """
2599
2576
2600 all = opts['all']
2577 all = opts['all']
2601 node1, node2 = cmdutil.revpair(ui, repo, opts.get('rev'))
2578 node1, node2 = cmdutil.revpair(ui, repo, opts.get('rev'))
2602
2579
2603 files, matchfn, anypats = cmdutil.matchpats(repo, pats, opts)
2580 files, matchfn, anypats = cmdutil.matchpats(repo, pats, opts)
2604 cwd = (pats and repo.getcwd()) or ''
2581 cwd = (pats and repo.getcwd()) or ''
2605 modified, added, removed, deleted, unknown, ignored, clean = [
2582 modified, added, removed, deleted, unknown, ignored, clean = [
2606 [util.pathto(cwd, x) for x in n]
2583 [util.pathto(cwd, x) for x in n]
2607 for n in repo.status(node1=node1, node2=node2, files=files,
2584 for n in repo.status(node1=node1, node2=node2, files=files,
2608 match=matchfn,
2585 match=matchfn,
2609 list_ignored=all or opts['ignored'],
2586 list_ignored=all or opts['ignored'],
2610 list_clean=all or opts['clean'])]
2587 list_clean=all or opts['clean'])]
2611
2588
2612 changetypes = (('modified', 'M', modified),
2589 changetypes = (('modified', 'M', modified),
2613 ('added', 'A', added),
2590 ('added', 'A', added),
2614 ('removed', 'R', removed),
2591 ('removed', 'R', removed),
2615 ('deleted', '!', deleted),
2592 ('deleted', '!', deleted),
2616 ('unknown', '?', unknown),
2593 ('unknown', '?', unknown),
2617 ('ignored', 'I', ignored))
2594 ('ignored', 'I', ignored))
2618
2595
2619 explicit_changetypes = changetypes + (('clean', 'C', clean),)
2596 explicit_changetypes = changetypes + (('clean', 'C', clean),)
2620
2597
2621 end = opts['print0'] and '\0' or '\n'
2598 end = opts['print0'] and '\0' or '\n'
2622
2599
2623 for opt, char, changes in ([ct for ct in explicit_changetypes
2600 for opt, char, changes in ([ct for ct in explicit_changetypes
2624 if all or opts[ct[0]]]
2601 if all or opts[ct[0]]]
2625 or changetypes):
2602 or changetypes):
2626 if opts['no_status']:
2603 if opts['no_status']:
2627 format = "%%s%s" % end
2604 format = "%%s%s" % end
2628 else:
2605 else:
2629 format = "%s %%s%s" % (char, end)
2606 format = "%s %%s%s" % (char, end)
2630
2607
2631 for f in changes:
2608 for f in changes:
2632 ui.write(format % f)
2609 ui.write(format % f)
2633 if ((all or opts.get('copies')) and not opts.get('no_status')):
2610 if ((all or opts.get('copies')) and not opts.get('no_status')):
2634 copied = repo.dirstate.copied(f)
2611 copied = repo.dirstate.copied(f)
2635 if copied:
2612 if copied:
2636 ui.write(' %s%s' % (copied, end))
2613 ui.write(' %s%s' % (copied, end))
2637
2614
2638 def tag(ui, repo, name, rev_=None, **opts):
2615 def tag(ui, repo, name, rev_=None, **opts):
2639 """add a tag for the current tip or a given revision
2616 """add a tag for the current tip or a given revision
2640
2617
2641 Name a particular revision using <name>.
2618 Name a particular revision using <name>.
2642
2619
2643 Tags are used to name particular revisions of the repository and are
2620 Tags are used to name particular revisions of the repository and are
2644 very useful to compare different revision, to go back to significant
2621 very useful to compare different revision, to go back to significant
2645 earlier versions or to mark branch points as releases, etc.
2622 earlier versions or to mark branch points as releases, etc.
2646
2623
2647 If no revision is given, the parent of the working directory is used.
2624 If no revision is given, the parent of the working directory is used.
2648
2625
2649 To facilitate version control, distribution, and merging of tags,
2626 To facilitate version control, distribution, and merging of tags,
2650 they are stored as a file named ".hgtags" which is managed
2627 they are stored as a file named ".hgtags" which is managed
2651 similarly to other project files and can be hand-edited if
2628 similarly to other project files and can be hand-edited if
2652 necessary. The file '.hg/localtags' is used for local tags (not
2629 necessary. The file '.hg/localtags' is used for local tags (not
2653 shared among repositories).
2630 shared among repositories).
2654 """
2631 """
2655 if name in ['tip', '.']:
2632 if name in ['tip', '.']:
2656 raise util.Abort(_("the name '%s' is reserved") % name)
2633 raise util.Abort(_("the name '%s' is reserved") % name)
2657 if rev_ is not None:
2634 if rev_ is not None:
2658 ui.warn(_("use of 'hg tag NAME [REV]' is deprecated, "
2635 ui.warn(_("use of 'hg tag NAME [REV]' is deprecated, "
2659 "please use 'hg tag [-r REV] NAME' instead\n"))
2636 "please use 'hg tag [-r REV] NAME' instead\n"))
2660 if opts['rev']:
2637 if opts['rev']:
2661 raise util.Abort(_("use only one form to specify the revision"))
2638 raise util.Abort(_("use only one form to specify the revision"))
2662 if opts['rev']:
2639 if opts['rev']:
2663 rev_ = opts['rev']
2640 rev_ = opts['rev']
2664 if not rev_ and repo.dirstate.parents()[1] != nullid:
2641 if not rev_ and repo.dirstate.parents()[1] != nullid:
2665 raise util.Abort(_('uncommitted merge - please provide a '
2642 raise util.Abort(_('uncommitted merge - please provide a '
2666 'specific revision'))
2643 'specific revision'))
2667 r = repo.changectx(rev_).node()
2644 r = repo.changectx(rev_).node()
2668
2645
2669 message = opts['message']
2646 message = opts['message']
2670 if not message:
2647 if not message:
2671 message = _('Added tag %s for changeset %s') % (name, short(r))
2648 message = _('Added tag %s for changeset %s') % (name, short(r))
2672
2649
2673 repo.tag(name, r, message, opts['local'], opts['user'], opts['date'])
2650 repo.tag(name, r, message, opts['local'], opts['user'], opts['date'])
2674
2651
2675 def tags(ui, repo):
2652 def tags(ui, repo):
2676 """list repository tags
2653 """list repository tags
2677
2654
2678 List the repository tags.
2655 List the repository tags.
2679
2656
2680 This lists both regular and local tags.
2657 This lists both regular and local tags.
2681 """
2658 """
2682
2659
2683 l = repo.tagslist()
2660 l = repo.tagslist()
2684 l.reverse()
2661 l.reverse()
2685 hexfunc = ui.debugflag and hex or short
2662 hexfunc = ui.debugflag and hex or short
2686 for t, n in l:
2663 for t, n in l:
2687 try:
2664 try:
2688 r = "%5d:%s" % (repo.changelog.rev(n), hexfunc(n))
2665 r = "%5d:%s" % (repo.changelog.rev(n), hexfunc(n))
2689 except KeyError:
2666 except KeyError:
2690 r = " ?:?"
2667 r = " ?:?"
2691 if ui.quiet:
2668 if ui.quiet:
2692 ui.write("%s\n" % t)
2669 ui.write("%s\n" % t)
2693 else:
2670 else:
2694 ui.write("%-30s %s\n" % (t, r))
2671 ui.write("%-30s %s\n" % (t, r))
2695
2672
2696 def tip(ui, repo, **opts):
2673 def tip(ui, repo, **opts):
2697 """show the tip revision
2674 """show the tip revision
2698
2675
2699 Show the tip revision.
2676 Show the tip revision.
2700 """
2677 """
2701 n = repo.changelog.tip()
2678 n = repo.changelog.tip()
2702 br = None
2679 br = None
2703 if opts['branches']:
2680 if opts['branches']:
2704 ui.warn(_("the --branches option is deprecated, "
2681 ui.warn(_("the --branches option is deprecated, "
2705 "please use 'hg branches' instead\n"))
2682 "please use 'hg branches' instead\n"))
2706 br = repo.branchlookup([n])
2683 br = repo.branchlookup([n])
2707 show_changeset(ui, repo, opts).show(changenode=n, brinfo=br)
2684 show_changeset(ui, repo, opts).show(changenode=n, brinfo=br)
2708 if opts['patch']:
2685 if opts['patch']:
2709 patch.diff(repo, repo.changelog.parents(n)[0], n)
2686 patch.diff(repo, repo.changelog.parents(n)[0], n)
2710
2687
2711 def unbundle(ui, repo, fname, **opts):
2688 def unbundle(ui, repo, fname, **opts):
2712 """apply a changegroup file
2689 """apply a changegroup file
2713
2690
2714 Apply a compressed changegroup file generated by the bundle
2691 Apply a compressed changegroup file generated by the bundle
2715 command.
2692 command.
2716 """
2693 """
2717 f = urllib.urlopen(fname)
2694 f = urllib.urlopen(fname)
2718
2695
2719 header = f.read(6)
2696 header = f.read(6)
2720 if not header.startswith("HG"):
2697 if not header.startswith("HG"):
2721 raise util.Abort(_("%s: not a Mercurial bundle file") % fname)
2698 raise util.Abort(_("%s: not a Mercurial bundle file") % fname)
2722 elif not header.startswith("HG10"):
2699 elif not header.startswith("HG10"):
2723 raise util.Abort(_("%s: unknown bundle version") % fname)
2700 raise util.Abort(_("%s: unknown bundle version") % fname)
2724 elif header == "HG10BZ":
2701 elif header == "HG10BZ":
2725 def generator(f):
2702 def generator(f):
2726 zd = bz2.BZ2Decompressor()
2703 zd = bz2.BZ2Decompressor()
2727 zd.decompress("BZ")
2704 zd.decompress("BZ")
2728 for chunk in f:
2705 for chunk in f:
2729 yield zd.decompress(chunk)
2706 yield zd.decompress(chunk)
2730 elif header == "HG10UN":
2707 elif header == "HG10UN":
2731 def generator(f):
2708 def generator(f):
2732 for chunk in f:
2709 for chunk in f:
2733 yield chunk
2710 yield chunk
2734 else:
2711 else:
2735 raise util.Abort(_("%s: unknown bundle compression type")
2712 raise util.Abort(_("%s: unknown bundle compression type")
2736 % fname)
2713 % fname)
2737 gen = generator(util.filechunkiter(f, 4096))
2714 gen = generator(util.filechunkiter(f, 4096))
2738 modheads = repo.addchangegroup(util.chunkbuffer(gen), 'unbundle',
2715 modheads = repo.addchangegroup(util.chunkbuffer(gen), 'unbundle',
2739 'bundle:' + fname)
2716 'bundle:' + fname)
2740 return postincoming(ui, repo, modheads, opts['update'])
2717 return postincoming(ui, repo, modheads, opts['update'])
2741
2718
2742 def update(ui, repo, node=None, merge=False, clean=False, force=None,
2719 def update(ui, repo, node=None, merge=False, clean=False, force=None,
2743 branch=None):
2720 branch=None):
2744 """update or merge working directory
2721 """update or merge working directory
2745
2722
2746 Update the working directory to the specified revision.
2723 Update the working directory to the specified revision.
2747
2724
2748 If there are no outstanding changes in the working directory and
2725 If there are no outstanding changes in the working directory and
2749 there is a linear relationship between the current version and the
2726 there is a linear relationship between the current version and the
2750 requested version, the result is the requested version.
2727 requested version, the result is the requested version.
2751
2728
2752 To merge the working directory with another revision, use the
2729 To merge the working directory with another revision, use the
2753 merge command.
2730 merge command.
2754
2731
2755 By default, update will refuse to run if doing so would require
2732 By default, update will refuse to run if doing so would require
2756 merging or discarding local changes.
2733 merging or discarding local changes.
2757 """
2734 """
2758 node = _lookup(repo, node, branch)
2735 node = _lookup(repo, node, branch)
2759 if clean:
2736 if clean:
2760 return hg.clean(repo, node)
2737 return hg.clean(repo, node)
2761 else:
2738 else:
2762 return hg.update(repo, node)
2739 return hg.update(repo, node)
2763
2740
2764 def _lookup(repo, node, branch=None):
2741 def _lookup(repo, node, branch=None):
2765 if branch:
2742 if branch:
2766 repo.ui.warn(_("the --branch option is deprecated, "
2743 repo.ui.warn(_("the --branch option is deprecated, "
2767 "please use 'hg branch' instead\n"))
2744 "please use 'hg branch' instead\n"))
2768 br = repo.branchlookup(branch=branch)
2745 br = repo.branchlookup(branch=branch)
2769 found = []
2746 found = []
2770 for x in br:
2747 for x in br:
2771 if branch in br[x]:
2748 if branch in br[x]:
2772 found.append(x)
2749 found.append(x)
2773 if len(found) > 1:
2750 if len(found) > 1:
2774 repo.ui.warn(_("Found multiple heads for %s\n") % branch)
2751 repo.ui.warn(_("Found multiple heads for %s\n") % branch)
2775 for x in found:
2752 for x in found:
2776 show_changeset(ui, repo, {}).show(changenode=x, brinfo=br)
2753 show_changeset(ui, repo, {}).show(changenode=x, brinfo=br)
2777 raise util.Abort("")
2754 raise util.Abort("")
2778 if len(found) == 1:
2755 if len(found) == 1:
2779 node = found[0]
2756 node = found[0]
2780 repo.ui.warn(_("Using head %s for branch %s\n")
2757 repo.ui.warn(_("Using head %s for branch %s\n")
2781 % (short(node), branch))
2758 % (short(node), branch))
2782 else:
2759 else:
2783 raise util.Abort(_("branch %s not found") % branch)
2760 raise util.Abort(_("branch %s not found") % branch)
2784 else:
2761 else:
2785 node = node and repo.lookup(node) or repo.changelog.tip()
2762 node = node and repo.lookup(node) or repo.changelog.tip()
2786 return node
2763 return node
2787
2764
2788 def verify(ui, repo):
2765 def verify(ui, repo):
2789 """verify the integrity of the repository
2766 """verify the integrity of the repository
2790
2767
2791 Verify the integrity of the current repository.
2768 Verify the integrity of the current repository.
2792
2769
2793 This will perform an extensive check of the repository's
2770 This will perform an extensive check of the repository's
2794 integrity, validating the hashes and checksums of each entry in
2771 integrity, validating the hashes and checksums of each entry in
2795 the changelog, manifest, and tracked files, as well as the
2772 the changelog, manifest, and tracked files, as well as the
2796 integrity of their crosslinks and indices.
2773 integrity of their crosslinks and indices.
2797 """
2774 """
2798 return hg.verify(repo)
2775 return hg.verify(repo)
2799
2776
2800 # Command options and aliases are listed here, alphabetically
2777 # Command options and aliases are listed here, alphabetically
2801
2778
2802 globalopts = [
2779 globalopts = [
2803 ('R', 'repository', '',
2780 ('R', 'repository', '',
2804 _('repository root directory or symbolic path name')),
2781 _('repository root directory or symbolic path name')),
2805 ('', 'cwd', '', _('change working directory')),
2782 ('', 'cwd', '', _('change working directory')),
2806 ('y', 'noninteractive', None,
2783 ('y', 'noninteractive', None,
2807 _('do not prompt, assume \'yes\' for any required answers')),
2784 _('do not prompt, assume \'yes\' for any required answers')),
2808 ('q', 'quiet', None, _('suppress output')),
2785 ('q', 'quiet', None, _('suppress output')),
2809 ('v', 'verbose', None, _('enable additional output')),
2786 ('v', 'verbose', None, _('enable additional output')),
2810 ('', 'config', [], _('set/override config option')),
2787 ('', 'config', [], _('set/override config option')),
2811 ('', 'debug', None, _('enable debugging output')),
2788 ('', 'debug', None, _('enable debugging output')),
2812 ('', 'debugger', None, _('start debugger')),
2789 ('', 'debugger', None, _('start debugger')),
2813 ('', 'lsprof', None, _('print improved command execution profile')),
2790 ('', 'lsprof', None, _('print improved command execution profile')),
2814 ('', 'traceback', None, _('print traceback on exception')),
2791 ('', 'traceback', None, _('print traceback on exception')),
2815 ('', 'time', None, _('time how long the command takes')),
2792 ('', 'time', None, _('time how long the command takes')),
2816 ('', 'profile', None, _('print command execution profile')),
2793 ('', 'profile', None, _('print command execution profile')),
2817 ('', 'version', None, _('output version information and exit')),
2794 ('', 'version', None, _('output version information and exit')),
2818 ('h', 'help', None, _('display help and exit')),
2795 ('h', 'help', None, _('display help and exit')),
2819 ]
2796 ]
2820
2797
2821 dryrunopts = [('n', 'dry-run', None,
2798 dryrunopts = [('n', 'dry-run', None,
2822 _('do not perform actions, just print output'))]
2799 _('do not perform actions, just print output'))]
2823
2800
2824 remoteopts = [
2801 remoteopts = [
2825 ('e', 'ssh', '', _('specify ssh command to use')),
2802 ('e', 'ssh', '', _('specify ssh command to use')),
2826 ('', 'remotecmd', '', _('specify hg command to run on the remote side')),
2803 ('', 'remotecmd', '', _('specify hg command to run on the remote side')),
2827 ]
2804 ]
2828
2805
2829 walkopts = [
2806 walkopts = [
2830 ('I', 'include', [], _('include names matching the given patterns')),
2807 ('I', 'include', [], _('include names matching the given patterns')),
2831 ('X', 'exclude', [], _('exclude names matching the given patterns')),
2808 ('X', 'exclude', [], _('exclude names matching the given patterns')),
2832 ]
2809 ]
2833
2810
2834 table = {
2811 table = {
2835 "^add":
2812 "^add":
2836 (add,
2813 (add,
2837 walkopts + dryrunopts,
2814 walkopts + dryrunopts,
2838 _('hg add [OPTION]... [FILE]...')),
2815 _('hg add [OPTION]... [FILE]...')),
2839 "addremove":
2816 "addremove":
2840 (addremove,
2817 (addremove,
2841 [('s', 'similarity', '',
2818 [('s', 'similarity', '',
2842 _('guess renamed files by similarity (0<=s<=100)')),
2819 _('guess renamed files by similarity (0<=s<=100)')),
2843 ] + walkopts + dryrunopts,
2820 ] + walkopts + dryrunopts,
2844 _('hg addremove [OPTION]... [FILE]...')),
2821 _('hg addremove [OPTION]... [FILE]...')),
2845 "^annotate":
2822 "^annotate":
2846 (annotate,
2823 (annotate,
2847 [('r', 'rev', '', _('annotate the specified revision')),
2824 [('r', 'rev', '', _('annotate the specified revision')),
2848 ('f', 'follow', None, _('follow file copies and renames')),
2825 ('f', 'follow', None, _('follow file copies and renames')),
2849 ('a', 'text', None, _('treat all files as text')),
2826 ('a', 'text', None, _('treat all files as text')),
2850 ('u', 'user', None, _('list the author')),
2827 ('u', 'user', None, _('list the author')),
2851 ('d', 'date', None, _('list the date')),
2828 ('d', 'date', None, _('list the date')),
2852 ('n', 'number', None, _('list the revision number (default)')),
2829 ('n', 'number', None, _('list the revision number (default)')),
2853 ('c', 'changeset', None, _('list the changeset')),
2830 ('c', 'changeset', None, _('list the changeset')),
2854 ] + walkopts,
2831 ] + walkopts,
2855 _('hg annotate [-r REV] [-a] [-u] [-d] [-n] [-c] FILE...')),
2832 _('hg annotate [-r REV] [-a] [-u] [-d] [-n] [-c] FILE...')),
2856 "archive":
2833 "archive":
2857 (archive,
2834 (archive,
2858 [('', 'no-decode', None, _('do not pass files through decoders')),
2835 [('', 'no-decode', None, _('do not pass files through decoders')),
2859 ('p', 'prefix', '', _('directory prefix for files in archive')),
2836 ('p', 'prefix', '', _('directory prefix for files in archive')),
2860 ('r', 'rev', '', _('revision to distribute')),
2837 ('r', 'rev', '', _('revision to distribute')),
2861 ('t', 'type', '', _('type of distribution to create')),
2838 ('t', 'type', '', _('type of distribution to create')),
2862 ] + walkopts,
2839 ] + walkopts,
2863 _('hg archive [OPTION]... DEST')),
2840 _('hg archive [OPTION]... DEST')),
2864 "backout":
2841 "backout":
2865 (backout,
2842 (backout,
2866 [('', 'merge', None,
2843 [('', 'merge', None,
2867 _('merge with old dirstate parent after backout')),
2844 _('merge with old dirstate parent after backout')),
2868 ('m', 'message', '', _('use <text> as commit message')),
2845 ('m', 'message', '', _('use <text> as commit message')),
2869 ('l', 'logfile', '', _('read commit message from <file>')),
2846 ('l', 'logfile', '', _('read commit message from <file>')),
2870 ('d', 'date', '', _('record datecode as commit date')),
2847 ('d', 'date', '', _('record datecode as commit date')),
2871 ('', 'parent', '', _('parent to choose when backing out merge')),
2848 ('', 'parent', '', _('parent to choose when backing out merge')),
2872 ('u', 'user', '', _('record user as committer')),
2849 ('u', 'user', '', _('record user as committer')),
2873 ] + walkopts,
2850 ] + walkopts,
2874 _('hg backout [OPTION]... REV')),
2851 _('hg backout [OPTION]... REV')),
2875 "branch": (branch, [], _('hg branch [NAME]')),
2852 "branch": (branch, [], _('hg branch [NAME]')),
2876 "branches": (branches, [], _('hg branches')),
2853 "branches": (branches, [], _('hg branches')),
2877 "bundle":
2854 "bundle":
2878 (bundle,
2855 (bundle,
2879 [('f', 'force', None,
2856 [('f', 'force', None,
2880 _('run even when remote repository is unrelated')),
2857 _('run even when remote repository is unrelated')),
2881 ('r', 'rev', [],
2858 ('r', 'rev', [],
2882 _('a changeset you would like to bundle')),
2859 _('a changeset you would like to bundle')),
2883 ('', 'base', [],
2860 ('', 'base', [],
2884 _('a base changeset to specify instead of a destination')),
2861 _('a base changeset to specify instead of a destination')),
2885 ] + remoteopts,
2862 ] + remoteopts,
2886 _('hg bundle [--base REV]... [--rev REV]... FILE [DEST]')),
2863 _('hg bundle [--base REV]... [--rev REV]... FILE [DEST]')),
2887 "cat":
2864 "cat":
2888 (cat,
2865 (cat,
2889 [('o', 'output', '', _('print output to file with formatted name')),
2866 [('o', 'output', '', _('print output to file with formatted name')),
2890 ('r', 'rev', '', _('print the given revision')),
2867 ('r', 'rev', '', _('print the given revision')),
2891 ] + walkopts,
2868 ] + walkopts,
2892 _('hg cat [OPTION]... FILE...')),
2869 _('hg cat [OPTION]... FILE...')),
2893 "^clone":
2870 "^clone":
2894 (clone,
2871 (clone,
2895 [('U', 'noupdate', None, _('do not update the new working directory')),
2872 [('U', 'noupdate', None, _('do not update the new working directory')),
2896 ('r', 'rev', [],
2873 ('r', 'rev', [],
2897 _('a changeset you would like to have after cloning')),
2874 _('a changeset you would like to have after cloning')),
2898 ('', 'pull', None, _('use pull protocol to copy metadata')),
2875 ('', 'pull', None, _('use pull protocol to copy metadata')),
2899 ('', 'uncompressed', None,
2876 ('', 'uncompressed', None,
2900 _('use uncompressed transfer (fast over LAN)')),
2877 _('use uncompressed transfer (fast over LAN)')),
2901 ] + remoteopts,
2878 ] + remoteopts,
2902 _('hg clone [OPTION]... SOURCE [DEST]')),
2879 _('hg clone [OPTION]... SOURCE [DEST]')),
2903 "^commit|ci":
2880 "^commit|ci":
2904 (commit,
2881 (commit,
2905 [('A', 'addremove', None,
2882 [('A', 'addremove', None,
2906 _('mark new/missing files as added/removed before committing')),
2883 _('mark new/missing files as added/removed before committing')),
2907 ('m', 'message', '', _('use <text> as commit message')),
2884 ('m', 'message', '', _('use <text> as commit message')),
2908 ('l', 'logfile', '', _('read the commit message from <file>')),
2885 ('l', 'logfile', '', _('read the commit message from <file>')),
2909 ('d', 'date', '', _('record datecode as commit date')),
2886 ('d', 'date', '', _('record datecode as commit date')),
2910 ('u', 'user', '', _('record user as commiter')),
2887 ('u', 'user', '', _('record user as commiter')),
2911 ] + walkopts,
2888 ] + walkopts,
2912 _('hg commit [OPTION]... [FILE]...')),
2889 _('hg commit [OPTION]... [FILE]...')),
2913 "copy|cp":
2890 "copy|cp":
2914 (copy,
2891 (copy,
2915 [('A', 'after', None, _('record a copy that has already occurred')),
2892 [('A', 'after', None, _('record a copy that has already occurred')),
2916 ('f', 'force', None,
2893 ('f', 'force', None,
2917 _('forcibly copy over an existing managed file')),
2894 _('forcibly copy over an existing managed file')),
2918 ] + walkopts + dryrunopts,
2895 ] + walkopts + dryrunopts,
2919 _('hg copy [OPTION]... [SOURCE]... DEST')),
2896 _('hg copy [OPTION]... [SOURCE]... DEST')),
2920 "debugancestor": (debugancestor, [], _('debugancestor INDEX REV1 REV2')),
2897 "debugancestor": (debugancestor, [], _('debugancestor INDEX REV1 REV2')),
2921 "debugcomplete":
2898 "debugcomplete":
2922 (debugcomplete,
2899 (debugcomplete,
2923 [('o', 'options', None, _('show the command options'))],
2900 [('o', 'options', None, _('show the command options'))],
2924 _('debugcomplete [-o] CMD')),
2901 _('debugcomplete [-o] CMD')),
2925 "debugrebuildstate":
2902 "debugrebuildstate":
2926 (debugrebuildstate,
2903 (debugrebuildstate,
2927 [('r', 'rev', '', _('revision to rebuild to'))],
2904 [('r', 'rev', '', _('revision to rebuild to'))],
2928 _('debugrebuildstate [-r REV] [REV]')),
2905 _('debugrebuildstate [-r REV] [REV]')),
2929 "debugcheckstate": (debugcheckstate, [], _('debugcheckstate')),
2906 "debugcheckstate": (debugcheckstate, [], _('debugcheckstate')),
2930 "debugsetparents": (debugsetparents, [], _('debugsetparents REV1 [REV2]')),
2907 "debugsetparents": (debugsetparents, [], _('debugsetparents REV1 [REV2]')),
2931 "debugstate": (debugstate, [], _('debugstate')),
2908 "debugstate": (debugstate, [], _('debugstate')),
2932 "debugdata": (debugdata, [], _('debugdata FILE REV')),
2909 "debugdata": (debugdata, [], _('debugdata FILE REV')),
2933 "debugindex": (debugindex, [], _('debugindex FILE')),
2910 "debugindex": (debugindex, [], _('debugindex FILE')),
2934 "debugindexdot": (debugindexdot, [], _('debugindexdot FILE')),
2911 "debugindexdot": (debugindexdot, [], _('debugindexdot FILE')),
2935 "debugrename": (debugrename, [], _('debugrename FILE [REV]')),
2912 "debugrename": (debugrename, [], _('debugrename FILE [REV]')),
2936 "debugwalk":
2913 "debugwalk":
2937 (debugwalk, walkopts, _('debugwalk [OPTION]... [FILE]...')),
2914 (debugwalk, walkopts, _('debugwalk [OPTION]... [FILE]...')),
2938 "^diff":
2915 "^diff":
2939 (diff,
2916 (diff,
2940 [('r', 'rev', [], _('revision')),
2917 [('r', 'rev', [], _('revision')),
2941 ('a', 'text', None, _('treat all files as text')),
2918 ('a', 'text', None, _('treat all files as text')),
2942 ('p', 'show-function', None,
2919 ('p', 'show-function', None,
2943 _('show which function each change is in')),
2920 _('show which function each change is in')),
2944 ('g', 'git', None, _('use git extended diff format')),
2921 ('g', 'git', None, _('use git extended diff format')),
2945 ('', 'nodates', None, _("don't include dates in diff headers")),
2922 ('', 'nodates', None, _("don't include dates in diff headers")),
2946 ('w', 'ignore-all-space', None,
2923 ('w', 'ignore-all-space', None,
2947 _('ignore white space when comparing lines')),
2924 _('ignore white space when comparing lines')),
2948 ('b', 'ignore-space-change', None,
2925 ('b', 'ignore-space-change', None,
2949 _('ignore changes in the amount of white space')),
2926 _('ignore changes in the amount of white space')),
2950 ('B', 'ignore-blank-lines', None,
2927 ('B', 'ignore-blank-lines', None,
2951 _('ignore changes whose lines are all blank')),
2928 _('ignore changes whose lines are all blank')),
2952 ] + walkopts,
2929 ] + walkopts,
2953 _('hg diff [-a] [-I] [-X] [-r REV1 [-r REV2]] [FILE]...')),
2930 _('hg diff [-a] [-I] [-X] [-r REV1 [-r REV2]] [FILE]...')),
2954 "^export":
2931 "^export":
2955 (export,
2932 (export,
2956 [('o', 'output', '', _('print output to file with formatted name')),
2933 [('o', 'output', '', _('print output to file with formatted name')),
2957 ('a', 'text', None, _('treat all files as text')),
2934 ('a', 'text', None, _('treat all files as text')),
2958 ('g', 'git', None, _('use git extended diff format')),
2935 ('g', 'git', None, _('use git extended diff format')),
2959 ('', 'nodates', None, _("don't include dates in diff headers")),
2936 ('', 'nodates', None, _("don't include dates in diff headers")),
2960 ('', 'switch-parent', None, _('diff against the second parent'))],
2937 ('', 'switch-parent', None, _('diff against the second parent'))],
2961 _('hg export [-a] [-o OUTFILESPEC] REV...')),
2938 _('hg export [-a] [-o OUTFILESPEC] REV...')),
2962 "grep":
2939 "grep":
2963 (grep,
2940 (grep,
2964 [('0', 'print0', None, _('end fields with NUL')),
2941 [('0', 'print0', None, _('end fields with NUL')),
2965 ('', 'all', None, _('print all revisions that match')),
2942 ('', 'all', None, _('print all revisions that match')),
2966 ('f', 'follow', None,
2943 ('f', 'follow', None,
2967 _('follow changeset history, or file history across copies and renames')),
2944 _('follow changeset history, or file history across copies and renames')),
2968 ('i', 'ignore-case', None, _('ignore case when matching')),
2945 ('i', 'ignore-case', None, _('ignore case when matching')),
2969 ('l', 'files-with-matches', None,
2946 ('l', 'files-with-matches', None,
2970 _('print only filenames and revs that match')),
2947 _('print only filenames and revs that match')),
2971 ('n', 'line-number', None, _('print matching line numbers')),
2948 ('n', 'line-number', None, _('print matching line numbers')),
2972 ('r', 'rev', [], _('search in given revision range')),
2949 ('r', 'rev', [], _('search in given revision range')),
2973 ('u', 'user', None, _('print user who committed change')),
2950 ('u', 'user', None, _('print user who committed change')),
2974 ] + walkopts,
2951 ] + walkopts,
2975 _('hg grep [OPTION]... PATTERN [FILE]...')),
2952 _('hg grep [OPTION]... PATTERN [FILE]...')),
2976 "heads":
2953 "heads":
2977 (heads,
2954 (heads,
2978 [('b', 'branches', None, _('show branches (DEPRECATED)')),
2955 [('b', 'branches', None, _('show branches (DEPRECATED)')),
2979 ('', 'style', '', _('display using template map file')),
2956 ('', 'style', '', _('display using template map file')),
2980 ('r', 'rev', '', _('show only heads which are descendants of rev')),
2957 ('r', 'rev', '', _('show only heads which are descendants of rev')),
2981 ('', 'template', '', _('display with template'))],
2958 ('', 'template', '', _('display with template'))],
2982 _('hg heads [-r REV]')),
2959 _('hg heads [-r REV]')),
2983 "help": (help_, [], _('hg help [COMMAND]')),
2960 "help": (help_, [], _('hg help [COMMAND]')),
2984 "identify|id": (identify, [], _('hg identify')),
2961 "identify|id": (identify, [], _('hg identify')),
2985 "import|patch":
2962 "import|patch":
2986 (import_,
2963 (import_,
2987 [('p', 'strip', 1,
2964 [('p', 'strip', 1,
2988 _('directory strip option for patch. This has the same\n'
2965 _('directory strip option for patch. This has the same\n'
2989 'meaning as the corresponding patch option')),
2966 'meaning as the corresponding patch option')),
2990 ('m', 'message', '', _('use <text> as commit message')),
2967 ('m', 'message', '', _('use <text> as commit message')),
2991 ('b', 'base', '', _('base path (DEPRECATED)')),
2968 ('b', 'base', '', _('base path (DEPRECATED)')),
2992 ('f', 'force', None,
2969 ('f', 'force', None,
2993 _('skip check for outstanding uncommitted changes'))],
2970 _('skip check for outstanding uncommitted changes'))],
2994 _('hg import [-p NUM] [-m MESSAGE] [-f] PATCH...')),
2971 _('hg import [-p NUM] [-m MESSAGE] [-f] PATCH...')),
2995 "incoming|in": (incoming,
2972 "incoming|in": (incoming,
2996 [('M', 'no-merges', None, _('do not show merges')),
2973 [('M', 'no-merges', None, _('do not show merges')),
2997 ('f', 'force', None,
2974 ('f', 'force', None,
2998 _('run even when remote repository is unrelated')),
2975 _('run even when remote repository is unrelated')),
2999 ('', 'style', '', _('display using template map file')),
2976 ('', 'style', '', _('display using template map file')),
3000 ('n', 'newest-first', None, _('show newest record first')),
2977 ('n', 'newest-first', None, _('show newest record first')),
3001 ('', 'bundle', '', _('file to store the bundles into')),
2978 ('', 'bundle', '', _('file to store the bundles into')),
3002 ('p', 'patch', None, _('show patch')),
2979 ('p', 'patch', None, _('show patch')),
3003 ('r', 'rev', [], _('a specific revision up to which you would like to pull')),
2980 ('r', 'rev', [], _('a specific revision up to which you would like to pull')),
3004 ('', 'template', '', _('display with template')),
2981 ('', 'template', '', _('display with template')),
3005 ] + remoteopts,
2982 ] + remoteopts,
3006 _('hg incoming [-p] [-n] [-M] [-r REV]...'
2983 _('hg incoming [-p] [-n] [-M] [-r REV]...'
3007 ' [--bundle FILENAME] [SOURCE]')),
2984 ' [--bundle FILENAME] [SOURCE]')),
3008 "^init":
2985 "^init":
3009 (init, remoteopts, _('hg init [-e FILE] [--remotecmd FILE] [DEST]')),
2986 (init, remoteopts, _('hg init [-e FILE] [--remotecmd FILE] [DEST]')),
3010 "locate":
2987 "locate":
3011 (locate,
2988 (locate,
3012 [('r', 'rev', '', _('search the repository as it stood at rev')),
2989 [('r', 'rev', '', _('search the repository as it stood at rev')),
3013 ('0', 'print0', None,
2990 ('0', 'print0', None,
3014 _('end filenames with NUL, for use with xargs')),
2991 _('end filenames with NUL, for use with xargs')),
3015 ('f', 'fullpath', None,
2992 ('f', 'fullpath', None,
3016 _('print complete paths from the filesystem root')),
2993 _('print complete paths from the filesystem root')),
3017 ] + walkopts,
2994 ] + walkopts,
3018 _('hg locate [OPTION]... [PATTERN]...')),
2995 _('hg locate [OPTION]... [PATTERN]...')),
3019 "^log|history":
2996 "^log|history":
3020 (log,
2997 (log,
3021 [('b', 'branches', None, _('show branches (DEPRECATED)')),
2998 [('b', 'branches', None, _('show branches (DEPRECATED)')),
3022 ('f', 'follow', None,
2999 ('f', 'follow', None,
3023 _('follow changeset history, or file history across copies and renames')),
3000 _('follow changeset history, or file history across copies and renames')),
3024 ('', 'follow-first', None,
3001 ('', 'follow-first', None,
3025 _('only follow the first parent of merge changesets')),
3002 _('only follow the first parent of merge changesets')),
3026 ('C', 'copies', None, _('show copied files')),
3003 ('C', 'copies', None, _('show copied files')),
3027 ('k', 'keyword', [], _('search for a keyword')),
3004 ('k', 'keyword', [], _('search for a keyword')),
3028 ('l', 'limit', '', _('limit number of changes displayed')),
3005 ('l', 'limit', '', _('limit number of changes displayed')),
3029 ('r', 'rev', [], _('show the specified revision or range')),
3006 ('r', 'rev', [], _('show the specified revision or range')),
3030 ('M', 'no-merges', None, _('do not show merges')),
3007 ('M', 'no-merges', None, _('do not show merges')),
3031 ('', 'style', '', _('display using template map file')),
3008 ('', 'style', '', _('display using template map file')),
3032 ('m', 'only-merges', None, _('show only merges')),
3009 ('m', 'only-merges', None, _('show only merges')),
3033 ('p', 'patch', None, _('show patch')),
3010 ('p', 'patch', None, _('show patch')),
3034 ('P', 'prune', [], _('do not display revision or any of its ancestors')),
3011 ('P', 'prune', [], _('do not display revision or any of its ancestors')),
3035 ('', 'template', '', _('display with template')),
3012 ('', 'template', '', _('display with template')),
3036 ] + walkopts,
3013 ] + walkopts,
3037 _('hg log [OPTION]... [FILE]')),
3014 _('hg log [OPTION]... [FILE]')),
3038 "manifest": (manifest, [], _('hg manifest [REV]')),
3015 "manifest": (manifest, [], _('hg manifest [REV]')),
3039 "merge":
3016 "merge":
3040 (merge,
3017 (merge,
3041 [('b', 'branch', '', _('merge with head of a specific branch (DEPRECATED)')),
3018 [('b', 'branch', '', _('merge with head of a specific branch (DEPRECATED)')),
3042 ('f', 'force', None, _('force a merge with outstanding changes'))],
3019 ('f', 'force', None, _('force a merge with outstanding changes'))],
3043 _('hg merge [-f] [REV]')),
3020 _('hg merge [-f] [REV]')),
3044 "outgoing|out": (outgoing,
3021 "outgoing|out": (outgoing,
3045 [('M', 'no-merges', None, _('do not show merges')),
3022 [('M', 'no-merges', None, _('do not show merges')),
3046 ('f', 'force', None,
3023 ('f', 'force', None,
3047 _('run even when remote repository is unrelated')),
3024 _('run even when remote repository is unrelated')),
3048 ('p', 'patch', None, _('show patch')),
3025 ('p', 'patch', None, _('show patch')),
3049 ('', 'style', '', _('display using template map file')),
3026 ('', 'style', '', _('display using template map file')),
3050 ('r', 'rev', [], _('a specific revision you would like to push')),
3027 ('r', 'rev', [], _('a specific revision you would like to push')),
3051 ('n', 'newest-first', None, _('show newest record first')),
3028 ('n', 'newest-first', None, _('show newest record first')),
3052 ('', 'template', '', _('display with template')),
3029 ('', 'template', '', _('display with template')),
3053 ] + remoteopts,
3030 ] + remoteopts,
3054 _('hg outgoing [-M] [-p] [-n] [-r REV]... [DEST]')),
3031 _('hg outgoing [-M] [-p] [-n] [-r REV]... [DEST]')),
3055 "^parents":
3032 "^parents":
3056 (parents,
3033 (parents,
3057 [('b', 'branches', None, _('show branches (DEPRECATED)')),
3034 [('b', 'branches', None, _('show branches (DEPRECATED)')),
3058 ('r', 'rev', '', _('show parents from the specified rev')),
3035 ('r', 'rev', '', _('show parents from the specified rev')),
3059 ('', 'style', '', _('display using template map file')),
3036 ('', 'style', '', _('display using template map file')),
3060 ('', 'template', '', _('display with template'))],
3037 ('', 'template', '', _('display with template'))],
3061 _('hg parents [-r REV] [FILE]')),
3038 _('hg parents [-r REV] [FILE]')),
3062 "paths": (paths, [], _('hg paths [NAME]')),
3039 "paths": (paths, [], _('hg paths [NAME]')),
3063 "^pull":
3040 "^pull":
3064 (pull,
3041 (pull,
3065 [('u', 'update', None,
3042 [('u', 'update', None,
3066 _('update to new tip if changesets were pulled')),
3043 _('update to new tip if changesets were pulled')),
3067 ('f', 'force', None,
3044 ('f', 'force', None,
3068 _('run even when remote repository is unrelated')),
3045 _('run even when remote repository is unrelated')),
3069 ('r', 'rev', [], _('a specific revision up to which you would like to pull')),
3046 ('r', 'rev', [], _('a specific revision up to which you would like to pull')),
3070 ] + remoteopts,
3047 ] + remoteopts,
3071 _('hg pull [-u] [-r REV]... [-e FILE] [--remotecmd FILE] [SOURCE]')),
3048 _('hg pull [-u] [-r REV]... [-e FILE] [--remotecmd FILE] [SOURCE]')),
3072 "^push":
3049 "^push":
3073 (push,
3050 (push,
3074 [('f', 'force', None, _('force push')),
3051 [('f', 'force', None, _('force push')),
3075 ('r', 'rev', [], _('a specific revision you would like to push')),
3052 ('r', 'rev', [], _('a specific revision you would like to push')),
3076 ] + remoteopts,
3053 ] + remoteopts,
3077 _('hg push [-f] [-r REV]... [-e FILE] [--remotecmd FILE] [DEST]')),
3054 _('hg push [-f] [-r REV]... [-e FILE] [--remotecmd FILE] [DEST]')),
3078 "debugrawcommit|rawcommit":
3055 "debugrawcommit|rawcommit":
3079 (rawcommit,
3056 (rawcommit,
3080 [('p', 'parent', [], _('parent')),
3057 [('p', 'parent', [], _('parent')),
3081 ('d', 'date', '', _('date code')),
3058 ('d', 'date', '', _('date code')),
3082 ('u', 'user', '', _('user')),
3059 ('u', 'user', '', _('user')),
3083 ('F', 'files', '', _('file list')),
3060 ('F', 'files', '', _('file list')),
3084 ('m', 'message', '', _('commit message')),
3061 ('m', 'message', '', _('commit message')),
3085 ('l', 'logfile', '', _('commit message file'))],
3062 ('l', 'logfile', '', _('commit message file'))],
3086 _('hg debugrawcommit [OPTION]... [FILE]...')),
3063 _('hg debugrawcommit [OPTION]... [FILE]...')),
3087 "recover": (recover, [], _('hg recover')),
3064 "recover": (recover, [], _('hg recover')),
3088 "^remove|rm":
3065 "^remove|rm":
3089 (remove,
3066 (remove,
3090 [('A', 'after', None, _('record remove that has already occurred')),
3067 [('A', 'after', None, _('record remove that has already occurred')),
3091 ('f', 'force', None, _('remove file even if modified')),
3068 ('f', 'force', None, _('remove file even if modified')),
3092 ] + walkopts,
3069 ] + walkopts,
3093 _('hg remove [OPTION]... FILE...')),
3070 _('hg remove [OPTION]... FILE...')),
3094 "rename|mv":
3071 "rename|mv":
3095 (rename,
3072 (rename,
3096 [('A', 'after', None, _('record a rename that has already occurred')),
3073 [('A', 'after', None, _('record a rename that has already occurred')),
3097 ('f', 'force', None,
3074 ('f', 'force', None,
3098 _('forcibly copy over an existing managed file')),
3075 _('forcibly copy over an existing managed file')),
3099 ] + walkopts + dryrunopts,
3076 ] + walkopts + dryrunopts,
3100 _('hg rename [OPTION]... SOURCE... DEST')),
3077 _('hg rename [OPTION]... SOURCE... DEST')),
3101 "^revert":
3078 "^revert":
3102 (revert,
3079 (revert,
3103 [('a', 'all', None, _('revert all changes when no arguments given')),
3080 [('a', 'all', None, _('revert all changes when no arguments given')),
3104 ('r', 'rev', '', _('revision to revert to')),
3081 ('r', 'rev', '', _('revision to revert to')),
3105 ('', 'no-backup', None, _('do not save backup copies of files')),
3082 ('', 'no-backup', None, _('do not save backup copies of files')),
3106 ] + walkopts + dryrunopts,
3083 ] + walkopts + dryrunopts,
3107 _('hg revert [-r REV] [NAME]...')),
3084 _('hg revert [-r REV] [NAME]...')),
3108 "rollback": (rollback, [], _('hg rollback')),
3085 "rollback": (rollback, [], _('hg rollback')),
3109 "root": (root, [], _('hg root')),
3086 "root": (root, [], _('hg root')),
3110 "showconfig|debugconfig":
3087 "showconfig|debugconfig":
3111 (showconfig,
3088 (showconfig,
3112 [('u', 'untrusted', None, _('show untrusted configuration options'))],
3089 [('u', 'untrusted', None, _('show untrusted configuration options'))],
3113 _('showconfig [-u] [NAME]...')),
3090 _('showconfig [-u] [NAME]...')),
3114 "^serve":
3091 "^serve":
3115 (serve,
3092 (serve,
3116 [('A', 'accesslog', '', _('name of access log file to write to')),
3093 [('A', 'accesslog', '', _('name of access log file to write to')),
3117 ('d', 'daemon', None, _('run server in background')),
3094 ('d', 'daemon', None, _('run server in background')),
3118 ('', 'daemon-pipefds', '', _('used internally by daemon mode')),
3095 ('', 'daemon-pipefds', '', _('used internally by daemon mode')),
3119 ('E', 'errorlog', '', _('name of error log file to write to')),
3096 ('E', 'errorlog', '', _('name of error log file to write to')),
3120 ('p', 'port', 0, _('port to use (default: 8000)')),
3097 ('p', 'port', 0, _('port to use (default: 8000)')),
3121 ('a', 'address', '', _('address to use')),
3098 ('a', 'address', '', _('address to use')),
3122 ('n', 'name', '',
3099 ('n', 'name', '',
3123 _('name to show in web pages (default: working dir)')),
3100 _('name to show in web pages (default: working dir)')),
3124 ('', 'webdir-conf', '', _('name of the webdir config file'
3101 ('', 'webdir-conf', '', _('name of the webdir config file'
3125 ' (serve more than one repo)')),
3102 ' (serve more than one repo)')),
3126 ('', 'pid-file', '', _('name of file to write process ID to')),
3103 ('', 'pid-file', '', _('name of file to write process ID to')),
3127 ('', 'stdio', None, _('for remote clients')),
3104 ('', 'stdio', None, _('for remote clients')),
3128 ('t', 'templates', '', _('web templates to use')),
3105 ('t', 'templates', '', _('web templates to use')),
3129 ('', 'style', '', _('template style to use')),
3106 ('', 'style', '', _('template style to use')),
3130 ('6', 'ipv6', None, _('use IPv6 in addition to IPv4'))],
3107 ('6', 'ipv6', None, _('use IPv6 in addition to IPv4'))],
3131 _('hg serve [OPTION]...')),
3108 _('hg serve [OPTION]...')),
3132 "^status|st":
3109 "^status|st":
3133 (status,
3110 (status,
3134 [('A', 'all', None, _('show status of all files')),
3111 [('A', 'all', None, _('show status of all files')),
3135 ('m', 'modified', None, _('show only modified files')),
3112 ('m', 'modified', None, _('show only modified files')),
3136 ('a', 'added', None, _('show only added files')),
3113 ('a', 'added', None, _('show only added files')),
3137 ('r', 'removed', None, _('show only removed files')),
3114 ('r', 'removed', None, _('show only removed files')),
3138 ('d', 'deleted', None, _('show only deleted (but tracked) files')),
3115 ('d', 'deleted', None, _('show only deleted (but tracked) files')),
3139 ('c', 'clean', None, _('show only files without changes')),
3116 ('c', 'clean', None, _('show only files without changes')),
3140 ('u', 'unknown', None, _('show only unknown (not tracked) files')),
3117 ('u', 'unknown', None, _('show only unknown (not tracked) files')),
3141 ('i', 'ignored', None, _('show ignored files')),
3118 ('i', 'ignored', None, _('show ignored files')),
3142 ('n', 'no-status', None, _('hide status prefix')),
3119 ('n', 'no-status', None, _('hide status prefix')),
3143 ('C', 'copies', None, _('show source of copied files')),
3120 ('C', 'copies', None, _('show source of copied files')),
3144 ('0', 'print0', None,
3121 ('0', 'print0', None,
3145 _('end filenames with NUL, for use with xargs')),
3122 _('end filenames with NUL, for use with xargs')),
3146 ('', 'rev', [], _('show difference from revision')),
3123 ('', 'rev', [], _('show difference from revision')),
3147 ] + walkopts,
3124 ] + walkopts,
3148 _('hg status [OPTION]... [FILE]...')),
3125 _('hg status [OPTION]... [FILE]...')),
3149 "tag":
3126 "tag":
3150 (tag,
3127 (tag,
3151 [('l', 'local', None, _('make the tag local')),
3128 [('l', 'local', None, _('make the tag local')),
3152 ('m', 'message', '', _('message for tag commit log entry')),
3129 ('m', 'message', '', _('message for tag commit log entry')),
3153 ('d', 'date', '', _('record datecode as commit date')),
3130 ('d', 'date', '', _('record datecode as commit date')),
3154 ('u', 'user', '', _('record user as commiter')),
3131 ('u', 'user', '', _('record user as commiter')),
3155 ('r', 'rev', '', _('revision to tag'))],
3132 ('r', 'rev', '', _('revision to tag'))],
3156 _('hg tag [-l] [-m TEXT] [-d DATE] [-u USER] [-r REV] NAME')),
3133 _('hg tag [-l] [-m TEXT] [-d DATE] [-u USER] [-r REV] NAME')),
3157 "tags": (tags, [], _('hg tags')),
3134 "tags": (tags, [], _('hg tags')),
3158 "tip":
3135 "tip":
3159 (tip,
3136 (tip,
3160 [('b', 'branches', None, _('show branches (DEPRECATED)')),
3137 [('b', 'branches', None, _('show branches (DEPRECATED)')),
3161 ('', 'style', '', _('display using template map file')),
3138 ('', 'style', '', _('display using template map file')),
3162 ('p', 'patch', None, _('show patch')),
3139 ('p', 'patch', None, _('show patch')),
3163 ('', 'template', '', _('display with template'))],
3140 ('', 'template', '', _('display with template'))],
3164 _('hg tip [-p]')),
3141 _('hg tip [-p]')),
3165 "unbundle":
3142 "unbundle":
3166 (unbundle,
3143 (unbundle,
3167 [('u', 'update', None,
3144 [('u', 'update', None,
3168 _('update to new tip if changesets were unbundled'))],
3145 _('update to new tip if changesets were unbundled'))],
3169 _('hg unbundle [-u] FILE')),
3146 _('hg unbundle [-u] FILE')),
3170 "^update|up|checkout|co":
3147 "^update|up|checkout|co":
3171 (update,
3148 (update,
3172 [('b', 'branch', '',
3149 [('b', 'branch', '',
3173 _('checkout the head of a specific branch (DEPRECATED)')),
3150 _('checkout the head of a specific branch (DEPRECATED)')),
3174 ('m', 'merge', None, _('allow merging of branches (DEPRECATED)')),
3151 ('m', 'merge', None, _('allow merging of branches (DEPRECATED)')),
3175 ('C', 'clean', None, _('overwrite locally modified files')),
3152 ('C', 'clean', None, _('overwrite locally modified files')),
3176 ('f', 'force', None, _('force a merge with outstanding changes'))],
3153 ('f', 'force', None, _('force a merge with outstanding changes'))],
3177 _('hg update [-C] [-f] [REV]')),
3154 _('hg update [-C] [-f] [REV]')),
3178 "verify": (verify, [], _('hg verify')),
3155 "verify": (verify, [], _('hg verify')),
3179 "version": (show_version, [], _('hg version')),
3156 "version": (show_version, [], _('hg version')),
3180 }
3157 }
3181
3158
3182 norepo = ("clone init version help debugancestor debugcomplete debugdata"
3159 norepo = ("clone init version help debugancestor debugcomplete debugdata"
3183 " debugindex debugindexdot")
3160 " debugindex debugindexdot")
3184 optionalrepo = ("paths serve showconfig")
3161 optionalrepo = ("paths serve showconfig")
3185
3162
3186 def findpossible(ui, cmd):
3163 def findpossible(ui, cmd):
3187 """
3164 """
3188 Return cmd -> (aliases, command table entry)
3165 Return cmd -> (aliases, command table entry)
3189 for each matching command.
3166 for each matching command.
3190 Return debug commands (or their aliases) only if no normal command matches.
3167 Return debug commands (or their aliases) only if no normal command matches.
3191 """
3168 """
3192 choice = {}
3169 choice = {}
3193 debugchoice = {}
3170 debugchoice = {}
3194 for e in table.keys():
3171 for e in table.keys():
3195 aliases = e.lstrip("^").split("|")
3172 aliases = e.lstrip("^").split("|")
3196 found = None
3173 found = None
3197 if cmd in aliases:
3174 if cmd in aliases:
3198 found = cmd
3175 found = cmd
3199 elif not ui.config("ui", "strict"):
3176 elif not ui.config("ui", "strict"):
3200 for a in aliases:
3177 for a in aliases:
3201 if a.startswith(cmd):
3178 if a.startswith(cmd):
3202 found = a
3179 found = a
3203 break
3180 break
3204 if found is not None:
3181 if found is not None:
3205 if aliases[0].startswith("debug") or found.startswith("debug"):
3182 if aliases[0].startswith("debug") or found.startswith("debug"):
3206 debugchoice[found] = (aliases, table[e])
3183 debugchoice[found] = (aliases, table[e])
3207 else:
3184 else:
3208 choice[found] = (aliases, table[e])
3185 choice[found] = (aliases, table[e])
3209
3186
3210 if not choice and debugchoice:
3187 if not choice and debugchoice:
3211 choice = debugchoice
3188 choice = debugchoice
3212
3189
3213 return choice
3190 return choice
3214
3191
3215 def findcmd(ui, cmd):
3192 def findcmd(ui, cmd):
3216 """Return (aliases, command table entry) for command string."""
3193 """Return (aliases, command table entry) for command string."""
3217 choice = findpossible(ui, cmd)
3194 choice = findpossible(ui, cmd)
3218
3195
3219 if choice.has_key(cmd):
3196 if choice.has_key(cmd):
3220 return choice[cmd]
3197 return choice[cmd]
3221
3198
3222 if len(choice) > 1:
3199 if len(choice) > 1:
3223 clist = choice.keys()
3200 clist = choice.keys()
3224 clist.sort()
3201 clist.sort()
3225 raise AmbiguousCommand(cmd, clist)
3202 raise AmbiguousCommand(cmd, clist)
3226
3203
3227 if choice:
3204 if choice:
3228 return choice.values()[0]
3205 return choice.values()[0]
3229
3206
3230 raise UnknownCommand(cmd)
3207 raise UnknownCommand(cmd)
3231
3208
3232 def catchterm(*args):
3209 def catchterm(*args):
3233 raise util.SignalInterrupt
3210 raise util.SignalInterrupt
3234
3211
3235 def run():
3212 def run():
3236 sys.exit(dispatch(sys.argv[1:]))
3213 sys.exit(dispatch(sys.argv[1:]))
3237
3214
3238 class ParseError(Exception):
3215 class ParseError(Exception):
3239 """Exception raised on errors in parsing the command line."""
3216 """Exception raised on errors in parsing the command line."""
3240
3217
3241 def parse(ui, args):
3218 def parse(ui, args):
3242 options = {}
3219 options = {}
3243 cmdoptions = {}
3220 cmdoptions = {}
3244
3221
3245 try:
3222 try:
3246 args = fancyopts.fancyopts(args, globalopts, options)
3223 args = fancyopts.fancyopts(args, globalopts, options)
3247 except fancyopts.getopt.GetoptError, inst:
3224 except fancyopts.getopt.GetoptError, inst:
3248 raise ParseError(None, inst)
3225 raise ParseError(None, inst)
3249
3226
3250 if args:
3227 if args:
3251 cmd, args = args[0], args[1:]
3228 cmd, args = args[0], args[1:]
3252 aliases, i = findcmd(ui, cmd)
3229 aliases, i = findcmd(ui, cmd)
3253 cmd = aliases[0]
3230 cmd = aliases[0]
3254 defaults = ui.config("defaults", cmd)
3231 defaults = ui.config("defaults", cmd)
3255 if defaults:
3232 if defaults:
3256 args = shlex.split(defaults) + args
3233 args = shlex.split(defaults) + args
3257 c = list(i[1])
3234 c = list(i[1])
3258 else:
3235 else:
3259 cmd = None
3236 cmd = None
3260 c = []
3237 c = []
3261
3238
3262 # combine global options into local
3239 # combine global options into local
3263 for o in globalopts:
3240 for o in globalopts:
3264 c.append((o[0], o[1], options[o[1]], o[3]))
3241 c.append((o[0], o[1], options[o[1]], o[3]))
3265
3242
3266 try:
3243 try:
3267 args = fancyopts.fancyopts(args, c, cmdoptions)
3244 args = fancyopts.fancyopts(args, c, cmdoptions)
3268 except fancyopts.getopt.GetoptError, inst:
3245 except fancyopts.getopt.GetoptError, inst:
3269 raise ParseError(cmd, inst)
3246 raise ParseError(cmd, inst)
3270
3247
3271 # separate global options back out
3248 # separate global options back out
3272 for o in globalopts:
3249 for o in globalopts:
3273 n = o[1]
3250 n = o[1]
3274 options[n] = cmdoptions[n]
3251 options[n] = cmdoptions[n]
3275 del cmdoptions[n]
3252 del cmdoptions[n]
3276
3253
3277 return (cmd, cmd and i[0] or None, args, options, cmdoptions)
3254 return (cmd, cmd and i[0] or None, args, options, cmdoptions)
3278
3255
3279 external = {}
3256 external = {}
3280
3257
3281 def findext(name):
3258 def findext(name):
3282 '''return module with given extension name'''
3259 '''return module with given extension name'''
3283 try:
3260 try:
3284 return sys.modules[external[name]]
3261 return sys.modules[external[name]]
3285 except KeyError:
3262 except KeyError:
3286 for k, v in external.iteritems():
3263 for k, v in external.iteritems():
3287 if k.endswith('.' + name) or k.endswith('/' + name) or v == name:
3264 if k.endswith('.' + name) or k.endswith('/' + name) or v == name:
3288 return sys.modules[v]
3265 return sys.modules[v]
3289 raise KeyError(name)
3266 raise KeyError(name)
3290
3267
3291 def load_extensions(ui):
3268 def load_extensions(ui):
3292 added = []
3269 added = []
3293 for ext_name, load_from_name in ui.extensions():
3270 for ext_name, load_from_name in ui.extensions():
3294 if ext_name in external:
3271 if ext_name in external:
3295 continue
3272 continue
3296 try:
3273 try:
3297 if load_from_name:
3274 if load_from_name:
3298 # the module will be loaded in sys.modules
3275 # the module will be loaded in sys.modules
3299 # choose an unique name so that it doesn't
3276 # choose an unique name so that it doesn't
3300 # conflicts with other modules
3277 # conflicts with other modules
3301 module_name = "hgext_%s" % ext_name.replace('.', '_')
3278 module_name = "hgext_%s" % ext_name.replace('.', '_')
3302 mod = imp.load_source(module_name, load_from_name)
3279 mod = imp.load_source(module_name, load_from_name)
3303 else:
3280 else:
3304 def importh(name):
3281 def importh(name):
3305 mod = __import__(name)
3282 mod = __import__(name)
3306 components = name.split('.')
3283 components = name.split('.')
3307 for comp in components[1:]:
3284 for comp in components[1:]:
3308 mod = getattr(mod, comp)
3285 mod = getattr(mod, comp)
3309 return mod
3286 return mod
3310 try:
3287 try:
3311 mod = importh("hgext.%s" % ext_name)
3288 mod = importh("hgext.%s" % ext_name)
3312 except ImportError:
3289 except ImportError:
3313 mod = importh(ext_name)
3290 mod = importh(ext_name)
3314 external[ext_name] = mod.__name__
3291 external[ext_name] = mod.__name__
3315 added.append((mod, ext_name))
3292 added.append((mod, ext_name))
3316 except (util.SignalInterrupt, KeyboardInterrupt):
3293 except (util.SignalInterrupt, KeyboardInterrupt):
3317 raise
3294 raise
3318 except Exception, inst:
3295 except Exception, inst:
3319 ui.warn(_("*** failed to import extension %s: %s\n") %
3296 ui.warn(_("*** failed to import extension %s: %s\n") %
3320 (ext_name, inst))
3297 (ext_name, inst))
3321 if ui.print_exc():
3298 if ui.print_exc():
3322 return 1
3299 return 1
3323
3300
3324 for mod, name in added:
3301 for mod, name in added:
3325 uisetup = getattr(mod, 'uisetup', None)
3302 uisetup = getattr(mod, 'uisetup', None)
3326 if uisetup:
3303 if uisetup:
3327 uisetup(ui)
3304 uisetup(ui)
3328 cmdtable = getattr(mod, 'cmdtable', {})
3305 cmdtable = getattr(mod, 'cmdtable', {})
3329 for t in cmdtable:
3306 for t in cmdtable:
3330 if t in table:
3307 if t in table:
3331 ui.warn(_("module %s overrides %s\n") % (name, t))
3308 ui.warn(_("module %s overrides %s\n") % (name, t))
3332 table.update(cmdtable)
3309 table.update(cmdtable)
3333
3310
3334 def parseconfig(config):
3311 def parseconfig(config):
3335 """parse the --config options from the command line"""
3312 """parse the --config options from the command line"""
3336 parsed = []
3313 parsed = []
3337 for cfg in config:
3314 for cfg in config:
3338 try:
3315 try:
3339 name, value = cfg.split('=', 1)
3316 name, value = cfg.split('=', 1)
3340 section, name = name.split('.', 1)
3317 section, name = name.split('.', 1)
3341 if not section or not name:
3318 if not section or not name:
3342 raise IndexError
3319 raise IndexError
3343 parsed.append((section, name, value))
3320 parsed.append((section, name, value))
3344 except (IndexError, ValueError):
3321 except (IndexError, ValueError):
3345 raise util.Abort(_('malformed --config option: %s') % cfg)
3322 raise util.Abort(_('malformed --config option: %s') % cfg)
3346 return parsed
3323 return parsed
3347
3324
3348 def dispatch(args):
3325 def dispatch(args):
3349 for name in 'SIGBREAK', 'SIGHUP', 'SIGTERM':
3326 for name in 'SIGBREAK', 'SIGHUP', 'SIGTERM':
3350 num = getattr(signal, name, None)
3327 num = getattr(signal, name, None)
3351 if num: signal.signal(num, catchterm)
3328 if num: signal.signal(num, catchterm)
3352
3329
3353 try:
3330 try:
3354 u = ui.ui(traceback='--traceback' in sys.argv[1:])
3331 u = ui.ui(traceback='--traceback' in sys.argv[1:])
3355 except util.Abort, inst:
3332 except util.Abort, inst:
3356 sys.stderr.write(_("abort: %s\n") % inst)
3333 sys.stderr.write(_("abort: %s\n") % inst)
3357 return -1
3334 return -1
3358
3335
3359 load_extensions(u)
3336 load_extensions(u)
3360 u.addreadhook(load_extensions)
3337 u.addreadhook(load_extensions)
3361
3338
3362 try:
3339 try:
3363 cmd, func, args, options, cmdoptions = parse(u, args)
3340 cmd, func, args, options, cmdoptions = parse(u, args)
3364 if options["time"]:
3341 if options["time"]:
3365 def get_times():
3342 def get_times():
3366 t = os.times()
3343 t = os.times()
3367 if t[4] == 0.0: # Windows leaves this as zero, so use time.clock()
3344 if t[4] == 0.0: # Windows leaves this as zero, so use time.clock()
3368 t = (t[0], t[1], t[2], t[3], time.clock())
3345 t = (t[0], t[1], t[2], t[3], time.clock())
3369 return t
3346 return t
3370 s = get_times()
3347 s = get_times()
3371 def print_time():
3348 def print_time():
3372 t = get_times()
3349 t = get_times()
3373 u.warn(_("Time: real %.3f secs (user %.3f+%.3f sys %.3f+%.3f)\n") %
3350 u.warn(_("Time: real %.3f secs (user %.3f+%.3f sys %.3f+%.3f)\n") %
3374 (t[4]-s[4], t[0]-s[0], t[2]-s[2], t[1]-s[1], t[3]-s[3]))
3351 (t[4]-s[4], t[0]-s[0], t[2]-s[2], t[1]-s[1], t[3]-s[3]))
3375 atexit.register(print_time)
3352 atexit.register(print_time)
3376
3353
3377 # enter the debugger before command execution
3354 # enter the debugger before command execution
3378 if options['debugger']:
3355 if options['debugger']:
3379 pdb.set_trace()
3356 pdb.set_trace()
3380
3357
3381 try:
3358 try:
3382 if options['cwd']:
3359 if options['cwd']:
3383 try:
3360 try:
3384 os.chdir(options['cwd'])
3361 os.chdir(options['cwd'])
3385 except OSError, inst:
3362 except OSError, inst:
3386 raise util.Abort('%s: %s' %
3363 raise util.Abort('%s: %s' %
3387 (options['cwd'], inst.strerror))
3364 (options['cwd'], inst.strerror))
3388
3365
3389 u.updateopts(options["verbose"], options["debug"], options["quiet"],
3366 u.updateopts(options["verbose"], options["debug"], options["quiet"],
3390 not options["noninteractive"], options["traceback"],
3367 not options["noninteractive"], options["traceback"],
3391 parseconfig(options["config"]))
3368 parseconfig(options["config"]))
3392
3369
3393 path = u.expandpath(options["repository"]) or ""
3370 path = u.expandpath(options["repository"]) or ""
3394 repo = path and hg.repository(u, path=path) or None
3371 repo = path and hg.repository(u, path=path) or None
3395 if repo and not repo.local():
3372 if repo and not repo.local():
3396 raise util.Abort(_("repository '%s' is not local") % path)
3373 raise util.Abort(_("repository '%s' is not local") % path)
3397
3374
3398 if options['help']:
3375 if options['help']:
3399 return help_(u, cmd, options['version'])
3376 return help_(u, cmd, options['version'])
3400 elif options['version']:
3377 elif options['version']:
3401 return show_version(u)
3378 return show_version(u)
3402 elif not cmd:
3379 elif not cmd:
3403 return help_(u, 'shortlist')
3380 return help_(u, 'shortlist')
3404
3381
3405 if cmd not in norepo.split():
3382 if cmd not in norepo.split():
3406 try:
3383 try:
3407 if not repo:
3384 if not repo:
3408 repo = hg.repository(u, path=path)
3385 repo = hg.repository(u, path=path)
3409 u = repo.ui
3386 u = repo.ui
3410 for name in external.itervalues():
3387 for name in external.itervalues():
3411 mod = sys.modules[name]
3388 mod = sys.modules[name]
3412 if hasattr(mod, 'reposetup'):
3389 if hasattr(mod, 'reposetup'):
3413 mod.reposetup(u, repo)
3390 mod.reposetup(u, repo)
3414 hg.repo_setup_hooks.append(mod.reposetup)
3391 hg.repo_setup_hooks.append(mod.reposetup)
3415 except hg.RepoError:
3392 except hg.RepoError:
3416 if cmd not in optionalrepo.split():
3393 if cmd not in optionalrepo.split():
3417 raise
3394 raise
3418 d = lambda: func(u, repo, *args, **cmdoptions)
3395 d = lambda: func(u, repo, *args, **cmdoptions)
3419 else:
3396 else:
3420 d = lambda: func(u, *args, **cmdoptions)
3397 d = lambda: func(u, *args, **cmdoptions)
3421
3398
3422 try:
3399 try:
3423 if options['profile']:
3400 if options['profile']:
3424 import hotshot, hotshot.stats
3401 import hotshot, hotshot.stats
3425 prof = hotshot.Profile("hg.prof")
3402 prof = hotshot.Profile("hg.prof")
3426 try:
3403 try:
3427 try:
3404 try:
3428 return prof.runcall(d)
3405 return prof.runcall(d)
3429 except:
3406 except:
3430 try:
3407 try:
3431 u.warn(_('exception raised - generating '
3408 u.warn(_('exception raised - generating '
3432 'profile anyway\n'))
3409 'profile anyway\n'))
3433 except:
3410 except:
3434 pass
3411 pass
3435 raise
3412 raise
3436 finally:
3413 finally:
3437 prof.close()
3414 prof.close()
3438 stats = hotshot.stats.load("hg.prof")
3415 stats = hotshot.stats.load("hg.prof")
3439 stats.strip_dirs()
3416 stats.strip_dirs()
3440 stats.sort_stats('time', 'calls')
3417 stats.sort_stats('time', 'calls')
3441 stats.print_stats(40)
3418 stats.print_stats(40)
3442 elif options['lsprof']:
3419 elif options['lsprof']:
3443 try:
3420 try:
3444 from mercurial import lsprof
3421 from mercurial import lsprof
3445 except ImportError:
3422 except ImportError:
3446 raise util.Abort(_(
3423 raise util.Abort(_(
3447 'lsprof not available - install from '
3424 'lsprof not available - install from '
3448 'http://codespeak.net/svn/user/arigo/hack/misc/lsprof/'))
3425 'http://codespeak.net/svn/user/arigo/hack/misc/lsprof/'))
3449 p = lsprof.Profiler()
3426 p = lsprof.Profiler()
3450 p.enable(subcalls=True)
3427 p.enable(subcalls=True)
3451 try:
3428 try:
3452 return d()
3429 return d()
3453 finally:
3430 finally:
3454 p.disable()
3431 p.disable()
3455 stats = lsprof.Stats(p.getstats())
3432 stats = lsprof.Stats(p.getstats())
3456 stats.sort()
3433 stats.sort()
3457 stats.pprint(top=10, file=sys.stderr, climit=5)
3434 stats.pprint(top=10, file=sys.stderr, climit=5)
3458 else:
3435 else:
3459 return d()
3436 return d()
3460 finally:
3437 finally:
3461 u.flush()
3438 u.flush()
3462 except:
3439 except:
3463 # enter the debugger when we hit an exception
3440 # enter the debugger when we hit an exception
3464 if options['debugger']:
3441 if options['debugger']:
3465 pdb.post_mortem(sys.exc_info()[2])
3442 pdb.post_mortem(sys.exc_info()[2])
3466 u.print_exc()
3443 u.print_exc()
3467 raise
3444 raise
3468 except ParseError, inst:
3445 except ParseError, inst:
3469 if inst.args[0]:
3446 if inst.args[0]:
3470 u.warn(_("hg %s: %s\n") % (inst.args[0], inst.args[1]))
3447 u.warn(_("hg %s: %s\n") % (inst.args[0], inst.args[1]))
3471 help_(u, inst.args[0])
3448 help_(u, inst.args[0])
3472 else:
3449 else:
3473 u.warn(_("hg: %s\n") % inst.args[1])
3450 u.warn(_("hg: %s\n") % inst.args[1])
3474 help_(u, 'shortlist')
3451 help_(u, 'shortlist')
3475 except AmbiguousCommand, inst:
3452 except AmbiguousCommand, inst:
3476 u.warn(_("hg: command '%s' is ambiguous:\n %s\n") %
3453 u.warn(_("hg: command '%s' is ambiguous:\n %s\n") %
3477 (inst.args[0], " ".join(inst.args[1])))
3454 (inst.args[0], " ".join(inst.args[1])))
3478 except UnknownCommand, inst:
3455 except UnknownCommand, inst:
3479 u.warn(_("hg: unknown command '%s'\n") % inst.args[0])
3456 u.warn(_("hg: unknown command '%s'\n") % inst.args[0])
3480 help_(u, 'shortlist')
3457 help_(u, 'shortlist')
3481 except hg.RepoError, inst:
3458 except hg.RepoError, inst:
3482 u.warn(_("abort: %s!\n") % inst)
3459 u.warn(_("abort: %s!\n") % inst)
3483 except lock.LockHeld, inst:
3460 except lock.LockHeld, inst:
3484 if inst.errno == errno.ETIMEDOUT:
3461 if inst.errno == errno.ETIMEDOUT:
3485 reason = _('timed out waiting for lock held by %s') % inst.locker
3462 reason = _('timed out waiting for lock held by %s') % inst.locker
3486 else:
3463 else:
3487 reason = _('lock held by %s') % inst.locker
3464 reason = _('lock held by %s') % inst.locker
3488 u.warn(_("abort: %s: %s\n") % (inst.desc or inst.filename, reason))
3465 u.warn(_("abort: %s: %s\n") % (inst.desc or inst.filename, reason))
3489 except lock.LockUnavailable, inst:
3466 except lock.LockUnavailable, inst:
3490 u.warn(_("abort: could not lock %s: %s\n") %
3467 u.warn(_("abort: could not lock %s: %s\n") %
3491 (inst.desc or inst.filename, inst.strerror))
3468 (inst.desc or inst.filename, inst.strerror))
3492 except revlog.RevlogError, inst:
3469 except revlog.RevlogError, inst:
3493 u.warn(_("abort: %s!\n") % inst)
3470 u.warn(_("abort: %s!\n") % inst)
3494 except util.SignalInterrupt:
3471 except util.SignalInterrupt:
3495 u.warn(_("killed!\n"))
3472 u.warn(_("killed!\n"))
3496 except KeyboardInterrupt:
3473 except KeyboardInterrupt:
3497 try:
3474 try:
3498 u.warn(_("interrupted!\n"))
3475 u.warn(_("interrupted!\n"))
3499 except IOError, inst:
3476 except IOError, inst:
3500 if inst.errno == errno.EPIPE:
3477 if inst.errno == errno.EPIPE:
3501 if u.debugflag:
3478 if u.debugflag:
3502 u.warn(_("\nbroken pipe\n"))
3479 u.warn(_("\nbroken pipe\n"))
3503 else:
3480 else:
3504 raise
3481 raise
3505 except IOError, inst:
3482 except IOError, inst:
3506 if hasattr(inst, "code"):
3483 if hasattr(inst, "code"):
3507 u.warn(_("abort: %s\n") % inst)
3484 u.warn(_("abort: %s\n") % inst)
3508 elif hasattr(inst, "reason"):
3485 elif hasattr(inst, "reason"):
3509 u.warn(_("abort: error: %s\n") % inst.reason[1])
3486 u.warn(_("abort: error: %s\n") % inst.reason[1])
3510 elif hasattr(inst, "args") and inst[0] == errno.EPIPE:
3487 elif hasattr(inst, "args") and inst[0] == errno.EPIPE:
3511 if u.debugflag:
3488 if u.debugflag:
3512 u.warn(_("broken pipe\n"))
3489 u.warn(_("broken pipe\n"))
3513 elif getattr(inst, "strerror", None):
3490 elif getattr(inst, "strerror", None):
3514 if getattr(inst, "filename", None):
3491 if getattr(inst, "filename", None):
3515 u.warn(_("abort: %s: %s\n") % (inst.strerror, inst.filename))
3492 u.warn(_("abort: %s: %s\n") % (inst.strerror, inst.filename))
3516 else:
3493 else:
3517 u.warn(_("abort: %s\n") % inst.strerror)
3494 u.warn(_("abort: %s\n") % inst.strerror)
3518 else:
3495 else:
3519 raise
3496 raise
3520 except OSError, inst:
3497 except OSError, inst:
3521 if getattr(inst, "filename", None):
3498 if getattr(inst, "filename", None):
3522 u.warn(_("abort: %s: %s\n") % (inst.strerror, inst.filename))
3499 u.warn(_("abort: %s: %s\n") % (inst.strerror, inst.filename))
3523 else:
3500 else:
3524 u.warn(_("abort: %s\n") % inst.strerror)
3501 u.warn(_("abort: %s\n") % inst.strerror)
3525 except util.UnexpectedOutput, inst:
3502 except util.UnexpectedOutput, inst:
3526 u.warn(_("abort: %s") % inst[0])
3503 u.warn(_("abort: %s") % inst[0])
3527 if not isinstance(inst[1], basestring):
3504 if not isinstance(inst[1], basestring):
3528 u.warn(" %r\n" % (inst[1],))
3505 u.warn(" %r\n" % (inst[1],))
3529 elif not inst[1]:
3506 elif not inst[1]:
3530 u.warn(_(" empty string\n"))
3507 u.warn(_(" empty string\n"))
3531 else:
3508 else:
3532 u.warn("\n%r%s\n" %
3509 u.warn("\n%r%s\n" %
3533 (inst[1][:400], len(inst[1]) > 400 and '...' or ''))
3510 (inst[1][:400], len(inst[1]) > 400 and '...' or ''))
3534 except util.Abort, inst:
3511 except util.Abort, inst:
3535 u.warn(_("abort: %s\n") % inst)
3512 u.warn(_("abort: %s\n") % inst)
3536 except TypeError, inst:
3513 except TypeError, inst:
3537 # was this an argument error?
3514 # was this an argument error?
3538 tb = traceback.extract_tb(sys.exc_info()[2])
3515 tb = traceback.extract_tb(sys.exc_info()[2])
3539 if len(tb) > 2: # no
3516 if len(tb) > 2: # no
3540 raise
3517 raise
3541 u.debug(inst, "\n")
3518 u.debug(inst, "\n")
3542 u.warn(_("%s: invalid arguments\n") % cmd)
3519 u.warn(_("%s: invalid arguments\n") % cmd)
3543 help_(u, cmd)
3520 help_(u, cmd)
3544 except SystemExit, inst:
3521 except SystemExit, inst:
3545 # Commands shouldn't sys.exit directly, but give a return code.
3522 # Commands shouldn't sys.exit directly, but give a return code.
3546 # Just in case catch this and and pass exit code to caller.
3523 # Just in case catch this and and pass exit code to caller.
3547 return inst.code
3524 return inst.code
3548 except:
3525 except:
3549 u.warn(_("** unknown exception encountered, details follow\n"))
3526 u.warn(_("** unknown exception encountered, details follow\n"))
3550 u.warn(_("** report bug details to "
3527 u.warn(_("** report bug details to "
3551 "http://www.selenic.com/mercurial/bts\n"))
3528 "http://www.selenic.com/mercurial/bts\n"))
3552 u.warn(_("** or mercurial@selenic.com\n"))
3529 u.warn(_("** or mercurial@selenic.com\n"))
3553 u.warn(_("** Mercurial Distributed SCM (version %s)\n")
3530 u.warn(_("** Mercurial Distributed SCM (version %s)\n")
3554 % version.get_version())
3531 % version.get_version())
3555 raise
3532 raise
3556
3533
3557 return -1
3534 return -1
@@ -1,536 +1,525 b''
1 """
1 """
2 dirstate.py - working directory tracking for mercurial
2 dirstate.py - working directory tracking for mercurial
3
3
4 Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
4 Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
5
5
6 This software may be used and distributed according to the terms
6 This software may be used and distributed according to the terms
7 of the GNU General Public License, incorporated herein by reference.
7 of the GNU General Public License, incorporated herein by reference.
8 """
8 """
9
9
10 from node import *
10 from node import *
11 from i18n import gettext as _
11 from i18n import gettext as _
12 from demandload import *
12 from demandload import *
13 demandload(globals(), "struct os time bisect stat strutil util re errno")
13 demandload(globals(), "struct os time bisect stat strutil util re errno")
14
14
15 class dirstate(object):
15 class dirstate(object):
16 format = ">cllll"
16 format = ">cllll"
17
17
18 def __init__(self, opener, ui, root):
18 def __init__(self, opener, ui, root):
19 self.opener = opener
19 self.opener = opener
20 self.root = root
20 self.root = root
21 self.dirty = 0
21 self.dirty = 0
22 self.ui = ui
22 self.ui = ui
23 self.map = None
23 self.map = None
24 self.pl = None
24 self.pl = None
25 self.dirs = None
25 self.dirs = None
26 self.copymap = {}
26 self.copymap = {}
27 self.ignorefunc = None
27 self.ignorefunc = None
28 self.blockignore = False
29
28
30 def wjoin(self, f):
29 def wjoin(self, f):
31 return os.path.join(self.root, f)
30 return os.path.join(self.root, f)
32
31
33 def getcwd(self):
32 def getcwd(self):
34 cwd = os.getcwd()
33 cwd = os.getcwd()
35 if cwd == self.root: return ''
34 if cwd == self.root: return ''
36 return cwd[len(self.root) + 1:]
35 return cwd[len(self.root) + 1:]
37
36
38 def hgignore(self):
37 def hgignore(self):
39 '''return the contents of .hgignore files as a list of patterns.
38 '''return the contents of .hgignore files as a list of patterns.
40
39
41 the files parsed for patterns include:
40 the files parsed for patterns include:
42 .hgignore in the repository root
41 .hgignore in the repository root
43 any additional files specified in the [ui] section of ~/.hgrc
42 any additional files specified in the [ui] section of ~/.hgrc
44
43
45 trailing white space is dropped.
44 trailing white space is dropped.
46 the escape character is backslash.
45 the escape character is backslash.
47 comments start with #.
46 comments start with #.
48 empty lines are skipped.
47 empty lines are skipped.
49
48
50 lines can be of the following formats:
49 lines can be of the following formats:
51
50
52 syntax: regexp # defaults following lines to non-rooted regexps
51 syntax: regexp # defaults following lines to non-rooted regexps
53 syntax: glob # defaults following lines to non-rooted globs
52 syntax: glob # defaults following lines to non-rooted globs
54 re:pattern # non-rooted regular expression
53 re:pattern # non-rooted regular expression
55 glob:pattern # non-rooted glob
54 glob:pattern # non-rooted glob
56 pattern # pattern of the current default type'''
55 pattern # pattern of the current default type'''
57 syntaxes = {'re': 'relre:', 'regexp': 'relre:', 'glob': 'relglob:'}
56 syntaxes = {'re': 'relre:', 'regexp': 'relre:', 'glob': 'relglob:'}
58 def parselines(fp):
57 def parselines(fp):
59 for line in fp:
58 for line in fp:
60 escape = False
59 escape = False
61 for i in xrange(len(line)):
60 for i in xrange(len(line)):
62 if escape: escape = False
61 if escape: escape = False
63 elif line[i] == '\\': escape = True
62 elif line[i] == '\\': escape = True
64 elif line[i] == '#': break
63 elif line[i] == '#': break
65 line = line[:i].rstrip()
64 line = line[:i].rstrip()
66 if line: yield line
65 if line: yield line
67 repoignore = self.wjoin('.hgignore')
66 repoignore = self.wjoin('.hgignore')
68 files = [repoignore]
67 files = [repoignore]
69 files.extend(self.ui.hgignorefiles())
68 files.extend(self.ui.hgignorefiles())
70 pats = {}
69 pats = {}
71 for f in files:
70 for f in files:
72 try:
71 try:
73 pats[f] = []
72 pats[f] = []
74 fp = open(f)
73 fp = open(f)
75 syntax = 'relre:'
74 syntax = 'relre:'
76 for line in parselines(fp):
75 for line in parselines(fp):
77 if line.startswith('syntax:'):
76 if line.startswith('syntax:'):
78 s = line[7:].strip()
77 s = line[7:].strip()
79 try:
78 try:
80 syntax = syntaxes[s]
79 syntax = syntaxes[s]
81 except KeyError:
80 except KeyError:
82 self.ui.warn(_("%s: ignoring invalid "
81 self.ui.warn(_("%s: ignoring invalid "
83 "syntax '%s'\n") % (f, s))
82 "syntax '%s'\n") % (f, s))
84 continue
83 continue
85 pat = syntax + line
84 pat = syntax + line
86 for s in syntaxes.values():
85 for s in syntaxes.values():
87 if line.startswith(s):
86 if line.startswith(s):
88 pat = line
87 pat = line
89 break
88 break
90 pats[f].append(pat)
89 pats[f].append(pat)
91 except IOError, inst:
90 except IOError, inst:
92 if f != repoignore:
91 if f != repoignore:
93 self.ui.warn(_("skipping unreadable ignore file"
92 self.ui.warn(_("skipping unreadable ignore file"
94 " '%s': %s\n") % (f, inst.strerror))
93 " '%s': %s\n") % (f, inst.strerror))
95 return pats
94 return pats
96
95
97 def ignore(self, fn):
96 def ignore(self, fn):
98 '''default match function used by dirstate and
97 '''default match function used by dirstate and
99 localrepository. this honours the repository .hgignore file
98 localrepository. this honours the repository .hgignore file
100 and any other files specified in the [ui] section of .hgrc.'''
99 and any other files specified in the [ui] section of .hgrc.'''
101 if self.blockignore:
102 return False
103 if not self.ignorefunc:
100 if not self.ignorefunc:
104 ignore = self.hgignore()
101 ignore = self.hgignore()
105 allpats = []
102 allpats = []
106 [allpats.extend(patlist) for patlist in ignore.values()]
103 [allpats.extend(patlist) for patlist in ignore.values()]
107 if allpats:
104 if allpats:
108 try:
105 try:
109 files, self.ignorefunc, anypats = (
106 files, self.ignorefunc, anypats = (
110 util.matcher(self.root, inc=allpats, src='.hgignore'))
107 util.matcher(self.root, inc=allpats, src='.hgignore'))
111 except util.Abort:
108 except util.Abort:
112 # Re-raise an exception where the src is the right file
109 # Re-raise an exception where the src is the right file
113 for f, patlist in ignore.items():
110 for f, patlist in ignore.items():
114 files, self.ignorefunc, anypats = (
111 files, self.ignorefunc, anypats = (
115 util.matcher(self.root, inc=patlist, src=f))
112 util.matcher(self.root, inc=patlist, src=f))
116 else:
113 else:
117 self.ignorefunc = util.never
114 self.ignorefunc = util.never
118 return self.ignorefunc(fn)
115 return self.ignorefunc(fn)
119
116
120 def __del__(self):
117 def __del__(self):
121 if self.dirty:
118 if self.dirty:
122 self.write()
119 self.write()
123
120
124 def __getitem__(self, key):
121 def __getitem__(self, key):
125 try:
122 try:
126 return self.map[key]
123 return self.map[key]
127 except TypeError:
124 except TypeError:
128 self.lazyread()
125 self.lazyread()
129 return self[key]
126 return self[key]
130
127
131 def __contains__(self, key):
128 def __contains__(self, key):
132 self.lazyread()
129 self.lazyread()
133 return key in self.map
130 return key in self.map
134
131
135 def parents(self):
132 def parents(self):
136 self.lazyread()
133 self.lazyread()
137 return self.pl
134 return self.pl
138
135
139 def markdirty(self):
136 def markdirty(self):
140 if not self.dirty:
137 if not self.dirty:
141 self.dirty = 1
138 self.dirty = 1
142
139
143 def setparents(self, p1, p2=nullid):
140 def setparents(self, p1, p2=nullid):
144 self.lazyread()
141 self.lazyread()
145 self.markdirty()
142 self.markdirty()
146 self.pl = p1, p2
143 self.pl = p1, p2
147
144
148 def state(self, key):
145 def state(self, key):
149 try:
146 try:
150 return self[key][0]
147 return self[key][0]
151 except KeyError:
148 except KeyError:
152 return "?"
149 return "?"
153
150
154 def lazyread(self):
151 def lazyread(self):
155 if self.map is None:
152 if self.map is None:
156 self.read()
153 self.read()
157
154
158 def parse(self, st):
155 def parse(self, st):
159 self.pl = [st[:20], st[20: 40]]
156 self.pl = [st[:20], st[20: 40]]
160
157
161 # deref fields so they will be local in loop
158 # deref fields so they will be local in loop
162 map = self.map
159 map = self.map
163 copymap = self.copymap
160 copymap = self.copymap
164 format = self.format
161 format = self.format
165 unpack = struct.unpack
162 unpack = struct.unpack
166
163
167 pos = 40
164 pos = 40
168 e_size = struct.calcsize(format)
165 e_size = struct.calcsize(format)
169
166
170 while pos < len(st):
167 while pos < len(st):
171 newpos = pos + e_size
168 newpos = pos + e_size
172 e = unpack(format, st[pos:newpos])
169 e = unpack(format, st[pos:newpos])
173 l = e[4]
170 l = e[4]
174 pos = newpos
171 pos = newpos
175 newpos = pos + l
172 newpos = pos + l
176 f = st[pos:newpos]
173 f = st[pos:newpos]
177 if '\0' in f:
174 if '\0' in f:
178 f, c = f.split('\0')
175 f, c = f.split('\0')
179 copymap[f] = c
176 copymap[f] = c
180 map[f] = e[:4]
177 map[f] = e[:4]
181 pos = newpos
178 pos = newpos
182
179
183 def read(self):
180 def read(self):
184 self.map = {}
181 self.map = {}
185 self.pl = [nullid, nullid]
182 self.pl = [nullid, nullid]
186 try:
183 try:
187 st = self.opener("dirstate").read()
184 st = self.opener("dirstate").read()
188 if st:
185 if st:
189 self.parse(st)
186 self.parse(st)
190 except IOError, err:
187 except IOError, err:
191 if err.errno != errno.ENOENT: raise
188 if err.errno != errno.ENOENT: raise
192
189
193 def copy(self, source, dest):
190 def copy(self, source, dest):
194 self.lazyread()
191 self.lazyread()
195 self.markdirty()
192 self.markdirty()
196 self.copymap[dest] = source
193 self.copymap[dest] = source
197
194
198 def copied(self, file):
195 def copied(self, file):
199 return self.copymap.get(file, None)
196 return self.copymap.get(file, None)
200
197
201 def copies(self):
198 def copies(self):
202 return self.copymap
199 return self.copymap
203
200
204 def initdirs(self):
201 def initdirs(self):
205 if self.dirs is None:
202 if self.dirs is None:
206 self.dirs = {}
203 self.dirs = {}
207 for f in self.map:
204 for f in self.map:
208 self.updatedirs(f, 1)
205 self.updatedirs(f, 1)
209
206
210 def updatedirs(self, path, delta):
207 def updatedirs(self, path, delta):
211 if self.dirs is not None:
208 if self.dirs is not None:
212 for c in strutil.findall(path, '/'):
209 for c in strutil.findall(path, '/'):
213 pc = path[:c]
210 pc = path[:c]
214 self.dirs.setdefault(pc, 0)
211 self.dirs.setdefault(pc, 0)
215 self.dirs[pc] += delta
212 self.dirs[pc] += delta
216
213
217 def checkshadows(self, files):
214 def checkshadows(self, files):
218 def prefixes(f):
215 def prefixes(f):
219 for c in strutil.rfindall(f, '/'):
216 for c in strutil.rfindall(f, '/'):
220 yield f[:c]
217 yield f[:c]
221 self.lazyread()
218 self.lazyread()
222 self.initdirs()
219 self.initdirs()
223 seendirs = {}
220 seendirs = {}
224 for f in files:
221 for f in files:
225 if self.dirs.get(f):
222 if self.dirs.get(f):
226 raise util.Abort(_('directory named %r already in dirstate') %
223 raise util.Abort(_('directory named %r already in dirstate') %
227 f)
224 f)
228 for d in prefixes(f):
225 for d in prefixes(f):
229 if d in seendirs:
226 if d in seendirs:
230 break
227 break
231 if d in self.map:
228 if d in self.map:
232 raise util.Abort(_('file named %r already in dirstate') %
229 raise util.Abort(_('file named %r already in dirstate') %
233 d)
230 d)
234 seendirs[d] = True
231 seendirs[d] = True
235
232
236 def update(self, files, state, **kw):
233 def update(self, files, state, **kw):
237 ''' current states:
234 ''' current states:
238 n normal
235 n normal
239 m needs merging
236 m needs merging
240 r marked for removal
237 r marked for removal
241 a marked for addition'''
238 a marked for addition'''
242
239
243 if not files: return
240 if not files: return
244 self.lazyread()
241 self.lazyread()
245 self.markdirty()
242 self.markdirty()
246 if state == "a":
243 if state == "a":
247 self.initdirs()
244 self.initdirs()
248 self.checkshadows(files)
245 self.checkshadows(files)
249 for f in files:
246 for f in files:
250 if state == "r":
247 if state == "r":
251 self.map[f] = ('r', 0, 0, 0)
248 self.map[f] = ('r', 0, 0, 0)
252 self.updatedirs(f, -1)
249 self.updatedirs(f, -1)
253 else:
250 else:
254 if state == "a":
251 if state == "a":
255 self.updatedirs(f, 1)
252 self.updatedirs(f, 1)
256 s = os.lstat(self.wjoin(f))
253 s = os.lstat(self.wjoin(f))
257 st_size = kw.get('st_size', s.st_size)
254 st_size = kw.get('st_size', s.st_size)
258 st_mtime = kw.get('st_mtime', s.st_mtime)
255 st_mtime = kw.get('st_mtime', s.st_mtime)
259 self.map[f] = (state, s.st_mode, st_size, st_mtime)
256 self.map[f] = (state, s.st_mode, st_size, st_mtime)
260 if self.copymap.has_key(f):
257 if self.copymap.has_key(f):
261 del self.copymap[f]
258 del self.copymap[f]
262
259
263 def forget(self, files):
260 def forget(self, files):
264 if not files: return
261 if not files: return
265 self.lazyread()
262 self.lazyread()
266 self.markdirty()
263 self.markdirty()
267 self.initdirs()
264 self.initdirs()
268 for f in files:
265 for f in files:
269 try:
266 try:
270 del self.map[f]
267 del self.map[f]
271 self.updatedirs(f, -1)
268 self.updatedirs(f, -1)
272 except KeyError:
269 except KeyError:
273 self.ui.warn(_("not in dirstate: %s!\n") % f)
270 self.ui.warn(_("not in dirstate: %s!\n") % f)
274 pass
271 pass
275
272
276 def clear(self):
273 def clear(self):
277 self.map = {}
274 self.map = {}
278 self.copymap = {}
275 self.copymap = {}
279 self.dirs = None
276 self.dirs = None
280 self.markdirty()
277 self.markdirty()
281
278
282 def rebuild(self, parent, files):
279 def rebuild(self, parent, files):
283 self.clear()
280 self.clear()
284 umask = os.umask(0)
281 umask = os.umask(0)
285 os.umask(umask)
282 os.umask(umask)
286 for f in files:
283 for f in files:
287 if files.execf(f):
284 if files.execf(f):
288 self.map[f] = ('n', ~umask, -1, 0)
285 self.map[f] = ('n', ~umask, -1, 0)
289 else:
286 else:
290 self.map[f] = ('n', ~umask & 0666, -1, 0)
287 self.map[f] = ('n', ~umask & 0666, -1, 0)
291 self.pl = (parent, nullid)
288 self.pl = (parent, nullid)
292 self.markdirty()
289 self.markdirty()
293
290
294 def write(self):
291 def write(self):
295 if not self.dirty:
292 if not self.dirty:
296 return
293 return
297 st = self.opener("dirstate", "w", atomic=True)
294 st = self.opener("dirstate", "w", atomic=True)
298 st.write("".join(self.pl))
295 st.write("".join(self.pl))
299 for f, e in self.map.items():
296 for f, e in self.map.items():
300 c = self.copied(f)
297 c = self.copied(f)
301 if c:
298 if c:
302 f = f + "\0" + c
299 f = f + "\0" + c
303 e = struct.pack(self.format, e[0], e[1], e[2], e[3], len(f))
300 e = struct.pack(self.format, e[0], e[1], e[2], e[3], len(f))
304 st.write(e + f)
301 st.write(e + f)
305 self.dirty = 0
302 self.dirty = 0
306
303
307 def filterfiles(self, files):
304 def filterfiles(self, files):
308 ret = {}
305 ret = {}
309 unknown = []
306 unknown = []
310
307
311 for x in files:
308 for x in files:
312 if x == '.':
309 if x == '.':
313 return self.map.copy()
310 return self.map.copy()
314 if x not in self.map:
311 if x not in self.map:
315 unknown.append(x)
312 unknown.append(x)
316 else:
313 else:
317 ret[x] = self.map[x]
314 ret[x] = self.map[x]
318
315
319 if not unknown:
316 if not unknown:
320 return ret
317 return ret
321
318
322 b = self.map.keys()
319 b = self.map.keys()
323 b.sort()
320 b.sort()
324 blen = len(b)
321 blen = len(b)
325
322
326 for x in unknown:
323 for x in unknown:
327 bs = bisect.bisect(b, "%s%s" % (x, '/'))
324 bs = bisect.bisect(b, "%s%s" % (x, '/'))
328 while bs < blen:
325 while bs < blen:
329 s = b[bs]
326 s = b[bs]
330 if len(s) > len(x) and s.startswith(x):
327 if len(s) > len(x) and s.startswith(x):
331 ret[s] = self.map[s]
328 ret[s] = self.map[s]
332 else:
329 else:
333 break
330 break
334 bs += 1
331 bs += 1
335 return ret
332 return ret
336
333
337 def supported_type(self, f, st, verbose=False):
334 def supported_type(self, f, st, verbose=False):
338 if stat.S_ISREG(st.st_mode):
335 if stat.S_ISREG(st.st_mode):
339 return True
336 return True
340 if verbose:
337 if verbose:
341 kind = 'unknown'
338 kind = 'unknown'
342 if stat.S_ISCHR(st.st_mode): kind = _('character device')
339 if stat.S_ISCHR(st.st_mode): kind = _('character device')
343 elif stat.S_ISBLK(st.st_mode): kind = _('block device')
340 elif stat.S_ISBLK(st.st_mode): kind = _('block device')
344 elif stat.S_ISFIFO(st.st_mode): kind = _('fifo')
341 elif stat.S_ISFIFO(st.st_mode): kind = _('fifo')
345 elif stat.S_ISLNK(st.st_mode): kind = _('symbolic link')
342 elif stat.S_ISLNK(st.st_mode): kind = _('symbolic link')
346 elif stat.S_ISSOCK(st.st_mode): kind = _('socket')
343 elif stat.S_ISSOCK(st.st_mode): kind = _('socket')
347 elif stat.S_ISDIR(st.st_mode): kind = _('directory')
344 elif stat.S_ISDIR(st.st_mode): kind = _('directory')
348 self.ui.warn(_('%s: unsupported file type (type is %s)\n') % (
345 self.ui.warn(_('%s: unsupported file type (type is %s)\n') % (
349 util.pathto(self.getcwd(), f),
346 util.pathto(self.getcwd(), f),
350 kind))
347 kind))
351 return False
348 return False
352
349
353 def statwalk(self, files=None, match=util.always, dc=None, ignored=False,
350 def walk(self, files=None, match=util.always, badmatch=None):
351 # filter out the stat
352 for src, f, st in self.statwalk(files, match, badmatch=badmatch):
353 yield src, f
354
355 def statwalk(self, files=None, match=util.always, ignored=False,
354 badmatch=None):
356 badmatch=None):
357 '''
358 walk recursively through the directory tree, finding all files
359 matched by the match function
360
361 results are yielded in a tuple (src, filename, st), where src
362 is one of:
363 'f' the file was found in the directory tree
364 'm' the file was only in the dirstate and not in the tree
365 'b' file was not found and matched badmatch
366
367 and st is the stat result if the file was found in the directory.
368 '''
355 self.lazyread()
369 self.lazyread()
356
370
357 # walk all files by default
371 # walk all files by default
358 if not files:
372 if not files:
359 files = [self.root]
373 files = [self.root]
360 if not dc:
374 dc = self.map.copy()
361 dc = self.map.copy()
375 else:
362 elif not dc:
376 files = util.unique(files)
363 dc = self.filterfiles(files)
377 dc = self.filterfiles(files)
364
378
365 def statmatch(file_, stat):
379 def imatch(file_):
366 file_ = util.pconvert(file_)
380 if file_ not in dc and self.ignore(file_):
367 if not ignored and file_ not in dc and self.ignore(file_):
368 return False
381 return False
369 return match(file_)
382 return match(file_)
370
383
371 return self.walkhelper(files=files, statmatch=statmatch, dc=dc,
384 if ignored: imatch = match
372 badmatch=badmatch)
373
374 def walk(self, files=None, match=util.always, dc=None, badmatch=None):
375 # filter out the stat
376 for src, f, st in self.statwalk(files, match, dc, badmatch=badmatch):
377 yield src, f
378
385
379 # walk recursively through the directory tree, finding all files
380 # matched by the statmatch function
381 #
382 # results are yielded in a tuple (src, filename, st), where src
383 # is one of:
384 # 'f' the file was found in the directory tree
385 # 'm' the file was only in the dirstate and not in the tree
386 # and st is the stat result if the file was found in the directory.
387 #
388 # dc is an optional arg for the current dirstate. dc is not modified
389 # directly by this function, but might be modified by your statmatch call.
390 #
391 def walkhelper(self, files, statmatch, dc, badmatch=None):
392 # self.root may end with a path separator when self.root == '/'
386 # self.root may end with a path separator when self.root == '/'
393 common_prefix_len = len(self.root)
387 common_prefix_len = len(self.root)
394 if not self.root.endswith('/'):
388 if not self.root.endswith('/'):
395 common_prefix_len += 1
389 common_prefix_len += 1
396 # recursion free walker, faster than os.walk.
390 # recursion free walker, faster than os.walk.
397 def findfiles(s):
391 def findfiles(s):
398 work = [s]
392 work = [s]
399 while work:
393 while work:
400 top = work.pop()
394 top = work.pop()
401 names = os.listdir(top)
395 names = os.listdir(top)
402 names.sort()
396 names.sort()
403 # nd is the top of the repository dir tree
397 # nd is the top of the repository dir tree
404 nd = util.normpath(top[common_prefix_len:])
398 nd = util.normpath(top[common_prefix_len:])
405 if nd == '.':
399 if nd == '.':
406 nd = ''
400 nd = ''
407 else:
401 else:
408 # do not recurse into a repo contained in this
402 # do not recurse into a repo contained in this
409 # one. use bisect to find .hg directory so speed
403 # one. use bisect to find .hg directory so speed
410 # is good on big directory.
404 # is good on big directory.
411 hg = bisect.bisect_left(names, '.hg')
405 hg = bisect.bisect_left(names, '.hg')
412 if hg < len(names) and names[hg] == '.hg':
406 if hg < len(names) and names[hg] == '.hg':
413 if os.path.isdir(os.path.join(top, '.hg')):
407 if os.path.isdir(os.path.join(top, '.hg')):
414 continue
408 continue
415 for f in names:
409 for f in names:
416 np = util.pconvert(os.path.join(nd, f))
410 np = util.pconvert(os.path.join(nd, f))
417 if seen(np):
411 if seen(np):
418 continue
412 continue
419 p = os.path.join(top, f)
413 p = os.path.join(top, f)
420 # don't trip over symlinks
414 # don't trip over symlinks
421 st = os.lstat(p)
415 st = os.lstat(p)
422 if stat.S_ISDIR(st.st_mode):
416 if stat.S_ISDIR(st.st_mode):
423 ds = os.path.join(nd, f +'/')
417 ds = util.pconvert(os.path.join(nd, f +'/'))
424 if statmatch(ds, st):
418 if imatch(ds):
425 work.append(p)
419 work.append(p)
426 if statmatch(np, st) and np in dc:
420 if imatch(np) and np in dc:
427 yield 'm', np, st
421 yield 'm', np, st
428 elif statmatch(np, st):
422 elif imatch(np):
429 if self.supported_type(np, st):
423 if self.supported_type(np, st):
430 yield 'f', np, st
424 yield 'f', np, st
431 elif np in dc:
425 elif np in dc:
432 yield 'm', np, st
426 yield 'm', np, st
433
427
434 known = {'.hg': 1}
428 known = {'.hg': 1}
435 def seen(fn):
429 def seen(fn):
436 if fn in known: return True
430 if fn in known: return True
437 known[fn] = 1
431 known[fn] = 1
438
432
439 # step one, find all files that match our criteria
433 # step one, find all files that match our criteria
440 files.sort()
434 files.sort()
441 for ff in util.unique(files):
435 for ff in files:
436 nf = util.normpath(ff)
442 f = self.wjoin(ff)
437 f = self.wjoin(ff)
443 try:
438 try:
444 st = os.lstat(f)
439 st = os.lstat(f)
445 except OSError, inst:
440 except OSError, inst:
446 nf = util.normpath(ff)
447 found = False
441 found = False
448 for fn in dc:
442 for fn in dc:
449 if nf == fn or (fn.startswith(nf) and fn[len(nf)] == '/'):
443 if nf == fn or (fn.startswith(nf) and fn[len(nf)] == '/'):
450 found = True
444 found = True
451 break
445 break
452 if not found:
446 if not found:
453 if inst.errno != errno.ENOENT or not badmatch:
447 if inst.errno != errno.ENOENT or not badmatch:
454 self.ui.warn('%s: %s\n' % (
448 self.ui.warn('%s: %s\n' % (
455 util.pathto(self.getcwd(), ff),
449 util.pathto(self.getcwd(), ff),
456 inst.strerror))
450 inst.strerror))
457 elif badmatch and badmatch(ff) and statmatch(ff, None):
451 elif badmatch and badmatch(ff) and imatch(nf):
458 yield 'b', ff, None
452 yield 'b', ff, None
459 continue
453 continue
460 if stat.S_ISDIR(st.st_mode):
454 if stat.S_ISDIR(st.st_mode):
461 cmp1 = (lambda x, y: cmp(x[1], y[1]))
455 cmp1 = (lambda x, y: cmp(x[1], y[1]))
462 sorted_ = [ x for x in findfiles(f) ]
456 sorted_ = [ x for x in findfiles(f) ]
463 sorted_.sort(cmp1)
457 sorted_.sort(cmp1)
464 for e in sorted_:
458 for e in sorted_:
465 yield e
459 yield e
466 else:
460 else:
467 ff = util.normpath(ff)
461 if not seen(nf) and match(nf):
468 if seen(ff):
469 continue
470 self.blockignore = True
471 if statmatch(ff, st):
472 if self.supported_type(ff, st, verbose=True):
462 if self.supported_type(ff, st, verbose=True):
473 yield 'f', ff, st
463 yield 'f', nf, st
474 elif ff in dc:
464 elif ff in dc:
475 yield 'm', ff, st
465 yield 'm', nf, st
476 self.blockignore = False
477
466
478 # step two run through anything left in the dc hash and yield
467 # step two run through anything left in the dc hash and yield
479 # if we haven't already seen it
468 # if we haven't already seen it
480 ks = dc.keys()
469 ks = dc.keys()
481 ks.sort()
470 ks.sort()
482 for k in ks:
471 for k in ks:
483 if not seen(k) and (statmatch(k, None)):
472 if not seen(k) and imatch(k):
484 yield 'm', k, None
473 yield 'm', k, None
485
474
486 def status(self, files=None, match=util.always, list_ignored=False,
475 def status(self, files=None, match=util.always, list_ignored=False,
487 list_clean=False):
476 list_clean=False):
488 lookup, modified, added, unknown, ignored = [], [], [], [], []
477 lookup, modified, added, unknown, ignored = [], [], [], [], []
489 removed, deleted, clean = [], [], []
478 removed, deleted, clean = [], [], []
490
479
491 for src, fn, st in self.statwalk(files, match, ignored=list_ignored):
480 for src, fn, st in self.statwalk(files, match, ignored=list_ignored):
492 try:
481 try:
493 type_, mode, size, time = self[fn]
482 type_, mode, size, time = self[fn]
494 except KeyError:
483 except KeyError:
495 if list_ignored and self.ignore(fn):
484 if list_ignored and self.ignore(fn):
496 ignored.append(fn)
485 ignored.append(fn)
497 else:
486 else:
498 unknown.append(fn)
487 unknown.append(fn)
499 continue
488 continue
500 if src == 'm':
489 if src == 'm':
501 nonexistent = True
490 nonexistent = True
502 if not st:
491 if not st:
503 try:
492 try:
504 st = os.lstat(self.wjoin(fn))
493 st = os.lstat(self.wjoin(fn))
505 except OSError, inst:
494 except OSError, inst:
506 if inst.errno != errno.ENOENT:
495 if inst.errno != errno.ENOENT:
507 raise
496 raise
508 st = None
497 st = None
509 # We need to re-check that it is a valid file
498 # We need to re-check that it is a valid file
510 if st and self.supported_type(fn, st):
499 if st and self.supported_type(fn, st):
511 nonexistent = False
500 nonexistent = False
512 # XXX: what to do with file no longer present in the fs
501 # XXX: what to do with file no longer present in the fs
513 # who are not removed in the dirstate ?
502 # who are not removed in the dirstate ?
514 if nonexistent and type_ in "nm":
503 if nonexistent and type_ in "nm":
515 deleted.append(fn)
504 deleted.append(fn)
516 continue
505 continue
517 # check the common case first
506 # check the common case first
518 if type_ == 'n':
507 if type_ == 'n':
519 if not st:
508 if not st:
520 st = os.lstat(self.wjoin(fn))
509 st = os.lstat(self.wjoin(fn))
521 if size >= 0 and (size != st.st_size
510 if size >= 0 and (size != st.st_size
522 or (mode ^ st.st_mode) & 0100):
511 or (mode ^ st.st_mode) & 0100):
523 modified.append(fn)
512 modified.append(fn)
524 elif time != int(st.st_mtime):
513 elif time != int(st.st_mtime):
525 lookup.append(fn)
514 lookup.append(fn)
526 elif list_clean:
515 elif list_clean:
527 clean.append(fn)
516 clean.append(fn)
528 elif type_ == 'm':
517 elif type_ == 'm':
529 modified.append(fn)
518 modified.append(fn)
530 elif type_ == 'a':
519 elif type_ == 'a':
531 added.append(fn)
520 added.append(fn)
532 elif type_ == 'r':
521 elif type_ == 'r':
533 removed.append(fn)
522 removed.append(fn)
534
523
535 return (lookup, modified, added, removed, deleted, unknown, ignored,
524 return (lookup, modified, added, removed, deleted, unknown, ignored,
536 clean)
525 clean)
@@ -1,1856 +1,1868 b''
1 # localrepo.py - read/write repository class for mercurial
1 # localrepo.py - read/write repository class for mercurial
2 #
2 #
3 # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7
7
8 from node import *
8 from node import *
9 from i18n import gettext as _
9 from i18n import gettext as _
10 from demandload import *
10 from demandload import *
11 import repo
11 import repo
12 demandload(globals(), "appendfile changegroup")
12 demandload(globals(), "appendfile changegroup")
13 demandload(globals(), "changelog dirstate filelog manifest context")
13 demandload(globals(), "changelog dirstate filelog manifest context")
14 demandload(globals(), "re lock transaction tempfile stat mdiff errno ui")
14 demandload(globals(), "re lock transaction tempfile stat mdiff errno ui")
15 demandload(globals(), "os revlog time util")
15 demandload(globals(), "os revlog time util")
16
16
17 class localrepository(repo.repository):
17 class localrepository(repo.repository):
18 capabilities = ('lookup', 'changegroupsubset')
18 capabilities = ('lookup', 'changegroupsubset')
19
19
20 def __del__(self):
20 def __del__(self):
21 self.transhandle = None
21 self.transhandle = None
22 def __init__(self, parentui, path=None, create=0):
22 def __init__(self, parentui, path=None, create=0):
23 repo.repository.__init__(self)
23 repo.repository.__init__(self)
24 if not path:
24 if not path:
25 p = os.getcwd()
25 p = os.getcwd()
26 while not os.path.isdir(os.path.join(p, ".hg")):
26 while not os.path.isdir(os.path.join(p, ".hg")):
27 oldp = p
27 oldp = p
28 p = os.path.dirname(p)
28 p = os.path.dirname(p)
29 if p == oldp:
29 if p == oldp:
30 raise repo.RepoError(_("There is no Mercurial repository"
30 raise repo.RepoError(_("There is no Mercurial repository"
31 " here (.hg not found)"))
31 " here (.hg not found)"))
32 path = p
32 path = p
33 self.path = os.path.join(path, ".hg")
33 self.path = os.path.join(path, ".hg")
34
34
35 if not os.path.isdir(self.path):
35 if not os.path.isdir(self.path):
36 if create:
36 if create:
37 if not os.path.exists(path):
37 if not os.path.exists(path):
38 os.mkdir(path)
38 os.mkdir(path)
39 os.mkdir(self.path)
39 os.mkdir(self.path)
40 os.mkdir(self.join("data"))
40 os.mkdir(self.join("data"))
41 else:
41 else:
42 raise repo.RepoError(_("repository %s not found") % path)
42 raise repo.RepoError(_("repository %s not found") % path)
43 elif create:
43 elif create:
44 raise repo.RepoError(_("repository %s already exists") % path)
44 raise repo.RepoError(_("repository %s already exists") % path)
45
45
46 self.root = os.path.abspath(path)
46 self.root = os.path.abspath(path)
47 self.origroot = path
47 self.origroot = path
48 self.ui = ui.ui(parentui=parentui)
48 self.ui = ui.ui(parentui=parentui)
49 self.opener = util.opener(self.path)
49 self.opener = util.opener(self.path)
50 self.sopener = util.opener(self.path)
50 self.sopener = util.opener(self.path)
51 self.wopener = util.opener(self.root)
51 self.wopener = util.opener(self.root)
52
52
53 try:
53 try:
54 self.ui.readconfig(self.join("hgrc"), self.root)
54 self.ui.readconfig(self.join("hgrc"), self.root)
55 except IOError:
55 except IOError:
56 pass
56 pass
57
57
58 v = self.ui.configrevlog()
58 v = self.ui.configrevlog()
59 self.revlogversion = int(v.get('format', revlog.REVLOG_DEFAULT_FORMAT))
59 self.revlogversion = int(v.get('format', revlog.REVLOG_DEFAULT_FORMAT))
60 self.revlogv1 = self.revlogversion != revlog.REVLOGV0
60 self.revlogv1 = self.revlogversion != revlog.REVLOGV0
61 fl = v.get('flags', None)
61 fl = v.get('flags', None)
62 flags = 0
62 flags = 0
63 if fl != None:
63 if fl != None:
64 for x in fl.split():
64 for x in fl.split():
65 flags |= revlog.flagstr(x)
65 flags |= revlog.flagstr(x)
66 elif self.revlogv1:
66 elif self.revlogv1:
67 flags = revlog.REVLOG_DEFAULT_FLAGS
67 flags = revlog.REVLOG_DEFAULT_FLAGS
68
68
69 v = self.revlogversion | flags
69 v = self.revlogversion | flags
70 self.manifest = manifest.manifest(self.sopener, v)
70 self.manifest = manifest.manifest(self.sopener, v)
71 self.changelog = changelog.changelog(self.sopener, v)
71 self.changelog = changelog.changelog(self.sopener, v)
72
72
73 # the changelog might not have the inline index flag
73 # the changelog might not have the inline index flag
74 # on. If the format of the changelog is the same as found in
74 # on. If the format of the changelog is the same as found in
75 # .hgrc, apply any flags found in the .hgrc as well.
75 # .hgrc, apply any flags found in the .hgrc as well.
76 # Otherwise, just version from the changelog
76 # Otherwise, just version from the changelog
77 v = self.changelog.version
77 v = self.changelog.version
78 if v == self.revlogversion:
78 if v == self.revlogversion:
79 v |= flags
79 v |= flags
80 self.revlogversion = v
80 self.revlogversion = v
81
81
82 self.tagscache = None
82 self.tagscache = None
83 self.branchcache = None
83 self.branchcache = None
84 self.nodetagscache = None
84 self.nodetagscache = None
85 self.encodepats = None
85 self.encodepats = None
86 self.decodepats = None
86 self.decodepats = None
87 self.transhandle = None
87 self.transhandle = None
88
88
89 self.dirstate = dirstate.dirstate(self.opener, self.ui, self.root)
89 self.dirstate = dirstate.dirstate(self.opener, self.ui, self.root)
90
90
91 def url(self):
91 def url(self):
92 return 'file:' + self.root
92 return 'file:' + self.root
93
93
94 def hook(self, name, throw=False, **args):
94 def hook(self, name, throw=False, **args):
95 def callhook(hname, funcname):
95 def callhook(hname, funcname):
96 '''call python hook. hook is callable object, looked up as
96 '''call python hook. hook is callable object, looked up as
97 name in python module. if callable returns "true", hook
97 name in python module. if callable returns "true", hook
98 fails, else passes. if hook raises exception, treated as
98 fails, else passes. if hook raises exception, treated as
99 hook failure. exception propagates if throw is "true".
99 hook failure. exception propagates if throw is "true".
100
100
101 reason for "true" meaning "hook failed" is so that
101 reason for "true" meaning "hook failed" is so that
102 unmodified commands (e.g. mercurial.commands.update) can
102 unmodified commands (e.g. mercurial.commands.update) can
103 be run as hooks without wrappers to convert return values.'''
103 be run as hooks without wrappers to convert return values.'''
104
104
105 self.ui.note(_("calling hook %s: %s\n") % (hname, funcname))
105 self.ui.note(_("calling hook %s: %s\n") % (hname, funcname))
106 d = funcname.rfind('.')
106 d = funcname.rfind('.')
107 if d == -1:
107 if d == -1:
108 raise util.Abort(_('%s hook is invalid ("%s" not in a module)')
108 raise util.Abort(_('%s hook is invalid ("%s" not in a module)')
109 % (hname, funcname))
109 % (hname, funcname))
110 modname = funcname[:d]
110 modname = funcname[:d]
111 try:
111 try:
112 obj = __import__(modname)
112 obj = __import__(modname)
113 except ImportError:
113 except ImportError:
114 try:
114 try:
115 # extensions are loaded with hgext_ prefix
115 # extensions are loaded with hgext_ prefix
116 obj = __import__("hgext_%s" % modname)
116 obj = __import__("hgext_%s" % modname)
117 except ImportError:
117 except ImportError:
118 raise util.Abort(_('%s hook is invalid '
118 raise util.Abort(_('%s hook is invalid '
119 '(import of "%s" failed)') %
119 '(import of "%s" failed)') %
120 (hname, modname))
120 (hname, modname))
121 try:
121 try:
122 for p in funcname.split('.')[1:]:
122 for p in funcname.split('.')[1:]:
123 obj = getattr(obj, p)
123 obj = getattr(obj, p)
124 except AttributeError, err:
124 except AttributeError, err:
125 raise util.Abort(_('%s hook is invalid '
125 raise util.Abort(_('%s hook is invalid '
126 '("%s" is not defined)') %
126 '("%s" is not defined)') %
127 (hname, funcname))
127 (hname, funcname))
128 if not callable(obj):
128 if not callable(obj):
129 raise util.Abort(_('%s hook is invalid '
129 raise util.Abort(_('%s hook is invalid '
130 '("%s" is not callable)') %
130 '("%s" is not callable)') %
131 (hname, funcname))
131 (hname, funcname))
132 try:
132 try:
133 r = obj(ui=self.ui, repo=self, hooktype=name, **args)
133 r = obj(ui=self.ui, repo=self, hooktype=name, **args)
134 except (KeyboardInterrupt, util.SignalInterrupt):
134 except (KeyboardInterrupt, util.SignalInterrupt):
135 raise
135 raise
136 except Exception, exc:
136 except Exception, exc:
137 if isinstance(exc, util.Abort):
137 if isinstance(exc, util.Abort):
138 self.ui.warn(_('error: %s hook failed: %s\n') %
138 self.ui.warn(_('error: %s hook failed: %s\n') %
139 (hname, exc.args[0]))
139 (hname, exc.args[0]))
140 else:
140 else:
141 self.ui.warn(_('error: %s hook raised an exception: '
141 self.ui.warn(_('error: %s hook raised an exception: '
142 '%s\n') % (hname, exc))
142 '%s\n') % (hname, exc))
143 if throw:
143 if throw:
144 raise
144 raise
145 self.ui.print_exc()
145 self.ui.print_exc()
146 return True
146 return True
147 if r:
147 if r:
148 if throw:
148 if throw:
149 raise util.Abort(_('%s hook failed') % hname)
149 raise util.Abort(_('%s hook failed') % hname)
150 self.ui.warn(_('warning: %s hook failed\n') % hname)
150 self.ui.warn(_('warning: %s hook failed\n') % hname)
151 return r
151 return r
152
152
153 def runhook(name, cmd):
153 def runhook(name, cmd):
154 self.ui.note(_("running hook %s: %s\n") % (name, cmd))
154 self.ui.note(_("running hook %s: %s\n") % (name, cmd))
155 env = dict([('HG_' + k.upper(), v) for k, v in args.iteritems()])
155 env = dict([('HG_' + k.upper(), v) for k, v in args.iteritems()])
156 r = util.system(cmd, environ=env, cwd=self.root)
156 r = util.system(cmd, environ=env, cwd=self.root)
157 if r:
157 if r:
158 desc, r = util.explain_exit(r)
158 desc, r = util.explain_exit(r)
159 if throw:
159 if throw:
160 raise util.Abort(_('%s hook %s') % (name, desc))
160 raise util.Abort(_('%s hook %s') % (name, desc))
161 self.ui.warn(_('warning: %s hook %s\n') % (name, desc))
161 self.ui.warn(_('warning: %s hook %s\n') % (name, desc))
162 return r
162 return r
163
163
164 r = False
164 r = False
165 hooks = [(hname, cmd) for hname, cmd in self.ui.configitems("hooks")
165 hooks = [(hname, cmd) for hname, cmd in self.ui.configitems("hooks")
166 if hname.split(".", 1)[0] == name and cmd]
166 if hname.split(".", 1)[0] == name and cmd]
167 hooks.sort()
167 hooks.sort()
168 for hname, cmd in hooks:
168 for hname, cmd in hooks:
169 if cmd.startswith('python:'):
169 if cmd.startswith('python:'):
170 r = callhook(hname, cmd[7:].strip()) or r
170 r = callhook(hname, cmd[7:].strip()) or r
171 else:
171 else:
172 r = runhook(hname, cmd) or r
172 r = runhook(hname, cmd) or r
173 return r
173 return r
174
174
175 tag_disallowed = ':\r\n'
175 tag_disallowed = ':\r\n'
176
176
177 def tag(self, name, node, message, local, user, date):
177 def tag(self, name, node, message, local, user, date):
178 '''tag a revision with a symbolic name.
178 '''tag a revision with a symbolic name.
179
179
180 if local is True, the tag is stored in a per-repository file.
180 if local is True, the tag is stored in a per-repository file.
181 otherwise, it is stored in the .hgtags file, and a new
181 otherwise, it is stored in the .hgtags file, and a new
182 changeset is committed with the change.
182 changeset is committed with the change.
183
183
184 keyword arguments:
184 keyword arguments:
185
185
186 local: whether to store tag in non-version-controlled file
186 local: whether to store tag in non-version-controlled file
187 (default False)
187 (default False)
188
188
189 message: commit message to use if committing
189 message: commit message to use if committing
190
190
191 user: name of user to use if committing
191 user: name of user to use if committing
192
192
193 date: date tuple to use if committing'''
193 date: date tuple to use if committing'''
194
194
195 for c in self.tag_disallowed:
195 for c in self.tag_disallowed:
196 if c in name:
196 if c in name:
197 raise util.Abort(_('%r cannot be used in a tag name') % c)
197 raise util.Abort(_('%r cannot be used in a tag name') % c)
198
198
199 self.hook('pretag', throw=True, node=hex(node), tag=name, local=local)
199 self.hook('pretag', throw=True, node=hex(node), tag=name, local=local)
200
200
201 if local:
201 if local:
202 self.opener('localtags', 'a').write('%s %s\n' % (hex(node), name))
202 self.opener('localtags', 'a').write('%s %s\n' % (hex(node), name))
203 self.hook('tag', node=hex(node), tag=name, local=local)
203 self.hook('tag', node=hex(node), tag=name, local=local)
204 return
204 return
205
205
206 for x in self.status()[:5]:
206 for x in self.status()[:5]:
207 if '.hgtags' in x:
207 if '.hgtags' in x:
208 raise util.Abort(_('working copy of .hgtags is changed '
208 raise util.Abort(_('working copy of .hgtags is changed '
209 '(please commit .hgtags manually)'))
209 '(please commit .hgtags manually)'))
210
210
211 self.wfile('.hgtags', 'ab').write('%s %s\n' % (hex(node), name))
211 self.wfile('.hgtags', 'ab').write('%s %s\n' % (hex(node), name))
212 if self.dirstate.state('.hgtags') == '?':
212 if self.dirstate.state('.hgtags') == '?':
213 self.add(['.hgtags'])
213 self.add(['.hgtags'])
214
214
215 self.commit(['.hgtags'], message, user, date)
215 self.commit(['.hgtags'], message, user, date)
216 self.hook('tag', node=hex(node), tag=name, local=local)
216 self.hook('tag', node=hex(node), tag=name, local=local)
217
217
218 def tags(self):
218 def tags(self):
219 '''return a mapping of tag to node'''
219 '''return a mapping of tag to node'''
220 if not self.tagscache:
220 if not self.tagscache:
221 self.tagscache = {}
221 self.tagscache = {}
222
222
223 def parsetag(line, context):
223 def parsetag(line, context):
224 if not line:
224 if not line:
225 return
225 return
226 s = l.split(" ", 1)
226 s = l.split(" ", 1)
227 if len(s) != 2:
227 if len(s) != 2:
228 self.ui.warn(_("%s: cannot parse entry\n") % context)
228 self.ui.warn(_("%s: cannot parse entry\n") % context)
229 return
229 return
230 node, key = s
230 node, key = s
231 key = key.strip()
231 key = key.strip()
232 try:
232 try:
233 bin_n = bin(node)
233 bin_n = bin(node)
234 except TypeError:
234 except TypeError:
235 self.ui.warn(_("%s: node '%s' is not well formed\n") %
235 self.ui.warn(_("%s: node '%s' is not well formed\n") %
236 (context, node))
236 (context, node))
237 return
237 return
238 if bin_n not in self.changelog.nodemap:
238 if bin_n not in self.changelog.nodemap:
239 self.ui.warn(_("%s: tag '%s' refers to unknown node\n") %
239 self.ui.warn(_("%s: tag '%s' refers to unknown node\n") %
240 (context, key))
240 (context, key))
241 return
241 return
242 self.tagscache[key] = bin_n
242 self.tagscache[key] = bin_n
243
243
244 # read the tags file from each head, ending with the tip,
244 # read the tags file from each head, ending with the tip,
245 # and add each tag found to the map, with "newer" ones
245 # and add each tag found to the map, with "newer" ones
246 # taking precedence
246 # taking precedence
247 heads = self.heads()
247 heads = self.heads()
248 heads.reverse()
248 heads.reverse()
249 seen = {}
249 seen = {}
250 for node in heads:
250 for node in heads:
251 f = self.filectx('.hgtags', node)
251 f = self.filectx('.hgtags', node)
252 if not f or f.filerev() in seen: continue
252 if not f or f.filerev() in seen: continue
253 seen[f.filerev()] = 1
253 seen[f.filerev()] = 1
254 count = 0
254 count = 0
255 for l in f.data().splitlines():
255 for l in f.data().splitlines():
256 count += 1
256 count += 1
257 parsetag(l, _("%s, line %d") % (str(f), count))
257 parsetag(l, _("%s, line %d") % (str(f), count))
258
258
259 try:
259 try:
260 f = self.opener("localtags")
260 f = self.opener("localtags")
261 count = 0
261 count = 0
262 for l in f:
262 for l in f:
263 count += 1
263 count += 1
264 parsetag(l, _("localtags, line %d") % count)
264 parsetag(l, _("localtags, line %d") % count)
265 except IOError:
265 except IOError:
266 pass
266 pass
267
267
268 self.tagscache['tip'] = self.changelog.tip()
268 self.tagscache['tip'] = self.changelog.tip()
269
269
270 return self.tagscache
270 return self.tagscache
271
271
272 def tagslist(self):
272 def tagslist(self):
273 '''return a list of tags ordered by revision'''
273 '''return a list of tags ordered by revision'''
274 l = []
274 l = []
275 for t, n in self.tags().items():
275 for t, n in self.tags().items():
276 try:
276 try:
277 r = self.changelog.rev(n)
277 r = self.changelog.rev(n)
278 except:
278 except:
279 r = -2 # sort to the beginning of the list if unknown
279 r = -2 # sort to the beginning of the list if unknown
280 l.append((r, t, n))
280 l.append((r, t, n))
281 l.sort()
281 l.sort()
282 return [(t, n) for r, t, n in l]
282 return [(t, n) for r, t, n in l]
283
283
284 def nodetags(self, node):
284 def nodetags(self, node):
285 '''return the tags associated with a node'''
285 '''return the tags associated with a node'''
286 if not self.nodetagscache:
286 if not self.nodetagscache:
287 self.nodetagscache = {}
287 self.nodetagscache = {}
288 for t, n in self.tags().items():
288 for t, n in self.tags().items():
289 self.nodetagscache.setdefault(n, []).append(t)
289 self.nodetagscache.setdefault(n, []).append(t)
290 return self.nodetagscache.get(node, [])
290 return self.nodetagscache.get(node, [])
291
291
292 def branchtags(self):
292 def branchtags(self):
293 if self.branchcache != None:
293 if self.branchcache != None:
294 return self.branchcache
294 return self.branchcache
295
295
296 self.branchcache = {} # avoid recursion in changectx
296 self.branchcache = {} # avoid recursion in changectx
297
297
298 partial, last, lrev = self._readbranchcache()
298 partial, last, lrev = self._readbranchcache()
299
299
300 tiprev = self.changelog.count() - 1
300 tiprev = self.changelog.count() - 1
301 if lrev != tiprev:
301 if lrev != tiprev:
302 self._updatebranchcache(partial, lrev+1, tiprev+1)
302 self._updatebranchcache(partial, lrev+1, tiprev+1)
303 self._writebranchcache(partial, self.changelog.tip(), tiprev)
303 self._writebranchcache(partial, self.changelog.tip(), tiprev)
304
304
305 self.branchcache = partial
305 self.branchcache = partial
306 return self.branchcache
306 return self.branchcache
307
307
308 def _readbranchcache(self):
308 def _readbranchcache(self):
309 partial = {}
309 partial = {}
310 try:
310 try:
311 f = self.opener("branches.cache")
311 f = self.opener("branches.cache")
312 last, lrev = f.readline().rstrip().split(" ", 1)
312 last, lrev = f.readline().rstrip().split(" ", 1)
313 last, lrev = bin(last), int(lrev)
313 last, lrev = bin(last), int(lrev)
314 if (lrev < self.changelog.count() and
314 if (lrev < self.changelog.count() and
315 self.changelog.node(lrev) == last): # sanity check
315 self.changelog.node(lrev) == last): # sanity check
316 for l in f:
316 for l in f:
317 node, label = l.rstrip().split(" ", 1)
317 node, label = l.rstrip().split(" ", 1)
318 partial[label] = bin(node)
318 partial[label] = bin(node)
319 else: # invalidate the cache
319 else: # invalidate the cache
320 last, lrev = nullid, -1
320 last, lrev = nullid, -1
321 f.close()
321 f.close()
322 except IOError:
322 except IOError:
323 last, lrev = nullid, -1
323 last, lrev = nullid, -1
324 return partial, last, lrev
324 return partial, last, lrev
325
325
326 def _writebranchcache(self, branches, tip, tiprev):
326 def _writebranchcache(self, branches, tip, tiprev):
327 try:
327 try:
328 f = self.opener("branches.cache", "w")
328 f = self.opener("branches.cache", "w")
329 f.write("%s %s\n" % (hex(tip), tiprev))
329 f.write("%s %s\n" % (hex(tip), tiprev))
330 for label, node in branches.iteritems():
330 for label, node in branches.iteritems():
331 f.write("%s %s\n" % (hex(node), label))
331 f.write("%s %s\n" % (hex(node), label))
332 except IOError:
332 except IOError:
333 pass
333 pass
334
334
335 def _updatebranchcache(self, partial, start, end):
335 def _updatebranchcache(self, partial, start, end):
336 for r in xrange(start, end):
336 for r in xrange(start, end):
337 c = self.changectx(r)
337 c = self.changectx(r)
338 b = c.branch()
338 b = c.branch()
339 if b:
339 if b:
340 partial[b] = c.node()
340 partial[b] = c.node()
341
341
342 def lookup(self, key):
342 def lookup(self, key):
343 if key == '.':
343 if key == '.':
344 key = self.dirstate.parents()[0]
344 key = self.dirstate.parents()[0]
345 if key == nullid:
345 if key == nullid:
346 raise repo.RepoError(_("no revision checked out"))
346 raise repo.RepoError(_("no revision checked out"))
347 n = self.changelog._match(key)
347 n = self.changelog._match(key)
348 if n:
348 if n:
349 return n
349 return n
350 if key in self.tags():
350 if key in self.tags():
351 return self.tags()[key]
351 return self.tags()[key]
352 if key in self.branchtags():
352 if key in self.branchtags():
353 return self.branchtags()[key]
353 return self.branchtags()[key]
354 n = self.changelog._partialmatch(key)
354 n = self.changelog._partialmatch(key)
355 if n:
355 if n:
356 return n
356 return n
357 raise repo.RepoError(_("unknown revision '%s'") % key)
357 raise repo.RepoError(_("unknown revision '%s'") % key)
358
358
359 def dev(self):
359 def dev(self):
360 return os.lstat(self.path).st_dev
360 return os.lstat(self.path).st_dev
361
361
362 def local(self):
362 def local(self):
363 return True
363 return True
364
364
365 def join(self, f):
365 def join(self, f):
366 return os.path.join(self.path, f)
366 return os.path.join(self.path, f)
367
367
368 def sjoin(self, f):
368 def sjoin(self, f):
369 return os.path.join(self.path, f)
369 return os.path.join(self.path, f)
370
370
371 def wjoin(self, f):
371 def wjoin(self, f):
372 return os.path.join(self.root, f)
372 return os.path.join(self.root, f)
373
373
374 def file(self, f):
374 def file(self, f):
375 if f[0] == '/':
375 if f[0] == '/':
376 f = f[1:]
376 f = f[1:]
377 return filelog.filelog(self.sopener, f, self.revlogversion)
377 return filelog.filelog(self.sopener, f, self.revlogversion)
378
378
379 def changectx(self, changeid=None):
379 def changectx(self, changeid=None):
380 return context.changectx(self, changeid)
380 return context.changectx(self, changeid)
381
381
382 def workingctx(self):
382 def workingctx(self):
383 return context.workingctx(self)
383 return context.workingctx(self)
384
384
385 def parents(self, changeid=None):
385 def parents(self, changeid=None):
386 '''
386 '''
387 get list of changectxs for parents of changeid or working directory
387 get list of changectxs for parents of changeid or working directory
388 '''
388 '''
389 if changeid is None:
389 if changeid is None:
390 pl = self.dirstate.parents()
390 pl = self.dirstate.parents()
391 else:
391 else:
392 n = self.changelog.lookup(changeid)
392 n = self.changelog.lookup(changeid)
393 pl = self.changelog.parents(n)
393 pl = self.changelog.parents(n)
394 if pl[1] == nullid:
394 if pl[1] == nullid:
395 return [self.changectx(pl[0])]
395 return [self.changectx(pl[0])]
396 return [self.changectx(pl[0]), self.changectx(pl[1])]
396 return [self.changectx(pl[0]), self.changectx(pl[1])]
397
397
398 def filectx(self, path, changeid=None, fileid=None):
398 def filectx(self, path, changeid=None, fileid=None):
399 """changeid can be a changeset revision, node, or tag.
399 """changeid can be a changeset revision, node, or tag.
400 fileid can be a file revision or node."""
400 fileid can be a file revision or node."""
401 return context.filectx(self, path, changeid, fileid)
401 return context.filectx(self, path, changeid, fileid)
402
402
403 def getcwd(self):
403 def getcwd(self):
404 return self.dirstate.getcwd()
404 return self.dirstate.getcwd()
405
405
406 def wfile(self, f, mode='r'):
406 def wfile(self, f, mode='r'):
407 return self.wopener(f, mode)
407 return self.wopener(f, mode)
408
408
409 def wread(self, filename):
409 def wread(self, filename):
410 if self.encodepats == None:
410 if self.encodepats == None:
411 l = []
411 l = []
412 for pat, cmd in self.ui.configitems("encode"):
412 for pat, cmd in self.ui.configitems("encode"):
413 mf = util.matcher(self.root, "", [pat], [], [])[1]
413 mf = util.matcher(self.root, "", [pat], [], [])[1]
414 l.append((mf, cmd))
414 l.append((mf, cmd))
415 self.encodepats = l
415 self.encodepats = l
416
416
417 data = self.wopener(filename, 'r').read()
417 data = self.wopener(filename, 'r').read()
418
418
419 for mf, cmd in self.encodepats:
419 for mf, cmd in self.encodepats:
420 if mf(filename):
420 if mf(filename):
421 self.ui.debug(_("filtering %s through %s\n") % (filename, cmd))
421 self.ui.debug(_("filtering %s through %s\n") % (filename, cmd))
422 data = util.filter(data, cmd)
422 data = util.filter(data, cmd)
423 break
423 break
424
424
425 return data
425 return data
426
426
427 def wwrite(self, filename, data, fd=None):
427 def wwrite(self, filename, data, fd=None):
428 if self.decodepats == None:
428 if self.decodepats == None:
429 l = []
429 l = []
430 for pat, cmd in self.ui.configitems("decode"):
430 for pat, cmd in self.ui.configitems("decode"):
431 mf = util.matcher(self.root, "", [pat], [], [])[1]
431 mf = util.matcher(self.root, "", [pat], [], [])[1]
432 l.append((mf, cmd))
432 l.append((mf, cmd))
433 self.decodepats = l
433 self.decodepats = l
434
434
435 for mf, cmd in self.decodepats:
435 for mf, cmd in self.decodepats:
436 if mf(filename):
436 if mf(filename):
437 self.ui.debug(_("filtering %s through %s\n") % (filename, cmd))
437 self.ui.debug(_("filtering %s through %s\n") % (filename, cmd))
438 data = util.filter(data, cmd)
438 data = util.filter(data, cmd)
439 break
439 break
440
440
441 if fd:
441 if fd:
442 return fd.write(data)
442 return fd.write(data)
443 return self.wopener(filename, 'w').write(data)
443 return self.wopener(filename, 'w').write(data)
444
444
445 def transaction(self):
445 def transaction(self):
446 tr = self.transhandle
446 tr = self.transhandle
447 if tr != None and tr.running():
447 if tr != None and tr.running():
448 return tr.nest()
448 return tr.nest()
449
449
450 # save dirstate for rollback
450 # save dirstate for rollback
451 try:
451 try:
452 ds = self.opener("dirstate").read()
452 ds = self.opener("dirstate").read()
453 except IOError:
453 except IOError:
454 ds = ""
454 ds = ""
455 self.opener("journal.dirstate", "w").write(ds)
455 self.opener("journal.dirstate", "w").write(ds)
456
456
457 tr = transaction.transaction(self.ui.warn, self.sopener,
457 tr = transaction.transaction(self.ui.warn, self.sopener,
458 self.sjoin("journal"),
458 self.sjoin("journal"),
459 aftertrans(self.path))
459 aftertrans(self.path))
460 self.transhandle = tr
460 self.transhandle = tr
461 return tr
461 return tr
462
462
463 def recover(self):
463 def recover(self):
464 l = self.lock()
464 l = self.lock()
465 if os.path.exists(self.sjoin("journal")):
465 if os.path.exists(self.sjoin("journal")):
466 self.ui.status(_("rolling back interrupted transaction\n"))
466 self.ui.status(_("rolling back interrupted transaction\n"))
467 transaction.rollback(self.sopener, self.sjoin("journal"))
467 transaction.rollback(self.sopener, self.sjoin("journal"))
468 self.reload()
468 self.reload()
469 return True
469 return True
470 else:
470 else:
471 self.ui.warn(_("no interrupted transaction available\n"))
471 self.ui.warn(_("no interrupted transaction available\n"))
472 return False
472 return False
473
473
474 def rollback(self, wlock=None):
474 def rollback(self, wlock=None):
475 if not wlock:
475 if not wlock:
476 wlock = self.wlock()
476 wlock = self.wlock()
477 l = self.lock()
477 l = self.lock()
478 if os.path.exists(self.sjoin("undo")):
478 if os.path.exists(self.sjoin("undo")):
479 self.ui.status(_("rolling back last transaction\n"))
479 self.ui.status(_("rolling back last transaction\n"))
480 transaction.rollback(self.sopener, self.sjoin("undo"))
480 transaction.rollback(self.sopener, self.sjoin("undo"))
481 util.rename(self.join("undo.dirstate"), self.join("dirstate"))
481 util.rename(self.join("undo.dirstate"), self.join("dirstate"))
482 self.reload()
482 self.reload()
483 self.wreload()
483 self.wreload()
484 else:
484 else:
485 self.ui.warn(_("no rollback information available\n"))
485 self.ui.warn(_("no rollback information available\n"))
486
486
487 def wreload(self):
487 def wreload(self):
488 self.dirstate.read()
488 self.dirstate.read()
489
489
490 def reload(self):
490 def reload(self):
491 self.changelog.load()
491 self.changelog.load()
492 self.manifest.load()
492 self.manifest.load()
493 self.tagscache = None
493 self.tagscache = None
494 self.nodetagscache = None
494 self.nodetagscache = None
495
495
496 def do_lock(self, lockname, wait, releasefn=None, acquirefn=None,
496 def do_lock(self, lockname, wait, releasefn=None, acquirefn=None,
497 desc=None):
497 desc=None):
498 try:
498 try:
499 l = lock.lock(lockname, 0, releasefn, desc=desc)
499 l = lock.lock(lockname, 0, releasefn, desc=desc)
500 except lock.LockHeld, inst:
500 except lock.LockHeld, inst:
501 if not wait:
501 if not wait:
502 raise
502 raise
503 self.ui.warn(_("waiting for lock on %s held by %s\n") %
503 self.ui.warn(_("waiting for lock on %s held by %s\n") %
504 (desc, inst.args[0]))
504 (desc, inst.args[0]))
505 # default to 600 seconds timeout
505 # default to 600 seconds timeout
506 l = lock.lock(lockname, int(self.ui.config("ui", "timeout", "600")),
506 l = lock.lock(lockname, int(self.ui.config("ui", "timeout", "600")),
507 releasefn, desc=desc)
507 releasefn, desc=desc)
508 if acquirefn:
508 if acquirefn:
509 acquirefn()
509 acquirefn()
510 return l
510 return l
511
511
512 def lock(self, wait=1):
512 def lock(self, wait=1):
513 return self.do_lock(self.sjoin("lock"), wait, acquirefn=self.reload,
513 return self.do_lock(self.sjoin("lock"), wait, acquirefn=self.reload,
514 desc=_('repository %s') % self.origroot)
514 desc=_('repository %s') % self.origroot)
515
515
516 def wlock(self, wait=1):
516 def wlock(self, wait=1):
517 return self.do_lock(self.join("wlock"), wait, self.dirstate.write,
517 return self.do_lock(self.join("wlock"), wait, self.dirstate.write,
518 self.wreload,
518 self.wreload,
519 desc=_('working directory of %s') % self.origroot)
519 desc=_('working directory of %s') % self.origroot)
520
520
521 def filecommit(self, fn, manifest1, manifest2, linkrev, transaction, changelist):
521 def filecommit(self, fn, manifest1, manifest2, linkrev, transaction, changelist):
522 """
522 """
523 commit an individual file as part of a larger transaction
523 commit an individual file as part of a larger transaction
524 """
524 """
525
525
526 t = self.wread(fn)
526 t = self.wread(fn)
527 fl = self.file(fn)
527 fl = self.file(fn)
528 fp1 = manifest1.get(fn, nullid)
528 fp1 = manifest1.get(fn, nullid)
529 fp2 = manifest2.get(fn, nullid)
529 fp2 = manifest2.get(fn, nullid)
530
530
531 meta = {}
531 meta = {}
532 cp = self.dirstate.copied(fn)
532 cp = self.dirstate.copied(fn)
533 if cp:
533 if cp:
534 meta["copy"] = cp
534 meta["copy"] = cp
535 if not manifest2: # not a branch merge
535 if not manifest2: # not a branch merge
536 meta["copyrev"] = hex(manifest1.get(cp, nullid))
536 meta["copyrev"] = hex(manifest1.get(cp, nullid))
537 fp2 = nullid
537 fp2 = nullid
538 elif fp2 != nullid: # copied on remote side
538 elif fp2 != nullid: # copied on remote side
539 meta["copyrev"] = hex(manifest1.get(cp, nullid))
539 meta["copyrev"] = hex(manifest1.get(cp, nullid))
540 else: # copied on local side, reversed
540 else: # copied on local side, reversed
541 meta["copyrev"] = hex(manifest2.get(cp))
541 meta["copyrev"] = hex(manifest2.get(cp))
542 fp2 = nullid
542 fp2 = nullid
543 self.ui.debug(_(" %s: copy %s:%s\n") %
543 self.ui.debug(_(" %s: copy %s:%s\n") %
544 (fn, cp, meta["copyrev"]))
544 (fn, cp, meta["copyrev"]))
545 fp1 = nullid
545 fp1 = nullid
546 elif fp2 != nullid:
546 elif fp2 != nullid:
547 # is one parent an ancestor of the other?
547 # is one parent an ancestor of the other?
548 fpa = fl.ancestor(fp1, fp2)
548 fpa = fl.ancestor(fp1, fp2)
549 if fpa == fp1:
549 if fpa == fp1:
550 fp1, fp2 = fp2, nullid
550 fp1, fp2 = fp2, nullid
551 elif fpa == fp2:
551 elif fpa == fp2:
552 fp2 = nullid
552 fp2 = nullid
553
553
554 # is the file unmodified from the parent? report existing entry
554 # is the file unmodified from the parent? report existing entry
555 if fp2 == nullid and not fl.cmp(fp1, t):
555 if fp2 == nullid and not fl.cmp(fp1, t):
556 return fp1
556 return fp1
557
557
558 changelist.append(fn)
558 changelist.append(fn)
559 return fl.add(t, meta, transaction, linkrev, fp1, fp2)
559 return fl.add(t, meta, transaction, linkrev, fp1, fp2)
560
560
561 def rawcommit(self, files, text, user, date, p1=None, p2=None, wlock=None):
561 def rawcommit(self, files, text, user, date, p1=None, p2=None, wlock=None):
562 orig_parent = self.dirstate.parents()[0] or nullid
562 orig_parent = self.dirstate.parents()[0] or nullid
563 p1 = p1 or self.dirstate.parents()[0] or nullid
563 p1 = p1 or self.dirstate.parents()[0] or nullid
564 p2 = p2 or self.dirstate.parents()[1] or nullid
564 p2 = p2 or self.dirstate.parents()[1] or nullid
565 c1 = self.changelog.read(p1)
565 c1 = self.changelog.read(p1)
566 c2 = self.changelog.read(p2)
566 c2 = self.changelog.read(p2)
567 m1 = self.manifest.read(c1[0]).copy()
567 m1 = self.manifest.read(c1[0]).copy()
568 m2 = self.manifest.read(c2[0])
568 m2 = self.manifest.read(c2[0])
569 changed = []
569 changed = []
570 removed = []
570 removed = []
571
571
572 if orig_parent == p1:
572 if orig_parent == p1:
573 update_dirstate = 1
573 update_dirstate = 1
574 else:
574 else:
575 update_dirstate = 0
575 update_dirstate = 0
576
576
577 if not wlock:
577 if not wlock:
578 wlock = self.wlock()
578 wlock = self.wlock()
579 l = self.lock()
579 l = self.lock()
580 tr = self.transaction()
580 tr = self.transaction()
581 linkrev = self.changelog.count()
581 linkrev = self.changelog.count()
582 for f in files:
582 for f in files:
583 try:
583 try:
584 m1[f] = self.filecommit(f, m1, m2, linkrev, tr, changed)
584 m1[f] = self.filecommit(f, m1, m2, linkrev, tr, changed)
585 m1.set(f, util.is_exec(self.wjoin(f), m1.execf(f)))
585 m1.set(f, util.is_exec(self.wjoin(f), m1.execf(f)))
586 except IOError:
586 except IOError:
587 try:
587 try:
588 del m1[f]
588 del m1[f]
589 if update_dirstate:
589 if update_dirstate:
590 self.dirstate.forget([f])
590 self.dirstate.forget([f])
591 removed.append(f)
591 removed.append(f)
592 except:
592 except:
593 # deleted from p2?
593 # deleted from p2?
594 pass
594 pass
595
595
596 mnode = self.manifest.add(m1, tr, linkrev, c1[0], c2[0])
596 mnode = self.manifest.add(m1, tr, linkrev, c1[0], c2[0])
597 user = user or self.ui.username()
597 user = user or self.ui.username()
598 n = self.changelog.add(mnode, changed + removed, text,
598 n = self.changelog.add(mnode, changed + removed, text,
599 tr, p1, p2, user, date)
599 tr, p1, p2, user, date)
600 tr.close()
600 tr.close()
601 if update_dirstate:
601 if update_dirstate:
602 self.dirstate.setparents(n, nullid)
602 self.dirstate.setparents(n, nullid)
603
603
604 def commit(self, files=None, text="", user=None, date=None,
604 def commit(self, files=None, text="", user=None, date=None,
605 match=util.always, force=False, lock=None, wlock=None,
605 match=util.always, force=False, lock=None, wlock=None,
606 force_editor=False):
606 force_editor=False):
607 commit = []
607 commit = []
608 remove = []
608 remove = []
609 changed = []
609 changed = []
610
610
611 if files:
611 if files:
612 for f in files:
612 for f in files:
613 s = self.dirstate.state(f)
613 s = self.dirstate.state(f)
614 if s in 'nmai':
614 if s in 'nmai':
615 commit.append(f)
615 commit.append(f)
616 elif s == 'r':
616 elif s == 'r':
617 remove.append(f)
617 remove.append(f)
618 else:
618 else:
619 self.ui.warn(_("%s not tracked!\n") % f)
619 self.ui.warn(_("%s not tracked!\n") % f)
620 else:
620 else:
621 modified, added, removed, deleted, unknown = self.status(match=match)[:5]
621 modified, added, removed, deleted, unknown = self.status(match=match)[:5]
622 commit = modified + added
622 commit = modified + added
623 remove = removed
623 remove = removed
624
624
625 p1, p2 = self.dirstate.parents()
625 p1, p2 = self.dirstate.parents()
626 c1 = self.changelog.read(p1)
626 c1 = self.changelog.read(p1)
627 c2 = self.changelog.read(p2)
627 c2 = self.changelog.read(p2)
628 m1 = self.manifest.read(c1[0]).copy()
628 m1 = self.manifest.read(c1[0]).copy()
629 m2 = self.manifest.read(c2[0])
629 m2 = self.manifest.read(c2[0])
630
630
631 branchname = self.workingctx().branch()
631 branchname = self.workingctx().branch()
632 oldname = c1[5].get("branch", "")
632 oldname = c1[5].get("branch", "")
633
633
634 if not commit and not remove and not force and p2 == nullid and \
634 if not commit and not remove and not force and p2 == nullid and \
635 branchname == oldname:
635 branchname == oldname:
636 self.ui.status(_("nothing changed\n"))
636 self.ui.status(_("nothing changed\n"))
637 return None
637 return None
638
638
639 xp1 = hex(p1)
639 xp1 = hex(p1)
640 if p2 == nullid: xp2 = ''
640 if p2 == nullid: xp2 = ''
641 else: xp2 = hex(p2)
641 else: xp2 = hex(p2)
642
642
643 self.hook("precommit", throw=True, parent1=xp1, parent2=xp2)
643 self.hook("precommit", throw=True, parent1=xp1, parent2=xp2)
644
644
645 if not wlock:
645 if not wlock:
646 wlock = self.wlock()
646 wlock = self.wlock()
647 if not lock:
647 if not lock:
648 lock = self.lock()
648 lock = self.lock()
649 tr = self.transaction()
649 tr = self.transaction()
650
650
651 # check in files
651 # check in files
652 new = {}
652 new = {}
653 linkrev = self.changelog.count()
653 linkrev = self.changelog.count()
654 commit.sort()
654 commit.sort()
655 for f in commit:
655 for f in commit:
656 self.ui.note(f + "\n")
656 self.ui.note(f + "\n")
657 try:
657 try:
658 new[f] = self.filecommit(f, m1, m2, linkrev, tr, changed)
658 new[f] = self.filecommit(f, m1, m2, linkrev, tr, changed)
659 m1.set(f, util.is_exec(self.wjoin(f), m1.execf(f)))
659 m1.set(f, util.is_exec(self.wjoin(f), m1.execf(f)))
660 except IOError:
660 except IOError:
661 self.ui.warn(_("trouble committing %s!\n") % f)
661 self.ui.warn(_("trouble committing %s!\n") % f)
662 raise
662 raise
663
663
664 # update manifest
664 # update manifest
665 m1.update(new)
665 m1.update(new)
666 for f in remove:
666 for f in remove:
667 if f in m1:
667 if f in m1:
668 del m1[f]
668 del m1[f]
669 mn = self.manifest.add(m1, tr, linkrev, c1[0], c2[0], (new, remove))
669 mn = self.manifest.add(m1, tr, linkrev, c1[0], c2[0], (new, remove))
670
670
671 # add changeset
671 # add changeset
672 new = new.keys()
672 new = new.keys()
673 new.sort()
673 new.sort()
674
674
675 user = user or self.ui.username()
675 user = user or self.ui.username()
676 if not text or force_editor:
676 if not text or force_editor:
677 edittext = []
677 edittext = []
678 if text:
678 if text:
679 edittext.append(text)
679 edittext.append(text)
680 edittext.append("")
680 edittext.append("")
681 if p2 != nullid:
681 if p2 != nullid:
682 edittext.append("HG: branch merge")
682 edittext.append("HG: branch merge")
683 edittext.extend(["HG: changed %s" % f for f in changed])
683 edittext.extend(["HG: changed %s" % f for f in changed])
684 edittext.extend(["HG: removed %s" % f for f in remove])
684 edittext.extend(["HG: removed %s" % f for f in remove])
685 if not changed and not remove:
685 if not changed and not remove:
686 edittext.append("HG: no files changed")
686 edittext.append("HG: no files changed")
687 edittext.append("")
687 edittext.append("")
688 # run editor in the repository root
688 # run editor in the repository root
689 olddir = os.getcwd()
689 olddir = os.getcwd()
690 os.chdir(self.root)
690 os.chdir(self.root)
691 text = self.ui.edit("\n".join(edittext), user)
691 text = self.ui.edit("\n".join(edittext), user)
692 os.chdir(olddir)
692 os.chdir(olddir)
693
693
694 lines = [line.rstrip() for line in text.rstrip().splitlines()]
694 lines = [line.rstrip() for line in text.rstrip().splitlines()]
695 while lines and not lines[0]:
695 while lines and not lines[0]:
696 del lines[0]
696 del lines[0]
697 if not lines:
697 if not lines:
698 return None
698 return None
699 text = '\n'.join(lines)
699 text = '\n'.join(lines)
700 extra = {}
700 extra = {}
701 if branchname:
701 if branchname:
702 extra["branch"] = branchname
702 extra["branch"] = branchname
703 n = self.changelog.add(mn, changed + remove, text, tr, p1, p2,
703 n = self.changelog.add(mn, changed + remove, text, tr, p1, p2,
704 user, date, extra)
704 user, date, extra)
705 self.hook('pretxncommit', throw=True, node=hex(n), parent1=xp1,
705 self.hook('pretxncommit', throw=True, node=hex(n), parent1=xp1,
706 parent2=xp2)
706 parent2=xp2)
707 tr.close()
707 tr.close()
708
708
709 self.dirstate.setparents(n)
709 self.dirstate.setparents(n)
710 self.dirstate.update(new, "n")
710 self.dirstate.update(new, "n")
711 self.dirstate.forget(remove)
711 self.dirstate.forget(remove)
712
712
713 self.hook("commit", node=hex(n), parent1=xp1, parent2=xp2)
713 self.hook("commit", node=hex(n), parent1=xp1, parent2=xp2)
714 return n
714 return n
715
715
716 def walk(self, node=None, files=[], match=util.always, badmatch=None):
716 def walk(self, node=None, files=[], match=util.always, badmatch=None):
717 '''
718 walk recursively through the directory tree or a given
719 changeset, finding all files matched by the match
720 function
721
722 results are yielded in a tuple (src, filename), where src
723 is one of:
724 'f' the file was found in the directory tree
725 'm' the file was only in the dirstate and not in the tree
726 'b' file was not found and matched badmatch
727 '''
728
717 if node:
729 if node:
718 fdict = dict.fromkeys(files)
730 fdict = dict.fromkeys(files)
719 for fn in self.manifest.read(self.changelog.read(node)[0]):
731 for fn in self.manifest.read(self.changelog.read(node)[0]):
720 for ffn in fdict:
732 for ffn in fdict:
721 # match if the file is the exact name or a directory
733 # match if the file is the exact name or a directory
722 if ffn == fn or fn.startswith("%s/" % ffn):
734 if ffn == fn or fn.startswith("%s/" % ffn):
723 del fdict[ffn]
735 del fdict[ffn]
724 break
736 break
725 if match(fn):
737 if match(fn):
726 yield 'm', fn
738 yield 'm', fn
727 for fn in fdict:
739 for fn in fdict:
728 if badmatch and badmatch(fn):
740 if badmatch and badmatch(fn):
729 if match(fn):
741 if match(fn):
730 yield 'b', fn
742 yield 'b', fn
731 else:
743 else:
732 self.ui.warn(_('%s: No such file in rev %s\n') % (
744 self.ui.warn(_('%s: No such file in rev %s\n') % (
733 util.pathto(self.getcwd(), fn), short(node)))
745 util.pathto(self.getcwd(), fn), short(node)))
734 else:
746 else:
735 for src, fn in self.dirstate.walk(files, match, badmatch=badmatch):
747 for src, fn in self.dirstate.walk(files, match, badmatch=badmatch):
736 yield src, fn
748 yield src, fn
737
749
738 def status(self, node1=None, node2=None, files=[], match=util.always,
750 def status(self, node1=None, node2=None, files=[], match=util.always,
739 wlock=None, list_ignored=False, list_clean=False):
751 wlock=None, list_ignored=False, list_clean=False):
740 """return status of files between two nodes or node and working directory
752 """return status of files between two nodes or node and working directory
741
753
742 If node1 is None, use the first dirstate parent instead.
754 If node1 is None, use the first dirstate parent instead.
743 If node2 is None, compare node1 with working directory.
755 If node2 is None, compare node1 with working directory.
744 """
756 """
745
757
746 def fcmp(fn, mf):
758 def fcmp(fn, mf):
747 t1 = self.wread(fn)
759 t1 = self.wread(fn)
748 return self.file(fn).cmp(mf.get(fn, nullid), t1)
760 return self.file(fn).cmp(mf.get(fn, nullid), t1)
749
761
750 def mfmatches(node):
762 def mfmatches(node):
751 change = self.changelog.read(node)
763 change = self.changelog.read(node)
752 mf = self.manifest.read(change[0]).copy()
764 mf = self.manifest.read(change[0]).copy()
753 for fn in mf.keys():
765 for fn in mf.keys():
754 if not match(fn):
766 if not match(fn):
755 del mf[fn]
767 del mf[fn]
756 return mf
768 return mf
757
769
758 modified, added, removed, deleted, unknown = [], [], [], [], []
770 modified, added, removed, deleted, unknown = [], [], [], [], []
759 ignored, clean = [], []
771 ignored, clean = [], []
760
772
761 compareworking = False
773 compareworking = False
762 if not node1 or (not node2 and node1 == self.dirstate.parents()[0]):
774 if not node1 or (not node2 and node1 == self.dirstate.parents()[0]):
763 compareworking = True
775 compareworking = True
764
776
765 if not compareworking:
777 if not compareworking:
766 # read the manifest from node1 before the manifest from node2,
778 # read the manifest from node1 before the manifest from node2,
767 # so that we'll hit the manifest cache if we're going through
779 # so that we'll hit the manifest cache if we're going through
768 # all the revisions in parent->child order.
780 # all the revisions in parent->child order.
769 mf1 = mfmatches(node1)
781 mf1 = mfmatches(node1)
770
782
771 # are we comparing the working directory?
783 # are we comparing the working directory?
772 if not node2:
784 if not node2:
773 if not wlock:
785 if not wlock:
774 try:
786 try:
775 wlock = self.wlock(wait=0)
787 wlock = self.wlock(wait=0)
776 except lock.LockException:
788 except lock.LockException:
777 wlock = None
789 wlock = None
778 (lookup, modified, added, removed, deleted, unknown,
790 (lookup, modified, added, removed, deleted, unknown,
779 ignored, clean) = self.dirstate.status(files, match,
791 ignored, clean) = self.dirstate.status(files, match,
780 list_ignored, list_clean)
792 list_ignored, list_clean)
781
793
782 # are we comparing working dir against its parent?
794 # are we comparing working dir against its parent?
783 if compareworking:
795 if compareworking:
784 if lookup:
796 if lookup:
785 # do a full compare of any files that might have changed
797 # do a full compare of any files that might have changed
786 mf2 = mfmatches(self.dirstate.parents()[0])
798 mf2 = mfmatches(self.dirstate.parents()[0])
787 for f in lookup:
799 for f in lookup:
788 if fcmp(f, mf2):
800 if fcmp(f, mf2):
789 modified.append(f)
801 modified.append(f)
790 else:
802 else:
791 clean.append(f)
803 clean.append(f)
792 if wlock is not None:
804 if wlock is not None:
793 self.dirstate.update([f], "n")
805 self.dirstate.update([f], "n")
794 else:
806 else:
795 # we are comparing working dir against non-parent
807 # we are comparing working dir against non-parent
796 # generate a pseudo-manifest for the working dir
808 # generate a pseudo-manifest for the working dir
797 # XXX: create it in dirstate.py ?
809 # XXX: create it in dirstate.py ?
798 mf2 = mfmatches(self.dirstate.parents()[0])
810 mf2 = mfmatches(self.dirstate.parents()[0])
799 for f in lookup + modified + added:
811 for f in lookup + modified + added:
800 mf2[f] = ""
812 mf2[f] = ""
801 mf2.set(f, execf=util.is_exec(self.wjoin(f), mf2.execf(f)))
813 mf2.set(f, execf=util.is_exec(self.wjoin(f), mf2.execf(f)))
802 for f in removed:
814 for f in removed:
803 if f in mf2:
815 if f in mf2:
804 del mf2[f]
816 del mf2[f]
805 else:
817 else:
806 # we are comparing two revisions
818 # we are comparing two revisions
807 mf2 = mfmatches(node2)
819 mf2 = mfmatches(node2)
808
820
809 if not compareworking:
821 if not compareworking:
810 # flush lists from dirstate before comparing manifests
822 # flush lists from dirstate before comparing manifests
811 modified, added, clean = [], [], []
823 modified, added, clean = [], [], []
812
824
813 # make sure to sort the files so we talk to the disk in a
825 # make sure to sort the files so we talk to the disk in a
814 # reasonable order
826 # reasonable order
815 mf2keys = mf2.keys()
827 mf2keys = mf2.keys()
816 mf2keys.sort()
828 mf2keys.sort()
817 for fn in mf2keys:
829 for fn in mf2keys:
818 if mf1.has_key(fn):
830 if mf1.has_key(fn):
819 if mf1.flags(fn) != mf2.flags(fn) or \
831 if mf1.flags(fn) != mf2.flags(fn) or \
820 (mf1[fn] != mf2[fn] and (mf2[fn] != "" or fcmp(fn, mf1))):
832 (mf1[fn] != mf2[fn] and (mf2[fn] != "" or fcmp(fn, mf1))):
821 modified.append(fn)
833 modified.append(fn)
822 elif list_clean:
834 elif list_clean:
823 clean.append(fn)
835 clean.append(fn)
824 del mf1[fn]
836 del mf1[fn]
825 else:
837 else:
826 added.append(fn)
838 added.append(fn)
827
839
828 removed = mf1.keys()
840 removed = mf1.keys()
829
841
830 # sort and return results:
842 # sort and return results:
831 for l in modified, added, removed, deleted, unknown, ignored, clean:
843 for l in modified, added, removed, deleted, unknown, ignored, clean:
832 l.sort()
844 l.sort()
833 return (modified, added, removed, deleted, unknown, ignored, clean)
845 return (modified, added, removed, deleted, unknown, ignored, clean)
834
846
835 def add(self, list, wlock=None):
847 def add(self, list, wlock=None):
836 if not wlock:
848 if not wlock:
837 wlock = self.wlock()
849 wlock = self.wlock()
838 for f in list:
850 for f in list:
839 p = self.wjoin(f)
851 p = self.wjoin(f)
840 if not os.path.exists(p):
852 if not os.path.exists(p):
841 self.ui.warn(_("%s does not exist!\n") % f)
853 self.ui.warn(_("%s does not exist!\n") % f)
842 elif not os.path.isfile(p):
854 elif not os.path.isfile(p):
843 self.ui.warn(_("%s not added: only files supported currently\n")
855 self.ui.warn(_("%s not added: only files supported currently\n")
844 % f)
856 % f)
845 elif self.dirstate.state(f) in 'an':
857 elif self.dirstate.state(f) in 'an':
846 self.ui.warn(_("%s already tracked!\n") % f)
858 self.ui.warn(_("%s already tracked!\n") % f)
847 else:
859 else:
848 self.dirstate.update([f], "a")
860 self.dirstate.update([f], "a")
849
861
850 def forget(self, list, wlock=None):
862 def forget(self, list, wlock=None):
851 if not wlock:
863 if not wlock:
852 wlock = self.wlock()
864 wlock = self.wlock()
853 for f in list:
865 for f in list:
854 if self.dirstate.state(f) not in 'ai':
866 if self.dirstate.state(f) not in 'ai':
855 self.ui.warn(_("%s not added!\n") % f)
867 self.ui.warn(_("%s not added!\n") % f)
856 else:
868 else:
857 self.dirstate.forget([f])
869 self.dirstate.forget([f])
858
870
859 def remove(self, list, unlink=False, wlock=None):
871 def remove(self, list, unlink=False, wlock=None):
860 if unlink:
872 if unlink:
861 for f in list:
873 for f in list:
862 try:
874 try:
863 util.unlink(self.wjoin(f))
875 util.unlink(self.wjoin(f))
864 except OSError, inst:
876 except OSError, inst:
865 if inst.errno != errno.ENOENT:
877 if inst.errno != errno.ENOENT:
866 raise
878 raise
867 if not wlock:
879 if not wlock:
868 wlock = self.wlock()
880 wlock = self.wlock()
869 for f in list:
881 for f in list:
870 p = self.wjoin(f)
882 p = self.wjoin(f)
871 if os.path.exists(p):
883 if os.path.exists(p):
872 self.ui.warn(_("%s still exists!\n") % f)
884 self.ui.warn(_("%s still exists!\n") % f)
873 elif self.dirstate.state(f) == 'a':
885 elif self.dirstate.state(f) == 'a':
874 self.dirstate.forget([f])
886 self.dirstate.forget([f])
875 elif f not in self.dirstate:
887 elif f not in self.dirstate:
876 self.ui.warn(_("%s not tracked!\n") % f)
888 self.ui.warn(_("%s not tracked!\n") % f)
877 else:
889 else:
878 self.dirstate.update([f], "r")
890 self.dirstate.update([f], "r")
879
891
880 def undelete(self, list, wlock=None):
892 def undelete(self, list, wlock=None):
881 p = self.dirstate.parents()[0]
893 p = self.dirstate.parents()[0]
882 mn = self.changelog.read(p)[0]
894 mn = self.changelog.read(p)[0]
883 m = self.manifest.read(mn)
895 m = self.manifest.read(mn)
884 if not wlock:
896 if not wlock:
885 wlock = self.wlock()
897 wlock = self.wlock()
886 for f in list:
898 for f in list:
887 if self.dirstate.state(f) not in "r":
899 if self.dirstate.state(f) not in "r":
888 self.ui.warn("%s not removed!\n" % f)
900 self.ui.warn("%s not removed!\n" % f)
889 else:
901 else:
890 t = self.file(f).read(m[f])
902 t = self.file(f).read(m[f])
891 self.wwrite(f, t)
903 self.wwrite(f, t)
892 util.set_exec(self.wjoin(f), m.execf(f))
904 util.set_exec(self.wjoin(f), m.execf(f))
893 self.dirstate.update([f], "n")
905 self.dirstate.update([f], "n")
894
906
895 def copy(self, source, dest, wlock=None):
907 def copy(self, source, dest, wlock=None):
896 p = self.wjoin(dest)
908 p = self.wjoin(dest)
897 if not os.path.exists(p):
909 if not os.path.exists(p):
898 self.ui.warn(_("%s does not exist!\n") % dest)
910 self.ui.warn(_("%s does not exist!\n") % dest)
899 elif not os.path.isfile(p):
911 elif not os.path.isfile(p):
900 self.ui.warn(_("copy failed: %s is not a file\n") % dest)
912 self.ui.warn(_("copy failed: %s is not a file\n") % dest)
901 else:
913 else:
902 if not wlock:
914 if not wlock:
903 wlock = self.wlock()
915 wlock = self.wlock()
904 if self.dirstate.state(dest) == '?':
916 if self.dirstate.state(dest) == '?':
905 self.dirstate.update([dest], "a")
917 self.dirstate.update([dest], "a")
906 self.dirstate.copy(source, dest)
918 self.dirstate.copy(source, dest)
907
919
908 def heads(self, start=None):
920 def heads(self, start=None):
909 heads = self.changelog.heads(start)
921 heads = self.changelog.heads(start)
910 # sort the output in rev descending order
922 # sort the output in rev descending order
911 heads = [(-self.changelog.rev(h), h) for h in heads]
923 heads = [(-self.changelog.rev(h), h) for h in heads]
912 heads.sort()
924 heads.sort()
913 return [n for (r, n) in heads]
925 return [n for (r, n) in heads]
914
926
915 # branchlookup returns a dict giving a list of branches for
927 # branchlookup returns a dict giving a list of branches for
916 # each head. A branch is defined as the tag of a node or
928 # each head. A branch is defined as the tag of a node or
917 # the branch of the node's parents. If a node has multiple
929 # the branch of the node's parents. If a node has multiple
918 # branch tags, tags are eliminated if they are visible from other
930 # branch tags, tags are eliminated if they are visible from other
919 # branch tags.
931 # branch tags.
920 #
932 #
921 # So, for this graph: a->b->c->d->e
933 # So, for this graph: a->b->c->d->e
922 # \ /
934 # \ /
923 # aa -----/
935 # aa -----/
924 # a has tag 2.6.12
936 # a has tag 2.6.12
925 # d has tag 2.6.13
937 # d has tag 2.6.13
926 # e would have branch tags for 2.6.12 and 2.6.13. Because the node
938 # e would have branch tags for 2.6.12 and 2.6.13. Because the node
927 # for 2.6.12 can be reached from the node 2.6.13, that is eliminated
939 # for 2.6.12 can be reached from the node 2.6.13, that is eliminated
928 # from the list.
940 # from the list.
929 #
941 #
930 # It is possible that more than one head will have the same branch tag.
942 # It is possible that more than one head will have the same branch tag.
931 # callers need to check the result for multiple heads under the same
943 # callers need to check the result for multiple heads under the same
932 # branch tag if that is a problem for them (ie checkout of a specific
944 # branch tag if that is a problem for them (ie checkout of a specific
933 # branch).
945 # branch).
934 #
946 #
935 # passing in a specific branch will limit the depth of the search
947 # passing in a specific branch will limit the depth of the search
936 # through the parents. It won't limit the branches returned in the
948 # through the parents. It won't limit the branches returned in the
937 # result though.
949 # result though.
938 def branchlookup(self, heads=None, branch=None):
950 def branchlookup(self, heads=None, branch=None):
939 if not heads:
951 if not heads:
940 heads = self.heads()
952 heads = self.heads()
941 headt = [ h for h in heads ]
953 headt = [ h for h in heads ]
942 chlog = self.changelog
954 chlog = self.changelog
943 branches = {}
955 branches = {}
944 merges = []
956 merges = []
945 seenmerge = {}
957 seenmerge = {}
946
958
947 # traverse the tree once for each head, recording in the branches
959 # traverse the tree once for each head, recording in the branches
948 # dict which tags are visible from this head. The branches
960 # dict which tags are visible from this head. The branches
949 # dict also records which tags are visible from each tag
961 # dict also records which tags are visible from each tag
950 # while we traverse.
962 # while we traverse.
951 while headt or merges:
963 while headt or merges:
952 if merges:
964 if merges:
953 n, found = merges.pop()
965 n, found = merges.pop()
954 visit = [n]
966 visit = [n]
955 else:
967 else:
956 h = headt.pop()
968 h = headt.pop()
957 visit = [h]
969 visit = [h]
958 found = [h]
970 found = [h]
959 seen = {}
971 seen = {}
960 while visit:
972 while visit:
961 n = visit.pop()
973 n = visit.pop()
962 if n in seen:
974 if n in seen:
963 continue
975 continue
964 pp = chlog.parents(n)
976 pp = chlog.parents(n)
965 tags = self.nodetags(n)
977 tags = self.nodetags(n)
966 if tags:
978 if tags:
967 for x in tags:
979 for x in tags:
968 if x == 'tip':
980 if x == 'tip':
969 continue
981 continue
970 for f in found:
982 for f in found:
971 branches.setdefault(f, {})[n] = 1
983 branches.setdefault(f, {})[n] = 1
972 branches.setdefault(n, {})[n] = 1
984 branches.setdefault(n, {})[n] = 1
973 break
985 break
974 if n not in found:
986 if n not in found:
975 found.append(n)
987 found.append(n)
976 if branch in tags:
988 if branch in tags:
977 continue
989 continue
978 seen[n] = 1
990 seen[n] = 1
979 if pp[1] != nullid and n not in seenmerge:
991 if pp[1] != nullid and n not in seenmerge:
980 merges.append((pp[1], [x for x in found]))
992 merges.append((pp[1], [x for x in found]))
981 seenmerge[n] = 1
993 seenmerge[n] = 1
982 if pp[0] != nullid:
994 if pp[0] != nullid:
983 visit.append(pp[0])
995 visit.append(pp[0])
984 # traverse the branches dict, eliminating branch tags from each
996 # traverse the branches dict, eliminating branch tags from each
985 # head that are visible from another branch tag for that head.
997 # head that are visible from another branch tag for that head.
986 out = {}
998 out = {}
987 viscache = {}
999 viscache = {}
988 for h in heads:
1000 for h in heads:
989 def visible(node):
1001 def visible(node):
990 if node in viscache:
1002 if node in viscache:
991 return viscache[node]
1003 return viscache[node]
992 ret = {}
1004 ret = {}
993 visit = [node]
1005 visit = [node]
994 while visit:
1006 while visit:
995 x = visit.pop()
1007 x = visit.pop()
996 if x in viscache:
1008 if x in viscache:
997 ret.update(viscache[x])
1009 ret.update(viscache[x])
998 elif x not in ret:
1010 elif x not in ret:
999 ret[x] = 1
1011 ret[x] = 1
1000 if x in branches:
1012 if x in branches:
1001 visit[len(visit):] = branches[x].keys()
1013 visit[len(visit):] = branches[x].keys()
1002 viscache[node] = ret
1014 viscache[node] = ret
1003 return ret
1015 return ret
1004 if h not in branches:
1016 if h not in branches:
1005 continue
1017 continue
1006 # O(n^2), but somewhat limited. This only searches the
1018 # O(n^2), but somewhat limited. This only searches the
1007 # tags visible from a specific head, not all the tags in the
1019 # tags visible from a specific head, not all the tags in the
1008 # whole repo.
1020 # whole repo.
1009 for b in branches[h]:
1021 for b in branches[h]:
1010 vis = False
1022 vis = False
1011 for bb in branches[h].keys():
1023 for bb in branches[h].keys():
1012 if b != bb:
1024 if b != bb:
1013 if b in visible(bb):
1025 if b in visible(bb):
1014 vis = True
1026 vis = True
1015 break
1027 break
1016 if not vis:
1028 if not vis:
1017 l = out.setdefault(h, [])
1029 l = out.setdefault(h, [])
1018 l[len(l):] = self.nodetags(b)
1030 l[len(l):] = self.nodetags(b)
1019 return out
1031 return out
1020
1032
1021 def branches(self, nodes):
1033 def branches(self, nodes):
1022 if not nodes:
1034 if not nodes:
1023 nodes = [self.changelog.tip()]
1035 nodes = [self.changelog.tip()]
1024 b = []
1036 b = []
1025 for n in nodes:
1037 for n in nodes:
1026 t = n
1038 t = n
1027 while 1:
1039 while 1:
1028 p = self.changelog.parents(n)
1040 p = self.changelog.parents(n)
1029 if p[1] != nullid or p[0] == nullid:
1041 if p[1] != nullid or p[0] == nullid:
1030 b.append((t, n, p[0], p[1]))
1042 b.append((t, n, p[0], p[1]))
1031 break
1043 break
1032 n = p[0]
1044 n = p[0]
1033 return b
1045 return b
1034
1046
1035 def between(self, pairs):
1047 def between(self, pairs):
1036 r = []
1048 r = []
1037
1049
1038 for top, bottom in pairs:
1050 for top, bottom in pairs:
1039 n, l, i = top, [], 0
1051 n, l, i = top, [], 0
1040 f = 1
1052 f = 1
1041
1053
1042 while n != bottom:
1054 while n != bottom:
1043 p = self.changelog.parents(n)[0]
1055 p = self.changelog.parents(n)[0]
1044 if i == f:
1056 if i == f:
1045 l.append(n)
1057 l.append(n)
1046 f = f * 2
1058 f = f * 2
1047 n = p
1059 n = p
1048 i += 1
1060 i += 1
1049
1061
1050 r.append(l)
1062 r.append(l)
1051
1063
1052 return r
1064 return r
1053
1065
1054 def findincoming(self, remote, base=None, heads=None, force=False):
1066 def findincoming(self, remote, base=None, heads=None, force=False):
1055 """Return list of roots of the subsets of missing nodes from remote
1067 """Return list of roots of the subsets of missing nodes from remote
1056
1068
1057 If base dict is specified, assume that these nodes and their parents
1069 If base dict is specified, assume that these nodes and their parents
1058 exist on the remote side and that no child of a node of base exists
1070 exist on the remote side and that no child of a node of base exists
1059 in both remote and self.
1071 in both remote and self.
1060 Furthermore base will be updated to include the nodes that exists
1072 Furthermore base will be updated to include the nodes that exists
1061 in self and remote but no children exists in self and remote.
1073 in self and remote but no children exists in self and remote.
1062 If a list of heads is specified, return only nodes which are heads
1074 If a list of heads is specified, return only nodes which are heads
1063 or ancestors of these heads.
1075 or ancestors of these heads.
1064
1076
1065 All the ancestors of base are in self and in remote.
1077 All the ancestors of base are in self and in remote.
1066 All the descendants of the list returned are missing in self.
1078 All the descendants of the list returned are missing in self.
1067 (and so we know that the rest of the nodes are missing in remote, see
1079 (and so we know that the rest of the nodes are missing in remote, see
1068 outgoing)
1080 outgoing)
1069 """
1081 """
1070 m = self.changelog.nodemap
1082 m = self.changelog.nodemap
1071 search = []
1083 search = []
1072 fetch = {}
1084 fetch = {}
1073 seen = {}
1085 seen = {}
1074 seenbranch = {}
1086 seenbranch = {}
1075 if base == None:
1087 if base == None:
1076 base = {}
1088 base = {}
1077
1089
1078 if not heads:
1090 if not heads:
1079 heads = remote.heads()
1091 heads = remote.heads()
1080
1092
1081 if self.changelog.tip() == nullid:
1093 if self.changelog.tip() == nullid:
1082 base[nullid] = 1
1094 base[nullid] = 1
1083 if heads != [nullid]:
1095 if heads != [nullid]:
1084 return [nullid]
1096 return [nullid]
1085 return []
1097 return []
1086
1098
1087 # assume we're closer to the tip than the root
1099 # assume we're closer to the tip than the root
1088 # and start by examining the heads
1100 # and start by examining the heads
1089 self.ui.status(_("searching for changes\n"))
1101 self.ui.status(_("searching for changes\n"))
1090
1102
1091 unknown = []
1103 unknown = []
1092 for h in heads:
1104 for h in heads:
1093 if h not in m:
1105 if h not in m:
1094 unknown.append(h)
1106 unknown.append(h)
1095 else:
1107 else:
1096 base[h] = 1
1108 base[h] = 1
1097
1109
1098 if not unknown:
1110 if not unknown:
1099 return []
1111 return []
1100
1112
1101 req = dict.fromkeys(unknown)
1113 req = dict.fromkeys(unknown)
1102 reqcnt = 0
1114 reqcnt = 0
1103
1115
1104 # search through remote branches
1116 # search through remote branches
1105 # a 'branch' here is a linear segment of history, with four parts:
1117 # a 'branch' here is a linear segment of history, with four parts:
1106 # head, root, first parent, second parent
1118 # head, root, first parent, second parent
1107 # (a branch always has two parents (or none) by definition)
1119 # (a branch always has two parents (or none) by definition)
1108 unknown = remote.branches(unknown)
1120 unknown = remote.branches(unknown)
1109 while unknown:
1121 while unknown:
1110 r = []
1122 r = []
1111 while unknown:
1123 while unknown:
1112 n = unknown.pop(0)
1124 n = unknown.pop(0)
1113 if n[0] in seen:
1125 if n[0] in seen:
1114 continue
1126 continue
1115
1127
1116 self.ui.debug(_("examining %s:%s\n")
1128 self.ui.debug(_("examining %s:%s\n")
1117 % (short(n[0]), short(n[1])))
1129 % (short(n[0]), short(n[1])))
1118 if n[0] == nullid: # found the end of the branch
1130 if n[0] == nullid: # found the end of the branch
1119 pass
1131 pass
1120 elif n in seenbranch:
1132 elif n in seenbranch:
1121 self.ui.debug(_("branch already found\n"))
1133 self.ui.debug(_("branch already found\n"))
1122 continue
1134 continue
1123 elif n[1] and n[1] in m: # do we know the base?
1135 elif n[1] and n[1] in m: # do we know the base?
1124 self.ui.debug(_("found incomplete branch %s:%s\n")
1136 self.ui.debug(_("found incomplete branch %s:%s\n")
1125 % (short(n[0]), short(n[1])))
1137 % (short(n[0]), short(n[1])))
1126 search.append(n) # schedule branch range for scanning
1138 search.append(n) # schedule branch range for scanning
1127 seenbranch[n] = 1
1139 seenbranch[n] = 1
1128 else:
1140 else:
1129 if n[1] not in seen and n[1] not in fetch:
1141 if n[1] not in seen and n[1] not in fetch:
1130 if n[2] in m and n[3] in m:
1142 if n[2] in m and n[3] in m:
1131 self.ui.debug(_("found new changeset %s\n") %
1143 self.ui.debug(_("found new changeset %s\n") %
1132 short(n[1]))
1144 short(n[1]))
1133 fetch[n[1]] = 1 # earliest unknown
1145 fetch[n[1]] = 1 # earliest unknown
1134 for p in n[2:4]:
1146 for p in n[2:4]:
1135 if p in m:
1147 if p in m:
1136 base[p] = 1 # latest known
1148 base[p] = 1 # latest known
1137
1149
1138 for p in n[2:4]:
1150 for p in n[2:4]:
1139 if p not in req and p not in m:
1151 if p not in req and p not in m:
1140 r.append(p)
1152 r.append(p)
1141 req[p] = 1
1153 req[p] = 1
1142 seen[n[0]] = 1
1154 seen[n[0]] = 1
1143
1155
1144 if r:
1156 if r:
1145 reqcnt += 1
1157 reqcnt += 1
1146 self.ui.debug(_("request %d: %s\n") %
1158 self.ui.debug(_("request %d: %s\n") %
1147 (reqcnt, " ".join(map(short, r))))
1159 (reqcnt, " ".join(map(short, r))))
1148 for p in xrange(0, len(r), 10):
1160 for p in xrange(0, len(r), 10):
1149 for b in remote.branches(r[p:p+10]):
1161 for b in remote.branches(r[p:p+10]):
1150 self.ui.debug(_("received %s:%s\n") %
1162 self.ui.debug(_("received %s:%s\n") %
1151 (short(b[0]), short(b[1])))
1163 (short(b[0]), short(b[1])))
1152 unknown.append(b)
1164 unknown.append(b)
1153
1165
1154 # do binary search on the branches we found
1166 # do binary search on the branches we found
1155 while search:
1167 while search:
1156 n = search.pop(0)
1168 n = search.pop(0)
1157 reqcnt += 1
1169 reqcnt += 1
1158 l = remote.between([(n[0], n[1])])[0]
1170 l = remote.between([(n[0], n[1])])[0]
1159 l.append(n[1])
1171 l.append(n[1])
1160 p = n[0]
1172 p = n[0]
1161 f = 1
1173 f = 1
1162 for i in l:
1174 for i in l:
1163 self.ui.debug(_("narrowing %d:%d %s\n") % (f, len(l), short(i)))
1175 self.ui.debug(_("narrowing %d:%d %s\n") % (f, len(l), short(i)))
1164 if i in m:
1176 if i in m:
1165 if f <= 2:
1177 if f <= 2:
1166 self.ui.debug(_("found new branch changeset %s\n") %
1178 self.ui.debug(_("found new branch changeset %s\n") %
1167 short(p))
1179 short(p))
1168 fetch[p] = 1
1180 fetch[p] = 1
1169 base[i] = 1
1181 base[i] = 1
1170 else:
1182 else:
1171 self.ui.debug(_("narrowed branch search to %s:%s\n")
1183 self.ui.debug(_("narrowed branch search to %s:%s\n")
1172 % (short(p), short(i)))
1184 % (short(p), short(i)))
1173 search.append((p, i))
1185 search.append((p, i))
1174 break
1186 break
1175 p, f = i, f * 2
1187 p, f = i, f * 2
1176
1188
1177 # sanity check our fetch list
1189 # sanity check our fetch list
1178 for f in fetch.keys():
1190 for f in fetch.keys():
1179 if f in m:
1191 if f in m:
1180 raise repo.RepoError(_("already have changeset ") + short(f[:4]))
1192 raise repo.RepoError(_("already have changeset ") + short(f[:4]))
1181
1193
1182 if base.keys() == [nullid]:
1194 if base.keys() == [nullid]:
1183 if force:
1195 if force:
1184 self.ui.warn(_("warning: repository is unrelated\n"))
1196 self.ui.warn(_("warning: repository is unrelated\n"))
1185 else:
1197 else:
1186 raise util.Abort(_("repository is unrelated"))
1198 raise util.Abort(_("repository is unrelated"))
1187
1199
1188 self.ui.debug(_("found new changesets starting at ") +
1200 self.ui.debug(_("found new changesets starting at ") +
1189 " ".join([short(f) for f in fetch]) + "\n")
1201 " ".join([short(f) for f in fetch]) + "\n")
1190
1202
1191 self.ui.debug(_("%d total queries\n") % reqcnt)
1203 self.ui.debug(_("%d total queries\n") % reqcnt)
1192
1204
1193 return fetch.keys()
1205 return fetch.keys()
1194
1206
1195 def findoutgoing(self, remote, base=None, heads=None, force=False):
1207 def findoutgoing(self, remote, base=None, heads=None, force=False):
1196 """Return list of nodes that are roots of subsets not in remote
1208 """Return list of nodes that are roots of subsets not in remote
1197
1209
1198 If base dict is specified, assume that these nodes and their parents
1210 If base dict is specified, assume that these nodes and their parents
1199 exist on the remote side.
1211 exist on the remote side.
1200 If a list of heads is specified, return only nodes which are heads
1212 If a list of heads is specified, return only nodes which are heads
1201 or ancestors of these heads, and return a second element which
1213 or ancestors of these heads, and return a second element which
1202 contains all remote heads which get new children.
1214 contains all remote heads which get new children.
1203 """
1215 """
1204 if base == None:
1216 if base == None:
1205 base = {}
1217 base = {}
1206 self.findincoming(remote, base, heads, force=force)
1218 self.findincoming(remote, base, heads, force=force)
1207
1219
1208 self.ui.debug(_("common changesets up to ")
1220 self.ui.debug(_("common changesets up to ")
1209 + " ".join(map(short, base.keys())) + "\n")
1221 + " ".join(map(short, base.keys())) + "\n")
1210
1222
1211 remain = dict.fromkeys(self.changelog.nodemap)
1223 remain = dict.fromkeys(self.changelog.nodemap)
1212
1224
1213 # prune everything remote has from the tree
1225 # prune everything remote has from the tree
1214 del remain[nullid]
1226 del remain[nullid]
1215 remove = base.keys()
1227 remove = base.keys()
1216 while remove:
1228 while remove:
1217 n = remove.pop(0)
1229 n = remove.pop(0)
1218 if n in remain:
1230 if n in remain:
1219 del remain[n]
1231 del remain[n]
1220 for p in self.changelog.parents(n):
1232 for p in self.changelog.parents(n):
1221 remove.append(p)
1233 remove.append(p)
1222
1234
1223 # find every node whose parents have been pruned
1235 # find every node whose parents have been pruned
1224 subset = []
1236 subset = []
1225 # find every remote head that will get new children
1237 # find every remote head that will get new children
1226 updated_heads = {}
1238 updated_heads = {}
1227 for n in remain:
1239 for n in remain:
1228 p1, p2 = self.changelog.parents(n)
1240 p1, p2 = self.changelog.parents(n)
1229 if p1 not in remain and p2 not in remain:
1241 if p1 not in remain and p2 not in remain:
1230 subset.append(n)
1242 subset.append(n)
1231 if heads:
1243 if heads:
1232 if p1 in heads:
1244 if p1 in heads:
1233 updated_heads[p1] = True
1245 updated_heads[p1] = True
1234 if p2 in heads:
1246 if p2 in heads:
1235 updated_heads[p2] = True
1247 updated_heads[p2] = True
1236
1248
1237 # this is the set of all roots we have to push
1249 # this is the set of all roots we have to push
1238 if heads:
1250 if heads:
1239 return subset, updated_heads.keys()
1251 return subset, updated_heads.keys()
1240 else:
1252 else:
1241 return subset
1253 return subset
1242
1254
1243 def pull(self, remote, heads=None, force=False, lock=None):
1255 def pull(self, remote, heads=None, force=False, lock=None):
1244 mylock = False
1256 mylock = False
1245 if not lock:
1257 if not lock:
1246 lock = self.lock()
1258 lock = self.lock()
1247 mylock = True
1259 mylock = True
1248
1260
1249 try:
1261 try:
1250 fetch = self.findincoming(remote, force=force)
1262 fetch = self.findincoming(remote, force=force)
1251 if fetch == [nullid]:
1263 if fetch == [nullid]:
1252 self.ui.status(_("requesting all changes\n"))
1264 self.ui.status(_("requesting all changes\n"))
1253
1265
1254 if not fetch:
1266 if not fetch:
1255 self.ui.status(_("no changes found\n"))
1267 self.ui.status(_("no changes found\n"))
1256 return 0
1268 return 0
1257
1269
1258 if heads is None:
1270 if heads is None:
1259 cg = remote.changegroup(fetch, 'pull')
1271 cg = remote.changegroup(fetch, 'pull')
1260 else:
1272 else:
1261 if 'changegroupsubset' not in remote.capabilities:
1273 if 'changegroupsubset' not in remote.capabilities:
1262 raise util.Abort(_("Partial pull cannot be done because other repository doesn't support changegroupsubset."))
1274 raise util.Abort(_("Partial pull cannot be done because other repository doesn't support changegroupsubset."))
1263 cg = remote.changegroupsubset(fetch, heads, 'pull')
1275 cg = remote.changegroupsubset(fetch, heads, 'pull')
1264 return self.addchangegroup(cg, 'pull', remote.url())
1276 return self.addchangegroup(cg, 'pull', remote.url())
1265 finally:
1277 finally:
1266 if mylock:
1278 if mylock:
1267 lock.release()
1279 lock.release()
1268
1280
1269 def push(self, remote, force=False, revs=None):
1281 def push(self, remote, force=False, revs=None):
1270 # there are two ways to push to remote repo:
1282 # there are two ways to push to remote repo:
1271 #
1283 #
1272 # addchangegroup assumes local user can lock remote
1284 # addchangegroup assumes local user can lock remote
1273 # repo (local filesystem, old ssh servers).
1285 # repo (local filesystem, old ssh servers).
1274 #
1286 #
1275 # unbundle assumes local user cannot lock remote repo (new ssh
1287 # unbundle assumes local user cannot lock remote repo (new ssh
1276 # servers, http servers).
1288 # servers, http servers).
1277
1289
1278 if remote.capable('unbundle'):
1290 if remote.capable('unbundle'):
1279 return self.push_unbundle(remote, force, revs)
1291 return self.push_unbundle(remote, force, revs)
1280 return self.push_addchangegroup(remote, force, revs)
1292 return self.push_addchangegroup(remote, force, revs)
1281
1293
1282 def prepush(self, remote, force, revs):
1294 def prepush(self, remote, force, revs):
1283 base = {}
1295 base = {}
1284 remote_heads = remote.heads()
1296 remote_heads = remote.heads()
1285 inc = self.findincoming(remote, base, remote_heads, force=force)
1297 inc = self.findincoming(remote, base, remote_heads, force=force)
1286 if not force and inc:
1298 if not force and inc:
1287 self.ui.warn(_("abort: unsynced remote changes!\n"))
1299 self.ui.warn(_("abort: unsynced remote changes!\n"))
1288 self.ui.status(_("(did you forget to sync?"
1300 self.ui.status(_("(did you forget to sync?"
1289 " use push -f to force)\n"))
1301 " use push -f to force)\n"))
1290 return None, 1
1302 return None, 1
1291
1303
1292 update, updated_heads = self.findoutgoing(remote, base, remote_heads)
1304 update, updated_heads = self.findoutgoing(remote, base, remote_heads)
1293 if revs is not None:
1305 if revs is not None:
1294 msng_cl, bases, heads = self.changelog.nodesbetween(update, revs)
1306 msng_cl, bases, heads = self.changelog.nodesbetween(update, revs)
1295 else:
1307 else:
1296 bases, heads = update, self.changelog.heads()
1308 bases, heads = update, self.changelog.heads()
1297
1309
1298 if not bases:
1310 if not bases:
1299 self.ui.status(_("no changes found\n"))
1311 self.ui.status(_("no changes found\n"))
1300 return None, 1
1312 return None, 1
1301 elif not force:
1313 elif not force:
1302 # FIXME we don't properly detect creation of new heads
1314 # FIXME we don't properly detect creation of new heads
1303 # in the push -r case, assume the user knows what he's doing
1315 # in the push -r case, assume the user knows what he's doing
1304 if not revs and len(remote_heads) < len(heads) \
1316 if not revs and len(remote_heads) < len(heads) \
1305 and remote_heads != [nullid]:
1317 and remote_heads != [nullid]:
1306 self.ui.warn(_("abort: push creates new remote branches!\n"))
1318 self.ui.warn(_("abort: push creates new remote branches!\n"))
1307 self.ui.status(_("(did you forget to merge?"
1319 self.ui.status(_("(did you forget to merge?"
1308 " use push -f to force)\n"))
1320 " use push -f to force)\n"))
1309 return None, 1
1321 return None, 1
1310
1322
1311 if revs is None:
1323 if revs is None:
1312 cg = self.changegroup(update, 'push')
1324 cg = self.changegroup(update, 'push')
1313 else:
1325 else:
1314 cg = self.changegroupsubset(update, revs, 'push')
1326 cg = self.changegroupsubset(update, revs, 'push')
1315 return cg, remote_heads
1327 return cg, remote_heads
1316
1328
1317 def push_addchangegroup(self, remote, force, revs):
1329 def push_addchangegroup(self, remote, force, revs):
1318 lock = remote.lock()
1330 lock = remote.lock()
1319
1331
1320 ret = self.prepush(remote, force, revs)
1332 ret = self.prepush(remote, force, revs)
1321 if ret[0] is not None:
1333 if ret[0] is not None:
1322 cg, remote_heads = ret
1334 cg, remote_heads = ret
1323 return remote.addchangegroup(cg, 'push', self.url())
1335 return remote.addchangegroup(cg, 'push', self.url())
1324 return ret[1]
1336 return ret[1]
1325
1337
1326 def push_unbundle(self, remote, force, revs):
1338 def push_unbundle(self, remote, force, revs):
1327 # local repo finds heads on server, finds out what revs it
1339 # local repo finds heads on server, finds out what revs it
1328 # must push. once revs transferred, if server finds it has
1340 # must push. once revs transferred, if server finds it has
1329 # different heads (someone else won commit/push race), server
1341 # different heads (someone else won commit/push race), server
1330 # aborts.
1342 # aborts.
1331
1343
1332 ret = self.prepush(remote, force, revs)
1344 ret = self.prepush(remote, force, revs)
1333 if ret[0] is not None:
1345 if ret[0] is not None:
1334 cg, remote_heads = ret
1346 cg, remote_heads = ret
1335 if force: remote_heads = ['force']
1347 if force: remote_heads = ['force']
1336 return remote.unbundle(cg, remote_heads, 'push')
1348 return remote.unbundle(cg, remote_heads, 'push')
1337 return ret[1]
1349 return ret[1]
1338
1350
1339 def changegroupinfo(self, nodes):
1351 def changegroupinfo(self, nodes):
1340 self.ui.note(_("%d changesets found\n") % len(nodes))
1352 self.ui.note(_("%d changesets found\n") % len(nodes))
1341 if self.ui.debugflag:
1353 if self.ui.debugflag:
1342 self.ui.debug(_("List of changesets:\n"))
1354 self.ui.debug(_("List of changesets:\n"))
1343 for node in nodes:
1355 for node in nodes:
1344 self.ui.debug("%s\n" % hex(node))
1356 self.ui.debug("%s\n" % hex(node))
1345
1357
1346 def changegroupsubset(self, bases, heads, source):
1358 def changegroupsubset(self, bases, heads, source):
1347 """This function generates a changegroup consisting of all the nodes
1359 """This function generates a changegroup consisting of all the nodes
1348 that are descendents of any of the bases, and ancestors of any of
1360 that are descendents of any of the bases, and ancestors of any of
1349 the heads.
1361 the heads.
1350
1362
1351 It is fairly complex as determining which filenodes and which
1363 It is fairly complex as determining which filenodes and which
1352 manifest nodes need to be included for the changeset to be complete
1364 manifest nodes need to be included for the changeset to be complete
1353 is non-trivial.
1365 is non-trivial.
1354
1366
1355 Another wrinkle is doing the reverse, figuring out which changeset in
1367 Another wrinkle is doing the reverse, figuring out which changeset in
1356 the changegroup a particular filenode or manifestnode belongs to."""
1368 the changegroup a particular filenode or manifestnode belongs to."""
1357
1369
1358 self.hook('preoutgoing', throw=True, source=source)
1370 self.hook('preoutgoing', throw=True, source=source)
1359
1371
1360 # Set up some initial variables
1372 # Set up some initial variables
1361 # Make it easy to refer to self.changelog
1373 # Make it easy to refer to self.changelog
1362 cl = self.changelog
1374 cl = self.changelog
1363 # msng is short for missing - compute the list of changesets in this
1375 # msng is short for missing - compute the list of changesets in this
1364 # changegroup.
1376 # changegroup.
1365 msng_cl_lst, bases, heads = cl.nodesbetween(bases, heads)
1377 msng_cl_lst, bases, heads = cl.nodesbetween(bases, heads)
1366 self.changegroupinfo(msng_cl_lst)
1378 self.changegroupinfo(msng_cl_lst)
1367 # Some bases may turn out to be superfluous, and some heads may be
1379 # Some bases may turn out to be superfluous, and some heads may be
1368 # too. nodesbetween will return the minimal set of bases and heads
1380 # too. nodesbetween will return the minimal set of bases and heads
1369 # necessary to re-create the changegroup.
1381 # necessary to re-create the changegroup.
1370
1382
1371 # Known heads are the list of heads that it is assumed the recipient
1383 # Known heads are the list of heads that it is assumed the recipient
1372 # of this changegroup will know about.
1384 # of this changegroup will know about.
1373 knownheads = {}
1385 knownheads = {}
1374 # We assume that all parents of bases are known heads.
1386 # We assume that all parents of bases are known heads.
1375 for n in bases:
1387 for n in bases:
1376 for p in cl.parents(n):
1388 for p in cl.parents(n):
1377 if p != nullid:
1389 if p != nullid:
1378 knownheads[p] = 1
1390 knownheads[p] = 1
1379 knownheads = knownheads.keys()
1391 knownheads = knownheads.keys()
1380 if knownheads:
1392 if knownheads:
1381 # Now that we know what heads are known, we can compute which
1393 # Now that we know what heads are known, we can compute which
1382 # changesets are known. The recipient must know about all
1394 # changesets are known. The recipient must know about all
1383 # changesets required to reach the known heads from the null
1395 # changesets required to reach the known heads from the null
1384 # changeset.
1396 # changeset.
1385 has_cl_set, junk, junk = cl.nodesbetween(None, knownheads)
1397 has_cl_set, junk, junk = cl.nodesbetween(None, knownheads)
1386 junk = None
1398 junk = None
1387 # Transform the list into an ersatz set.
1399 # Transform the list into an ersatz set.
1388 has_cl_set = dict.fromkeys(has_cl_set)
1400 has_cl_set = dict.fromkeys(has_cl_set)
1389 else:
1401 else:
1390 # If there were no known heads, the recipient cannot be assumed to
1402 # If there were no known heads, the recipient cannot be assumed to
1391 # know about any changesets.
1403 # know about any changesets.
1392 has_cl_set = {}
1404 has_cl_set = {}
1393
1405
1394 # Make it easy to refer to self.manifest
1406 # Make it easy to refer to self.manifest
1395 mnfst = self.manifest
1407 mnfst = self.manifest
1396 # We don't know which manifests are missing yet
1408 # We don't know which manifests are missing yet
1397 msng_mnfst_set = {}
1409 msng_mnfst_set = {}
1398 # Nor do we know which filenodes are missing.
1410 # Nor do we know which filenodes are missing.
1399 msng_filenode_set = {}
1411 msng_filenode_set = {}
1400
1412
1401 junk = mnfst.index[mnfst.count() - 1] # Get around a bug in lazyindex
1413 junk = mnfst.index[mnfst.count() - 1] # Get around a bug in lazyindex
1402 junk = None
1414 junk = None
1403
1415
1404 # A changeset always belongs to itself, so the changenode lookup
1416 # A changeset always belongs to itself, so the changenode lookup
1405 # function for a changenode is identity.
1417 # function for a changenode is identity.
1406 def identity(x):
1418 def identity(x):
1407 return x
1419 return x
1408
1420
1409 # A function generating function. Sets up an environment for the
1421 # A function generating function. Sets up an environment for the
1410 # inner function.
1422 # inner function.
1411 def cmp_by_rev_func(revlog):
1423 def cmp_by_rev_func(revlog):
1412 # Compare two nodes by their revision number in the environment's
1424 # Compare two nodes by their revision number in the environment's
1413 # revision history. Since the revision number both represents the
1425 # revision history. Since the revision number both represents the
1414 # most efficient order to read the nodes in, and represents a
1426 # most efficient order to read the nodes in, and represents a
1415 # topological sorting of the nodes, this function is often useful.
1427 # topological sorting of the nodes, this function is often useful.
1416 def cmp_by_rev(a, b):
1428 def cmp_by_rev(a, b):
1417 return cmp(revlog.rev(a), revlog.rev(b))
1429 return cmp(revlog.rev(a), revlog.rev(b))
1418 return cmp_by_rev
1430 return cmp_by_rev
1419
1431
1420 # If we determine that a particular file or manifest node must be a
1432 # If we determine that a particular file or manifest node must be a
1421 # node that the recipient of the changegroup will already have, we can
1433 # node that the recipient of the changegroup will already have, we can
1422 # also assume the recipient will have all the parents. This function
1434 # also assume the recipient will have all the parents. This function
1423 # prunes them from the set of missing nodes.
1435 # prunes them from the set of missing nodes.
1424 def prune_parents(revlog, hasset, msngset):
1436 def prune_parents(revlog, hasset, msngset):
1425 haslst = hasset.keys()
1437 haslst = hasset.keys()
1426 haslst.sort(cmp_by_rev_func(revlog))
1438 haslst.sort(cmp_by_rev_func(revlog))
1427 for node in haslst:
1439 for node in haslst:
1428 parentlst = [p for p in revlog.parents(node) if p != nullid]
1440 parentlst = [p for p in revlog.parents(node) if p != nullid]
1429 while parentlst:
1441 while parentlst:
1430 n = parentlst.pop()
1442 n = parentlst.pop()
1431 if n not in hasset:
1443 if n not in hasset:
1432 hasset[n] = 1
1444 hasset[n] = 1
1433 p = [p for p in revlog.parents(n) if p != nullid]
1445 p = [p for p in revlog.parents(n) if p != nullid]
1434 parentlst.extend(p)
1446 parentlst.extend(p)
1435 for n in hasset:
1447 for n in hasset:
1436 msngset.pop(n, None)
1448 msngset.pop(n, None)
1437
1449
1438 # This is a function generating function used to set up an environment
1450 # This is a function generating function used to set up an environment
1439 # for the inner function to execute in.
1451 # for the inner function to execute in.
1440 def manifest_and_file_collector(changedfileset):
1452 def manifest_and_file_collector(changedfileset):
1441 # This is an information gathering function that gathers
1453 # This is an information gathering function that gathers
1442 # information from each changeset node that goes out as part of
1454 # information from each changeset node that goes out as part of
1443 # the changegroup. The information gathered is a list of which
1455 # the changegroup. The information gathered is a list of which
1444 # manifest nodes are potentially required (the recipient may
1456 # manifest nodes are potentially required (the recipient may
1445 # already have them) and total list of all files which were
1457 # already have them) and total list of all files which were
1446 # changed in any changeset in the changegroup.
1458 # changed in any changeset in the changegroup.
1447 #
1459 #
1448 # We also remember the first changenode we saw any manifest
1460 # We also remember the first changenode we saw any manifest
1449 # referenced by so we can later determine which changenode 'owns'
1461 # referenced by so we can later determine which changenode 'owns'
1450 # the manifest.
1462 # the manifest.
1451 def collect_manifests_and_files(clnode):
1463 def collect_manifests_and_files(clnode):
1452 c = cl.read(clnode)
1464 c = cl.read(clnode)
1453 for f in c[3]:
1465 for f in c[3]:
1454 # This is to make sure we only have one instance of each
1466 # This is to make sure we only have one instance of each
1455 # filename string for each filename.
1467 # filename string for each filename.
1456 changedfileset.setdefault(f, f)
1468 changedfileset.setdefault(f, f)
1457 msng_mnfst_set.setdefault(c[0], clnode)
1469 msng_mnfst_set.setdefault(c[0], clnode)
1458 return collect_manifests_and_files
1470 return collect_manifests_and_files
1459
1471
1460 # Figure out which manifest nodes (of the ones we think might be part
1472 # Figure out which manifest nodes (of the ones we think might be part
1461 # of the changegroup) the recipient must know about and remove them
1473 # of the changegroup) the recipient must know about and remove them
1462 # from the changegroup.
1474 # from the changegroup.
1463 def prune_manifests():
1475 def prune_manifests():
1464 has_mnfst_set = {}
1476 has_mnfst_set = {}
1465 for n in msng_mnfst_set:
1477 for n in msng_mnfst_set:
1466 # If a 'missing' manifest thinks it belongs to a changenode
1478 # If a 'missing' manifest thinks it belongs to a changenode
1467 # the recipient is assumed to have, obviously the recipient
1479 # the recipient is assumed to have, obviously the recipient
1468 # must have that manifest.
1480 # must have that manifest.
1469 linknode = cl.node(mnfst.linkrev(n))
1481 linknode = cl.node(mnfst.linkrev(n))
1470 if linknode in has_cl_set:
1482 if linknode in has_cl_set:
1471 has_mnfst_set[n] = 1
1483 has_mnfst_set[n] = 1
1472 prune_parents(mnfst, has_mnfst_set, msng_mnfst_set)
1484 prune_parents(mnfst, has_mnfst_set, msng_mnfst_set)
1473
1485
1474 # Use the information collected in collect_manifests_and_files to say
1486 # Use the information collected in collect_manifests_and_files to say
1475 # which changenode any manifestnode belongs to.
1487 # which changenode any manifestnode belongs to.
1476 def lookup_manifest_link(mnfstnode):
1488 def lookup_manifest_link(mnfstnode):
1477 return msng_mnfst_set[mnfstnode]
1489 return msng_mnfst_set[mnfstnode]
1478
1490
1479 # A function generating function that sets up the initial environment
1491 # A function generating function that sets up the initial environment
1480 # the inner function.
1492 # the inner function.
1481 def filenode_collector(changedfiles):
1493 def filenode_collector(changedfiles):
1482 next_rev = [0]
1494 next_rev = [0]
1483 # This gathers information from each manifestnode included in the
1495 # This gathers information from each manifestnode included in the
1484 # changegroup about which filenodes the manifest node references
1496 # changegroup about which filenodes the manifest node references
1485 # so we can include those in the changegroup too.
1497 # so we can include those in the changegroup too.
1486 #
1498 #
1487 # It also remembers which changenode each filenode belongs to. It
1499 # It also remembers which changenode each filenode belongs to. It
1488 # does this by assuming the a filenode belongs to the changenode
1500 # does this by assuming the a filenode belongs to the changenode
1489 # the first manifest that references it belongs to.
1501 # the first manifest that references it belongs to.
1490 def collect_msng_filenodes(mnfstnode):
1502 def collect_msng_filenodes(mnfstnode):
1491 r = mnfst.rev(mnfstnode)
1503 r = mnfst.rev(mnfstnode)
1492 if r == next_rev[0]:
1504 if r == next_rev[0]:
1493 # If the last rev we looked at was the one just previous,
1505 # If the last rev we looked at was the one just previous,
1494 # we only need to see a diff.
1506 # we only need to see a diff.
1495 delta = mdiff.patchtext(mnfst.delta(mnfstnode))
1507 delta = mdiff.patchtext(mnfst.delta(mnfstnode))
1496 # For each line in the delta
1508 # For each line in the delta
1497 for dline in delta.splitlines():
1509 for dline in delta.splitlines():
1498 # get the filename and filenode for that line
1510 # get the filename and filenode for that line
1499 f, fnode = dline.split('\0')
1511 f, fnode = dline.split('\0')
1500 fnode = bin(fnode[:40])
1512 fnode = bin(fnode[:40])
1501 f = changedfiles.get(f, None)
1513 f = changedfiles.get(f, None)
1502 # And if the file is in the list of files we care
1514 # And if the file is in the list of files we care
1503 # about.
1515 # about.
1504 if f is not None:
1516 if f is not None:
1505 # Get the changenode this manifest belongs to
1517 # Get the changenode this manifest belongs to
1506 clnode = msng_mnfst_set[mnfstnode]
1518 clnode = msng_mnfst_set[mnfstnode]
1507 # Create the set of filenodes for the file if
1519 # Create the set of filenodes for the file if
1508 # there isn't one already.
1520 # there isn't one already.
1509 ndset = msng_filenode_set.setdefault(f, {})
1521 ndset = msng_filenode_set.setdefault(f, {})
1510 # And set the filenode's changelog node to the
1522 # And set the filenode's changelog node to the
1511 # manifest's if it hasn't been set already.
1523 # manifest's if it hasn't been set already.
1512 ndset.setdefault(fnode, clnode)
1524 ndset.setdefault(fnode, clnode)
1513 else:
1525 else:
1514 # Otherwise we need a full manifest.
1526 # Otherwise we need a full manifest.
1515 m = mnfst.read(mnfstnode)
1527 m = mnfst.read(mnfstnode)
1516 # For every file in we care about.
1528 # For every file in we care about.
1517 for f in changedfiles:
1529 for f in changedfiles:
1518 fnode = m.get(f, None)
1530 fnode = m.get(f, None)
1519 # If it's in the manifest
1531 # If it's in the manifest
1520 if fnode is not None:
1532 if fnode is not None:
1521 # See comments above.
1533 # See comments above.
1522 clnode = msng_mnfst_set[mnfstnode]
1534 clnode = msng_mnfst_set[mnfstnode]
1523 ndset = msng_filenode_set.setdefault(f, {})
1535 ndset = msng_filenode_set.setdefault(f, {})
1524 ndset.setdefault(fnode, clnode)
1536 ndset.setdefault(fnode, clnode)
1525 # Remember the revision we hope to see next.
1537 # Remember the revision we hope to see next.
1526 next_rev[0] = r + 1
1538 next_rev[0] = r + 1
1527 return collect_msng_filenodes
1539 return collect_msng_filenodes
1528
1540
1529 # We have a list of filenodes we think we need for a file, lets remove
1541 # We have a list of filenodes we think we need for a file, lets remove
1530 # all those we now the recipient must have.
1542 # all those we now the recipient must have.
1531 def prune_filenodes(f, filerevlog):
1543 def prune_filenodes(f, filerevlog):
1532 msngset = msng_filenode_set[f]
1544 msngset = msng_filenode_set[f]
1533 hasset = {}
1545 hasset = {}
1534 # If a 'missing' filenode thinks it belongs to a changenode we
1546 # If a 'missing' filenode thinks it belongs to a changenode we
1535 # assume the recipient must have, then the recipient must have
1547 # assume the recipient must have, then the recipient must have
1536 # that filenode.
1548 # that filenode.
1537 for n in msngset:
1549 for n in msngset:
1538 clnode = cl.node(filerevlog.linkrev(n))
1550 clnode = cl.node(filerevlog.linkrev(n))
1539 if clnode in has_cl_set:
1551 if clnode in has_cl_set:
1540 hasset[n] = 1
1552 hasset[n] = 1
1541 prune_parents(filerevlog, hasset, msngset)
1553 prune_parents(filerevlog, hasset, msngset)
1542
1554
1543 # A function generator function that sets up the a context for the
1555 # A function generator function that sets up the a context for the
1544 # inner function.
1556 # inner function.
1545 def lookup_filenode_link_func(fname):
1557 def lookup_filenode_link_func(fname):
1546 msngset = msng_filenode_set[fname]
1558 msngset = msng_filenode_set[fname]
1547 # Lookup the changenode the filenode belongs to.
1559 # Lookup the changenode the filenode belongs to.
1548 def lookup_filenode_link(fnode):
1560 def lookup_filenode_link(fnode):
1549 return msngset[fnode]
1561 return msngset[fnode]
1550 return lookup_filenode_link
1562 return lookup_filenode_link
1551
1563
1552 # Now that we have all theses utility functions to help out and
1564 # Now that we have all theses utility functions to help out and
1553 # logically divide up the task, generate the group.
1565 # logically divide up the task, generate the group.
1554 def gengroup():
1566 def gengroup():
1555 # The set of changed files starts empty.
1567 # The set of changed files starts empty.
1556 changedfiles = {}
1568 changedfiles = {}
1557 # Create a changenode group generator that will call our functions
1569 # Create a changenode group generator that will call our functions
1558 # back to lookup the owning changenode and collect information.
1570 # back to lookup the owning changenode and collect information.
1559 group = cl.group(msng_cl_lst, identity,
1571 group = cl.group(msng_cl_lst, identity,
1560 manifest_and_file_collector(changedfiles))
1572 manifest_and_file_collector(changedfiles))
1561 for chnk in group:
1573 for chnk in group:
1562 yield chnk
1574 yield chnk
1563
1575
1564 # The list of manifests has been collected by the generator
1576 # The list of manifests has been collected by the generator
1565 # calling our functions back.
1577 # calling our functions back.
1566 prune_manifests()
1578 prune_manifests()
1567 msng_mnfst_lst = msng_mnfst_set.keys()
1579 msng_mnfst_lst = msng_mnfst_set.keys()
1568 # Sort the manifestnodes by revision number.
1580 # Sort the manifestnodes by revision number.
1569 msng_mnfst_lst.sort(cmp_by_rev_func(mnfst))
1581 msng_mnfst_lst.sort(cmp_by_rev_func(mnfst))
1570 # Create a generator for the manifestnodes that calls our lookup
1582 # Create a generator for the manifestnodes that calls our lookup
1571 # and data collection functions back.
1583 # and data collection functions back.
1572 group = mnfst.group(msng_mnfst_lst, lookup_manifest_link,
1584 group = mnfst.group(msng_mnfst_lst, lookup_manifest_link,
1573 filenode_collector(changedfiles))
1585 filenode_collector(changedfiles))
1574 for chnk in group:
1586 for chnk in group:
1575 yield chnk
1587 yield chnk
1576
1588
1577 # These are no longer needed, dereference and toss the memory for
1589 # These are no longer needed, dereference and toss the memory for
1578 # them.
1590 # them.
1579 msng_mnfst_lst = None
1591 msng_mnfst_lst = None
1580 msng_mnfst_set.clear()
1592 msng_mnfst_set.clear()
1581
1593
1582 changedfiles = changedfiles.keys()
1594 changedfiles = changedfiles.keys()
1583 changedfiles.sort()
1595 changedfiles.sort()
1584 # Go through all our files in order sorted by name.
1596 # Go through all our files in order sorted by name.
1585 for fname in changedfiles:
1597 for fname in changedfiles:
1586 filerevlog = self.file(fname)
1598 filerevlog = self.file(fname)
1587 # Toss out the filenodes that the recipient isn't really
1599 # Toss out the filenodes that the recipient isn't really
1588 # missing.
1600 # missing.
1589 if msng_filenode_set.has_key(fname):
1601 if msng_filenode_set.has_key(fname):
1590 prune_filenodes(fname, filerevlog)
1602 prune_filenodes(fname, filerevlog)
1591 msng_filenode_lst = msng_filenode_set[fname].keys()
1603 msng_filenode_lst = msng_filenode_set[fname].keys()
1592 else:
1604 else:
1593 msng_filenode_lst = []
1605 msng_filenode_lst = []
1594 # If any filenodes are left, generate the group for them,
1606 # If any filenodes are left, generate the group for them,
1595 # otherwise don't bother.
1607 # otherwise don't bother.
1596 if len(msng_filenode_lst) > 0:
1608 if len(msng_filenode_lst) > 0:
1597 yield changegroup.genchunk(fname)
1609 yield changegroup.genchunk(fname)
1598 # Sort the filenodes by their revision #
1610 # Sort the filenodes by their revision #
1599 msng_filenode_lst.sort(cmp_by_rev_func(filerevlog))
1611 msng_filenode_lst.sort(cmp_by_rev_func(filerevlog))
1600 # Create a group generator and only pass in a changenode
1612 # Create a group generator and only pass in a changenode
1601 # lookup function as we need to collect no information
1613 # lookup function as we need to collect no information
1602 # from filenodes.
1614 # from filenodes.
1603 group = filerevlog.group(msng_filenode_lst,
1615 group = filerevlog.group(msng_filenode_lst,
1604 lookup_filenode_link_func(fname))
1616 lookup_filenode_link_func(fname))
1605 for chnk in group:
1617 for chnk in group:
1606 yield chnk
1618 yield chnk
1607 if msng_filenode_set.has_key(fname):
1619 if msng_filenode_set.has_key(fname):
1608 # Don't need this anymore, toss it to free memory.
1620 # Don't need this anymore, toss it to free memory.
1609 del msng_filenode_set[fname]
1621 del msng_filenode_set[fname]
1610 # Signal that no more groups are left.
1622 # Signal that no more groups are left.
1611 yield changegroup.closechunk()
1623 yield changegroup.closechunk()
1612
1624
1613 if msng_cl_lst:
1625 if msng_cl_lst:
1614 self.hook('outgoing', node=hex(msng_cl_lst[0]), source=source)
1626 self.hook('outgoing', node=hex(msng_cl_lst[0]), source=source)
1615
1627
1616 return util.chunkbuffer(gengroup())
1628 return util.chunkbuffer(gengroup())
1617
1629
1618 def changegroup(self, basenodes, source):
1630 def changegroup(self, basenodes, source):
1619 """Generate a changegroup of all nodes that we have that a recipient
1631 """Generate a changegroup of all nodes that we have that a recipient
1620 doesn't.
1632 doesn't.
1621
1633
1622 This is much easier than the previous function as we can assume that
1634 This is much easier than the previous function as we can assume that
1623 the recipient has any changenode we aren't sending them."""
1635 the recipient has any changenode we aren't sending them."""
1624
1636
1625 self.hook('preoutgoing', throw=True, source=source)
1637 self.hook('preoutgoing', throw=True, source=source)
1626
1638
1627 cl = self.changelog
1639 cl = self.changelog
1628 nodes = cl.nodesbetween(basenodes, None)[0]
1640 nodes = cl.nodesbetween(basenodes, None)[0]
1629 revset = dict.fromkeys([cl.rev(n) for n in nodes])
1641 revset = dict.fromkeys([cl.rev(n) for n in nodes])
1630 self.changegroupinfo(nodes)
1642 self.changegroupinfo(nodes)
1631
1643
1632 def identity(x):
1644 def identity(x):
1633 return x
1645 return x
1634
1646
1635 def gennodelst(revlog):
1647 def gennodelst(revlog):
1636 for r in xrange(0, revlog.count()):
1648 for r in xrange(0, revlog.count()):
1637 n = revlog.node(r)
1649 n = revlog.node(r)
1638 if revlog.linkrev(n) in revset:
1650 if revlog.linkrev(n) in revset:
1639 yield n
1651 yield n
1640
1652
1641 def changed_file_collector(changedfileset):
1653 def changed_file_collector(changedfileset):
1642 def collect_changed_files(clnode):
1654 def collect_changed_files(clnode):
1643 c = cl.read(clnode)
1655 c = cl.read(clnode)
1644 for fname in c[3]:
1656 for fname in c[3]:
1645 changedfileset[fname] = 1
1657 changedfileset[fname] = 1
1646 return collect_changed_files
1658 return collect_changed_files
1647
1659
1648 def lookuprevlink_func(revlog):
1660 def lookuprevlink_func(revlog):
1649 def lookuprevlink(n):
1661 def lookuprevlink(n):
1650 return cl.node(revlog.linkrev(n))
1662 return cl.node(revlog.linkrev(n))
1651 return lookuprevlink
1663 return lookuprevlink
1652
1664
1653 def gengroup():
1665 def gengroup():
1654 # construct a list of all changed files
1666 # construct a list of all changed files
1655 changedfiles = {}
1667 changedfiles = {}
1656
1668
1657 for chnk in cl.group(nodes, identity,
1669 for chnk in cl.group(nodes, identity,
1658 changed_file_collector(changedfiles)):
1670 changed_file_collector(changedfiles)):
1659 yield chnk
1671 yield chnk
1660 changedfiles = changedfiles.keys()
1672 changedfiles = changedfiles.keys()
1661 changedfiles.sort()
1673 changedfiles.sort()
1662
1674
1663 mnfst = self.manifest
1675 mnfst = self.manifest
1664 nodeiter = gennodelst(mnfst)
1676 nodeiter = gennodelst(mnfst)
1665 for chnk in mnfst.group(nodeiter, lookuprevlink_func(mnfst)):
1677 for chnk in mnfst.group(nodeiter, lookuprevlink_func(mnfst)):
1666 yield chnk
1678 yield chnk
1667
1679
1668 for fname in changedfiles:
1680 for fname in changedfiles:
1669 filerevlog = self.file(fname)
1681 filerevlog = self.file(fname)
1670 nodeiter = gennodelst(filerevlog)
1682 nodeiter = gennodelst(filerevlog)
1671 nodeiter = list(nodeiter)
1683 nodeiter = list(nodeiter)
1672 if nodeiter:
1684 if nodeiter:
1673 yield changegroup.genchunk(fname)
1685 yield changegroup.genchunk(fname)
1674 lookup = lookuprevlink_func(filerevlog)
1686 lookup = lookuprevlink_func(filerevlog)
1675 for chnk in filerevlog.group(nodeiter, lookup):
1687 for chnk in filerevlog.group(nodeiter, lookup):
1676 yield chnk
1688 yield chnk
1677
1689
1678 yield changegroup.closechunk()
1690 yield changegroup.closechunk()
1679
1691
1680 if nodes:
1692 if nodes:
1681 self.hook('outgoing', node=hex(nodes[0]), source=source)
1693 self.hook('outgoing', node=hex(nodes[0]), source=source)
1682
1694
1683 return util.chunkbuffer(gengroup())
1695 return util.chunkbuffer(gengroup())
1684
1696
1685 def addchangegroup(self, source, srctype, url):
1697 def addchangegroup(self, source, srctype, url):
1686 """add changegroup to repo.
1698 """add changegroup to repo.
1687 returns number of heads modified or added + 1."""
1699 returns number of heads modified or added + 1."""
1688
1700
1689 def csmap(x):
1701 def csmap(x):
1690 self.ui.debug(_("add changeset %s\n") % short(x))
1702 self.ui.debug(_("add changeset %s\n") % short(x))
1691 return cl.count()
1703 return cl.count()
1692
1704
1693 def revmap(x):
1705 def revmap(x):
1694 return cl.rev(x)
1706 return cl.rev(x)
1695
1707
1696 if not source:
1708 if not source:
1697 return 0
1709 return 0
1698
1710
1699 self.hook('prechangegroup', throw=True, source=srctype, url=url)
1711 self.hook('prechangegroup', throw=True, source=srctype, url=url)
1700
1712
1701 changesets = files = revisions = 0
1713 changesets = files = revisions = 0
1702
1714
1703 tr = self.transaction()
1715 tr = self.transaction()
1704
1716
1705 # write changelog data to temp files so concurrent readers will not see
1717 # write changelog data to temp files so concurrent readers will not see
1706 # inconsistent view
1718 # inconsistent view
1707 cl = None
1719 cl = None
1708 try:
1720 try:
1709 cl = appendfile.appendchangelog(self.sopener,
1721 cl = appendfile.appendchangelog(self.sopener,
1710 self.changelog.version)
1722 self.changelog.version)
1711
1723
1712 oldheads = len(cl.heads())
1724 oldheads = len(cl.heads())
1713
1725
1714 # pull off the changeset group
1726 # pull off the changeset group
1715 self.ui.status(_("adding changesets\n"))
1727 self.ui.status(_("adding changesets\n"))
1716 cor = cl.count() - 1
1728 cor = cl.count() - 1
1717 chunkiter = changegroup.chunkiter(source)
1729 chunkiter = changegroup.chunkiter(source)
1718 if cl.addgroup(chunkiter, csmap, tr, 1) is None:
1730 if cl.addgroup(chunkiter, csmap, tr, 1) is None:
1719 raise util.Abort(_("received changelog group is empty"))
1731 raise util.Abort(_("received changelog group is empty"))
1720 cnr = cl.count() - 1
1732 cnr = cl.count() - 1
1721 changesets = cnr - cor
1733 changesets = cnr - cor
1722
1734
1723 # pull off the manifest group
1735 # pull off the manifest group
1724 self.ui.status(_("adding manifests\n"))
1736 self.ui.status(_("adding manifests\n"))
1725 chunkiter = changegroup.chunkiter(source)
1737 chunkiter = changegroup.chunkiter(source)
1726 # no need to check for empty manifest group here:
1738 # no need to check for empty manifest group here:
1727 # if the result of the merge of 1 and 2 is the same in 3 and 4,
1739 # if the result of the merge of 1 and 2 is the same in 3 and 4,
1728 # no new manifest will be created and the manifest group will
1740 # no new manifest will be created and the manifest group will
1729 # be empty during the pull
1741 # be empty during the pull
1730 self.manifest.addgroup(chunkiter, revmap, tr)
1742 self.manifest.addgroup(chunkiter, revmap, tr)
1731
1743
1732 # process the files
1744 # process the files
1733 self.ui.status(_("adding file changes\n"))
1745 self.ui.status(_("adding file changes\n"))
1734 while 1:
1746 while 1:
1735 f = changegroup.getchunk(source)
1747 f = changegroup.getchunk(source)
1736 if not f:
1748 if not f:
1737 break
1749 break
1738 self.ui.debug(_("adding %s revisions\n") % f)
1750 self.ui.debug(_("adding %s revisions\n") % f)
1739 fl = self.file(f)
1751 fl = self.file(f)
1740 o = fl.count()
1752 o = fl.count()
1741 chunkiter = changegroup.chunkiter(source)
1753 chunkiter = changegroup.chunkiter(source)
1742 if fl.addgroup(chunkiter, revmap, tr) is None:
1754 if fl.addgroup(chunkiter, revmap, tr) is None:
1743 raise util.Abort(_("received file revlog group is empty"))
1755 raise util.Abort(_("received file revlog group is empty"))
1744 revisions += fl.count() - o
1756 revisions += fl.count() - o
1745 files += 1
1757 files += 1
1746
1758
1747 cl.writedata()
1759 cl.writedata()
1748 finally:
1760 finally:
1749 if cl:
1761 if cl:
1750 cl.cleanup()
1762 cl.cleanup()
1751
1763
1752 # make changelog see real files again
1764 # make changelog see real files again
1753 self.changelog = changelog.changelog(self.sopener,
1765 self.changelog = changelog.changelog(self.sopener,
1754 self.changelog.version)
1766 self.changelog.version)
1755 self.changelog.checkinlinesize(tr)
1767 self.changelog.checkinlinesize(tr)
1756
1768
1757 newheads = len(self.changelog.heads())
1769 newheads = len(self.changelog.heads())
1758 heads = ""
1770 heads = ""
1759 if oldheads and newheads != oldheads:
1771 if oldheads and newheads != oldheads:
1760 heads = _(" (%+d heads)") % (newheads - oldheads)
1772 heads = _(" (%+d heads)") % (newheads - oldheads)
1761
1773
1762 self.ui.status(_("added %d changesets"
1774 self.ui.status(_("added %d changesets"
1763 " with %d changes to %d files%s\n")
1775 " with %d changes to %d files%s\n")
1764 % (changesets, revisions, files, heads))
1776 % (changesets, revisions, files, heads))
1765
1777
1766 if changesets > 0:
1778 if changesets > 0:
1767 self.hook('pretxnchangegroup', throw=True,
1779 self.hook('pretxnchangegroup', throw=True,
1768 node=hex(self.changelog.node(cor+1)), source=srctype,
1780 node=hex(self.changelog.node(cor+1)), source=srctype,
1769 url=url)
1781 url=url)
1770
1782
1771 tr.close()
1783 tr.close()
1772
1784
1773 if changesets > 0:
1785 if changesets > 0:
1774 self.hook("changegroup", node=hex(self.changelog.node(cor+1)),
1786 self.hook("changegroup", node=hex(self.changelog.node(cor+1)),
1775 source=srctype, url=url)
1787 source=srctype, url=url)
1776
1788
1777 for i in xrange(cor + 1, cnr + 1):
1789 for i in xrange(cor + 1, cnr + 1):
1778 self.hook("incoming", node=hex(self.changelog.node(i)),
1790 self.hook("incoming", node=hex(self.changelog.node(i)),
1779 source=srctype, url=url)
1791 source=srctype, url=url)
1780
1792
1781 return newheads - oldheads + 1
1793 return newheads - oldheads + 1
1782
1794
1783
1795
1784 def stream_in(self, remote):
1796 def stream_in(self, remote):
1785 fp = remote.stream_out()
1797 fp = remote.stream_out()
1786 l = fp.readline()
1798 l = fp.readline()
1787 try:
1799 try:
1788 resp = int(l)
1800 resp = int(l)
1789 except ValueError:
1801 except ValueError:
1790 raise util.UnexpectedOutput(
1802 raise util.UnexpectedOutput(
1791 _('Unexpected response from remote server:'), l)
1803 _('Unexpected response from remote server:'), l)
1792 if resp != 0:
1804 if resp != 0:
1793 raise util.Abort(_('operation forbidden by server'))
1805 raise util.Abort(_('operation forbidden by server'))
1794 self.ui.status(_('streaming all changes\n'))
1806 self.ui.status(_('streaming all changes\n'))
1795 l = fp.readline()
1807 l = fp.readline()
1796 try:
1808 try:
1797 total_files, total_bytes = map(int, l.split(' ', 1))
1809 total_files, total_bytes = map(int, l.split(' ', 1))
1798 except ValueError, TypeError:
1810 except ValueError, TypeError:
1799 raise util.UnexpectedOutput(
1811 raise util.UnexpectedOutput(
1800 _('Unexpected response from remote server:'), l)
1812 _('Unexpected response from remote server:'), l)
1801 self.ui.status(_('%d files to transfer, %s of data\n') %
1813 self.ui.status(_('%d files to transfer, %s of data\n') %
1802 (total_files, util.bytecount(total_bytes)))
1814 (total_files, util.bytecount(total_bytes)))
1803 start = time.time()
1815 start = time.time()
1804 for i in xrange(total_files):
1816 for i in xrange(total_files):
1805 l = fp.readline()
1817 l = fp.readline()
1806 try:
1818 try:
1807 name, size = l.split('\0', 1)
1819 name, size = l.split('\0', 1)
1808 size = int(size)
1820 size = int(size)
1809 except ValueError, TypeError:
1821 except ValueError, TypeError:
1810 raise util.UnexpectedOutput(
1822 raise util.UnexpectedOutput(
1811 _('Unexpected response from remote server:'), l)
1823 _('Unexpected response from remote server:'), l)
1812 self.ui.debug('adding %s (%s)\n' % (name, util.bytecount(size)))
1824 self.ui.debug('adding %s (%s)\n' % (name, util.bytecount(size)))
1813 ofp = self.sopener(name, 'w')
1825 ofp = self.sopener(name, 'w')
1814 for chunk in util.filechunkiter(fp, limit=size):
1826 for chunk in util.filechunkiter(fp, limit=size):
1815 ofp.write(chunk)
1827 ofp.write(chunk)
1816 ofp.close()
1828 ofp.close()
1817 elapsed = time.time() - start
1829 elapsed = time.time() - start
1818 self.ui.status(_('transferred %s in %.1f seconds (%s/sec)\n') %
1830 self.ui.status(_('transferred %s in %.1f seconds (%s/sec)\n') %
1819 (util.bytecount(total_bytes), elapsed,
1831 (util.bytecount(total_bytes), elapsed,
1820 util.bytecount(total_bytes / elapsed)))
1832 util.bytecount(total_bytes / elapsed)))
1821 self.reload()
1833 self.reload()
1822 return len(self.heads()) + 1
1834 return len(self.heads()) + 1
1823
1835
1824 def clone(self, remote, heads=[], stream=False):
1836 def clone(self, remote, heads=[], stream=False):
1825 '''clone remote repository.
1837 '''clone remote repository.
1826
1838
1827 keyword arguments:
1839 keyword arguments:
1828 heads: list of revs to clone (forces use of pull)
1840 heads: list of revs to clone (forces use of pull)
1829 stream: use streaming clone if possible'''
1841 stream: use streaming clone if possible'''
1830
1842
1831 # now, all clients that can request uncompressed clones can
1843 # now, all clients that can request uncompressed clones can
1832 # read repo formats supported by all servers that can serve
1844 # read repo formats supported by all servers that can serve
1833 # them.
1845 # them.
1834
1846
1835 # if revlog format changes, client will have to check version
1847 # if revlog format changes, client will have to check version
1836 # and format flags on "stream" capability, and use
1848 # and format flags on "stream" capability, and use
1837 # uncompressed only if compatible.
1849 # uncompressed only if compatible.
1838
1850
1839 if stream and not heads and remote.capable('stream'):
1851 if stream and not heads and remote.capable('stream'):
1840 return self.stream_in(remote)
1852 return self.stream_in(remote)
1841 return self.pull(remote, heads)
1853 return self.pull(remote, heads)
1842
1854
1843 # used to avoid circular references so destructors work
1855 # used to avoid circular references so destructors work
1844 def aftertrans(base):
1856 def aftertrans(base):
1845 p = base
1857 p = base
1846 def a():
1858 def a():
1847 util.rename(os.path.join(p, "journal"), os.path.join(p, "undo"))
1859 util.rename(os.path.join(p, "journal"), os.path.join(p, "undo"))
1848 util.rename(os.path.join(p, "journal.dirstate"),
1860 util.rename(os.path.join(p, "journal.dirstate"),
1849 os.path.join(p, "undo.dirstate"))
1861 os.path.join(p, "undo.dirstate"))
1850 return a
1862 return a
1851
1863
1852 def instance(ui, path, create):
1864 def instance(ui, path, create):
1853 return localrepository(ui, util.drop_scheme('file', path), create)
1865 return localrepository(ui, util.drop_scheme('file', path), create)
1854
1866
1855 def islocal(path):
1867 def islocal(path):
1856 return True
1868 return True
@@ -1,1054 +1,1059 b''
1 """
1 """
2 util.py - Mercurial utility functions and platform specfic implementations
2 util.py - Mercurial utility functions and platform specfic implementations
3
3
4 Copyright 2005 K. Thananchayan <thananck@yahoo.com>
4 Copyright 2005 K. Thananchayan <thananck@yahoo.com>
5 Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
5 Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
6 Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
6 Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
7
7
8 This software may be used and distributed according to the terms
8 This software may be used and distributed according to the terms
9 of the GNU General Public License, incorporated herein by reference.
9 of the GNU General Public License, incorporated herein by reference.
10
10
11 This contains helper routines that are independent of the SCM core and hide
11 This contains helper routines that are independent of the SCM core and hide
12 platform-specific details from the core.
12 platform-specific details from the core.
13 """
13 """
14
14
15 from i18n import gettext as _
15 from i18n import gettext as _
16 from demandload import *
16 from demandload import *
17 demandload(globals(), "cStringIO errno getpass popen2 re shutil sys tempfile")
17 demandload(globals(), "cStringIO errno getpass popen2 re shutil sys tempfile")
18 demandload(globals(), "os threading time calendar ConfigParser")
18 demandload(globals(), "os threading time calendar ConfigParser")
19
19
20 # used by parsedate
20 # used by parsedate
21 defaultdateformats = ('%Y-%m-%d %H:%M:%S', '%Y-%m-%d %H:%M',
21 defaultdateformats = ('%Y-%m-%d %H:%M:%S', '%Y-%m-%d %H:%M',
22 '%a %b %d %H:%M:%S %Y')
22 '%a %b %d %H:%M:%S %Y')
23
23
24 class SignalInterrupt(Exception):
24 class SignalInterrupt(Exception):
25 """Exception raised on SIGTERM and SIGHUP."""
25 """Exception raised on SIGTERM and SIGHUP."""
26
26
27 # like SafeConfigParser but with case-sensitive keys
27 # like SafeConfigParser but with case-sensitive keys
28 class configparser(ConfigParser.SafeConfigParser):
28 class configparser(ConfigParser.SafeConfigParser):
29 def optionxform(self, optionstr):
29 def optionxform(self, optionstr):
30 return optionstr
30 return optionstr
31
31
32 def cachefunc(func):
32 def cachefunc(func):
33 '''cache the result of function calls'''
33 '''cache the result of function calls'''
34 # XXX doesn't handle keywords args
34 # XXX doesn't handle keywords args
35 cache = {}
35 cache = {}
36 if func.func_code.co_argcount == 1:
36 if func.func_code.co_argcount == 1:
37 # we gain a small amount of time because
37 # we gain a small amount of time because
38 # we don't need to pack/unpack the list
38 # we don't need to pack/unpack the list
39 def f(arg):
39 def f(arg):
40 if arg not in cache:
40 if arg not in cache:
41 cache[arg] = func(arg)
41 cache[arg] = func(arg)
42 return cache[arg]
42 return cache[arg]
43 else:
43 else:
44 def f(*args):
44 def f(*args):
45 if args not in cache:
45 if args not in cache:
46 cache[args] = func(*args)
46 cache[args] = func(*args)
47 return cache[args]
47 return cache[args]
48
48
49 return f
49 return f
50
50
51 def pipefilter(s, cmd):
51 def pipefilter(s, cmd):
52 '''filter string S through command CMD, returning its output'''
52 '''filter string S through command CMD, returning its output'''
53 (pout, pin) = popen2.popen2(cmd, -1, 'b')
53 (pout, pin) = popen2.popen2(cmd, -1, 'b')
54 def writer():
54 def writer():
55 try:
55 try:
56 pin.write(s)
56 pin.write(s)
57 pin.close()
57 pin.close()
58 except IOError, inst:
58 except IOError, inst:
59 if inst.errno != errno.EPIPE:
59 if inst.errno != errno.EPIPE:
60 raise
60 raise
61
61
62 # we should use select instead on UNIX, but this will work on most
62 # we should use select instead on UNIX, but this will work on most
63 # systems, including Windows
63 # systems, including Windows
64 w = threading.Thread(target=writer)
64 w = threading.Thread(target=writer)
65 w.start()
65 w.start()
66 f = pout.read()
66 f = pout.read()
67 pout.close()
67 pout.close()
68 w.join()
68 w.join()
69 return f
69 return f
70
70
71 def tempfilter(s, cmd):
71 def tempfilter(s, cmd):
72 '''filter string S through a pair of temporary files with CMD.
72 '''filter string S through a pair of temporary files with CMD.
73 CMD is used as a template to create the real command to be run,
73 CMD is used as a template to create the real command to be run,
74 with the strings INFILE and OUTFILE replaced by the real names of
74 with the strings INFILE and OUTFILE replaced by the real names of
75 the temporary files generated.'''
75 the temporary files generated.'''
76 inname, outname = None, None
76 inname, outname = None, None
77 try:
77 try:
78 infd, inname = tempfile.mkstemp(prefix='hg-filter-in-')
78 infd, inname = tempfile.mkstemp(prefix='hg-filter-in-')
79 fp = os.fdopen(infd, 'wb')
79 fp = os.fdopen(infd, 'wb')
80 fp.write(s)
80 fp.write(s)
81 fp.close()
81 fp.close()
82 outfd, outname = tempfile.mkstemp(prefix='hg-filter-out-')
82 outfd, outname = tempfile.mkstemp(prefix='hg-filter-out-')
83 os.close(outfd)
83 os.close(outfd)
84 cmd = cmd.replace('INFILE', inname)
84 cmd = cmd.replace('INFILE', inname)
85 cmd = cmd.replace('OUTFILE', outname)
85 cmd = cmd.replace('OUTFILE', outname)
86 code = os.system(cmd)
86 code = os.system(cmd)
87 if code: raise Abort(_("command '%s' failed: %s") %
87 if code: raise Abort(_("command '%s' failed: %s") %
88 (cmd, explain_exit(code)))
88 (cmd, explain_exit(code)))
89 return open(outname, 'rb').read()
89 return open(outname, 'rb').read()
90 finally:
90 finally:
91 try:
91 try:
92 if inname: os.unlink(inname)
92 if inname: os.unlink(inname)
93 except: pass
93 except: pass
94 try:
94 try:
95 if outname: os.unlink(outname)
95 if outname: os.unlink(outname)
96 except: pass
96 except: pass
97
97
98 filtertable = {
98 filtertable = {
99 'tempfile:': tempfilter,
99 'tempfile:': tempfilter,
100 'pipe:': pipefilter,
100 'pipe:': pipefilter,
101 }
101 }
102
102
103 def filter(s, cmd):
103 def filter(s, cmd):
104 "filter a string through a command that transforms its input to its output"
104 "filter a string through a command that transforms its input to its output"
105 for name, fn in filtertable.iteritems():
105 for name, fn in filtertable.iteritems():
106 if cmd.startswith(name):
106 if cmd.startswith(name):
107 return fn(s, cmd[len(name):].lstrip())
107 return fn(s, cmd[len(name):].lstrip())
108 return pipefilter(s, cmd)
108 return pipefilter(s, cmd)
109
109
110 def find_in_path(name, path, default=None):
110 def find_in_path(name, path, default=None):
111 '''find name in search path. path can be string (will be split
111 '''find name in search path. path can be string (will be split
112 with os.pathsep), or iterable thing that returns strings. if name
112 with os.pathsep), or iterable thing that returns strings. if name
113 found, return path to name. else return default.'''
113 found, return path to name. else return default.'''
114 if isinstance(path, str):
114 if isinstance(path, str):
115 path = path.split(os.pathsep)
115 path = path.split(os.pathsep)
116 for p in path:
116 for p in path:
117 p_name = os.path.join(p, name)
117 p_name = os.path.join(p, name)
118 if os.path.exists(p_name):
118 if os.path.exists(p_name):
119 return p_name
119 return p_name
120 return default
120 return default
121
121
122 def binary(s):
122 def binary(s):
123 """return true if a string is binary data using diff's heuristic"""
123 """return true if a string is binary data using diff's heuristic"""
124 if s and '\0' in s[:4096]:
124 if s and '\0' in s[:4096]:
125 return True
125 return True
126 return False
126 return False
127
127
128 def unique(g):
128 def unique(g):
129 """return the uniq elements of iterable g"""
129 """return the uniq elements of iterable g"""
130 seen = {}
130 seen = {}
131 l = []
131 for f in g:
132 for f in g:
132 if f not in seen:
133 if f not in seen:
133 seen[f] = 1
134 seen[f] = 1
134 yield f
135 l.append(f)
136 return l
135
137
136 class Abort(Exception):
138 class Abort(Exception):
137 """Raised if a command needs to print an error and exit."""
139 """Raised if a command needs to print an error and exit."""
138
140
139 class UnexpectedOutput(Abort):
141 class UnexpectedOutput(Abort):
140 """Raised to print an error with part of output and exit."""
142 """Raised to print an error with part of output and exit."""
141
143
142 def always(fn): return True
144 def always(fn): return True
143 def never(fn): return False
145 def never(fn): return False
144
146
145 def patkind(name, dflt_pat='glob'):
147 def patkind(name, dflt_pat='glob'):
146 """Split a string into an optional pattern kind prefix and the
148 """Split a string into an optional pattern kind prefix and the
147 actual pattern."""
149 actual pattern."""
148 for prefix in 're', 'glob', 'path', 'relglob', 'relpath', 'relre':
150 for prefix in 're', 'glob', 'path', 'relglob', 'relpath', 'relre':
149 if name.startswith(prefix + ':'): return name.split(':', 1)
151 if name.startswith(prefix + ':'): return name.split(':', 1)
150 return dflt_pat, name
152 return dflt_pat, name
151
153
152 def globre(pat, head='^', tail='$'):
154 def globre(pat, head='^', tail='$'):
153 "convert a glob pattern into a regexp"
155 "convert a glob pattern into a regexp"
154 i, n = 0, len(pat)
156 i, n = 0, len(pat)
155 res = ''
157 res = ''
156 group = False
158 group = False
157 def peek(): return i < n and pat[i]
159 def peek(): return i < n and pat[i]
158 while i < n:
160 while i < n:
159 c = pat[i]
161 c = pat[i]
160 i = i+1
162 i = i+1
161 if c == '*':
163 if c == '*':
162 if peek() == '*':
164 if peek() == '*':
163 i += 1
165 i += 1
164 res += '.*'
166 res += '.*'
165 else:
167 else:
166 res += '[^/]*'
168 res += '[^/]*'
167 elif c == '?':
169 elif c == '?':
168 res += '.'
170 res += '.'
169 elif c == '[':
171 elif c == '[':
170 j = i
172 j = i
171 if j < n and pat[j] in '!]':
173 if j < n and pat[j] in '!]':
172 j += 1
174 j += 1
173 while j < n and pat[j] != ']':
175 while j < n and pat[j] != ']':
174 j += 1
176 j += 1
175 if j >= n:
177 if j >= n:
176 res += '\\['
178 res += '\\['
177 else:
179 else:
178 stuff = pat[i:j].replace('\\','\\\\')
180 stuff = pat[i:j].replace('\\','\\\\')
179 i = j + 1
181 i = j + 1
180 if stuff[0] == '!':
182 if stuff[0] == '!':
181 stuff = '^' + stuff[1:]
183 stuff = '^' + stuff[1:]
182 elif stuff[0] == '^':
184 elif stuff[0] == '^':
183 stuff = '\\' + stuff
185 stuff = '\\' + stuff
184 res = '%s[%s]' % (res, stuff)
186 res = '%s[%s]' % (res, stuff)
185 elif c == '{':
187 elif c == '{':
186 group = True
188 group = True
187 res += '(?:'
189 res += '(?:'
188 elif c == '}' and group:
190 elif c == '}' and group:
189 res += ')'
191 res += ')'
190 group = False
192 group = False
191 elif c == ',' and group:
193 elif c == ',' and group:
192 res += '|'
194 res += '|'
193 elif c == '\\':
195 elif c == '\\':
194 p = peek()
196 p = peek()
195 if p:
197 if p:
196 i += 1
198 i += 1
197 res += re.escape(p)
199 res += re.escape(p)
198 else:
200 else:
199 res += re.escape(c)
201 res += re.escape(c)
200 else:
202 else:
201 res += re.escape(c)
203 res += re.escape(c)
202 return head + res + tail
204 return head + res + tail
203
205
204 _globchars = {'[': 1, '{': 1, '*': 1, '?': 1}
206 _globchars = {'[': 1, '{': 1, '*': 1, '?': 1}
205
207
206 def pathto(n1, n2):
208 def pathto(n1, n2):
207 '''return the relative path from one place to another.
209 '''return the relative path from one place to another.
208 this returns a path in the form used by the local filesystem, not hg.'''
210 this returns a path in the form used by the local filesystem, not hg.'''
209 if not n1: return localpath(n2)
211 if not n1: return localpath(n2)
210 a, b = n1.split('/'), n2.split('/')
212 a, b = n1.split('/'), n2.split('/')
211 a.reverse()
213 a.reverse()
212 b.reverse()
214 b.reverse()
213 while a and b and a[-1] == b[-1]:
215 while a and b and a[-1] == b[-1]:
214 a.pop()
216 a.pop()
215 b.pop()
217 b.pop()
216 b.reverse()
218 b.reverse()
217 return os.sep.join((['..'] * len(a)) + b)
219 return os.sep.join((['..'] * len(a)) + b)
218
220
219 def canonpath(root, cwd, myname):
221 def canonpath(root, cwd, myname):
220 """return the canonical path of myname, given cwd and root"""
222 """return the canonical path of myname, given cwd and root"""
221 if root == os.sep:
223 if root == os.sep:
222 rootsep = os.sep
224 rootsep = os.sep
223 elif root.endswith(os.sep):
225 elif root.endswith(os.sep):
224 rootsep = root
226 rootsep = root
225 else:
227 else:
226 rootsep = root + os.sep
228 rootsep = root + os.sep
227 name = myname
229 name = myname
228 if not os.path.isabs(name):
230 if not os.path.isabs(name):
229 name = os.path.join(root, cwd, name)
231 name = os.path.join(root, cwd, name)
230 name = os.path.normpath(name)
232 name = os.path.normpath(name)
231 if name != rootsep and name.startswith(rootsep):
233 if name != rootsep and name.startswith(rootsep):
232 name = name[len(rootsep):]
234 name = name[len(rootsep):]
233 audit_path(name)
235 audit_path(name)
234 return pconvert(name)
236 return pconvert(name)
235 elif name == root:
237 elif name == root:
236 return ''
238 return ''
237 else:
239 else:
238 # Determine whether `name' is in the hierarchy at or beneath `root',
240 # Determine whether `name' is in the hierarchy at or beneath `root',
239 # by iterating name=dirname(name) until that causes no change (can't
241 # by iterating name=dirname(name) until that causes no change (can't
240 # check name == '/', because that doesn't work on windows). For each
242 # check name == '/', because that doesn't work on windows). For each
241 # `name', compare dev/inode numbers. If they match, the list `rel'
243 # `name', compare dev/inode numbers. If they match, the list `rel'
242 # holds the reversed list of components making up the relative file
244 # holds the reversed list of components making up the relative file
243 # name we want.
245 # name we want.
244 root_st = os.stat(root)
246 root_st = os.stat(root)
245 rel = []
247 rel = []
246 while True:
248 while True:
247 try:
249 try:
248 name_st = os.stat(name)
250 name_st = os.stat(name)
249 except OSError:
251 except OSError:
250 break
252 break
251 if samestat(name_st, root_st):
253 if samestat(name_st, root_st):
252 rel.reverse()
254 rel.reverse()
253 name = os.path.join(*rel)
255 name = os.path.join(*rel)
254 audit_path(name)
256 audit_path(name)
255 return pconvert(name)
257 return pconvert(name)
256 dirname, basename = os.path.split(name)
258 dirname, basename = os.path.split(name)
257 rel.append(basename)
259 rel.append(basename)
258 if dirname == name:
260 if dirname == name:
259 break
261 break
260 name = dirname
262 name = dirname
261
263
262 raise Abort('%s not under root' % myname)
264 raise Abort('%s not under root' % myname)
263
265
264 def matcher(canonroot, cwd='', names=['.'], inc=[], exc=[], head='', src=None):
266 def matcher(canonroot, cwd='', names=['.'], inc=[], exc=[], head='', src=None):
265 return _matcher(canonroot, cwd, names, inc, exc, head, 'glob', src)
267 return _matcher(canonroot, cwd, names, inc, exc, head, 'glob', src)
266
268
267 def cmdmatcher(canonroot, cwd='', names=['.'], inc=[], exc=[], head='', src=None):
269 def cmdmatcher(canonroot, cwd='', names=['.'], inc=[], exc=[], head='', src=None):
268 if os.name == 'nt':
270 if os.name == 'nt':
269 dflt_pat = 'glob'
271 dflt_pat = 'glob'
270 else:
272 else:
271 dflt_pat = 'relpath'
273 dflt_pat = 'relpath'
272 return _matcher(canonroot, cwd, names, inc, exc, head, dflt_pat, src)
274 return _matcher(canonroot, cwd, names, inc, exc, head, dflt_pat, src)
273
275
274 def _matcher(canonroot, cwd, names, inc, exc, head, dflt_pat, src):
276 def _matcher(canonroot, cwd, names, inc, exc, head, dflt_pat, src):
275 """build a function to match a set of file patterns
277 """build a function to match a set of file patterns
276
278
277 arguments:
279 arguments:
278 canonroot - the canonical root of the tree you're matching against
280 canonroot - the canonical root of the tree you're matching against
279 cwd - the current working directory, if relevant
281 cwd - the current working directory, if relevant
280 names - patterns to find
282 names - patterns to find
281 inc - patterns to include
283 inc - patterns to include
282 exc - patterns to exclude
284 exc - patterns to exclude
283 head - a regex to prepend to patterns to control whether a match is rooted
285 head - a regex to prepend to patterns to control whether a match is rooted
284
286
285 a pattern is one of:
287 a pattern is one of:
286 'glob:<rooted glob>'
288 'glob:<rooted glob>'
287 're:<rooted regexp>'
289 're:<rooted regexp>'
288 'path:<rooted path>'
290 'path:<rooted path>'
289 'relglob:<relative glob>'
291 'relglob:<relative glob>'
290 'relpath:<relative path>'
292 'relpath:<relative path>'
291 'relre:<relative regexp>'
293 'relre:<relative regexp>'
292 '<rooted path or regexp>'
294 '<rooted path or regexp>'
293
295
294 returns:
296 returns:
295 a 3-tuple containing
297 a 3-tuple containing
296 - list of explicit non-pattern names passed in
298 - list of explicit non-pattern names passed in
297 - a bool match(filename) function
299 - a bool match(filename) function
298 - a bool indicating if any patterns were passed in
300 - a bool indicating if any patterns were passed in
299
301
300 todo:
302 todo:
301 make head regex a rooted bool
303 make head regex a rooted bool
302 """
304 """
303
305
304 def contains_glob(name):
306 def contains_glob(name):
305 for c in name:
307 for c in name:
306 if c in _globchars: return True
308 if c in _globchars: return True
307 return False
309 return False
308
310
309 def regex(kind, name, tail):
311 def regex(kind, name, tail):
310 '''convert a pattern into a regular expression'''
312 '''convert a pattern into a regular expression'''
311 if kind == 're':
313 if kind == 're':
312 return name
314 return name
313 elif kind == 'path':
315 elif kind == 'path':
314 return '^' + re.escape(name) + '(?:/|$)'
316 return '^' + re.escape(name) + '(?:/|$)'
315 elif kind == 'relglob':
317 elif kind == 'relglob':
316 return head + globre(name, '(?:|.*/)', tail)
318 return head + globre(name, '(?:|.*/)', tail)
317 elif kind == 'relpath':
319 elif kind == 'relpath':
318 return head + re.escape(name) + tail
320 return head + re.escape(name) + tail
319 elif kind == 'relre':
321 elif kind == 'relre':
320 if name.startswith('^'):
322 if name.startswith('^'):
321 return name
323 return name
322 return '.*' + name
324 return '.*' + name
323 return head + globre(name, '', tail)
325 return head + globre(name, '', tail)
324
326
325 def matchfn(pats, tail):
327 def matchfn(pats, tail):
326 """build a matching function from a set of patterns"""
328 """build a matching function from a set of patterns"""
327 if not pats:
329 if not pats:
328 return
330 return
329 matches = []
331 matches = []
330 for k, p in pats:
332 for k, p in pats:
331 try:
333 try:
332 pat = '(?:%s)' % regex(k, p, tail)
334 pat = '(?:%s)' % regex(k, p, tail)
333 matches.append(re.compile(pat).match)
335 matches.append(re.compile(pat).match)
334 except re.error:
336 except re.error:
335 if src: raise Abort("%s: invalid pattern (%s): %s" % (src, k, p))
337 if src: raise Abort("%s: invalid pattern (%s): %s" % (src, k, p))
336 else: raise Abort("invalid pattern (%s): %s" % (k, p))
338 else: raise Abort("invalid pattern (%s): %s" % (k, p))
337
339
338 def buildfn(text):
340 def buildfn(text):
339 for m in matches:
341 for m in matches:
340 r = m(text)
342 r = m(text)
341 if r:
343 if r:
342 return r
344 return r
343
345
344 return buildfn
346 return buildfn
345
347
346 def globprefix(pat):
348 def globprefix(pat):
347 '''return the non-glob prefix of a path, e.g. foo/* -> foo'''
349 '''return the non-glob prefix of a path, e.g. foo/* -> foo'''
348 root = []
350 root = []
349 for p in pat.split(os.sep):
351 for p in pat.split(os.sep):
350 if contains_glob(p): break
352 if contains_glob(p): break
351 root.append(p)
353 root.append(p)
352 return '/'.join(root)
354 return '/'.join(root)
353
355
354 pats = []
356 pats = []
355 files = []
357 files = []
356 roots = []
358 roots = []
357 for kind, name in [patkind(p, dflt_pat) for p in names]:
359 for kind, name in [patkind(p, dflt_pat) for p in names]:
358 if kind in ('glob', 'relpath'):
360 if kind in ('glob', 'relpath'):
359 name = canonpath(canonroot, cwd, name)
361 name = canonpath(canonroot, cwd, name)
360 if name == '':
362 if name == '':
361 kind, name = 'glob', '**'
363 kind, name = 'glob', '**'
362 if kind in ('glob', 'path', 're'):
364 if kind in ('glob', 'path', 're'):
363 pats.append((kind, name))
365 pats.append((kind, name))
364 if kind == 'glob':
366 if kind == 'glob':
365 root = globprefix(name)
367 root = globprefix(name)
366 if root: roots.append(root)
368 if root: roots.append(root)
367 elif kind == 'relpath':
369 elif kind == 'relpath':
368 files.append((kind, name))
370 files.append((kind, name))
369 roots.append(name)
371 roots.append(name)
370
372
371 patmatch = matchfn(pats, '$') or always
373 patmatch = matchfn(pats, '$') or always
372 filematch = matchfn(files, '(?:/|$)') or always
374 filematch = matchfn(files, '(?:/|$)') or always
373 incmatch = always
375 incmatch = always
374 if inc:
376 if inc:
375 inckinds = [patkind(canonpath(canonroot, cwd, i)) for i in inc]
377 inckinds = [patkind(canonpath(canonroot, cwd, i)) for i in inc]
376 incmatch = matchfn(inckinds, '(?:/|$)')
378 incmatch = matchfn(inckinds, '(?:/|$)')
377 excmatch = lambda fn: False
379 excmatch = lambda fn: False
378 if exc:
380 if exc:
379 exckinds = [patkind(canonpath(canonroot, cwd, x)) for x in exc]
381 exckinds = [patkind(canonpath(canonroot, cwd, x)) for x in exc]
380 excmatch = matchfn(exckinds, '(?:/|$)')
382 excmatch = matchfn(exckinds, '(?:/|$)')
381
383
382 return (roots,
384 return (roots,
383 lambda fn: (incmatch(fn) and not excmatch(fn) and
385 lambda fn: (incmatch(fn) and not excmatch(fn) and
384 (fn.endswith('/') or
386 (fn.endswith('/') or
385 (not pats and not files) or
387 (not pats and not files) or
386 (pats and patmatch(fn)) or
388 (pats and patmatch(fn)) or
387 (files and filematch(fn)))),
389 (files and filematch(fn)))),
388 (inc or exc or (pats and pats != [('glob', '**')])) and True)
390 (inc or exc or (pats and pats != [('glob', '**')])) and True)
389
391
390 def system(cmd, environ={}, cwd=None, onerr=None, errprefix=None):
392 def system(cmd, environ={}, cwd=None, onerr=None, errprefix=None):
391 '''enhanced shell command execution.
393 '''enhanced shell command execution.
392 run with environment maybe modified, maybe in different dir.
394 run with environment maybe modified, maybe in different dir.
393
395
394 if command fails and onerr is None, return status. if ui object,
396 if command fails and onerr is None, return status. if ui object,
395 print error message and return status, else raise onerr object as
397 print error message and return status, else raise onerr object as
396 exception.'''
398 exception.'''
397 def py2shell(val):
399 def py2shell(val):
398 'convert python object into string that is useful to shell'
400 'convert python object into string that is useful to shell'
399 if val in (None, False):
401 if val in (None, False):
400 return '0'
402 return '0'
401 if val == True:
403 if val == True:
402 return '1'
404 return '1'
403 return str(val)
405 return str(val)
404 oldenv = {}
406 oldenv = {}
405 for k in environ:
407 for k in environ:
406 oldenv[k] = os.environ.get(k)
408 oldenv[k] = os.environ.get(k)
407 if cwd is not None:
409 if cwd is not None:
408 oldcwd = os.getcwd()
410 oldcwd = os.getcwd()
409 try:
411 try:
410 for k, v in environ.iteritems():
412 for k, v in environ.iteritems():
411 os.environ[k] = py2shell(v)
413 os.environ[k] = py2shell(v)
412 if cwd is not None and oldcwd != cwd:
414 if cwd is not None and oldcwd != cwd:
413 os.chdir(cwd)
415 os.chdir(cwd)
414 rc = os.system(cmd)
416 rc = os.system(cmd)
415 if rc and onerr:
417 if rc and onerr:
416 errmsg = '%s %s' % (os.path.basename(cmd.split(None, 1)[0]),
418 errmsg = '%s %s' % (os.path.basename(cmd.split(None, 1)[0]),
417 explain_exit(rc)[0])
419 explain_exit(rc)[0])
418 if errprefix:
420 if errprefix:
419 errmsg = '%s: %s' % (errprefix, errmsg)
421 errmsg = '%s: %s' % (errprefix, errmsg)
420 try:
422 try:
421 onerr.warn(errmsg + '\n')
423 onerr.warn(errmsg + '\n')
422 except AttributeError:
424 except AttributeError:
423 raise onerr(errmsg)
425 raise onerr(errmsg)
424 return rc
426 return rc
425 finally:
427 finally:
426 for k, v in oldenv.iteritems():
428 for k, v in oldenv.iteritems():
427 if v is None:
429 if v is None:
428 del os.environ[k]
430 del os.environ[k]
429 else:
431 else:
430 os.environ[k] = v
432 os.environ[k] = v
431 if cwd is not None and oldcwd != cwd:
433 if cwd is not None and oldcwd != cwd:
432 os.chdir(oldcwd)
434 os.chdir(oldcwd)
433
435
434 def rename(src, dst):
436 def rename(src, dst):
435 """forcibly rename a file"""
437 """forcibly rename a file"""
436 try:
438 try:
437 os.rename(src, dst)
439 os.rename(src, dst)
438 except OSError, err:
440 except OSError, err:
439 # on windows, rename to existing file is not allowed, so we
441 # on windows, rename to existing file is not allowed, so we
440 # must delete destination first. but if file is open, unlink
442 # must delete destination first. but if file is open, unlink
441 # schedules it for delete but does not delete it. rename
443 # schedules it for delete but does not delete it. rename
442 # happens immediately even for open files, so we create
444 # happens immediately even for open files, so we create
443 # temporary file, delete it, rename destination to that name,
445 # temporary file, delete it, rename destination to that name,
444 # then delete that. then rename is safe to do.
446 # then delete that. then rename is safe to do.
445 fd, temp = tempfile.mkstemp(dir=os.path.dirname(dst) or '.')
447 fd, temp = tempfile.mkstemp(dir=os.path.dirname(dst) or '.')
446 os.close(fd)
448 os.close(fd)
447 os.unlink(temp)
449 os.unlink(temp)
448 os.rename(dst, temp)
450 os.rename(dst, temp)
449 os.unlink(temp)
451 os.unlink(temp)
450 os.rename(src, dst)
452 os.rename(src, dst)
451
453
452 def unlink(f):
454 def unlink(f):
453 """unlink and remove the directory if it is empty"""
455 """unlink and remove the directory if it is empty"""
454 os.unlink(f)
456 os.unlink(f)
455 # try removing directories that might now be empty
457 # try removing directories that might now be empty
456 try:
458 try:
457 os.removedirs(os.path.dirname(f))
459 os.removedirs(os.path.dirname(f))
458 except OSError:
460 except OSError:
459 pass
461 pass
460
462
461 def copyfiles(src, dst, hardlink=None):
463 def copyfiles(src, dst, hardlink=None):
462 """Copy a directory tree using hardlinks if possible"""
464 """Copy a directory tree using hardlinks if possible"""
463
465
464 if hardlink is None:
466 if hardlink is None:
465 hardlink = (os.stat(src).st_dev ==
467 hardlink = (os.stat(src).st_dev ==
466 os.stat(os.path.dirname(dst)).st_dev)
468 os.stat(os.path.dirname(dst)).st_dev)
467
469
468 if os.path.isdir(src):
470 if os.path.isdir(src):
469 os.mkdir(dst)
471 os.mkdir(dst)
470 for name in os.listdir(src):
472 for name in os.listdir(src):
471 srcname = os.path.join(src, name)
473 srcname = os.path.join(src, name)
472 dstname = os.path.join(dst, name)
474 dstname = os.path.join(dst, name)
473 copyfiles(srcname, dstname, hardlink)
475 copyfiles(srcname, dstname, hardlink)
474 else:
476 else:
475 if hardlink:
477 if hardlink:
476 try:
478 try:
477 os_link(src, dst)
479 os_link(src, dst)
478 except (IOError, OSError):
480 except (IOError, OSError):
479 hardlink = False
481 hardlink = False
480 shutil.copy(src, dst)
482 shutil.copy(src, dst)
481 else:
483 else:
482 shutil.copy(src, dst)
484 shutil.copy(src, dst)
483
485
484 def audit_path(path):
486 def audit_path(path):
485 """Abort if path contains dangerous components"""
487 """Abort if path contains dangerous components"""
486 parts = os.path.normcase(path).split(os.sep)
488 parts = os.path.normcase(path).split(os.sep)
487 if (os.path.splitdrive(path)[0] or parts[0] in ('.hg', '')
489 if (os.path.splitdrive(path)[0] or parts[0] in ('.hg', '')
488 or os.pardir in parts):
490 or os.pardir in parts):
489 raise Abort(_("path contains illegal component: %s\n") % path)
491 raise Abort(_("path contains illegal component: %s\n") % path)
490
492
491 def _makelock_file(info, pathname):
493 def _makelock_file(info, pathname):
492 ld = os.open(pathname, os.O_CREAT | os.O_WRONLY | os.O_EXCL)
494 ld = os.open(pathname, os.O_CREAT | os.O_WRONLY | os.O_EXCL)
493 os.write(ld, info)
495 os.write(ld, info)
494 os.close(ld)
496 os.close(ld)
495
497
496 def _readlock_file(pathname):
498 def _readlock_file(pathname):
497 return posixfile(pathname).read()
499 return posixfile(pathname).read()
498
500
499 def nlinks(pathname):
501 def nlinks(pathname):
500 """Return number of hardlinks for the given file."""
502 """Return number of hardlinks for the given file."""
501 return os.lstat(pathname).st_nlink
503 return os.lstat(pathname).st_nlink
502
504
503 if hasattr(os, 'link'):
505 if hasattr(os, 'link'):
504 os_link = os.link
506 os_link = os.link
505 else:
507 else:
506 def os_link(src, dst):
508 def os_link(src, dst):
507 raise OSError(0, _("Hardlinks not supported"))
509 raise OSError(0, _("Hardlinks not supported"))
508
510
509 def fstat(fp):
511 def fstat(fp):
510 '''stat file object that may not have fileno method.'''
512 '''stat file object that may not have fileno method.'''
511 try:
513 try:
512 return os.fstat(fp.fileno())
514 return os.fstat(fp.fileno())
513 except AttributeError:
515 except AttributeError:
514 return os.stat(fp.name)
516 return os.stat(fp.name)
515
517
516 posixfile = file
518 posixfile = file
517
519
518 def is_win_9x():
520 def is_win_9x():
519 '''return true if run on windows 95, 98 or me.'''
521 '''return true if run on windows 95, 98 or me.'''
520 try:
522 try:
521 return sys.getwindowsversion()[3] == 1
523 return sys.getwindowsversion()[3] == 1
522 except AttributeError:
524 except AttributeError:
523 return os.name == 'nt' and 'command' in os.environ.get('comspec', '')
525 return os.name == 'nt' and 'command' in os.environ.get('comspec', '')
524
526
525 def username(uid=None):
527 def username(uid=None):
526 """Return the name of the user with the given uid.
528 """Return the name of the user with the given uid.
527
529
528 If uid is None, return the name of the current user."""
530 If uid is None, return the name of the current user."""
529 try:
531 try:
530 import pwd
532 import pwd
531 if uid is None:
533 if uid is None:
532 uid = os.getuid()
534 uid = os.getuid()
533 try:
535 try:
534 return pwd.getpwuid(uid)[0]
536 return pwd.getpwuid(uid)[0]
535 except KeyError:
537 except KeyError:
536 return str(uid)
538 return str(uid)
537 except ImportError:
539 except ImportError:
538 return None
540 return None
539
541
540 def groupname(gid=None):
542 def groupname(gid=None):
541 """Return the name of the group with the given gid.
543 """Return the name of the group with the given gid.
542
544
543 If gid is None, return the name of the current group."""
545 If gid is None, return the name of the current group."""
544 try:
546 try:
545 import grp
547 import grp
546 if gid is None:
548 if gid is None:
547 gid = os.getgid()
549 gid = os.getgid()
548 try:
550 try:
549 return grp.getgrgid(gid)[0]
551 return grp.getgrgid(gid)[0]
550 except KeyError:
552 except KeyError:
551 return str(gid)
553 return str(gid)
552 except ImportError:
554 except ImportError:
553 return None
555 return None
554
556
555 # Platform specific variants
557 # Platform specific variants
556 if os.name == 'nt':
558 if os.name == 'nt':
557 demandload(globals(), "msvcrt")
559 demandload(globals(), "msvcrt")
558 nulldev = 'NUL:'
560 nulldev = 'NUL:'
559
561
560 class winstdout:
562 class winstdout:
561 '''stdout on windows misbehaves if sent through a pipe'''
563 '''stdout on windows misbehaves if sent through a pipe'''
562
564
563 def __init__(self, fp):
565 def __init__(self, fp):
564 self.fp = fp
566 self.fp = fp
565
567
566 def __getattr__(self, key):
568 def __getattr__(self, key):
567 return getattr(self.fp, key)
569 return getattr(self.fp, key)
568
570
569 def close(self):
571 def close(self):
570 try:
572 try:
571 self.fp.close()
573 self.fp.close()
572 except: pass
574 except: pass
573
575
574 def write(self, s):
576 def write(self, s):
575 try:
577 try:
576 return self.fp.write(s)
578 return self.fp.write(s)
577 except IOError, inst:
579 except IOError, inst:
578 if inst.errno != 0: raise
580 if inst.errno != 0: raise
579 self.close()
581 self.close()
580 raise IOError(errno.EPIPE, 'Broken pipe')
582 raise IOError(errno.EPIPE, 'Broken pipe')
581
583
582 sys.stdout = winstdout(sys.stdout)
584 sys.stdout = winstdout(sys.stdout)
583
585
584 def system_rcpath():
586 def system_rcpath():
585 try:
587 try:
586 return system_rcpath_win32()
588 return system_rcpath_win32()
587 except:
589 except:
588 return [r'c:\mercurial\mercurial.ini']
590 return [r'c:\mercurial\mercurial.ini']
589
591
590 def os_rcpath():
592 def os_rcpath():
591 '''return default os-specific hgrc search path'''
593 '''return default os-specific hgrc search path'''
592 path = system_rcpath()
594 path = system_rcpath()
593 path.append(user_rcpath())
595 path.append(user_rcpath())
594 userprofile = os.environ.get('USERPROFILE')
596 userprofile = os.environ.get('USERPROFILE')
595 if userprofile:
597 if userprofile:
596 path.append(os.path.join(userprofile, 'mercurial.ini'))
598 path.append(os.path.join(userprofile, 'mercurial.ini'))
597 return path
599 return path
598
600
599 def user_rcpath():
601 def user_rcpath():
600 '''return os-specific hgrc search path to the user dir'''
602 '''return os-specific hgrc search path to the user dir'''
601 return os.path.join(os.path.expanduser('~'), 'mercurial.ini')
603 return os.path.join(os.path.expanduser('~'), 'mercurial.ini')
602
604
603 def parse_patch_output(output_line):
605 def parse_patch_output(output_line):
604 """parses the output produced by patch and returns the file name"""
606 """parses the output produced by patch and returns the file name"""
605 pf = output_line[14:]
607 pf = output_line[14:]
606 if pf[0] == '`':
608 if pf[0] == '`':
607 pf = pf[1:-1] # Remove the quotes
609 pf = pf[1:-1] # Remove the quotes
608 return pf
610 return pf
609
611
610 def testpid(pid):
612 def testpid(pid):
611 '''return False if pid dead, True if running or not known'''
613 '''return False if pid dead, True if running or not known'''
612 return True
614 return True
613
615
614 def is_exec(f, last):
616 def is_exec(f, last):
615 return last
617 return last
616
618
617 def set_exec(f, mode):
619 def set_exec(f, mode):
618 pass
620 pass
619
621
620 def set_binary(fd):
622 def set_binary(fd):
621 msvcrt.setmode(fd.fileno(), os.O_BINARY)
623 msvcrt.setmode(fd.fileno(), os.O_BINARY)
622
624
623 def pconvert(path):
625 def pconvert(path):
624 return path.replace("\\", "/")
626 return path.replace("\\", "/")
625
627
626 def localpath(path):
628 def localpath(path):
627 return path.replace('/', '\\')
629 return path.replace('/', '\\')
628
630
629 def normpath(path):
631 def normpath(path):
630 return pconvert(os.path.normpath(path))
632 return pconvert(os.path.normpath(path))
631
633
632 makelock = _makelock_file
634 makelock = _makelock_file
633 readlock = _readlock_file
635 readlock = _readlock_file
634
636
635 def samestat(s1, s2):
637 def samestat(s1, s2):
636 return False
638 return False
637
639
638 def shellquote(s):
640 def shellquote(s):
639 return '"%s"' % s.replace('"', '\\"')
641 return '"%s"' % s.replace('"', '\\"')
640
642
641 def explain_exit(code):
643 def explain_exit(code):
642 return _("exited with status %d") % code, code
644 return _("exited with status %d") % code, code
643
645
644 try:
646 try:
645 # override functions with win32 versions if possible
647 # override functions with win32 versions if possible
646 from util_win32 import *
648 from util_win32 import *
647 if not is_win_9x():
649 if not is_win_9x():
648 posixfile = posixfile_nt
650 posixfile = posixfile_nt
649 except ImportError:
651 except ImportError:
650 pass
652 pass
651
653
652 else:
654 else:
653 nulldev = '/dev/null'
655 nulldev = '/dev/null'
654
656
655 def rcfiles(path):
657 def rcfiles(path):
656 rcs = [os.path.join(path, 'hgrc')]
658 rcs = [os.path.join(path, 'hgrc')]
657 rcdir = os.path.join(path, 'hgrc.d')
659 rcdir = os.path.join(path, 'hgrc.d')
658 try:
660 try:
659 rcs.extend([os.path.join(rcdir, f) for f in os.listdir(rcdir)
661 rcs.extend([os.path.join(rcdir, f) for f in os.listdir(rcdir)
660 if f.endswith(".rc")])
662 if f.endswith(".rc")])
661 except OSError:
663 except OSError:
662 pass
664 pass
663 return rcs
665 return rcs
664
666
665 def os_rcpath():
667 def os_rcpath():
666 '''return default os-specific hgrc search path'''
668 '''return default os-specific hgrc search path'''
667 path = []
669 path = []
668 # old mod_python does not set sys.argv
670 # old mod_python does not set sys.argv
669 if len(getattr(sys, 'argv', [])) > 0:
671 if len(getattr(sys, 'argv', [])) > 0:
670 path.extend(rcfiles(os.path.dirname(sys.argv[0]) +
672 path.extend(rcfiles(os.path.dirname(sys.argv[0]) +
671 '/../etc/mercurial'))
673 '/../etc/mercurial'))
672 path.extend(rcfiles('/etc/mercurial'))
674 path.extend(rcfiles('/etc/mercurial'))
673 path.append(os.path.expanduser('~/.hgrc'))
675 path.append(os.path.expanduser('~/.hgrc'))
674 path = [os.path.normpath(f) for f in path]
676 path = [os.path.normpath(f) for f in path]
675 return path
677 return path
676
678
677 def parse_patch_output(output_line):
679 def parse_patch_output(output_line):
678 """parses the output produced by patch and returns the file name"""
680 """parses the output produced by patch and returns the file name"""
679 pf = output_line[14:]
681 pf = output_line[14:]
680 if pf.startswith("'") and pf.endswith("'") and " " in pf:
682 if pf.startswith("'") and pf.endswith("'") and " " in pf:
681 pf = pf[1:-1] # Remove the quotes
683 pf = pf[1:-1] # Remove the quotes
682 return pf
684 return pf
683
685
684 def is_exec(f, last):
686 def is_exec(f, last):
685 """check whether a file is executable"""
687 """check whether a file is executable"""
686 return (os.lstat(f).st_mode & 0100 != 0)
688 return (os.lstat(f).st_mode & 0100 != 0)
687
689
688 def set_exec(f, mode):
690 def set_exec(f, mode):
689 s = os.lstat(f).st_mode
691 s = os.lstat(f).st_mode
690 if (s & 0100 != 0) == mode:
692 if (s & 0100 != 0) == mode:
691 return
693 return
692 if mode:
694 if mode:
693 # Turn on +x for every +r bit when making a file executable
695 # Turn on +x for every +r bit when making a file executable
694 # and obey umask.
696 # and obey umask.
695 umask = os.umask(0)
697 umask = os.umask(0)
696 os.umask(umask)
698 os.umask(umask)
697 os.chmod(f, s | (s & 0444) >> 2 & ~umask)
699 os.chmod(f, s | (s & 0444) >> 2 & ~umask)
698 else:
700 else:
699 os.chmod(f, s & 0666)
701 os.chmod(f, s & 0666)
700
702
701 def set_binary(fd):
703 def set_binary(fd):
702 pass
704 pass
703
705
704 def pconvert(path):
706 def pconvert(path):
705 return path
707 return path
706
708
707 def localpath(path):
709 def localpath(path):
708 return path
710 return path
709
711
710 normpath = os.path.normpath
712 normpath = os.path.normpath
711 samestat = os.path.samestat
713 samestat = os.path.samestat
712
714
713 def makelock(info, pathname):
715 def makelock(info, pathname):
714 try:
716 try:
715 os.symlink(info, pathname)
717 os.symlink(info, pathname)
716 except OSError, why:
718 except OSError, why:
717 if why.errno == errno.EEXIST:
719 if why.errno == errno.EEXIST:
718 raise
720 raise
719 else:
721 else:
720 _makelock_file(info, pathname)
722 _makelock_file(info, pathname)
721
723
722 def readlock(pathname):
724 def readlock(pathname):
723 try:
725 try:
724 return os.readlink(pathname)
726 return os.readlink(pathname)
725 except OSError, why:
727 except OSError, why:
726 if why.errno == errno.EINVAL:
728 if why.errno == errno.EINVAL:
727 return _readlock_file(pathname)
729 return _readlock_file(pathname)
728 else:
730 else:
729 raise
731 raise
730
732
731 def shellquote(s):
733 def shellquote(s):
732 return "'%s'" % s.replace("'", "'\\''")
734 return "'%s'" % s.replace("'", "'\\''")
733
735
734 def testpid(pid):
736 def testpid(pid):
735 '''return False if pid dead, True if running or not sure'''
737 '''return False if pid dead, True if running or not sure'''
736 try:
738 try:
737 os.kill(pid, 0)
739 os.kill(pid, 0)
738 return True
740 return True
739 except OSError, inst:
741 except OSError, inst:
740 return inst.errno != errno.ESRCH
742 return inst.errno != errno.ESRCH
741
743
742 def explain_exit(code):
744 def explain_exit(code):
743 """return a 2-tuple (desc, code) describing a process's status"""
745 """return a 2-tuple (desc, code) describing a process's status"""
744 if os.WIFEXITED(code):
746 if os.WIFEXITED(code):
745 val = os.WEXITSTATUS(code)
747 val = os.WEXITSTATUS(code)
746 return _("exited with status %d") % val, val
748 return _("exited with status %d") % val, val
747 elif os.WIFSIGNALED(code):
749 elif os.WIFSIGNALED(code):
748 val = os.WTERMSIG(code)
750 val = os.WTERMSIG(code)
749 return _("killed by signal %d") % val, val
751 return _("killed by signal %d") % val, val
750 elif os.WIFSTOPPED(code):
752 elif os.WIFSTOPPED(code):
751 val = os.WSTOPSIG(code)
753 val = os.WSTOPSIG(code)
752 return _("stopped by signal %d") % val, val
754 return _("stopped by signal %d") % val, val
753 raise ValueError(_("invalid exit code"))
755 raise ValueError(_("invalid exit code"))
754
756
755 def opener(base, audit=True):
757 def opener(base, audit=True):
756 """
758 """
757 return a function that opens files relative to base
759 return a function that opens files relative to base
758
760
759 this function is used to hide the details of COW semantics and
761 this function is used to hide the details of COW semantics and
760 remote file access from higher level code.
762 remote file access from higher level code.
761 """
763 """
762 p = base
764 p = base
763 audit_p = audit
765 audit_p = audit
764
766
765 def mktempcopy(name):
767 def mktempcopy(name):
766 d, fn = os.path.split(name)
768 d, fn = os.path.split(name)
767 fd, temp = tempfile.mkstemp(prefix='.%s-' % fn, dir=d)
769 fd, temp = tempfile.mkstemp(prefix='.%s-' % fn, dir=d)
768 os.close(fd)
770 os.close(fd)
769 ofp = posixfile(temp, "wb")
771 ofp = posixfile(temp, "wb")
770 try:
772 try:
771 try:
773 try:
772 ifp = posixfile(name, "rb")
774 ifp = posixfile(name, "rb")
773 except IOError, inst:
775 except IOError, inst:
774 if not getattr(inst, 'filename', None):
776 if not getattr(inst, 'filename', None):
775 inst.filename = name
777 inst.filename = name
776 raise
778 raise
777 for chunk in filechunkiter(ifp):
779 for chunk in filechunkiter(ifp):
778 ofp.write(chunk)
780 ofp.write(chunk)
779 ifp.close()
781 ifp.close()
780 ofp.close()
782 ofp.close()
781 except:
783 except:
782 try: os.unlink(temp)
784 try: os.unlink(temp)
783 except: pass
785 except: pass
784 raise
786 raise
785 st = os.lstat(name)
787 st = os.lstat(name)
786 os.chmod(temp, st.st_mode)
788 os.chmod(temp, st.st_mode)
787 return temp
789 return temp
788
790
789 class atomictempfile(posixfile):
791 class atomictempfile(posixfile):
790 """the file will only be copied when rename is called"""
792 """the file will only be copied when rename is called"""
791 def __init__(self, name, mode):
793 def __init__(self, name, mode):
792 self.__name = name
794 self.__name = name
793 self.temp = mktempcopy(name)
795 self.temp = mktempcopy(name)
794 posixfile.__init__(self, self.temp, mode)
796 posixfile.__init__(self, self.temp, mode)
795 def rename(self):
797 def rename(self):
796 if not self.closed:
798 if not self.closed:
797 posixfile.close(self)
799 posixfile.close(self)
798 rename(self.temp, localpath(self.__name))
800 rename(self.temp, localpath(self.__name))
799 def __del__(self):
801 def __del__(self):
800 if not self.closed:
802 if not self.closed:
801 try:
803 try:
802 os.unlink(self.temp)
804 os.unlink(self.temp)
803 except: pass
805 except: pass
804 posixfile.close(self)
806 posixfile.close(self)
805
807
806 class atomicfile(atomictempfile):
808 class atomicfile(atomictempfile):
807 """the file will only be copied on close"""
809 """the file will only be copied on close"""
808 def __init__(self, name, mode):
810 def __init__(self, name, mode):
809 atomictempfile.__init__(self, name, mode)
811 atomictempfile.__init__(self, name, mode)
810 def close(self):
812 def close(self):
811 self.rename()
813 self.rename()
812 def __del__(self):
814 def __del__(self):
813 self.rename()
815 self.rename()
814
816
815 def o(path, mode="r", text=False, atomic=False, atomictemp=False):
817 def o(path, mode="r", text=False, atomic=False, atomictemp=False):
816 if audit_p:
818 if audit_p:
817 audit_path(path)
819 audit_path(path)
818 f = os.path.join(p, path)
820 f = os.path.join(p, path)
819
821
820 if not text:
822 if not text:
821 mode += "b" # for that other OS
823 mode += "b" # for that other OS
822
824
823 if mode[0] != "r":
825 if mode[0] != "r":
824 try:
826 try:
825 nlink = nlinks(f)
827 nlink = nlinks(f)
826 except OSError:
828 except OSError:
827 d = os.path.dirname(f)
829 d = os.path.dirname(f)
828 if not os.path.isdir(d):
830 if not os.path.isdir(d):
829 os.makedirs(d)
831 os.makedirs(d)
830 else:
832 else:
831 if atomic:
833 if atomic:
832 return atomicfile(f, mode)
834 return atomicfile(f, mode)
833 elif atomictemp:
835 elif atomictemp:
834 return atomictempfile(f, mode)
836 return atomictempfile(f, mode)
835 if nlink > 1:
837 if nlink > 1:
836 rename(mktempcopy(f), f)
838 rename(mktempcopy(f), f)
837 return posixfile(f, mode)
839 return posixfile(f, mode)
838
840
839 return o
841 return o
840
842
841 class chunkbuffer(object):
843 class chunkbuffer(object):
842 """Allow arbitrary sized chunks of data to be efficiently read from an
844 """Allow arbitrary sized chunks of data to be efficiently read from an
843 iterator over chunks of arbitrary size."""
845 iterator over chunks of arbitrary size."""
844
846
845 def __init__(self, in_iter, targetsize = 2**16):
847 def __init__(self, in_iter, targetsize = 2**16):
846 """in_iter is the iterator that's iterating over the input chunks.
848 """in_iter is the iterator that's iterating over the input chunks.
847 targetsize is how big a buffer to try to maintain."""
849 targetsize is how big a buffer to try to maintain."""
848 self.in_iter = iter(in_iter)
850 self.in_iter = iter(in_iter)
849 self.buf = ''
851 self.buf = ''
850 self.targetsize = int(targetsize)
852 self.targetsize = int(targetsize)
851 if self.targetsize <= 0:
853 if self.targetsize <= 0:
852 raise ValueError(_("targetsize must be greater than 0, was %d") %
854 raise ValueError(_("targetsize must be greater than 0, was %d") %
853 targetsize)
855 targetsize)
854 self.iterempty = False
856 self.iterempty = False
855
857
856 def fillbuf(self):
858 def fillbuf(self):
857 """Ignore target size; read every chunk from iterator until empty."""
859 """Ignore target size; read every chunk from iterator until empty."""
858 if not self.iterempty:
860 if not self.iterempty:
859 collector = cStringIO.StringIO()
861 collector = cStringIO.StringIO()
860 collector.write(self.buf)
862 collector.write(self.buf)
861 for ch in self.in_iter:
863 for ch in self.in_iter:
862 collector.write(ch)
864 collector.write(ch)
863 self.buf = collector.getvalue()
865 self.buf = collector.getvalue()
864 self.iterempty = True
866 self.iterempty = True
865
867
866 def read(self, l):
868 def read(self, l):
867 """Read L bytes of data from the iterator of chunks of data.
869 """Read L bytes of data from the iterator of chunks of data.
868 Returns less than L bytes if the iterator runs dry."""
870 Returns less than L bytes if the iterator runs dry."""
869 if l > len(self.buf) and not self.iterempty:
871 if l > len(self.buf) and not self.iterempty:
870 # Clamp to a multiple of self.targetsize
872 # Clamp to a multiple of self.targetsize
871 targetsize = self.targetsize * ((l // self.targetsize) + 1)
873 targetsize = self.targetsize * ((l // self.targetsize) + 1)
872 collector = cStringIO.StringIO()
874 collector = cStringIO.StringIO()
873 collector.write(self.buf)
875 collector.write(self.buf)
874 collected = len(self.buf)
876 collected = len(self.buf)
875 for chunk in self.in_iter:
877 for chunk in self.in_iter:
876 collector.write(chunk)
878 collector.write(chunk)
877 collected += len(chunk)
879 collected += len(chunk)
878 if collected >= targetsize:
880 if collected >= targetsize:
879 break
881 break
880 if collected < targetsize:
882 if collected < targetsize:
881 self.iterempty = True
883 self.iterempty = True
882 self.buf = collector.getvalue()
884 self.buf = collector.getvalue()
883 s, self.buf = self.buf[:l], buffer(self.buf, l)
885 s, self.buf = self.buf[:l], buffer(self.buf, l)
884 return s
886 return s
885
887
886 def filechunkiter(f, size=65536, limit=None):
888 def filechunkiter(f, size=65536, limit=None):
887 """Create a generator that produces the data in the file size
889 """Create a generator that produces the data in the file size
888 (default 65536) bytes at a time, up to optional limit (default is
890 (default 65536) bytes at a time, up to optional limit (default is
889 to read all data). Chunks may be less than size bytes if the
891 to read all data). Chunks may be less than size bytes if the
890 chunk is the last chunk in the file, or the file is a socket or
892 chunk is the last chunk in the file, or the file is a socket or
891 some other type of file that sometimes reads less data than is
893 some other type of file that sometimes reads less data than is
892 requested."""
894 requested."""
893 assert size >= 0
895 assert size >= 0
894 assert limit is None or limit >= 0
896 assert limit is None or limit >= 0
895 while True:
897 while True:
896 if limit is None: nbytes = size
898 if limit is None: nbytes = size
897 else: nbytes = min(limit, size)
899 else: nbytes = min(limit, size)
898 s = nbytes and f.read(nbytes)
900 s = nbytes and f.read(nbytes)
899 if not s: break
901 if not s: break
900 if limit: limit -= len(s)
902 if limit: limit -= len(s)
901 yield s
903 yield s
902
904
903 def makedate():
905 def makedate():
904 lt = time.localtime()
906 lt = time.localtime()
905 if lt[8] == 1 and time.daylight:
907 if lt[8] == 1 and time.daylight:
906 tz = time.altzone
908 tz = time.altzone
907 else:
909 else:
908 tz = time.timezone
910 tz = time.timezone
909 return time.mktime(lt), tz
911 return time.mktime(lt), tz
910
912
911 def datestr(date=None, format='%a %b %d %H:%M:%S %Y', timezone=True):
913 def datestr(date=None, format='%a %b %d %H:%M:%S %Y', timezone=True):
912 """represent a (unixtime, offset) tuple as a localized time.
914 """represent a (unixtime, offset) tuple as a localized time.
913 unixtime is seconds since the epoch, and offset is the time zone's
915 unixtime is seconds since the epoch, and offset is the time zone's
914 number of seconds away from UTC. if timezone is false, do not
916 number of seconds away from UTC. if timezone is false, do not
915 append time zone to string."""
917 append time zone to string."""
916 t, tz = date or makedate()
918 t, tz = date or makedate()
917 s = time.strftime(format, time.gmtime(float(t) - tz))
919 s = time.strftime(format, time.gmtime(float(t) - tz))
918 if timezone:
920 if timezone:
919 s += " %+03d%02d" % (-tz / 3600, ((-tz % 3600) / 60))
921 s += " %+03d%02d" % (-tz / 3600, ((-tz % 3600) / 60))
920 return s
922 return s
921
923
922 def strdate(string, format='%a %b %d %H:%M:%S %Y'):
924 def strdate(string, format='%a %b %d %H:%M:%S %Y'):
923 """parse a localized time string and return a (unixtime, offset) tuple.
925 """parse a localized time string and return a (unixtime, offset) tuple.
924 if the string cannot be parsed, ValueError is raised."""
926 if the string cannot be parsed, ValueError is raised."""
925 def hastimezone(string):
927 def hastimezone(string):
926 return (string[-4:].isdigit() and
928 return (string[-4:].isdigit() and
927 (string[-5] == '+' or string[-5] == '-') and
929 (string[-5] == '+' or string[-5] == '-') and
928 string[-6].isspace())
930 string[-6].isspace())
929
931
930 # NOTE: unixtime = localunixtime + offset
932 # NOTE: unixtime = localunixtime + offset
931 if hastimezone(string):
933 if hastimezone(string):
932 date, tz = string[:-6], string[-5:]
934 date, tz = string[:-6], string[-5:]
933 tz = int(tz)
935 tz = int(tz)
934 offset = - 3600 * (tz / 100) - 60 * (tz % 100)
936 offset = - 3600 * (tz / 100) - 60 * (tz % 100)
935 else:
937 else:
936 date, offset = string, None
938 date, offset = string, None
937 timetuple = time.strptime(date, format)
939 timetuple = time.strptime(date, format)
938 localunixtime = int(calendar.timegm(timetuple))
940 localunixtime = int(calendar.timegm(timetuple))
939 if offset is None:
941 if offset is None:
940 # local timezone
942 # local timezone
941 unixtime = int(time.mktime(timetuple))
943 unixtime = int(time.mktime(timetuple))
942 offset = unixtime - localunixtime
944 offset = unixtime - localunixtime
943 else:
945 else:
944 unixtime = localunixtime + offset
946 unixtime = localunixtime + offset
945 return unixtime, offset
947 return unixtime, offset
946
948
947 def parsedate(string, formats=None):
949 def parsedate(string, formats=None):
948 """parse a localized time string and return a (unixtime, offset) tuple.
950 """parse a localized time string and return a (unixtime, offset) tuple.
949 The date may be a "unixtime offset" string or in one of the specified
951 The date may be a "unixtime offset" string or in one of the specified
950 formats."""
952 formats."""
951 if not formats:
953 if not formats:
952 formats = defaultdateformats
954 formats = defaultdateformats
953 try:
955 try:
954 when, offset = map(int, string.split(' '))
956 when, offset = map(int, string.split(' '))
955 except ValueError:
957 except ValueError:
956 for format in formats:
958 for format in formats:
957 try:
959 try:
958 when, offset = strdate(string, format)
960 when, offset = strdate(string, format)
959 except ValueError:
961 except ValueError:
960 pass
962 pass
961 else:
963 else:
962 break
964 break
963 else:
965 else:
964 raise ValueError(_('invalid date: %r '
966 raise ValueError(_('invalid date: %r '
965 'see hg(1) manual page for details')
967 'see hg(1) manual page for details')
966 % string)
968 % string)
967 # validate explicit (probably user-specified) date and
969 # validate explicit (probably user-specified) date and
968 # time zone offset. values must fit in signed 32 bits for
970 # time zone offset. values must fit in signed 32 bits for
969 # current 32-bit linux runtimes. timezones go from UTC-12
971 # current 32-bit linux runtimes. timezones go from UTC-12
970 # to UTC+14
972 # to UTC+14
971 if abs(when) > 0x7fffffff:
973 if abs(when) > 0x7fffffff:
972 raise ValueError(_('date exceeds 32 bits: %d') % when)
974 raise ValueError(_('date exceeds 32 bits: %d') % when)
973 if offset < -50400 or offset > 43200:
975 if offset < -50400 or offset > 43200:
974 raise ValueError(_('impossible time zone offset: %d') % offset)
976 raise ValueError(_('impossible time zone offset: %d') % offset)
975 return when, offset
977 return when, offset
976
978
977 def shortuser(user):
979 def shortuser(user):
978 """Return a short representation of a user name or email address."""
980 """Return a short representation of a user name or email address."""
979 f = user.find('@')
981 f = user.find('@')
980 if f >= 0:
982 if f >= 0:
981 user = user[:f]
983 user = user[:f]
982 f = user.find('<')
984 f = user.find('<')
983 if f >= 0:
985 if f >= 0:
984 user = user[f+1:]
986 user = user[f+1:]
985 f = user.find(' ')
987 f = user.find(' ')
986 if f >= 0:
988 if f >= 0:
987 user = user[:f]
989 user = user[:f]
990 f = user.find('.')
991 if f >= 0:
992 user = user[:f]
988 return user
993 return user
989
994
990 def walkrepos(path):
995 def walkrepos(path):
991 '''yield every hg repository under path, recursively.'''
996 '''yield every hg repository under path, recursively.'''
992 def errhandler(err):
997 def errhandler(err):
993 if err.filename == path:
998 if err.filename == path:
994 raise err
999 raise err
995
1000
996 for root, dirs, files in os.walk(path, onerror=errhandler):
1001 for root, dirs, files in os.walk(path, onerror=errhandler):
997 for d in dirs:
1002 for d in dirs:
998 if d == '.hg':
1003 if d == '.hg':
999 yield root
1004 yield root
1000 dirs[:] = []
1005 dirs[:] = []
1001 break
1006 break
1002
1007
1003 _rcpath = None
1008 _rcpath = None
1004
1009
1005 def rcpath():
1010 def rcpath():
1006 '''return hgrc search path. if env var HGRCPATH is set, use it.
1011 '''return hgrc search path. if env var HGRCPATH is set, use it.
1007 for each item in path, if directory, use files ending in .rc,
1012 for each item in path, if directory, use files ending in .rc,
1008 else use item.
1013 else use item.
1009 make HGRCPATH empty to only look in .hg/hgrc of current repo.
1014 make HGRCPATH empty to only look in .hg/hgrc of current repo.
1010 if no HGRCPATH, use default os-specific path.'''
1015 if no HGRCPATH, use default os-specific path.'''
1011 global _rcpath
1016 global _rcpath
1012 if _rcpath is None:
1017 if _rcpath is None:
1013 if 'HGRCPATH' in os.environ:
1018 if 'HGRCPATH' in os.environ:
1014 _rcpath = []
1019 _rcpath = []
1015 for p in os.environ['HGRCPATH'].split(os.pathsep):
1020 for p in os.environ['HGRCPATH'].split(os.pathsep):
1016 if not p: continue
1021 if not p: continue
1017 if os.path.isdir(p):
1022 if os.path.isdir(p):
1018 for f in os.listdir(p):
1023 for f in os.listdir(p):
1019 if f.endswith('.rc'):
1024 if f.endswith('.rc'):
1020 _rcpath.append(os.path.join(p, f))
1025 _rcpath.append(os.path.join(p, f))
1021 else:
1026 else:
1022 _rcpath.append(p)
1027 _rcpath.append(p)
1023 else:
1028 else:
1024 _rcpath = os_rcpath()
1029 _rcpath = os_rcpath()
1025 return _rcpath
1030 return _rcpath
1026
1031
1027 def bytecount(nbytes):
1032 def bytecount(nbytes):
1028 '''return byte count formatted as readable string, with units'''
1033 '''return byte count formatted as readable string, with units'''
1029
1034
1030 units = (
1035 units = (
1031 (100, 1<<30, _('%.0f GB')),
1036 (100, 1<<30, _('%.0f GB')),
1032 (10, 1<<30, _('%.1f GB')),
1037 (10, 1<<30, _('%.1f GB')),
1033 (1, 1<<30, _('%.2f GB')),
1038 (1, 1<<30, _('%.2f GB')),
1034 (100, 1<<20, _('%.0f MB')),
1039 (100, 1<<20, _('%.0f MB')),
1035 (10, 1<<20, _('%.1f MB')),
1040 (10, 1<<20, _('%.1f MB')),
1036 (1, 1<<20, _('%.2f MB')),
1041 (1, 1<<20, _('%.2f MB')),
1037 (100, 1<<10, _('%.0f KB')),
1042 (100, 1<<10, _('%.0f KB')),
1038 (10, 1<<10, _('%.1f KB')),
1043 (10, 1<<10, _('%.1f KB')),
1039 (1, 1<<10, _('%.2f KB')),
1044 (1, 1<<10, _('%.2f KB')),
1040 (1, 1, _('%.0f bytes')),
1045 (1, 1, _('%.0f bytes')),
1041 )
1046 )
1042
1047
1043 for multiplier, divisor, format in units:
1048 for multiplier, divisor, format in units:
1044 if nbytes >= divisor * multiplier:
1049 if nbytes >= divisor * multiplier:
1045 return format % (nbytes / float(divisor))
1050 return format % (nbytes / float(divisor))
1046 return units[-1][2] % nbytes
1051 return units[-1][2] % nbytes
1047
1052
1048 def drop_scheme(scheme, path):
1053 def drop_scheme(scheme, path):
1049 sc = scheme + ':'
1054 sc = scheme + ':'
1050 if path.startswith(sc):
1055 if path.startswith(sc):
1051 path = path[len(sc):]
1056 path = path[len(sc):]
1052 if path.startswith('//'):
1057 if path.startswith('//'):
1053 path = path[2:]
1058 path = path[2:]
1054 return path
1059 return path
@@ -1,25 +1,25 b''
1 port:4:export
1 port:4:export
2 port:4:vaportight
2 port:4:vaportight
3 port:4:import/export
3 port:4:import/export
4 port:4:4:-:spam:import/export
4 port:4:4:-:spam:import/export
5 port:3:4:+:eggs:import/export
5 port:3:4:+:eggs:import/export
6 port:2:1:-:spam:import
6 port:2:1:-:spam:import
7 port:2:2:-:spam:export
7 port:2:2:-:spam:export
8 port:2:1:+:spam:export
8 port:2:1:+:spam:export
9 port:2:2:+:spam:vaportight
9 port:2:2:+:spam:vaportight
10 port:2:3:+:spam:import/export
10 port:2:3:+:spam:import/export
11 port:1:2:+:eggs:export
11 port:1:2:+:eggs:export
12 port:0:1:+:eggs:import
12 port:0:1:+:spam:import
13 port:4:import/export
13 port:4:import/export
14 % follow
14 % follow
15 port:0:import
15 port:0:import
16 port2:6:4:+:eggs:deport
16 port2:6:4:+:eggs:deport
17 port:4:4:-:spam:import/export
17 port:4:4:-:spam:import/export
18 port:3:4:+:eggs:import/export
18 port:3:4:+:eggs:import/export
19 port:2:1:-:spam:import
19 port:2:1:-:spam:import
20 port:2:2:-:spam:export
20 port:2:2:-:spam:export
21 port:2:1:+:spam:export
21 port:2:1:+:spam:export
22 port:2:2:+:spam:vaportight
22 port:2:2:+:spam:vaportight
23 port:2:3:+:spam:import/export
23 port:2:3:+:spam:import/export
24 port:1:2:+:eggs:export
24 port:1:2:+:eggs:export
25 port:0:1:+:eggs:import
25 port:0:1:+:spam:import
General Comments 0
You need to be logged in to leave comments. Login now