##// END OF EJS Templates
Never apply string formatting to generated errors with util.Abort....
Thomas Arendsen Hein -
r3072:bc3fe3b5 default
parent child Browse files
Show More
@@ -1,2007 +1,2007 b''
1 # queue.py - patch queues for mercurial
1 # queue.py - patch queues for mercurial
2 #
2 #
3 # Copyright 2005, 2006 Chris Mason <mason@suse.com>
3 # Copyright 2005, 2006 Chris Mason <mason@suse.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7
7
8 '''patch management and development
8 '''patch management and development
9
9
10 This extension lets you work with a stack of patches in a Mercurial
10 This extension lets you work with a stack of patches in a Mercurial
11 repository. It manages two stacks of patches - all known patches, and
11 repository. It manages two stacks of patches - all known patches, and
12 applied patches (subset of known patches).
12 applied patches (subset of known patches).
13
13
14 Known patches are represented as patch files in the .hg/patches
14 Known patches are represented as patch files in the .hg/patches
15 directory. Applied patches are both patch files and changesets.
15 directory. Applied patches are both patch files and changesets.
16
16
17 Common tasks (use "hg help command" for more details):
17 Common tasks (use "hg help command" for more details):
18
18
19 prepare repository to work with patches qinit
19 prepare repository to work with patches qinit
20 create new patch qnew
20 create new patch qnew
21 import existing patch qimport
21 import existing patch qimport
22
22
23 print patch series qseries
23 print patch series qseries
24 print applied patches qapplied
24 print applied patches qapplied
25 print name of top applied patch qtop
25 print name of top applied patch qtop
26
26
27 add known patch to applied stack qpush
27 add known patch to applied stack qpush
28 remove patch from applied stack qpop
28 remove patch from applied stack qpop
29 refresh contents of top applied patch qrefresh
29 refresh contents of top applied patch qrefresh
30 '''
30 '''
31
31
32 from mercurial.demandload import *
32 from mercurial.demandload import *
33 from mercurial.i18n import gettext as _
33 from mercurial.i18n import gettext as _
34 from mercurial import commands
34 from mercurial import commands
35 demandload(globals(), "os sys re struct traceback errno bz2")
35 demandload(globals(), "os sys re struct traceback errno bz2")
36 demandload(globals(), "mercurial:cmdutil,hg,patch,revlog,ui,util")
36 demandload(globals(), "mercurial:cmdutil,hg,patch,revlog,ui,util")
37
37
38 commands.norepo += " qclone qversion"
38 commands.norepo += " qclone qversion"
39
39
40 class statusentry:
40 class statusentry:
41 def __init__(self, rev, name=None):
41 def __init__(self, rev, name=None):
42 if not name:
42 if not name:
43 fields = rev.split(':')
43 fields = rev.split(':')
44 if len(fields) == 2:
44 if len(fields) == 2:
45 self.rev, self.name = fields
45 self.rev, self.name = fields
46 else:
46 else:
47 self.rev, self.name = None, None
47 self.rev, self.name = None, None
48 else:
48 else:
49 self.rev, self.name = rev, name
49 self.rev, self.name = rev, name
50
50
51 def __str__(self):
51 def __str__(self):
52 return self.rev + ':' + self.name
52 return self.rev + ':' + self.name
53
53
54 class queue:
54 class queue:
55 def __init__(self, ui, path, patchdir=None):
55 def __init__(self, ui, path, patchdir=None):
56 self.basepath = path
56 self.basepath = path
57 self.path = patchdir or os.path.join(path, "patches")
57 self.path = patchdir or os.path.join(path, "patches")
58 self.opener = util.opener(self.path)
58 self.opener = util.opener(self.path)
59 self.ui = ui
59 self.ui = ui
60 self.applied = []
60 self.applied = []
61 self.full_series = []
61 self.full_series = []
62 self.applied_dirty = 0
62 self.applied_dirty = 0
63 self.series_dirty = 0
63 self.series_dirty = 0
64 self.series_path = "series"
64 self.series_path = "series"
65 self.status_path = "status"
65 self.status_path = "status"
66 self.guards_path = "guards"
66 self.guards_path = "guards"
67 self.active_guards = None
67 self.active_guards = None
68 self.guards_dirty = False
68 self.guards_dirty = False
69 self._diffopts = None
69 self._diffopts = None
70
70
71 if os.path.exists(self.join(self.series_path)):
71 if os.path.exists(self.join(self.series_path)):
72 self.full_series = self.opener(self.series_path).read().splitlines()
72 self.full_series = self.opener(self.series_path).read().splitlines()
73 self.parse_series()
73 self.parse_series()
74
74
75 if os.path.exists(self.join(self.status_path)):
75 if os.path.exists(self.join(self.status_path)):
76 lines = self.opener(self.status_path).read().splitlines()
76 lines = self.opener(self.status_path).read().splitlines()
77 self.applied = [statusentry(l) for l in lines]
77 self.applied = [statusentry(l) for l in lines]
78
78
79 def diffopts(self):
79 def diffopts(self):
80 if self._diffopts is None:
80 if self._diffopts is None:
81 self._diffopts = patch.diffopts(self.ui)
81 self._diffopts = patch.diffopts(self.ui)
82 return self._diffopts
82 return self._diffopts
83
83
84 def join(self, *p):
84 def join(self, *p):
85 return os.path.join(self.path, *p)
85 return os.path.join(self.path, *p)
86
86
87 def find_series(self, patch):
87 def find_series(self, patch):
88 pre = re.compile("(\s*)([^#]+)")
88 pre = re.compile("(\s*)([^#]+)")
89 index = 0
89 index = 0
90 for l in self.full_series:
90 for l in self.full_series:
91 m = pre.match(l)
91 m = pre.match(l)
92 if m:
92 if m:
93 s = m.group(2)
93 s = m.group(2)
94 s = s.rstrip()
94 s = s.rstrip()
95 if s == patch:
95 if s == patch:
96 return index
96 return index
97 index += 1
97 index += 1
98 return None
98 return None
99
99
100 guard_re = re.compile(r'\s?#([-+][^-+# \t\r\n\f][^# \t\r\n\f]*)')
100 guard_re = re.compile(r'\s?#([-+][^-+# \t\r\n\f][^# \t\r\n\f]*)')
101
101
102 def parse_series(self):
102 def parse_series(self):
103 self.series = []
103 self.series = []
104 self.series_guards = []
104 self.series_guards = []
105 for l in self.full_series:
105 for l in self.full_series:
106 h = l.find('#')
106 h = l.find('#')
107 if h == -1:
107 if h == -1:
108 patch = l
108 patch = l
109 comment = ''
109 comment = ''
110 elif h == 0:
110 elif h == 0:
111 continue
111 continue
112 else:
112 else:
113 patch = l[:h]
113 patch = l[:h]
114 comment = l[h:]
114 comment = l[h:]
115 patch = patch.strip()
115 patch = patch.strip()
116 if patch:
116 if patch:
117 self.series.append(patch)
117 self.series.append(patch)
118 self.series_guards.append(self.guard_re.findall(comment))
118 self.series_guards.append(self.guard_re.findall(comment))
119
119
120 def check_guard(self, guard):
120 def check_guard(self, guard):
121 bad_chars = '# \t\r\n\f'
121 bad_chars = '# \t\r\n\f'
122 first = guard[0]
122 first = guard[0]
123 for c in '-+':
123 for c in '-+':
124 if first == c:
124 if first == c:
125 return (_('guard %r starts with invalid character: %r') %
125 return (_('guard %r starts with invalid character: %r') %
126 (guard, c))
126 (guard, c))
127 for c in bad_chars:
127 for c in bad_chars:
128 if c in guard:
128 if c in guard:
129 return _('invalid character in guard %r: %r') % (guard, c)
129 return _('invalid character in guard %r: %r') % (guard, c)
130
130
131 def set_active(self, guards):
131 def set_active(self, guards):
132 for guard in guards:
132 for guard in guards:
133 bad = self.check_guard(guard)
133 bad = self.check_guard(guard)
134 if bad:
134 if bad:
135 raise util.Abort(bad)
135 raise util.Abort(bad)
136 guards = dict.fromkeys(guards).keys()
136 guards = dict.fromkeys(guards).keys()
137 guards.sort()
137 guards.sort()
138 self.ui.debug('active guards: %s\n' % ' '.join(guards))
138 self.ui.debug('active guards: %s\n' % ' '.join(guards))
139 self.active_guards = guards
139 self.active_guards = guards
140 self.guards_dirty = True
140 self.guards_dirty = True
141
141
142 def active(self):
142 def active(self):
143 if self.active_guards is None:
143 if self.active_guards is None:
144 self.active_guards = []
144 self.active_guards = []
145 try:
145 try:
146 guards = self.opener(self.guards_path).read().split()
146 guards = self.opener(self.guards_path).read().split()
147 except IOError, err:
147 except IOError, err:
148 if err.errno != errno.ENOENT: raise
148 if err.errno != errno.ENOENT: raise
149 guards = []
149 guards = []
150 for i, guard in enumerate(guards):
150 for i, guard in enumerate(guards):
151 bad = self.check_guard(guard)
151 bad = self.check_guard(guard)
152 if bad:
152 if bad:
153 self.ui.warn('%s:%d: %s\n' %
153 self.ui.warn('%s:%d: %s\n' %
154 (self.join(self.guards_path), i + 1, bad))
154 (self.join(self.guards_path), i + 1, bad))
155 else:
155 else:
156 self.active_guards.append(guard)
156 self.active_guards.append(guard)
157 return self.active_guards
157 return self.active_guards
158
158
159 def set_guards(self, idx, guards):
159 def set_guards(self, idx, guards):
160 for g in guards:
160 for g in guards:
161 if len(g) < 2:
161 if len(g) < 2:
162 raise util.Abort(_('guard %r too short') % g)
162 raise util.Abort(_('guard %r too short') % g)
163 if g[0] not in '-+':
163 if g[0] not in '-+':
164 raise util.Abort(_('guard %r starts with invalid char') % g)
164 raise util.Abort(_('guard %r starts with invalid char') % g)
165 bad = self.check_guard(g[1:])
165 bad = self.check_guard(g[1:])
166 if bad:
166 if bad:
167 raise util.Abort(bad)
167 raise util.Abort(bad)
168 drop = self.guard_re.sub('', self.full_series[idx])
168 drop = self.guard_re.sub('', self.full_series[idx])
169 self.full_series[idx] = drop + ''.join([' #' + g for g in guards])
169 self.full_series[idx] = drop + ''.join([' #' + g for g in guards])
170 self.parse_series()
170 self.parse_series()
171 self.series_dirty = True
171 self.series_dirty = True
172
172
173 def pushable(self, idx):
173 def pushable(self, idx):
174 if isinstance(idx, str):
174 if isinstance(idx, str):
175 idx = self.series.index(idx)
175 idx = self.series.index(idx)
176 patchguards = self.series_guards[idx]
176 patchguards = self.series_guards[idx]
177 if not patchguards:
177 if not patchguards:
178 return True, None
178 return True, None
179 default = False
179 default = False
180 guards = self.active()
180 guards = self.active()
181 exactneg = [g for g in patchguards if g[0] == '-' and g[1:] in guards]
181 exactneg = [g for g in patchguards if g[0] == '-' and g[1:] in guards]
182 if exactneg:
182 if exactneg:
183 return False, exactneg[0]
183 return False, exactneg[0]
184 pos = [g for g in patchguards if g[0] == '+']
184 pos = [g for g in patchguards if g[0] == '+']
185 exactpos = [g for g in pos if g[1:] in guards]
185 exactpos = [g for g in pos if g[1:] in guards]
186 if pos:
186 if pos:
187 if exactpos:
187 if exactpos:
188 return True, exactpos[0]
188 return True, exactpos[0]
189 return False, pos
189 return False, pos
190 return True, ''
190 return True, ''
191
191
192 def explain_pushable(self, idx, all_patches=False):
192 def explain_pushable(self, idx, all_patches=False):
193 write = all_patches and self.ui.write or self.ui.warn
193 write = all_patches and self.ui.write or self.ui.warn
194 if all_patches or self.ui.verbose:
194 if all_patches or self.ui.verbose:
195 if isinstance(idx, str):
195 if isinstance(idx, str):
196 idx = self.series.index(idx)
196 idx = self.series.index(idx)
197 pushable, why = self.pushable(idx)
197 pushable, why = self.pushable(idx)
198 if all_patches and pushable:
198 if all_patches and pushable:
199 if why is None:
199 if why is None:
200 write(_('allowing %s - no guards in effect\n') %
200 write(_('allowing %s - no guards in effect\n') %
201 self.series[idx])
201 self.series[idx])
202 else:
202 else:
203 if not why:
203 if not why:
204 write(_('allowing %s - no matching negative guards\n') %
204 write(_('allowing %s - no matching negative guards\n') %
205 self.series[idx])
205 self.series[idx])
206 else:
206 else:
207 write(_('allowing %s - guarded by %r\n') %
207 write(_('allowing %s - guarded by %r\n') %
208 (self.series[idx], why))
208 (self.series[idx], why))
209 if not pushable:
209 if not pushable:
210 if why:
210 if why:
211 write(_('skipping %s - guarded by %r\n') %
211 write(_('skipping %s - guarded by %r\n') %
212 (self.series[idx], ' '.join(why)))
212 (self.series[idx], ' '.join(why)))
213 else:
213 else:
214 write(_('skipping %s - no matching guards\n') %
214 write(_('skipping %s - no matching guards\n') %
215 self.series[idx])
215 self.series[idx])
216
216
217 def save_dirty(self):
217 def save_dirty(self):
218 def write_list(items, path):
218 def write_list(items, path):
219 fp = self.opener(path, 'w')
219 fp = self.opener(path, 'w')
220 for i in items:
220 for i in items:
221 print >> fp, i
221 print >> fp, i
222 fp.close()
222 fp.close()
223 if self.applied_dirty: write_list(map(str, self.applied), self.status_path)
223 if self.applied_dirty: write_list(map(str, self.applied), self.status_path)
224 if self.series_dirty: write_list(self.full_series, self.series_path)
224 if self.series_dirty: write_list(self.full_series, self.series_path)
225 if self.guards_dirty: write_list(self.active_guards, self.guards_path)
225 if self.guards_dirty: write_list(self.active_guards, self.guards_path)
226
226
227 def readheaders(self, patch):
227 def readheaders(self, patch):
228 def eatdiff(lines):
228 def eatdiff(lines):
229 while lines:
229 while lines:
230 l = lines[-1]
230 l = lines[-1]
231 if (l.startswith("diff -") or
231 if (l.startswith("diff -") or
232 l.startswith("Index:") or
232 l.startswith("Index:") or
233 l.startswith("===========")):
233 l.startswith("===========")):
234 del lines[-1]
234 del lines[-1]
235 else:
235 else:
236 break
236 break
237 def eatempty(lines):
237 def eatempty(lines):
238 while lines:
238 while lines:
239 l = lines[-1]
239 l = lines[-1]
240 if re.match('\s*$', l):
240 if re.match('\s*$', l):
241 del lines[-1]
241 del lines[-1]
242 else:
242 else:
243 break
243 break
244
244
245 pf = self.join(patch)
245 pf = self.join(patch)
246 message = []
246 message = []
247 comments = []
247 comments = []
248 user = None
248 user = None
249 date = None
249 date = None
250 format = None
250 format = None
251 subject = None
251 subject = None
252 diffstart = 0
252 diffstart = 0
253
253
254 for line in file(pf):
254 for line in file(pf):
255 line = line.rstrip()
255 line = line.rstrip()
256 if line.startswith('diff --git'):
256 if line.startswith('diff --git'):
257 diffstart = 2
257 diffstart = 2
258 break
258 break
259 if diffstart:
259 if diffstart:
260 if line.startswith('+++ '):
260 if line.startswith('+++ '):
261 diffstart = 2
261 diffstart = 2
262 break
262 break
263 if line.startswith("--- "):
263 if line.startswith("--- "):
264 diffstart = 1
264 diffstart = 1
265 continue
265 continue
266 elif format == "hgpatch":
266 elif format == "hgpatch":
267 # parse values when importing the result of an hg export
267 # parse values when importing the result of an hg export
268 if line.startswith("# User "):
268 if line.startswith("# User "):
269 user = line[7:]
269 user = line[7:]
270 elif line.startswith("# Date "):
270 elif line.startswith("# Date "):
271 date = line[7:]
271 date = line[7:]
272 elif not line.startswith("# ") and line:
272 elif not line.startswith("# ") and line:
273 message.append(line)
273 message.append(line)
274 format = None
274 format = None
275 elif line == '# HG changeset patch':
275 elif line == '# HG changeset patch':
276 format = "hgpatch"
276 format = "hgpatch"
277 elif (format != "tagdone" and (line.startswith("Subject: ") or
277 elif (format != "tagdone" and (line.startswith("Subject: ") or
278 line.startswith("subject: "))):
278 line.startswith("subject: "))):
279 subject = line[9:]
279 subject = line[9:]
280 format = "tag"
280 format = "tag"
281 elif (format != "tagdone" and (line.startswith("From: ") or
281 elif (format != "tagdone" and (line.startswith("From: ") or
282 line.startswith("from: "))):
282 line.startswith("from: "))):
283 user = line[6:]
283 user = line[6:]
284 format = "tag"
284 format = "tag"
285 elif format == "tag" and line == "":
285 elif format == "tag" and line == "":
286 # when looking for tags (subject: from: etc) they
286 # when looking for tags (subject: from: etc) they
287 # end once you find a blank line in the source
287 # end once you find a blank line in the source
288 format = "tagdone"
288 format = "tagdone"
289 elif message or line:
289 elif message or line:
290 message.append(line)
290 message.append(line)
291 comments.append(line)
291 comments.append(line)
292
292
293 eatdiff(message)
293 eatdiff(message)
294 eatdiff(comments)
294 eatdiff(comments)
295 eatempty(message)
295 eatempty(message)
296 eatempty(comments)
296 eatempty(comments)
297
297
298 # make sure message isn't empty
298 # make sure message isn't empty
299 if format and format.startswith("tag") and subject:
299 if format and format.startswith("tag") and subject:
300 message.insert(0, "")
300 message.insert(0, "")
301 message.insert(0, subject)
301 message.insert(0, subject)
302 return (message, comments, user, date, diffstart > 1)
302 return (message, comments, user, date, diffstart > 1)
303
303
304 def printdiff(self, repo, node1, node2=None, files=None,
304 def printdiff(self, repo, node1, node2=None, files=None,
305 fp=None, changes=None, opts={}):
305 fp=None, changes=None, opts={}):
306 fns, matchfn, anypats = cmdutil.matchpats(repo, files, opts)
306 fns, matchfn, anypats = cmdutil.matchpats(repo, files, opts)
307
307
308 patch.diff(repo, node1, node2, fns, match=matchfn,
308 patch.diff(repo, node1, node2, fns, match=matchfn,
309 fp=fp, changes=changes, opts=self.diffopts())
309 fp=fp, changes=changes, opts=self.diffopts())
310
310
311 def mergeone(self, repo, mergeq, head, patch, rev, wlock):
311 def mergeone(self, repo, mergeq, head, patch, rev, wlock):
312 # first try just applying the patch
312 # first try just applying the patch
313 (err, n) = self.apply(repo, [ patch ], update_status=False,
313 (err, n) = self.apply(repo, [ patch ], update_status=False,
314 strict=True, merge=rev, wlock=wlock)
314 strict=True, merge=rev, wlock=wlock)
315
315
316 if err == 0:
316 if err == 0:
317 return (err, n)
317 return (err, n)
318
318
319 if n is None:
319 if n is None:
320 raise util.Abort(_("apply failed for patch %s") % patch)
320 raise util.Abort(_("apply failed for patch %s") % patch)
321
321
322 self.ui.warn("patch didn't work out, merging %s\n" % patch)
322 self.ui.warn("patch didn't work out, merging %s\n" % patch)
323
323
324 # apply failed, strip away that rev and merge.
324 # apply failed, strip away that rev and merge.
325 hg.clean(repo, head, wlock=wlock)
325 hg.clean(repo, head, wlock=wlock)
326 self.strip(repo, n, update=False, backup='strip', wlock=wlock)
326 self.strip(repo, n, update=False, backup='strip', wlock=wlock)
327
327
328 c = repo.changelog.read(rev)
328 c = repo.changelog.read(rev)
329 ret = hg.merge(repo, rev, wlock=wlock)
329 ret = hg.merge(repo, rev, wlock=wlock)
330 if ret:
330 if ret:
331 raise util.Abort(_("update returned %d") % ret)
331 raise util.Abort(_("update returned %d") % ret)
332 n = repo.commit(None, c[4], c[1], force=1, wlock=wlock)
332 n = repo.commit(None, c[4], c[1], force=1, wlock=wlock)
333 if n == None:
333 if n == None:
334 raise util.Abort(_("repo commit failed"))
334 raise util.Abort(_("repo commit failed"))
335 try:
335 try:
336 message, comments, user, date, patchfound = mergeq.readheaders(patch)
336 message, comments, user, date, patchfound = mergeq.readheaders(patch)
337 except:
337 except:
338 raise util.Abort(_("unable to read %s") % patch)
338 raise util.Abort(_("unable to read %s") % patch)
339
339
340 patchf = self.opener(patch, "w")
340 patchf = self.opener(patch, "w")
341 if comments:
341 if comments:
342 comments = "\n".join(comments) + '\n\n'
342 comments = "\n".join(comments) + '\n\n'
343 patchf.write(comments)
343 patchf.write(comments)
344 self.printdiff(repo, head, n, fp=patchf)
344 self.printdiff(repo, head, n, fp=patchf)
345 patchf.close()
345 patchf.close()
346 return (0, n)
346 return (0, n)
347
347
348 def qparents(self, repo, rev=None):
348 def qparents(self, repo, rev=None):
349 if rev is None:
349 if rev is None:
350 (p1, p2) = repo.dirstate.parents()
350 (p1, p2) = repo.dirstate.parents()
351 if p2 == revlog.nullid:
351 if p2 == revlog.nullid:
352 return p1
352 return p1
353 if len(self.applied) == 0:
353 if len(self.applied) == 0:
354 return None
354 return None
355 return revlog.bin(self.applied[-1].rev)
355 return revlog.bin(self.applied[-1].rev)
356 pp = repo.changelog.parents(rev)
356 pp = repo.changelog.parents(rev)
357 if pp[1] != revlog.nullid:
357 if pp[1] != revlog.nullid:
358 arevs = [ x.rev for x in self.applied ]
358 arevs = [ x.rev for x in self.applied ]
359 p0 = revlog.hex(pp[0])
359 p0 = revlog.hex(pp[0])
360 p1 = revlog.hex(pp[1])
360 p1 = revlog.hex(pp[1])
361 if p0 in arevs:
361 if p0 in arevs:
362 return pp[0]
362 return pp[0]
363 if p1 in arevs:
363 if p1 in arevs:
364 return pp[1]
364 return pp[1]
365 return pp[0]
365 return pp[0]
366
366
367 def mergepatch(self, repo, mergeq, series, wlock):
367 def mergepatch(self, repo, mergeq, series, wlock):
368 if len(self.applied) == 0:
368 if len(self.applied) == 0:
369 # each of the patches merged in will have two parents. This
369 # each of the patches merged in will have two parents. This
370 # can confuse the qrefresh, qdiff, and strip code because it
370 # can confuse the qrefresh, qdiff, and strip code because it
371 # needs to know which parent is actually in the patch queue.
371 # needs to know which parent is actually in the patch queue.
372 # so, we insert a merge marker with only one parent. This way
372 # so, we insert a merge marker with only one parent. This way
373 # the first patch in the queue is never a merge patch
373 # the first patch in the queue is never a merge patch
374 #
374 #
375 pname = ".hg.patches.merge.marker"
375 pname = ".hg.patches.merge.marker"
376 n = repo.commit(None, '[mq]: merge marker', user=None, force=1,
376 n = repo.commit(None, '[mq]: merge marker', user=None, force=1,
377 wlock=wlock)
377 wlock=wlock)
378 self.applied.append(statusentry(revlog.hex(n), pname))
378 self.applied.append(statusentry(revlog.hex(n), pname))
379 self.applied_dirty = 1
379 self.applied_dirty = 1
380
380
381 head = self.qparents(repo)
381 head = self.qparents(repo)
382
382
383 for patch in series:
383 for patch in series:
384 patch = mergeq.lookup(patch, strict=True)
384 patch = mergeq.lookup(patch, strict=True)
385 if not patch:
385 if not patch:
386 self.ui.warn("patch %s does not exist\n" % patch)
386 self.ui.warn("patch %s does not exist\n" % patch)
387 return (1, None)
387 return (1, None)
388 pushable, reason = self.pushable(patch)
388 pushable, reason = self.pushable(patch)
389 if not pushable:
389 if not pushable:
390 self.explain_pushable(patch, all_patches=True)
390 self.explain_pushable(patch, all_patches=True)
391 continue
391 continue
392 info = mergeq.isapplied(patch)
392 info = mergeq.isapplied(patch)
393 if not info:
393 if not info:
394 self.ui.warn("patch %s is not applied\n" % patch)
394 self.ui.warn("patch %s is not applied\n" % patch)
395 return (1, None)
395 return (1, None)
396 rev = revlog.bin(info[1])
396 rev = revlog.bin(info[1])
397 (err, head) = self.mergeone(repo, mergeq, head, patch, rev, wlock)
397 (err, head) = self.mergeone(repo, mergeq, head, patch, rev, wlock)
398 if head:
398 if head:
399 self.applied.append(statusentry(revlog.hex(head), patch))
399 self.applied.append(statusentry(revlog.hex(head), patch))
400 self.applied_dirty = 1
400 self.applied_dirty = 1
401 if err:
401 if err:
402 return (err, head)
402 return (err, head)
403 return (0, head)
403 return (0, head)
404
404
405 def patch(self, repo, patchfile):
405 def patch(self, repo, patchfile):
406 '''Apply patchfile to the working directory.
406 '''Apply patchfile to the working directory.
407 patchfile: file name of patch'''
407 patchfile: file name of patch'''
408 try:
408 try:
409 (files, fuzz) = patch.patch(patchfile, self.ui, strip=1,
409 (files, fuzz) = patch.patch(patchfile, self.ui, strip=1,
410 cwd=repo.root)
410 cwd=repo.root)
411 except Exception, inst:
411 except Exception, inst:
412 self.ui.note(str(inst) + '\n')
412 self.ui.note(str(inst) + '\n')
413 if not self.ui.verbose:
413 if not self.ui.verbose:
414 self.ui.warn("patch failed, unable to continue (try -v)\n")
414 self.ui.warn("patch failed, unable to continue (try -v)\n")
415 return (False, [], False)
415 return (False, [], False)
416
416
417 return (True, files, fuzz)
417 return (True, files, fuzz)
418
418
419 def apply(self, repo, series, list=False, update_status=True,
419 def apply(self, repo, series, list=False, update_status=True,
420 strict=False, patchdir=None, merge=None, wlock=None):
420 strict=False, patchdir=None, merge=None, wlock=None):
421 # TODO unify with commands.py
421 # TODO unify with commands.py
422 if not patchdir:
422 if not patchdir:
423 patchdir = self.path
423 patchdir = self.path
424 err = 0
424 err = 0
425 if not wlock:
425 if not wlock:
426 wlock = repo.wlock()
426 wlock = repo.wlock()
427 lock = repo.lock()
427 lock = repo.lock()
428 tr = repo.transaction()
428 tr = repo.transaction()
429 n = None
429 n = None
430 for patchname in series:
430 for patchname in series:
431 pushable, reason = self.pushable(patchname)
431 pushable, reason = self.pushable(patchname)
432 if not pushable:
432 if not pushable:
433 self.explain_pushable(patchname, all_patches=True)
433 self.explain_pushable(patchname, all_patches=True)
434 continue
434 continue
435 self.ui.warn("applying %s\n" % patchname)
435 self.ui.warn("applying %s\n" % patchname)
436 pf = os.path.join(patchdir, patchname)
436 pf = os.path.join(patchdir, patchname)
437
437
438 try:
438 try:
439 message, comments, user, date, patchfound = self.readheaders(patchname)
439 message, comments, user, date, patchfound = self.readheaders(patchname)
440 except:
440 except:
441 self.ui.warn("Unable to read %s\n" % patchname)
441 self.ui.warn("Unable to read %s\n" % patchname)
442 err = 1
442 err = 1
443 break
443 break
444
444
445 if not message:
445 if not message:
446 message = "imported patch %s\n" % patchname
446 message = "imported patch %s\n" % patchname
447 else:
447 else:
448 if list:
448 if list:
449 message.append("\nimported patch %s" % patchname)
449 message.append("\nimported patch %s" % patchname)
450 message = '\n'.join(message)
450 message = '\n'.join(message)
451
451
452 (patcherr, files, fuzz) = self.patch(repo, pf)
452 (patcherr, files, fuzz) = self.patch(repo, pf)
453 patcherr = not patcherr
453 patcherr = not patcherr
454
454
455 if merge and files:
455 if merge and files:
456 # Mark as merged and update dirstate parent info
456 # Mark as merged and update dirstate parent info
457 repo.dirstate.update(repo.dirstate.filterfiles(files.keys()), 'm')
457 repo.dirstate.update(repo.dirstate.filterfiles(files.keys()), 'm')
458 p1, p2 = repo.dirstate.parents()
458 p1, p2 = repo.dirstate.parents()
459 repo.dirstate.setparents(p1, merge)
459 repo.dirstate.setparents(p1, merge)
460 files = patch.updatedir(self.ui, repo, files, wlock=wlock)
460 files = patch.updatedir(self.ui, repo, files, wlock=wlock)
461 n = repo.commit(files, message, user, date, force=1, lock=lock,
461 n = repo.commit(files, message, user, date, force=1, lock=lock,
462 wlock=wlock)
462 wlock=wlock)
463
463
464 if n == None:
464 if n == None:
465 raise util.Abort(_("repo commit failed"))
465 raise util.Abort(_("repo commit failed"))
466
466
467 if update_status:
467 if update_status:
468 self.applied.append(statusentry(revlog.hex(n), patchname))
468 self.applied.append(statusentry(revlog.hex(n), patchname))
469
469
470 if patcherr:
470 if patcherr:
471 if not patchfound:
471 if not patchfound:
472 self.ui.warn("patch %s is empty\n" % patchname)
472 self.ui.warn("patch %s is empty\n" % patchname)
473 err = 0
473 err = 0
474 else:
474 else:
475 self.ui.warn("patch failed, rejects left in working dir\n")
475 self.ui.warn("patch failed, rejects left in working dir\n")
476 err = 1
476 err = 1
477 break
477 break
478
478
479 if fuzz and strict:
479 if fuzz and strict:
480 self.ui.warn("fuzz found when applying patch, stopping\n")
480 self.ui.warn("fuzz found when applying patch, stopping\n")
481 err = 1
481 err = 1
482 break
482 break
483 tr.close()
483 tr.close()
484 return (err, n)
484 return (err, n)
485
485
486 def delete(self, repo, patches, keep=False):
486 def delete(self, repo, patches, keep=False):
487 realpatches = []
487 realpatches = []
488 for patch in patches:
488 for patch in patches:
489 patch = self.lookup(patch, strict=True)
489 patch = self.lookup(patch, strict=True)
490 info = self.isapplied(patch)
490 info = self.isapplied(patch)
491 if info:
491 if info:
492 raise util.Abort(_("cannot delete applied patch %s") % patch)
492 raise util.Abort(_("cannot delete applied patch %s") % patch)
493 if patch not in self.series:
493 if patch not in self.series:
494 raise util.Abort(_("patch %s not in series file") % patch)
494 raise util.Abort(_("patch %s not in series file") % patch)
495 realpatches.append(patch)
495 realpatches.append(patch)
496
496
497 if not keep:
497 if not keep:
498 r = self.qrepo()
498 r = self.qrepo()
499 if r:
499 if r:
500 r.remove(realpatches, True)
500 r.remove(realpatches, True)
501 else:
501 else:
502 os.unlink(self.join(patch))
502 os.unlink(self.join(patch))
503
503
504 indices = [self.find_series(p) for p in realpatches]
504 indices = [self.find_series(p) for p in realpatches]
505 indices.sort()
505 indices.sort()
506 for i in indices[-1::-1]:
506 for i in indices[-1::-1]:
507 del self.full_series[i]
507 del self.full_series[i]
508 self.parse_series()
508 self.parse_series()
509 self.series_dirty = 1
509 self.series_dirty = 1
510
510
511 def check_toppatch(self, repo):
511 def check_toppatch(self, repo):
512 if len(self.applied) > 0:
512 if len(self.applied) > 0:
513 top = revlog.bin(self.applied[-1].rev)
513 top = revlog.bin(self.applied[-1].rev)
514 pp = repo.dirstate.parents()
514 pp = repo.dirstate.parents()
515 if top not in pp:
515 if top not in pp:
516 raise util.Abort(_("queue top not at same revision as working directory"))
516 raise util.Abort(_("queue top not at same revision as working directory"))
517 return top
517 return top
518 return None
518 return None
519 def check_localchanges(self, repo, force=False, refresh=True):
519 def check_localchanges(self, repo, force=False, refresh=True):
520 m, a, r, d = repo.status()[:4]
520 m, a, r, d = repo.status()[:4]
521 if m or a or r or d:
521 if m or a or r or d:
522 if not force:
522 if not force:
523 if refresh:
523 if refresh:
524 raise util.Abort(_("local changes found, refresh first"))
524 raise util.Abort(_("local changes found, refresh first"))
525 else:
525 else:
526 raise util.Abort(_("local changes found"))
526 raise util.Abort(_("local changes found"))
527 return m, a, r, d
527 return m, a, r, d
528 def new(self, repo, patch, msg=None, force=None):
528 def new(self, repo, patch, msg=None, force=None):
529 if os.path.exists(self.join(patch)):
529 if os.path.exists(self.join(patch)):
530 raise util.Abort(_('patch "%s" already exists') % patch)
530 raise util.Abort(_('patch "%s" already exists') % patch)
531 m, a, r, d = self.check_localchanges(repo, force)
531 m, a, r, d = self.check_localchanges(repo, force)
532 commitfiles = m + a + r
532 commitfiles = m + a + r
533 self.check_toppatch(repo)
533 self.check_toppatch(repo)
534 wlock = repo.wlock()
534 wlock = repo.wlock()
535 insert = self.full_series_end()
535 insert = self.full_series_end()
536 if msg:
536 if msg:
537 n = repo.commit(commitfiles, "[mq]: %s" % msg, force=True,
537 n = repo.commit(commitfiles, "[mq]: %s" % msg, force=True,
538 wlock=wlock)
538 wlock=wlock)
539 else:
539 else:
540 n = repo.commit(commitfiles,
540 n = repo.commit(commitfiles,
541 "New patch: %s" % patch, force=True, wlock=wlock)
541 "New patch: %s" % patch, force=True, wlock=wlock)
542 if n == None:
542 if n == None:
543 raise util.Abort(_("repo commit failed"))
543 raise util.Abort(_("repo commit failed"))
544 self.full_series[insert:insert] = [patch]
544 self.full_series[insert:insert] = [patch]
545 self.applied.append(statusentry(revlog.hex(n), patch))
545 self.applied.append(statusentry(revlog.hex(n), patch))
546 self.parse_series()
546 self.parse_series()
547 self.series_dirty = 1
547 self.series_dirty = 1
548 self.applied_dirty = 1
548 self.applied_dirty = 1
549 p = self.opener(patch, "w")
549 p = self.opener(patch, "w")
550 if msg:
550 if msg:
551 msg = msg + "\n"
551 msg = msg + "\n"
552 p.write(msg)
552 p.write(msg)
553 p.close()
553 p.close()
554 wlock = None
554 wlock = None
555 r = self.qrepo()
555 r = self.qrepo()
556 if r: r.add([patch])
556 if r: r.add([patch])
557 if commitfiles:
557 if commitfiles:
558 self.refresh(repo, short=True)
558 self.refresh(repo, short=True)
559
559
560 def strip(self, repo, rev, update=True, backup="all", wlock=None):
560 def strip(self, repo, rev, update=True, backup="all", wlock=None):
561 def limitheads(chlog, stop):
561 def limitheads(chlog, stop):
562 """return the list of all nodes that have no children"""
562 """return the list of all nodes that have no children"""
563 p = {}
563 p = {}
564 h = []
564 h = []
565 stoprev = 0
565 stoprev = 0
566 if stop in chlog.nodemap:
566 if stop in chlog.nodemap:
567 stoprev = chlog.rev(stop)
567 stoprev = chlog.rev(stop)
568
568
569 for r in range(chlog.count() - 1, -1, -1):
569 for r in range(chlog.count() - 1, -1, -1):
570 n = chlog.node(r)
570 n = chlog.node(r)
571 if n not in p:
571 if n not in p:
572 h.append(n)
572 h.append(n)
573 if n == stop:
573 if n == stop:
574 break
574 break
575 if r < stoprev:
575 if r < stoprev:
576 break
576 break
577 for pn in chlog.parents(n):
577 for pn in chlog.parents(n):
578 p[pn] = 1
578 p[pn] = 1
579 return h
579 return h
580
580
581 def bundle(cg):
581 def bundle(cg):
582 backupdir = repo.join("strip-backup")
582 backupdir = repo.join("strip-backup")
583 if not os.path.isdir(backupdir):
583 if not os.path.isdir(backupdir):
584 os.mkdir(backupdir)
584 os.mkdir(backupdir)
585 name = os.path.join(backupdir, "%s" % revlog.short(rev))
585 name = os.path.join(backupdir, "%s" % revlog.short(rev))
586 name = savename(name)
586 name = savename(name)
587 self.ui.warn("saving bundle to %s\n" % name)
587 self.ui.warn("saving bundle to %s\n" % name)
588 # TODO, exclusive open
588 # TODO, exclusive open
589 f = open(name, "wb")
589 f = open(name, "wb")
590 try:
590 try:
591 f.write("HG10")
591 f.write("HG10")
592 z = bz2.BZ2Compressor(9)
592 z = bz2.BZ2Compressor(9)
593 while 1:
593 while 1:
594 chunk = cg.read(4096)
594 chunk = cg.read(4096)
595 if not chunk:
595 if not chunk:
596 break
596 break
597 f.write(z.compress(chunk))
597 f.write(z.compress(chunk))
598 f.write(z.flush())
598 f.write(z.flush())
599 except:
599 except:
600 os.unlink(name)
600 os.unlink(name)
601 raise
601 raise
602 f.close()
602 f.close()
603 return name
603 return name
604
604
605 def stripall(rev, revnum):
605 def stripall(rev, revnum):
606 cl = repo.changelog
606 cl = repo.changelog
607 c = cl.read(rev)
607 c = cl.read(rev)
608 mm = repo.manifest.read(c[0])
608 mm = repo.manifest.read(c[0])
609 seen = {}
609 seen = {}
610
610
611 for x in xrange(revnum, cl.count()):
611 for x in xrange(revnum, cl.count()):
612 c = cl.read(cl.node(x))
612 c = cl.read(cl.node(x))
613 for f in c[3]:
613 for f in c[3]:
614 if f in seen:
614 if f in seen:
615 continue
615 continue
616 seen[f] = 1
616 seen[f] = 1
617 if f in mm:
617 if f in mm:
618 filerev = mm[f]
618 filerev = mm[f]
619 else:
619 else:
620 filerev = 0
620 filerev = 0
621 seen[f] = filerev
621 seen[f] = filerev
622 # we go in two steps here so the strip loop happens in a
622 # we go in two steps here so the strip loop happens in a
623 # sensible order. When stripping many files, this helps keep
623 # sensible order. When stripping many files, this helps keep
624 # our disk access patterns under control.
624 # our disk access patterns under control.
625 seen_list = seen.keys()
625 seen_list = seen.keys()
626 seen_list.sort()
626 seen_list.sort()
627 for f in seen_list:
627 for f in seen_list:
628 ff = repo.file(f)
628 ff = repo.file(f)
629 filerev = seen[f]
629 filerev = seen[f]
630 if filerev != 0:
630 if filerev != 0:
631 if filerev in ff.nodemap:
631 if filerev in ff.nodemap:
632 filerev = ff.rev(filerev)
632 filerev = ff.rev(filerev)
633 else:
633 else:
634 filerev = 0
634 filerev = 0
635 ff.strip(filerev, revnum)
635 ff.strip(filerev, revnum)
636
636
637 if not wlock:
637 if not wlock:
638 wlock = repo.wlock()
638 wlock = repo.wlock()
639 lock = repo.lock()
639 lock = repo.lock()
640 chlog = repo.changelog
640 chlog = repo.changelog
641 # TODO delete the undo files, and handle undo of merge sets
641 # TODO delete the undo files, and handle undo of merge sets
642 pp = chlog.parents(rev)
642 pp = chlog.parents(rev)
643 revnum = chlog.rev(rev)
643 revnum = chlog.rev(rev)
644
644
645 if update:
645 if update:
646 self.check_localchanges(repo, refresh=False)
646 self.check_localchanges(repo, refresh=False)
647 urev = self.qparents(repo, rev)
647 urev = self.qparents(repo, rev)
648 hg.clean(repo, urev, wlock=wlock)
648 hg.clean(repo, urev, wlock=wlock)
649 repo.dirstate.write()
649 repo.dirstate.write()
650
650
651 # save is a list of all the branches we are truncating away
651 # save is a list of all the branches we are truncating away
652 # that we actually want to keep. changegroup will be used
652 # that we actually want to keep. changegroup will be used
653 # to preserve them and add them back after the truncate
653 # to preserve them and add them back after the truncate
654 saveheads = []
654 saveheads = []
655 savebases = {}
655 savebases = {}
656
656
657 heads = limitheads(chlog, rev)
657 heads = limitheads(chlog, rev)
658 seen = {}
658 seen = {}
659
659
660 # search through all the heads, finding those where the revision
660 # search through all the heads, finding those where the revision
661 # we want to strip away is an ancestor. Also look for merges
661 # we want to strip away is an ancestor. Also look for merges
662 # that might be turned into new heads by the strip.
662 # that might be turned into new heads by the strip.
663 while heads:
663 while heads:
664 h = heads.pop()
664 h = heads.pop()
665 n = h
665 n = h
666 while True:
666 while True:
667 seen[n] = 1
667 seen[n] = 1
668 pp = chlog.parents(n)
668 pp = chlog.parents(n)
669 if pp[1] != revlog.nullid and chlog.rev(pp[1]) > revnum:
669 if pp[1] != revlog.nullid and chlog.rev(pp[1]) > revnum:
670 if pp[1] not in seen:
670 if pp[1] not in seen:
671 heads.append(pp[1])
671 heads.append(pp[1])
672 if pp[0] == revlog.nullid:
672 if pp[0] == revlog.nullid:
673 break
673 break
674 if chlog.rev(pp[0]) < revnum:
674 if chlog.rev(pp[0]) < revnum:
675 break
675 break
676 n = pp[0]
676 n = pp[0]
677 if n == rev:
677 if n == rev:
678 break
678 break
679 r = chlog.reachable(h, rev)
679 r = chlog.reachable(h, rev)
680 if rev not in r:
680 if rev not in r:
681 saveheads.append(h)
681 saveheads.append(h)
682 for x in r:
682 for x in r:
683 if chlog.rev(x) > revnum:
683 if chlog.rev(x) > revnum:
684 savebases[x] = 1
684 savebases[x] = 1
685
685
686 # create a changegroup for all the branches we need to keep
686 # create a changegroup for all the branches we need to keep
687 if backup == "all":
687 if backup == "all":
688 backupch = repo.changegroupsubset([rev], chlog.heads(), 'strip')
688 backupch = repo.changegroupsubset([rev], chlog.heads(), 'strip')
689 bundle(backupch)
689 bundle(backupch)
690 if saveheads:
690 if saveheads:
691 backupch = repo.changegroupsubset(savebases.keys(), saveheads, 'strip')
691 backupch = repo.changegroupsubset(savebases.keys(), saveheads, 'strip')
692 chgrpfile = bundle(backupch)
692 chgrpfile = bundle(backupch)
693
693
694 stripall(rev, revnum)
694 stripall(rev, revnum)
695
695
696 change = chlog.read(rev)
696 change = chlog.read(rev)
697 repo.manifest.strip(repo.manifest.rev(change[0]), revnum)
697 repo.manifest.strip(repo.manifest.rev(change[0]), revnum)
698 chlog.strip(revnum, revnum)
698 chlog.strip(revnum, revnum)
699 if saveheads:
699 if saveheads:
700 self.ui.status("adding branch\n")
700 self.ui.status("adding branch\n")
701 commands.unbundle(self.ui, repo, chgrpfile, update=False)
701 commands.unbundle(self.ui, repo, chgrpfile, update=False)
702 if backup != "strip":
702 if backup != "strip":
703 os.unlink(chgrpfile)
703 os.unlink(chgrpfile)
704
704
705 def isapplied(self, patch):
705 def isapplied(self, patch):
706 """returns (index, rev, patch)"""
706 """returns (index, rev, patch)"""
707 for i in xrange(len(self.applied)):
707 for i in xrange(len(self.applied)):
708 a = self.applied[i]
708 a = self.applied[i]
709 if a.name == patch:
709 if a.name == patch:
710 return (i, a.rev, a.name)
710 return (i, a.rev, a.name)
711 return None
711 return None
712
712
713 # if the exact patch name does not exist, we try a few
713 # if the exact patch name does not exist, we try a few
714 # variations. If strict is passed, we try only #1
714 # variations. If strict is passed, we try only #1
715 #
715 #
716 # 1) a number to indicate an offset in the series file
716 # 1) a number to indicate an offset in the series file
717 # 2) a unique substring of the patch name was given
717 # 2) a unique substring of the patch name was given
718 # 3) patchname[-+]num to indicate an offset in the series file
718 # 3) patchname[-+]num to indicate an offset in the series file
719 def lookup(self, patch, strict=False):
719 def lookup(self, patch, strict=False):
720 patch = patch and str(patch)
720 patch = patch and str(patch)
721
721
722 def partial_name(s):
722 def partial_name(s):
723 if s in self.series:
723 if s in self.series:
724 return s
724 return s
725 matches = [x for x in self.series if s in x]
725 matches = [x for x in self.series if s in x]
726 if len(matches) > 1:
726 if len(matches) > 1:
727 self.ui.warn(_('patch name "%s" is ambiguous:\n') % s)
727 self.ui.warn(_('patch name "%s" is ambiguous:\n') % s)
728 for m in matches:
728 for m in matches:
729 self.ui.warn(' %s\n' % m)
729 self.ui.warn(' %s\n' % m)
730 return None
730 return None
731 if matches:
731 if matches:
732 return matches[0]
732 return matches[0]
733 if len(self.series) > 0 and len(self.applied) > 0:
733 if len(self.series) > 0 and len(self.applied) > 0:
734 if s == 'qtip':
734 if s == 'qtip':
735 return self.series[self.series_end()-1]
735 return self.series[self.series_end()-1]
736 if s == 'qbase':
736 if s == 'qbase':
737 return self.series[0]
737 return self.series[0]
738 return None
738 return None
739 if patch == None:
739 if patch == None:
740 return None
740 return None
741
741
742 # we don't want to return a partial match until we make
742 # we don't want to return a partial match until we make
743 # sure the file name passed in does not exist (checked below)
743 # sure the file name passed in does not exist (checked below)
744 res = partial_name(patch)
744 res = partial_name(patch)
745 if res and res == patch:
745 if res and res == patch:
746 return res
746 return res
747
747
748 if not os.path.isfile(self.join(patch)):
748 if not os.path.isfile(self.join(patch)):
749 try:
749 try:
750 sno = int(patch)
750 sno = int(patch)
751 except(ValueError, OverflowError):
751 except(ValueError, OverflowError):
752 pass
752 pass
753 else:
753 else:
754 if sno < len(self.series):
754 if sno < len(self.series):
755 return self.series[sno]
755 return self.series[sno]
756 if not strict:
756 if not strict:
757 # return any partial match made above
757 # return any partial match made above
758 if res:
758 if res:
759 return res
759 return res
760 minus = patch.rsplit('-', 1)
760 minus = patch.rsplit('-', 1)
761 if len(minus) > 1:
761 if len(minus) > 1:
762 res = partial_name(minus[0])
762 res = partial_name(minus[0])
763 if res:
763 if res:
764 i = self.series.index(res)
764 i = self.series.index(res)
765 try:
765 try:
766 off = int(minus[1] or 1)
766 off = int(minus[1] or 1)
767 except(ValueError, OverflowError):
767 except(ValueError, OverflowError):
768 pass
768 pass
769 else:
769 else:
770 if i - off >= 0:
770 if i - off >= 0:
771 return self.series[i - off]
771 return self.series[i - off]
772 plus = patch.rsplit('+', 1)
772 plus = patch.rsplit('+', 1)
773 if len(plus) > 1:
773 if len(plus) > 1:
774 res = partial_name(plus[0])
774 res = partial_name(plus[0])
775 if res:
775 if res:
776 i = self.series.index(res)
776 i = self.series.index(res)
777 try:
777 try:
778 off = int(plus[1] or 1)
778 off = int(plus[1] or 1)
779 except(ValueError, OverflowError):
779 except(ValueError, OverflowError):
780 pass
780 pass
781 else:
781 else:
782 if i + off < len(self.series):
782 if i + off < len(self.series):
783 return self.series[i + off]
783 return self.series[i + off]
784 raise util.Abort(_("patch %s not in series") % patch)
784 raise util.Abort(_("patch %s not in series") % patch)
785
785
786 def push(self, repo, patch=None, force=False, list=False,
786 def push(self, repo, patch=None, force=False, list=False,
787 mergeq=None, wlock=None):
787 mergeq=None, wlock=None):
788 if not wlock:
788 if not wlock:
789 wlock = repo.wlock()
789 wlock = repo.wlock()
790 patch = self.lookup(patch)
790 patch = self.lookup(patch)
791 if patch and self.isapplied(patch):
791 if patch and self.isapplied(patch):
792 self.ui.warn(_("patch %s is already applied\n") % patch)
792 self.ui.warn(_("patch %s is already applied\n") % patch)
793 sys.exit(1)
793 sys.exit(1)
794 if self.series_end() == len(self.series):
794 if self.series_end() == len(self.series):
795 self.ui.warn(_("patch series fully applied\n"))
795 self.ui.warn(_("patch series fully applied\n"))
796 sys.exit(1)
796 sys.exit(1)
797 if not force:
797 if not force:
798 self.check_localchanges(repo)
798 self.check_localchanges(repo)
799
799
800 self.applied_dirty = 1;
800 self.applied_dirty = 1;
801 start = self.series_end()
801 start = self.series_end()
802 if start > 0:
802 if start > 0:
803 self.check_toppatch(repo)
803 self.check_toppatch(repo)
804 if not patch:
804 if not patch:
805 patch = self.series[start]
805 patch = self.series[start]
806 end = start + 1
806 end = start + 1
807 else:
807 else:
808 end = self.series.index(patch, start) + 1
808 end = self.series.index(patch, start) + 1
809 s = self.series[start:end]
809 s = self.series[start:end]
810 if mergeq:
810 if mergeq:
811 ret = self.mergepatch(repo, mergeq, s, wlock)
811 ret = self.mergepatch(repo, mergeq, s, wlock)
812 else:
812 else:
813 ret = self.apply(repo, s, list, wlock=wlock)
813 ret = self.apply(repo, s, list, wlock=wlock)
814 top = self.applied[-1].name
814 top = self.applied[-1].name
815 if ret[0]:
815 if ret[0]:
816 self.ui.write("Errors during apply, please fix and refresh %s\n" %
816 self.ui.write("Errors during apply, please fix and refresh %s\n" %
817 top)
817 top)
818 else:
818 else:
819 self.ui.write("Now at: %s\n" % top)
819 self.ui.write("Now at: %s\n" % top)
820 return ret[0]
820 return ret[0]
821
821
822 def pop(self, repo, patch=None, force=False, update=True, all=False,
822 def pop(self, repo, patch=None, force=False, update=True, all=False,
823 wlock=None):
823 wlock=None):
824 def getfile(f, rev):
824 def getfile(f, rev):
825 t = repo.file(f).read(rev)
825 t = repo.file(f).read(rev)
826 try:
826 try:
827 repo.wfile(f, "w").write(t)
827 repo.wfile(f, "w").write(t)
828 except IOError:
828 except IOError:
829 try:
829 try:
830 os.makedirs(os.path.dirname(repo.wjoin(f)))
830 os.makedirs(os.path.dirname(repo.wjoin(f)))
831 except OSError, err:
831 except OSError, err:
832 if err.errno != errno.EEXIST: raise
832 if err.errno != errno.EEXIST: raise
833 repo.wfile(f, "w").write(t)
833 repo.wfile(f, "w").write(t)
834
834
835 if not wlock:
835 if not wlock:
836 wlock = repo.wlock()
836 wlock = repo.wlock()
837 if patch:
837 if patch:
838 # index, rev, patch
838 # index, rev, patch
839 info = self.isapplied(patch)
839 info = self.isapplied(patch)
840 if not info:
840 if not info:
841 patch = self.lookup(patch)
841 patch = self.lookup(patch)
842 info = self.isapplied(patch)
842 info = self.isapplied(patch)
843 if not info:
843 if not info:
844 raise util.Abort(_("patch %s is not applied") % patch)
844 raise util.Abort(_("patch %s is not applied") % patch)
845 if len(self.applied) == 0:
845 if len(self.applied) == 0:
846 self.ui.warn(_("no patches applied\n"))
846 self.ui.warn(_("no patches applied\n"))
847 sys.exit(1)
847 sys.exit(1)
848
848
849 if not update:
849 if not update:
850 parents = repo.dirstate.parents()
850 parents = repo.dirstate.parents()
851 rr = [ revlog.bin(x.rev) for x in self.applied ]
851 rr = [ revlog.bin(x.rev) for x in self.applied ]
852 for p in parents:
852 for p in parents:
853 if p in rr:
853 if p in rr:
854 self.ui.warn("qpop: forcing dirstate update\n")
854 self.ui.warn("qpop: forcing dirstate update\n")
855 update = True
855 update = True
856
856
857 if not force and update:
857 if not force and update:
858 self.check_localchanges(repo)
858 self.check_localchanges(repo)
859
859
860 self.applied_dirty = 1;
860 self.applied_dirty = 1;
861 end = len(self.applied)
861 end = len(self.applied)
862 if not patch:
862 if not patch:
863 if all:
863 if all:
864 popi = 0
864 popi = 0
865 else:
865 else:
866 popi = len(self.applied) - 1
866 popi = len(self.applied) - 1
867 else:
867 else:
868 popi = info[0] + 1
868 popi = info[0] + 1
869 if popi >= end:
869 if popi >= end:
870 self.ui.warn("qpop: %s is already at the top\n" % patch)
870 self.ui.warn("qpop: %s is already at the top\n" % patch)
871 return
871 return
872 info = [ popi ] + [self.applied[popi].rev, self.applied[popi].name]
872 info = [ popi ] + [self.applied[popi].rev, self.applied[popi].name]
873
873
874 start = info[0]
874 start = info[0]
875 rev = revlog.bin(info[1])
875 rev = revlog.bin(info[1])
876
876
877 # we know there are no local changes, so we can make a simplified
877 # we know there are no local changes, so we can make a simplified
878 # form of hg.update.
878 # form of hg.update.
879 if update:
879 if update:
880 top = self.check_toppatch(repo)
880 top = self.check_toppatch(repo)
881 qp = self.qparents(repo, rev)
881 qp = self.qparents(repo, rev)
882 changes = repo.changelog.read(qp)
882 changes = repo.changelog.read(qp)
883 mmap = repo.manifest.read(changes[0])
883 mmap = repo.manifest.read(changes[0])
884 m, a, r, d, u = repo.status(qp, top)[:5]
884 m, a, r, d, u = repo.status(qp, top)[:5]
885 if d:
885 if d:
886 raise util.Abort("deletions found between repo revs")
886 raise util.Abort("deletions found between repo revs")
887 for f in m:
887 for f in m:
888 getfile(f, mmap[f])
888 getfile(f, mmap[f])
889 for f in r:
889 for f in r:
890 getfile(f, mmap[f])
890 getfile(f, mmap[f])
891 util.set_exec(repo.wjoin(f), mmap.execf(f))
891 util.set_exec(repo.wjoin(f), mmap.execf(f))
892 repo.dirstate.update(m + r, 'n')
892 repo.dirstate.update(m + r, 'n')
893 for f in a:
893 for f in a:
894 try: os.unlink(repo.wjoin(f))
894 try: os.unlink(repo.wjoin(f))
895 except: raise
895 except: raise
896 try: os.removedirs(os.path.dirname(repo.wjoin(f)))
896 try: os.removedirs(os.path.dirname(repo.wjoin(f)))
897 except: pass
897 except: pass
898 if a:
898 if a:
899 repo.dirstate.forget(a)
899 repo.dirstate.forget(a)
900 repo.dirstate.setparents(qp, revlog.nullid)
900 repo.dirstate.setparents(qp, revlog.nullid)
901 self.strip(repo, rev, update=False, backup='strip', wlock=wlock)
901 self.strip(repo, rev, update=False, backup='strip', wlock=wlock)
902 del self.applied[start:end]
902 del self.applied[start:end]
903 if len(self.applied):
903 if len(self.applied):
904 self.ui.write("Now at: %s\n" % self.applied[-1].name)
904 self.ui.write("Now at: %s\n" % self.applied[-1].name)
905 else:
905 else:
906 self.ui.write("Patch queue now empty\n")
906 self.ui.write("Patch queue now empty\n")
907
907
908 def diff(self, repo, pats, opts):
908 def diff(self, repo, pats, opts):
909 top = self.check_toppatch(repo)
909 top = self.check_toppatch(repo)
910 if not top:
910 if not top:
911 self.ui.write("No patches applied\n")
911 self.ui.write("No patches applied\n")
912 return
912 return
913 qp = self.qparents(repo, top)
913 qp = self.qparents(repo, top)
914 self.printdiff(repo, qp, files=pats, opts=opts)
914 self.printdiff(repo, qp, files=pats, opts=opts)
915
915
916 def refresh(self, repo, pats=None, **opts):
916 def refresh(self, repo, pats=None, **opts):
917 if len(self.applied) == 0:
917 if len(self.applied) == 0:
918 self.ui.write("No patches applied\n")
918 self.ui.write("No patches applied\n")
919 return 1
919 return 1
920 wlock = repo.wlock()
920 wlock = repo.wlock()
921 self.check_toppatch(repo)
921 self.check_toppatch(repo)
922 (top, patchfn) = (self.applied[-1].rev, self.applied[-1].name)
922 (top, patchfn) = (self.applied[-1].rev, self.applied[-1].name)
923 top = revlog.bin(top)
923 top = revlog.bin(top)
924 cparents = repo.changelog.parents(top)
924 cparents = repo.changelog.parents(top)
925 patchparent = self.qparents(repo, top)
925 patchparent = self.qparents(repo, top)
926 message, comments, user, date, patchfound = self.readheaders(patchfn)
926 message, comments, user, date, patchfound = self.readheaders(patchfn)
927
927
928 patchf = self.opener(patchfn, "w")
928 patchf = self.opener(patchfn, "w")
929 msg = opts.get('msg', '').rstrip()
929 msg = opts.get('msg', '').rstrip()
930 if msg:
930 if msg:
931 if comments:
931 if comments:
932 # Remove existing message.
932 # Remove existing message.
933 ci = 0
933 ci = 0
934 for mi in range(len(message)):
934 for mi in range(len(message)):
935 while message[mi] != comments[ci]:
935 while message[mi] != comments[ci]:
936 ci += 1
936 ci += 1
937 del comments[ci]
937 del comments[ci]
938 comments.append(msg)
938 comments.append(msg)
939 if comments:
939 if comments:
940 comments = "\n".join(comments) + '\n\n'
940 comments = "\n".join(comments) + '\n\n'
941 patchf.write(comments)
941 patchf.write(comments)
942
942
943 fns, matchfn, anypats = cmdutil.matchpats(repo, pats, opts)
943 fns, matchfn, anypats = cmdutil.matchpats(repo, pats, opts)
944 tip = repo.changelog.tip()
944 tip = repo.changelog.tip()
945 if top == tip:
945 if top == tip:
946 # if the top of our patch queue is also the tip, there is an
946 # if the top of our patch queue is also the tip, there is an
947 # optimization here. We update the dirstate in place and strip
947 # optimization here. We update the dirstate in place and strip
948 # off the tip commit. Then just commit the current directory
948 # off the tip commit. Then just commit the current directory
949 # tree. We can also send repo.commit the list of files
949 # tree. We can also send repo.commit the list of files
950 # changed to speed up the diff
950 # changed to speed up the diff
951 #
951 #
952 # in short mode, we only diff the files included in the
952 # in short mode, we only diff the files included in the
953 # patch already
953 # patch already
954 #
954 #
955 # this should really read:
955 # this should really read:
956 # mm, dd, aa, aa2, uu = repo.status(tip, patchparent)[:5]
956 # mm, dd, aa, aa2, uu = repo.status(tip, patchparent)[:5]
957 # but we do it backwards to take advantage of manifest/chlog
957 # but we do it backwards to take advantage of manifest/chlog
958 # caching against the next repo.status call
958 # caching against the next repo.status call
959 #
959 #
960 mm, aa, dd, aa2, uu = repo.status(patchparent, tip)[:5]
960 mm, aa, dd, aa2, uu = repo.status(patchparent, tip)[:5]
961 if opts.get('short'):
961 if opts.get('short'):
962 filelist = mm + aa + dd
962 filelist = mm + aa + dd
963 else:
963 else:
964 filelist = None
964 filelist = None
965 m, a, r, d, u = repo.status(files=filelist)[:5]
965 m, a, r, d, u = repo.status(files=filelist)[:5]
966
966
967 # we might end up with files that were added between tip and
967 # we might end up with files that were added between tip and
968 # the dirstate parent, but then changed in the local dirstate.
968 # the dirstate parent, but then changed in the local dirstate.
969 # in this case, we want them to only show up in the added section
969 # in this case, we want them to only show up in the added section
970 for x in m:
970 for x in m:
971 if x not in aa:
971 if x not in aa:
972 mm.append(x)
972 mm.append(x)
973 # we might end up with files added by the local dirstate that
973 # we might end up with files added by the local dirstate that
974 # were deleted by the patch. In this case, they should only
974 # were deleted by the patch. In this case, they should only
975 # show up in the changed section.
975 # show up in the changed section.
976 for x in a:
976 for x in a:
977 if x in dd:
977 if x in dd:
978 del dd[dd.index(x)]
978 del dd[dd.index(x)]
979 mm.append(x)
979 mm.append(x)
980 else:
980 else:
981 aa.append(x)
981 aa.append(x)
982 # make sure any files deleted in the local dirstate
982 # make sure any files deleted in the local dirstate
983 # are not in the add or change column of the patch
983 # are not in the add or change column of the patch
984 forget = []
984 forget = []
985 for x in d + r:
985 for x in d + r:
986 if x in aa:
986 if x in aa:
987 del aa[aa.index(x)]
987 del aa[aa.index(x)]
988 forget.append(x)
988 forget.append(x)
989 continue
989 continue
990 elif x in mm:
990 elif x in mm:
991 del mm[mm.index(x)]
991 del mm[mm.index(x)]
992 dd.append(x)
992 dd.append(x)
993
993
994 m = list(util.unique(mm))
994 m = list(util.unique(mm))
995 r = list(util.unique(dd))
995 r = list(util.unique(dd))
996 a = list(util.unique(aa))
996 a = list(util.unique(aa))
997 filelist = filter(matchfn, util.unique(m + r + a))
997 filelist = filter(matchfn, util.unique(m + r + a))
998 patch.diff(repo, patchparent, files=filelist, match=matchfn,
998 patch.diff(repo, patchparent, files=filelist, match=matchfn,
999 fp=patchf, changes=(m, a, r, [], u),
999 fp=patchf, changes=(m, a, r, [], u),
1000 opts=self.diffopts())
1000 opts=self.diffopts())
1001 patchf.close()
1001 patchf.close()
1002
1002
1003 changes = repo.changelog.read(tip)
1003 changes = repo.changelog.read(tip)
1004 repo.dirstate.setparents(*cparents)
1004 repo.dirstate.setparents(*cparents)
1005 copies = [(f, repo.dirstate.copied(f)) for f in a]
1005 copies = [(f, repo.dirstate.copied(f)) for f in a]
1006 repo.dirstate.update(a, 'a')
1006 repo.dirstate.update(a, 'a')
1007 for dst, src in copies:
1007 for dst, src in copies:
1008 repo.dirstate.copy(src, dst)
1008 repo.dirstate.copy(src, dst)
1009 repo.dirstate.update(r, 'r')
1009 repo.dirstate.update(r, 'r')
1010 # if the patch excludes a modified file, mark that file with mtime=0
1010 # if the patch excludes a modified file, mark that file with mtime=0
1011 # so status can see it.
1011 # so status can see it.
1012 mm = []
1012 mm = []
1013 for i in range(len(m)-1, -1, -1):
1013 for i in range(len(m)-1, -1, -1):
1014 if not matchfn(m[i]):
1014 if not matchfn(m[i]):
1015 mm.append(m[i])
1015 mm.append(m[i])
1016 del m[i]
1016 del m[i]
1017 repo.dirstate.update(m, 'n')
1017 repo.dirstate.update(m, 'n')
1018 repo.dirstate.update(mm, 'n', st_mtime=0)
1018 repo.dirstate.update(mm, 'n', st_mtime=0)
1019 repo.dirstate.forget(forget)
1019 repo.dirstate.forget(forget)
1020
1020
1021 if not msg:
1021 if not msg:
1022 if not message:
1022 if not message:
1023 message = "patch queue: %s\n" % patchfn
1023 message = "patch queue: %s\n" % patchfn
1024 else:
1024 else:
1025 message = "\n".join(message)
1025 message = "\n".join(message)
1026 else:
1026 else:
1027 message = msg
1027 message = msg
1028
1028
1029 self.strip(repo, top, update=False, backup='strip', wlock=wlock)
1029 self.strip(repo, top, update=False, backup='strip', wlock=wlock)
1030 n = repo.commit(filelist, message, changes[1], force=1, wlock=wlock)
1030 n = repo.commit(filelist, message, changes[1], force=1, wlock=wlock)
1031 self.applied[-1] = statusentry(revlog.hex(n), patchfn)
1031 self.applied[-1] = statusentry(revlog.hex(n), patchfn)
1032 self.applied_dirty = 1
1032 self.applied_dirty = 1
1033 else:
1033 else:
1034 self.printdiff(repo, patchparent, fp=patchf)
1034 self.printdiff(repo, patchparent, fp=patchf)
1035 patchf.close()
1035 patchf.close()
1036 self.pop(repo, force=True, wlock=wlock)
1036 self.pop(repo, force=True, wlock=wlock)
1037 self.push(repo, force=True, wlock=wlock)
1037 self.push(repo, force=True, wlock=wlock)
1038
1038
1039 def init(self, repo, create=False):
1039 def init(self, repo, create=False):
1040 if os.path.isdir(self.path):
1040 if os.path.isdir(self.path):
1041 raise util.Abort(_("patch queue directory already exists"))
1041 raise util.Abort(_("patch queue directory already exists"))
1042 os.mkdir(self.path)
1042 os.mkdir(self.path)
1043 if create:
1043 if create:
1044 return self.qrepo(create=True)
1044 return self.qrepo(create=True)
1045
1045
1046 def unapplied(self, repo, patch=None):
1046 def unapplied(self, repo, patch=None):
1047 if patch and patch not in self.series:
1047 if patch and patch not in self.series:
1048 raise util.Abort(_("patch %s is not in series file") % patch)
1048 raise util.Abort(_("patch %s is not in series file") % patch)
1049 if not patch:
1049 if not patch:
1050 start = self.series_end()
1050 start = self.series_end()
1051 else:
1051 else:
1052 start = self.series.index(patch) + 1
1052 start = self.series.index(patch) + 1
1053 unapplied = []
1053 unapplied = []
1054 for i in xrange(start, len(self.series)):
1054 for i in xrange(start, len(self.series)):
1055 pushable, reason = self.pushable(i)
1055 pushable, reason = self.pushable(i)
1056 if pushable:
1056 if pushable:
1057 unapplied.append((i, self.series[i]))
1057 unapplied.append((i, self.series[i]))
1058 self.explain_pushable(i)
1058 self.explain_pushable(i)
1059 return unapplied
1059 return unapplied
1060
1060
1061 def qseries(self, repo, missing=None, summary=False):
1061 def qseries(self, repo, missing=None, summary=False):
1062 start = self.series_end(all_patches=True)
1062 start = self.series_end(all_patches=True)
1063 if not missing:
1063 if not missing:
1064 for i in range(len(self.series)):
1064 for i in range(len(self.series)):
1065 patch = self.series[i]
1065 patch = self.series[i]
1066 if self.ui.verbose:
1066 if self.ui.verbose:
1067 if i < start:
1067 if i < start:
1068 status = 'A'
1068 status = 'A'
1069 elif self.pushable(i)[0]:
1069 elif self.pushable(i)[0]:
1070 status = 'U'
1070 status = 'U'
1071 else:
1071 else:
1072 status = 'G'
1072 status = 'G'
1073 self.ui.write('%d %s ' % (i, status))
1073 self.ui.write('%d %s ' % (i, status))
1074 if summary:
1074 if summary:
1075 msg = self.readheaders(patch)[0]
1075 msg = self.readheaders(patch)[0]
1076 msg = msg and ': ' + msg[0] or ': '
1076 msg = msg and ': ' + msg[0] or ': '
1077 else:
1077 else:
1078 msg = ''
1078 msg = ''
1079 self.ui.write('%s%s\n' % (patch, msg))
1079 self.ui.write('%s%s\n' % (patch, msg))
1080 else:
1080 else:
1081 msng_list = []
1081 msng_list = []
1082 for root, dirs, files in os.walk(self.path):
1082 for root, dirs, files in os.walk(self.path):
1083 d = root[len(self.path) + 1:]
1083 d = root[len(self.path) + 1:]
1084 for f in files:
1084 for f in files:
1085 fl = os.path.join(d, f)
1085 fl = os.path.join(d, f)
1086 if (fl not in self.series and
1086 if (fl not in self.series and
1087 fl not in (self.status_path, self.series_path)
1087 fl not in (self.status_path, self.series_path)
1088 and not fl.startswith('.')):
1088 and not fl.startswith('.')):
1089 msng_list.append(fl)
1089 msng_list.append(fl)
1090 msng_list.sort()
1090 msng_list.sort()
1091 for x in msng_list:
1091 for x in msng_list:
1092 if self.ui.verbose:
1092 if self.ui.verbose:
1093 self.ui.write("D ")
1093 self.ui.write("D ")
1094 self.ui.write("%s\n" % x)
1094 self.ui.write("%s\n" % x)
1095
1095
1096 def issaveline(self, l):
1096 def issaveline(self, l):
1097 if l.name == '.hg.patches.save.line':
1097 if l.name == '.hg.patches.save.line':
1098 return True
1098 return True
1099
1099
1100 def qrepo(self, create=False):
1100 def qrepo(self, create=False):
1101 if create or os.path.isdir(self.join(".hg")):
1101 if create or os.path.isdir(self.join(".hg")):
1102 return hg.repository(self.ui, path=self.path, create=create)
1102 return hg.repository(self.ui, path=self.path, create=create)
1103
1103
1104 def restore(self, repo, rev, delete=None, qupdate=None):
1104 def restore(self, repo, rev, delete=None, qupdate=None):
1105 c = repo.changelog.read(rev)
1105 c = repo.changelog.read(rev)
1106 desc = c[4].strip()
1106 desc = c[4].strip()
1107 lines = desc.splitlines()
1107 lines = desc.splitlines()
1108 i = 0
1108 i = 0
1109 datastart = None
1109 datastart = None
1110 series = []
1110 series = []
1111 applied = []
1111 applied = []
1112 qpp = None
1112 qpp = None
1113 for i in xrange(0, len(lines)):
1113 for i in xrange(0, len(lines)):
1114 if lines[i] == 'Patch Data:':
1114 if lines[i] == 'Patch Data:':
1115 datastart = i + 1
1115 datastart = i + 1
1116 elif lines[i].startswith('Dirstate:'):
1116 elif lines[i].startswith('Dirstate:'):
1117 l = lines[i].rstrip()
1117 l = lines[i].rstrip()
1118 l = l[10:].split(' ')
1118 l = l[10:].split(' ')
1119 qpp = [ hg.bin(x) for x in l ]
1119 qpp = [ hg.bin(x) for x in l ]
1120 elif datastart != None:
1120 elif datastart != None:
1121 l = lines[i].rstrip()
1121 l = lines[i].rstrip()
1122 se = statusentry(l)
1122 se = statusentry(l)
1123 file_ = se.name
1123 file_ = se.name
1124 if se.rev:
1124 if se.rev:
1125 applied.append(se)
1125 applied.append(se)
1126 series.append(file_)
1126 series.append(file_)
1127 if datastart == None:
1127 if datastart == None:
1128 self.ui.warn("No saved patch data found\n")
1128 self.ui.warn("No saved patch data found\n")
1129 return 1
1129 return 1
1130 self.ui.warn("restoring status: %s\n" % lines[0])
1130 self.ui.warn("restoring status: %s\n" % lines[0])
1131 self.full_series = series
1131 self.full_series = series
1132 self.applied = applied
1132 self.applied = applied
1133 self.parse_series()
1133 self.parse_series()
1134 self.series_dirty = 1
1134 self.series_dirty = 1
1135 self.applied_dirty = 1
1135 self.applied_dirty = 1
1136 heads = repo.changelog.heads()
1136 heads = repo.changelog.heads()
1137 if delete:
1137 if delete:
1138 if rev not in heads:
1138 if rev not in heads:
1139 self.ui.warn("save entry has children, leaving it alone\n")
1139 self.ui.warn("save entry has children, leaving it alone\n")
1140 else:
1140 else:
1141 self.ui.warn("removing save entry %s\n" % hg.short(rev))
1141 self.ui.warn("removing save entry %s\n" % hg.short(rev))
1142 pp = repo.dirstate.parents()
1142 pp = repo.dirstate.parents()
1143 if rev in pp:
1143 if rev in pp:
1144 update = True
1144 update = True
1145 else:
1145 else:
1146 update = False
1146 update = False
1147 self.strip(repo, rev, update=update, backup='strip')
1147 self.strip(repo, rev, update=update, backup='strip')
1148 if qpp:
1148 if qpp:
1149 self.ui.warn("saved queue repository parents: %s %s\n" %
1149 self.ui.warn("saved queue repository parents: %s %s\n" %
1150 (hg.short(qpp[0]), hg.short(qpp[1])))
1150 (hg.short(qpp[0]), hg.short(qpp[1])))
1151 if qupdate:
1151 if qupdate:
1152 print "queue directory updating"
1152 print "queue directory updating"
1153 r = self.qrepo()
1153 r = self.qrepo()
1154 if not r:
1154 if not r:
1155 self.ui.warn("Unable to load queue repository\n")
1155 self.ui.warn("Unable to load queue repository\n")
1156 return 1
1156 return 1
1157 hg.clean(r, qpp[0])
1157 hg.clean(r, qpp[0])
1158
1158
1159 def save(self, repo, msg=None):
1159 def save(self, repo, msg=None):
1160 if len(self.applied) == 0:
1160 if len(self.applied) == 0:
1161 self.ui.warn("save: no patches applied, exiting\n")
1161 self.ui.warn("save: no patches applied, exiting\n")
1162 return 1
1162 return 1
1163 if self.issaveline(self.applied[-1]):
1163 if self.issaveline(self.applied[-1]):
1164 self.ui.warn("status is already saved\n")
1164 self.ui.warn("status is already saved\n")
1165 return 1
1165 return 1
1166
1166
1167 ar = [ ':' + x for x in self.full_series ]
1167 ar = [ ':' + x for x in self.full_series ]
1168 if not msg:
1168 if not msg:
1169 msg = "hg patches saved state"
1169 msg = "hg patches saved state"
1170 else:
1170 else:
1171 msg = "hg patches: " + msg.rstrip('\r\n')
1171 msg = "hg patches: " + msg.rstrip('\r\n')
1172 r = self.qrepo()
1172 r = self.qrepo()
1173 if r:
1173 if r:
1174 pp = r.dirstate.parents()
1174 pp = r.dirstate.parents()
1175 msg += "\nDirstate: %s %s" % (hg.hex(pp[0]), hg.hex(pp[1]))
1175 msg += "\nDirstate: %s %s" % (hg.hex(pp[0]), hg.hex(pp[1]))
1176 msg += "\n\nPatch Data:\n"
1176 msg += "\n\nPatch Data:\n"
1177 text = msg + "\n".join([str(x) for x in self.applied]) + '\n' + (ar and
1177 text = msg + "\n".join([str(x) for x in self.applied]) + '\n' + (ar and
1178 "\n".join(ar) + '\n' or "")
1178 "\n".join(ar) + '\n' or "")
1179 n = repo.commit(None, text, user=None, force=1)
1179 n = repo.commit(None, text, user=None, force=1)
1180 if not n:
1180 if not n:
1181 self.ui.warn("repo commit failed\n")
1181 self.ui.warn("repo commit failed\n")
1182 return 1
1182 return 1
1183 self.applied.append(statusentry(revlog.hex(n),'.hg.patches.save.line'))
1183 self.applied.append(statusentry(revlog.hex(n),'.hg.patches.save.line'))
1184 self.applied_dirty = 1
1184 self.applied_dirty = 1
1185
1185
1186 def full_series_end(self):
1186 def full_series_end(self):
1187 if len(self.applied) > 0:
1187 if len(self.applied) > 0:
1188 p = self.applied[-1].name
1188 p = self.applied[-1].name
1189 end = self.find_series(p)
1189 end = self.find_series(p)
1190 if end == None:
1190 if end == None:
1191 return len(self.full_series)
1191 return len(self.full_series)
1192 return end + 1
1192 return end + 1
1193 return 0
1193 return 0
1194
1194
1195 def series_end(self, all_patches=False):
1195 def series_end(self, all_patches=False):
1196 end = 0
1196 end = 0
1197 def next(start):
1197 def next(start):
1198 if all_patches:
1198 if all_patches:
1199 return start
1199 return start
1200 i = start
1200 i = start
1201 while i < len(self.series):
1201 while i < len(self.series):
1202 p, reason = self.pushable(i)
1202 p, reason = self.pushable(i)
1203 if p:
1203 if p:
1204 break
1204 break
1205 self.explain_pushable(i)
1205 self.explain_pushable(i)
1206 i += 1
1206 i += 1
1207 return i
1207 return i
1208 if len(self.applied) > 0:
1208 if len(self.applied) > 0:
1209 p = self.applied[-1].name
1209 p = self.applied[-1].name
1210 try:
1210 try:
1211 end = self.series.index(p)
1211 end = self.series.index(p)
1212 except ValueError:
1212 except ValueError:
1213 return 0
1213 return 0
1214 return next(end + 1)
1214 return next(end + 1)
1215 return next(end)
1215 return next(end)
1216
1216
1217 def qapplied(self, repo, patch=None):
1217 def qapplied(self, repo, patch=None):
1218 if patch and patch not in self.series:
1218 if patch and patch not in self.series:
1219 raise util.Abort(_("patch %s is not in series file") % patch)
1219 raise util.Abort(_("patch %s is not in series file") % patch)
1220 if not patch:
1220 if not patch:
1221 end = len(self.applied)
1221 end = len(self.applied)
1222 else:
1222 else:
1223 end = self.series.index(patch) + 1
1223 end = self.series.index(patch) + 1
1224 for x in xrange(end):
1224 for x in xrange(end):
1225 p = self.appliedname(x)
1225 p = self.appliedname(x)
1226 self.ui.write("%s\n" % p)
1226 self.ui.write("%s\n" % p)
1227
1227
1228 def appliedname(self, index):
1228 def appliedname(self, index):
1229 pname = self.applied[index].name
1229 pname = self.applied[index].name
1230 if not self.ui.verbose:
1230 if not self.ui.verbose:
1231 p = pname
1231 p = pname
1232 else:
1232 else:
1233 p = str(self.series.index(pname)) + " " + pname
1233 p = str(self.series.index(pname)) + " " + pname
1234 return p
1234 return p
1235
1235
1236 def top(self, repo):
1236 def top(self, repo):
1237 if len(self.applied):
1237 if len(self.applied):
1238 p = self.appliedname(-1)
1238 p = self.appliedname(-1)
1239 self.ui.write(p + '\n')
1239 self.ui.write(p + '\n')
1240 else:
1240 else:
1241 self.ui.write("No patches applied\n")
1241 self.ui.write("No patches applied\n")
1242 return 1
1242 return 1
1243
1243
1244 def next(self, repo):
1244 def next(self, repo):
1245 end = self.series_end()
1245 end = self.series_end()
1246 if end == len(self.series):
1246 if end == len(self.series):
1247 self.ui.write("All patches applied\n")
1247 self.ui.write("All patches applied\n")
1248 return 1
1248 return 1
1249 else:
1249 else:
1250 p = self.series[end]
1250 p = self.series[end]
1251 if self.ui.verbose:
1251 if self.ui.verbose:
1252 self.ui.write("%d " % self.series.index(p))
1252 self.ui.write("%d " % self.series.index(p))
1253 self.ui.write(p + '\n')
1253 self.ui.write(p + '\n')
1254
1254
1255 def prev(self, repo):
1255 def prev(self, repo):
1256 if len(self.applied) > 1:
1256 if len(self.applied) > 1:
1257 p = self.appliedname(-2)
1257 p = self.appliedname(-2)
1258 self.ui.write(p + '\n')
1258 self.ui.write(p + '\n')
1259 elif len(self.applied) == 1:
1259 elif len(self.applied) == 1:
1260 self.ui.write("Only one patch applied\n")
1260 self.ui.write("Only one patch applied\n")
1261 return 1
1261 return 1
1262 else:
1262 else:
1263 self.ui.write("No patches applied\n")
1263 self.ui.write("No patches applied\n")
1264 return 1
1264 return 1
1265
1265
1266 def qimport(self, repo, files, patch=None, existing=None, force=None):
1266 def qimport(self, repo, files, patch=None, existing=None, force=None):
1267 if len(files) > 1 and patch:
1267 if len(files) > 1 and patch:
1268 raise util.Abort(_('option "-n" not valid when importing multiple '
1268 raise util.Abort(_('option "-n" not valid when importing multiple '
1269 'files'))
1269 'files'))
1270 i = 0
1270 i = 0
1271 added = []
1271 added = []
1272 for filename in files:
1272 for filename in files:
1273 if existing:
1273 if existing:
1274 if not patch:
1274 if not patch:
1275 patch = filename
1275 patch = filename
1276 if not os.path.isfile(self.join(patch)):
1276 if not os.path.isfile(self.join(patch)):
1277 raise util.Abort(_("patch %s does not exist") % patch)
1277 raise util.Abort(_("patch %s does not exist") % patch)
1278 else:
1278 else:
1279 try:
1279 try:
1280 text = file(filename).read()
1280 text = file(filename).read()
1281 except IOError:
1281 except IOError:
1282 raise util.Abort(_("unable to read %s") % patch)
1282 raise util.Abort(_("unable to read %s") % patch)
1283 if not patch:
1283 if not patch:
1284 patch = os.path.split(filename)[1]
1284 patch = os.path.split(filename)[1]
1285 if not force and os.path.exists(self.join(patch)):
1285 if not force and os.path.exists(self.join(patch)):
1286 raise util.Abort(_('patch "%s" already exists') % patch)
1286 raise util.Abort(_('patch "%s" already exists') % patch)
1287 patchf = self.opener(patch, "w")
1287 patchf = self.opener(patch, "w")
1288 patchf.write(text)
1288 patchf.write(text)
1289 if patch in self.series:
1289 if patch in self.series:
1290 raise util.Abort(_('patch %s is already in the series file')
1290 raise util.Abort(_('patch %s is already in the series file')
1291 % patch)
1291 % patch)
1292 index = self.full_series_end() + i
1292 index = self.full_series_end() + i
1293 self.full_series[index:index] = [patch]
1293 self.full_series[index:index] = [patch]
1294 self.parse_series()
1294 self.parse_series()
1295 self.ui.warn("adding %s to series file\n" % patch)
1295 self.ui.warn("adding %s to series file\n" % patch)
1296 i += 1
1296 i += 1
1297 added.append(patch)
1297 added.append(patch)
1298 patch = None
1298 patch = None
1299 self.series_dirty = 1
1299 self.series_dirty = 1
1300 qrepo = self.qrepo()
1300 qrepo = self.qrepo()
1301 if qrepo:
1301 if qrepo:
1302 qrepo.add(added)
1302 qrepo.add(added)
1303
1303
1304 def delete(ui, repo, patch, *patches, **opts):
1304 def delete(ui, repo, patch, *patches, **opts):
1305 """remove patches from queue
1305 """remove patches from queue
1306
1306
1307 The patches must not be applied.
1307 The patches must not be applied.
1308 With -k, the patch files are preserved in the patch directory."""
1308 With -k, the patch files are preserved in the patch directory."""
1309 q = repo.mq
1309 q = repo.mq
1310 q.delete(repo, (patch,) + patches, keep=opts.get('keep'))
1310 q.delete(repo, (patch,) + patches, keep=opts.get('keep'))
1311 q.save_dirty()
1311 q.save_dirty()
1312 return 0
1312 return 0
1313
1313
1314 def applied(ui, repo, patch=None, **opts):
1314 def applied(ui, repo, patch=None, **opts):
1315 """print the patches already applied"""
1315 """print the patches already applied"""
1316 repo.mq.qapplied(repo, patch)
1316 repo.mq.qapplied(repo, patch)
1317 return 0
1317 return 0
1318
1318
1319 def unapplied(ui, repo, patch=None, **opts):
1319 def unapplied(ui, repo, patch=None, **opts):
1320 """print the patches not yet applied"""
1320 """print the patches not yet applied"""
1321 for i, p in repo.mq.unapplied(repo, patch):
1321 for i, p in repo.mq.unapplied(repo, patch):
1322 if ui.verbose:
1322 if ui.verbose:
1323 ui.write("%d " % i)
1323 ui.write("%d " % i)
1324 ui.write("%s\n" % p)
1324 ui.write("%s\n" % p)
1325
1325
1326 def qimport(ui, repo, *filename, **opts):
1326 def qimport(ui, repo, *filename, **opts):
1327 """import a patch"""
1327 """import a patch"""
1328 q = repo.mq
1328 q = repo.mq
1329 q.qimport(repo, filename, patch=opts['name'],
1329 q.qimport(repo, filename, patch=opts['name'],
1330 existing=opts['existing'], force=opts['force'])
1330 existing=opts['existing'], force=opts['force'])
1331 q.save_dirty()
1331 q.save_dirty()
1332 return 0
1332 return 0
1333
1333
1334 def init(ui, repo, **opts):
1334 def init(ui, repo, **opts):
1335 """init a new queue repository
1335 """init a new queue repository
1336
1336
1337 The queue repository is unversioned by default. If -c is
1337 The queue repository is unversioned by default. If -c is
1338 specified, qinit will create a separate nested repository
1338 specified, qinit will create a separate nested repository
1339 for patches. Use qcommit to commit changes to this queue
1339 for patches. Use qcommit to commit changes to this queue
1340 repository."""
1340 repository."""
1341 q = repo.mq
1341 q = repo.mq
1342 r = q.init(repo, create=opts['create_repo'])
1342 r = q.init(repo, create=opts['create_repo'])
1343 q.save_dirty()
1343 q.save_dirty()
1344 if r:
1344 if r:
1345 fp = r.wopener('.hgignore', 'w')
1345 fp = r.wopener('.hgignore', 'w')
1346 print >> fp, 'syntax: glob'
1346 print >> fp, 'syntax: glob'
1347 print >> fp, 'status'
1347 print >> fp, 'status'
1348 fp.close()
1348 fp.close()
1349 r.wopener('series', 'w').close()
1349 r.wopener('series', 'w').close()
1350 r.add(['.hgignore', 'series'])
1350 r.add(['.hgignore', 'series'])
1351 return 0
1351 return 0
1352
1352
1353 def clone(ui, source, dest=None, **opts):
1353 def clone(ui, source, dest=None, **opts):
1354 '''clone main and patch repository at same time
1354 '''clone main and patch repository at same time
1355
1355
1356 If source is local, destination will have no patches applied. If
1356 If source is local, destination will have no patches applied. If
1357 source is remote, this command can not check if patches are
1357 source is remote, this command can not check if patches are
1358 applied in source, so cannot guarantee that patches are not
1358 applied in source, so cannot guarantee that patches are not
1359 applied in destination. If you clone remote repository, be sure
1359 applied in destination. If you clone remote repository, be sure
1360 before that it has no patches applied.
1360 before that it has no patches applied.
1361
1361
1362 Source patch repository is looked for in <src>/.hg/patches by
1362 Source patch repository is looked for in <src>/.hg/patches by
1363 default. Use -p <url> to change.
1363 default. Use -p <url> to change.
1364 '''
1364 '''
1365 commands.setremoteconfig(ui, opts)
1365 commands.setremoteconfig(ui, opts)
1366 if dest is None:
1366 if dest is None:
1367 dest = hg.defaultdest(source)
1367 dest = hg.defaultdest(source)
1368 sr = hg.repository(ui, ui.expandpath(source))
1368 sr = hg.repository(ui, ui.expandpath(source))
1369 qbase, destrev = None, None
1369 qbase, destrev = None, None
1370 if sr.local():
1370 if sr.local():
1371 reposetup(ui, sr)
1371 reposetup(ui, sr)
1372 if sr.mq.applied:
1372 if sr.mq.applied:
1373 qbase = revlog.bin(sr.mq.applied[0].rev)
1373 qbase = revlog.bin(sr.mq.applied[0].rev)
1374 if not hg.islocal(dest):
1374 if not hg.islocal(dest):
1375 destrev = sr.parents(qbase)[0]
1375 destrev = sr.parents(qbase)[0]
1376 ui.note(_('cloning main repo\n'))
1376 ui.note(_('cloning main repo\n'))
1377 sr, dr = hg.clone(ui, sr, dest,
1377 sr, dr = hg.clone(ui, sr, dest,
1378 pull=opts['pull'],
1378 pull=opts['pull'],
1379 rev=destrev,
1379 rev=destrev,
1380 update=False,
1380 update=False,
1381 stream=opts['uncompressed'])
1381 stream=opts['uncompressed'])
1382 ui.note(_('cloning patch repo\n'))
1382 ui.note(_('cloning patch repo\n'))
1383 spr, dpr = hg.clone(ui, opts['patches'] or (sr.url() + '/.hg/patches'),
1383 spr, dpr = hg.clone(ui, opts['patches'] or (sr.url() + '/.hg/patches'),
1384 dr.url() + '/.hg/patches',
1384 dr.url() + '/.hg/patches',
1385 pull=opts['pull'],
1385 pull=opts['pull'],
1386 update=not opts['noupdate'],
1386 update=not opts['noupdate'],
1387 stream=opts['uncompressed'])
1387 stream=opts['uncompressed'])
1388 if dr.local():
1388 if dr.local():
1389 if qbase:
1389 if qbase:
1390 ui.note(_('stripping applied patches from destination repo\n'))
1390 ui.note(_('stripping applied patches from destination repo\n'))
1391 reposetup(ui, dr)
1391 reposetup(ui, dr)
1392 dr.mq.strip(dr, qbase, update=False, backup=None)
1392 dr.mq.strip(dr, qbase, update=False, backup=None)
1393 if not opts['noupdate']:
1393 if not opts['noupdate']:
1394 ui.note(_('updating destination repo\n'))
1394 ui.note(_('updating destination repo\n'))
1395 hg.update(dr, dr.changelog.tip())
1395 hg.update(dr, dr.changelog.tip())
1396
1396
1397 def commit(ui, repo, *pats, **opts):
1397 def commit(ui, repo, *pats, **opts):
1398 """commit changes in the queue repository"""
1398 """commit changes in the queue repository"""
1399 q = repo.mq
1399 q = repo.mq
1400 r = q.qrepo()
1400 r = q.qrepo()
1401 if not r: raise util.Abort('no queue repository')
1401 if not r: raise util.Abort('no queue repository')
1402 commands.commit(r.ui, r, *pats, **opts)
1402 commands.commit(r.ui, r, *pats, **opts)
1403
1403
1404 def series(ui, repo, **opts):
1404 def series(ui, repo, **opts):
1405 """print the entire series file"""
1405 """print the entire series file"""
1406 repo.mq.qseries(repo, missing=opts['missing'], summary=opts['summary'])
1406 repo.mq.qseries(repo, missing=opts['missing'], summary=opts['summary'])
1407 return 0
1407 return 0
1408
1408
1409 def top(ui, repo, **opts):
1409 def top(ui, repo, **opts):
1410 """print the name of the current patch"""
1410 """print the name of the current patch"""
1411 return repo.mq.top(repo)
1411 return repo.mq.top(repo)
1412
1412
1413 def next(ui, repo, **opts):
1413 def next(ui, repo, **opts):
1414 """print the name of the next patch"""
1414 """print the name of the next patch"""
1415 return repo.mq.next(repo)
1415 return repo.mq.next(repo)
1416
1416
1417 def prev(ui, repo, **opts):
1417 def prev(ui, repo, **opts):
1418 """print the name of the previous patch"""
1418 """print the name of the previous patch"""
1419 return repo.mq.prev(repo)
1419 return repo.mq.prev(repo)
1420
1420
1421 def new(ui, repo, patch, **opts):
1421 def new(ui, repo, patch, **opts):
1422 """create a new patch
1422 """create a new patch
1423
1423
1424 qnew creates a new patch on top of the currently-applied patch
1424 qnew creates a new patch on top of the currently-applied patch
1425 (if any). It will refuse to run if there are any outstanding
1425 (if any). It will refuse to run if there are any outstanding
1426 changes unless -f is specified, in which case the patch will
1426 changes unless -f is specified, in which case the patch will
1427 be initialised with them.
1427 be initialised with them.
1428
1428
1429 -e, -m or -l set the patch header as well as the commit message.
1429 -e, -m or -l set the patch header as well as the commit message.
1430 If none is specified, the patch header is empty and the
1430 If none is specified, the patch header is empty and the
1431 commit message is 'New patch: PATCH'"""
1431 commit message is 'New patch: PATCH'"""
1432 q = repo.mq
1432 q = repo.mq
1433 message = commands.logmessage(opts)
1433 message = commands.logmessage(opts)
1434 if opts['edit']:
1434 if opts['edit']:
1435 message = ui.edit(message, ui.username())
1435 message = ui.edit(message, ui.username())
1436 q.new(repo, patch, msg=message, force=opts['force'])
1436 q.new(repo, patch, msg=message, force=opts['force'])
1437 q.save_dirty()
1437 q.save_dirty()
1438 return 0
1438 return 0
1439
1439
1440 def refresh(ui, repo, *pats, **opts):
1440 def refresh(ui, repo, *pats, **opts):
1441 """update the current patch
1441 """update the current patch
1442
1442
1443 If any file patterns are provided, the refreshed patch will contain only
1443 If any file patterns are provided, the refreshed patch will contain only
1444 the modifications that match those patterns; the remaining modifications
1444 the modifications that match those patterns; the remaining modifications
1445 will remain in the working directory.
1445 will remain in the working directory.
1446 """
1446 """
1447 q = repo.mq
1447 q = repo.mq
1448 message = commands.logmessage(opts)
1448 message = commands.logmessage(opts)
1449 if opts['edit']:
1449 if opts['edit']:
1450 if message:
1450 if message:
1451 raise util.Abort(_('option "-e" incompatible with "-m" or "-l"'))
1451 raise util.Abort(_('option "-e" incompatible with "-m" or "-l"'))
1452 patch = q.applied[-1].name
1452 patch = q.applied[-1].name
1453 (message, comment, user, date, hasdiff) = q.readheaders(patch)
1453 (message, comment, user, date, hasdiff) = q.readheaders(patch)
1454 message = ui.edit('\n'.join(message), user or ui.username())
1454 message = ui.edit('\n'.join(message), user or ui.username())
1455 ret = q.refresh(repo, pats, msg=message, **opts)
1455 ret = q.refresh(repo, pats, msg=message, **opts)
1456 q.save_dirty()
1456 q.save_dirty()
1457 return ret
1457 return ret
1458
1458
1459 def diff(ui, repo, *pats, **opts):
1459 def diff(ui, repo, *pats, **opts):
1460 """diff of the current patch"""
1460 """diff of the current patch"""
1461 repo.mq.diff(repo, pats, opts)
1461 repo.mq.diff(repo, pats, opts)
1462 return 0
1462 return 0
1463
1463
1464 def fold(ui, repo, *files, **opts):
1464 def fold(ui, repo, *files, **opts):
1465 """fold the named patches into the current patch
1465 """fold the named patches into the current patch
1466
1466
1467 Patches must not yet be applied. Each patch will be successively
1467 Patches must not yet be applied. Each patch will be successively
1468 applied to the current patch in the order given. If all the
1468 applied to the current patch in the order given. If all the
1469 patches apply successfully, the current patch will be refreshed
1469 patches apply successfully, the current patch will be refreshed
1470 with the new cumulative patch, and the folded patches will
1470 with the new cumulative patch, and the folded patches will
1471 be deleted. With -k/--keep, the folded patch files will not
1471 be deleted. With -k/--keep, the folded patch files will not
1472 be removed afterwards.
1472 be removed afterwards.
1473
1473
1474 The header for each folded patch will be concatenated with
1474 The header for each folded patch will be concatenated with
1475 the current patch header, separated by a line of '* * *'."""
1475 the current patch header, separated by a line of '* * *'."""
1476
1476
1477 q = repo.mq
1477 q = repo.mq
1478
1478
1479 if not files:
1479 if not files:
1480 raise util.Abort(_('qfold requires at least one patch name'))
1480 raise util.Abort(_('qfold requires at least one patch name'))
1481 if not q.check_toppatch(repo):
1481 if not q.check_toppatch(repo):
1482 raise util.Abort(_('No patches applied\n'))
1482 raise util.Abort(_('No patches applied'))
1483
1483
1484 message = commands.logmessage(opts)
1484 message = commands.logmessage(opts)
1485 if opts['edit']:
1485 if opts['edit']:
1486 if message:
1486 if message:
1487 raise util.Abort(_('option "-e" incompatible with "-m" or "-l"'))
1487 raise util.Abort(_('option "-e" incompatible with "-m" or "-l"'))
1488
1488
1489 parent = q.lookup('qtip')
1489 parent = q.lookup('qtip')
1490 patches = []
1490 patches = []
1491 messages = []
1491 messages = []
1492 for f in files:
1492 for f in files:
1493 p = q.lookup(f)
1493 p = q.lookup(f)
1494 if p in patches or p == parent:
1494 if p in patches or p == parent:
1495 ui.warn(_('Skipping already folded patch %s') % p)
1495 ui.warn(_('Skipping already folded patch %s') % p)
1496 if q.isapplied(p):
1496 if q.isapplied(p):
1497 raise util.Abort(_('qfold cannot fold already applied patch %s') % p)
1497 raise util.Abort(_('qfold cannot fold already applied patch %s') % p)
1498 patches.append(p)
1498 patches.append(p)
1499
1499
1500 for p in patches:
1500 for p in patches:
1501 if not message:
1501 if not message:
1502 messages.append(q.readheaders(p)[0])
1502 messages.append(q.readheaders(p)[0])
1503 pf = q.join(p)
1503 pf = q.join(p)
1504 (patchsuccess, files, fuzz) = q.patch(repo, pf)
1504 (patchsuccess, files, fuzz) = q.patch(repo, pf)
1505 if not patchsuccess:
1505 if not patchsuccess:
1506 raise util.Abort(_('Error folding patch %s') % p)
1506 raise util.Abort(_('Error folding patch %s') % p)
1507 patch.updatedir(ui, repo, files)
1507 patch.updatedir(ui, repo, files)
1508
1508
1509 if not message:
1509 if not message:
1510 message, comments, user = q.readheaders(parent)[0:3]
1510 message, comments, user = q.readheaders(parent)[0:3]
1511 for msg in messages:
1511 for msg in messages:
1512 message.append('* * *')
1512 message.append('* * *')
1513 message.extend(msg)
1513 message.extend(msg)
1514 message = '\n'.join(message)
1514 message = '\n'.join(message)
1515
1515
1516 if opts['edit']:
1516 if opts['edit']:
1517 message = ui.edit(message, user or ui.username())
1517 message = ui.edit(message, user or ui.username())
1518
1518
1519 q.refresh(repo, msg=message)
1519 q.refresh(repo, msg=message)
1520 q.delete(repo, patches, keep=opts['keep'])
1520 q.delete(repo, patches, keep=opts['keep'])
1521 q.save_dirty()
1521 q.save_dirty()
1522
1522
1523 def guard(ui, repo, *args, **opts):
1523 def guard(ui, repo, *args, **opts):
1524 '''set or print guards for a patch
1524 '''set or print guards for a patch
1525
1525
1526 Guards control whether a patch can be pushed. A patch with no
1526 Guards control whether a patch can be pushed. A patch with no
1527 guards is always pushed. A patch with a positive guard ("+foo") is
1527 guards is always pushed. A patch with a positive guard ("+foo") is
1528 pushed only if the qselect command has activated it. A patch with
1528 pushed only if the qselect command has activated it. A patch with
1529 a negative guard ("-foo") is never pushed if the qselect command
1529 a negative guard ("-foo") is never pushed if the qselect command
1530 has activated it.
1530 has activated it.
1531
1531
1532 With no arguments, print the currently active guards.
1532 With no arguments, print the currently active guards.
1533 With arguments, set guards for the named patch.
1533 With arguments, set guards for the named patch.
1534
1534
1535 To set a negative guard "-foo" on topmost patch ("--" is needed so
1535 To set a negative guard "-foo" on topmost patch ("--" is needed so
1536 hg will not interpret "-foo" as an option):
1536 hg will not interpret "-foo" as an option):
1537 hg qguard -- -foo
1537 hg qguard -- -foo
1538
1538
1539 To set guards on another patch:
1539 To set guards on another patch:
1540 hg qguard other.patch +2.6.17 -stable
1540 hg qguard other.patch +2.6.17 -stable
1541 '''
1541 '''
1542 def status(idx):
1542 def status(idx):
1543 guards = q.series_guards[idx] or ['unguarded']
1543 guards = q.series_guards[idx] or ['unguarded']
1544 ui.write('%s: %s\n' % (q.series[idx], ' '.join(guards)))
1544 ui.write('%s: %s\n' % (q.series[idx], ' '.join(guards)))
1545 q = repo.mq
1545 q = repo.mq
1546 patch = None
1546 patch = None
1547 args = list(args)
1547 args = list(args)
1548 if opts['list']:
1548 if opts['list']:
1549 if args or opts['none']:
1549 if args or opts['none']:
1550 raise util.Abort(_('cannot mix -l/--list with options or arguments'))
1550 raise util.Abort(_('cannot mix -l/--list with options or arguments'))
1551 for i in xrange(len(q.series)):
1551 for i in xrange(len(q.series)):
1552 status(i)
1552 status(i)
1553 return
1553 return
1554 if not args or args[0][0:1] in '-+':
1554 if not args or args[0][0:1] in '-+':
1555 if not q.applied:
1555 if not q.applied:
1556 raise util.Abort(_('no patches applied'))
1556 raise util.Abort(_('no patches applied'))
1557 patch = q.applied[-1].name
1557 patch = q.applied[-1].name
1558 if patch is None and args[0][0:1] not in '-+':
1558 if patch is None and args[0][0:1] not in '-+':
1559 patch = args.pop(0)
1559 patch = args.pop(0)
1560 if patch is None:
1560 if patch is None:
1561 raise util.Abort(_('no patch to work with'))
1561 raise util.Abort(_('no patch to work with'))
1562 if args or opts['none']:
1562 if args or opts['none']:
1563 q.set_guards(q.find_series(patch), args)
1563 q.set_guards(q.find_series(patch), args)
1564 q.save_dirty()
1564 q.save_dirty()
1565 else:
1565 else:
1566 status(q.series.index(q.lookup(patch)))
1566 status(q.series.index(q.lookup(patch)))
1567
1567
1568 def header(ui, repo, patch=None):
1568 def header(ui, repo, patch=None):
1569 """Print the header of the topmost or specified patch"""
1569 """Print the header of the topmost or specified patch"""
1570 q = repo.mq
1570 q = repo.mq
1571
1571
1572 if patch:
1572 if patch:
1573 patch = q.lookup(patch)
1573 patch = q.lookup(patch)
1574 else:
1574 else:
1575 if not q.applied:
1575 if not q.applied:
1576 ui.write('No patches applied\n')
1576 ui.write('No patches applied\n')
1577 return 1
1577 return 1
1578 patch = q.lookup('qtip')
1578 patch = q.lookup('qtip')
1579 message = repo.mq.readheaders(patch)[0]
1579 message = repo.mq.readheaders(patch)[0]
1580
1580
1581 ui.write('\n'.join(message) + '\n')
1581 ui.write('\n'.join(message) + '\n')
1582
1582
1583 def lastsavename(path):
1583 def lastsavename(path):
1584 (directory, base) = os.path.split(path)
1584 (directory, base) = os.path.split(path)
1585 names = os.listdir(directory)
1585 names = os.listdir(directory)
1586 namere = re.compile("%s.([0-9]+)" % base)
1586 namere = re.compile("%s.([0-9]+)" % base)
1587 maxindex = None
1587 maxindex = None
1588 maxname = None
1588 maxname = None
1589 for f in names:
1589 for f in names:
1590 m = namere.match(f)
1590 m = namere.match(f)
1591 if m:
1591 if m:
1592 index = int(m.group(1))
1592 index = int(m.group(1))
1593 if maxindex == None or index > maxindex:
1593 if maxindex == None or index > maxindex:
1594 maxindex = index
1594 maxindex = index
1595 maxname = f
1595 maxname = f
1596 if maxname:
1596 if maxname:
1597 return (os.path.join(directory, maxname), maxindex)
1597 return (os.path.join(directory, maxname), maxindex)
1598 return (None, None)
1598 return (None, None)
1599
1599
1600 def savename(path):
1600 def savename(path):
1601 (last, index) = lastsavename(path)
1601 (last, index) = lastsavename(path)
1602 if last is None:
1602 if last is None:
1603 index = 0
1603 index = 0
1604 newpath = path + ".%d" % (index + 1)
1604 newpath = path + ".%d" % (index + 1)
1605 return newpath
1605 return newpath
1606
1606
1607 def push(ui, repo, patch=None, **opts):
1607 def push(ui, repo, patch=None, **opts):
1608 """push the next patch onto the stack"""
1608 """push the next patch onto the stack"""
1609 q = repo.mq
1609 q = repo.mq
1610 mergeq = None
1610 mergeq = None
1611
1611
1612 if opts['all']:
1612 if opts['all']:
1613 patch = q.series[-1]
1613 patch = q.series[-1]
1614 if opts['merge']:
1614 if opts['merge']:
1615 if opts['name']:
1615 if opts['name']:
1616 newpath = opts['name']
1616 newpath = opts['name']
1617 else:
1617 else:
1618 newpath, i = lastsavename(q.path)
1618 newpath, i = lastsavename(q.path)
1619 if not newpath:
1619 if not newpath:
1620 ui.warn("no saved queues found, please use -n\n")
1620 ui.warn("no saved queues found, please use -n\n")
1621 return 1
1621 return 1
1622 mergeq = queue(ui, repo.join(""), newpath)
1622 mergeq = queue(ui, repo.join(""), newpath)
1623 ui.warn("merging with queue at: %s\n" % mergeq.path)
1623 ui.warn("merging with queue at: %s\n" % mergeq.path)
1624 ret = q.push(repo, patch, force=opts['force'], list=opts['list'],
1624 ret = q.push(repo, patch, force=opts['force'], list=opts['list'],
1625 mergeq=mergeq)
1625 mergeq=mergeq)
1626 q.save_dirty()
1626 q.save_dirty()
1627 return ret
1627 return ret
1628
1628
1629 def pop(ui, repo, patch=None, **opts):
1629 def pop(ui, repo, patch=None, **opts):
1630 """pop the current patch off the stack"""
1630 """pop the current patch off the stack"""
1631 localupdate = True
1631 localupdate = True
1632 if opts['name']:
1632 if opts['name']:
1633 q = queue(ui, repo.join(""), repo.join(opts['name']))
1633 q = queue(ui, repo.join(""), repo.join(opts['name']))
1634 ui.warn('using patch queue: %s\n' % q.path)
1634 ui.warn('using patch queue: %s\n' % q.path)
1635 localupdate = False
1635 localupdate = False
1636 else:
1636 else:
1637 q = repo.mq
1637 q = repo.mq
1638 q.pop(repo, patch, force=opts['force'], update=localupdate, all=opts['all'])
1638 q.pop(repo, patch, force=opts['force'], update=localupdate, all=opts['all'])
1639 q.save_dirty()
1639 q.save_dirty()
1640 return 0
1640 return 0
1641
1641
1642 def rename(ui, repo, patch, name=None, **opts):
1642 def rename(ui, repo, patch, name=None, **opts):
1643 """rename a patch
1643 """rename a patch
1644
1644
1645 With one argument, renames the current patch to PATCH1.
1645 With one argument, renames the current patch to PATCH1.
1646 With two arguments, renames PATCH1 to PATCH2."""
1646 With two arguments, renames PATCH1 to PATCH2."""
1647
1647
1648 q = repo.mq
1648 q = repo.mq
1649
1649
1650 if not name:
1650 if not name:
1651 name = patch
1651 name = patch
1652 patch = None
1652 patch = None
1653
1653
1654 if name in q.series:
1654 if name in q.series:
1655 raise util.Abort(_('A patch named %s already exists in the series file') % name)
1655 raise util.Abort(_('A patch named %s already exists in the series file') % name)
1656
1656
1657 absdest = q.join(name)
1657 absdest = q.join(name)
1658 if os.path.exists(absdest):
1658 if os.path.exists(absdest):
1659 raise util.Abort(_('%s already exists') % absdest)
1659 raise util.Abort(_('%s already exists') % absdest)
1660
1660
1661 if patch:
1661 if patch:
1662 patch = q.lookup(patch)
1662 patch = q.lookup(patch)
1663 else:
1663 else:
1664 if not q.applied:
1664 if not q.applied:
1665 ui.write(_('No patches applied\n'))
1665 ui.write(_('No patches applied\n'))
1666 return
1666 return
1667 patch = q.lookup('qtip')
1667 patch = q.lookup('qtip')
1668
1668
1669 if ui.verbose:
1669 if ui.verbose:
1670 ui.write('Renaming %s to %s\n' % (patch, name))
1670 ui.write('Renaming %s to %s\n' % (patch, name))
1671 i = q.find_series(patch)
1671 i = q.find_series(patch)
1672 q.full_series[i] = name
1672 q.full_series[i] = name
1673 q.parse_series()
1673 q.parse_series()
1674 q.series_dirty = 1
1674 q.series_dirty = 1
1675
1675
1676 info = q.isapplied(patch)
1676 info = q.isapplied(patch)
1677 if info:
1677 if info:
1678 q.applied[info[0]] = statusentry(info[1], name)
1678 q.applied[info[0]] = statusentry(info[1], name)
1679 q.applied_dirty = 1
1679 q.applied_dirty = 1
1680
1680
1681 util.rename(q.join(patch), absdest)
1681 util.rename(q.join(patch), absdest)
1682 r = q.qrepo()
1682 r = q.qrepo()
1683 if r:
1683 if r:
1684 wlock = r.wlock()
1684 wlock = r.wlock()
1685 if r.dirstate.state(name) == 'r':
1685 if r.dirstate.state(name) == 'r':
1686 r.undelete([name], wlock)
1686 r.undelete([name], wlock)
1687 r.copy(patch, name, wlock)
1687 r.copy(patch, name, wlock)
1688 r.remove([patch], False, wlock)
1688 r.remove([patch], False, wlock)
1689
1689
1690 q.save_dirty()
1690 q.save_dirty()
1691
1691
1692 def restore(ui, repo, rev, **opts):
1692 def restore(ui, repo, rev, **opts):
1693 """restore the queue state saved by a rev"""
1693 """restore the queue state saved by a rev"""
1694 rev = repo.lookup(rev)
1694 rev = repo.lookup(rev)
1695 q = repo.mq
1695 q = repo.mq
1696 q.restore(repo, rev, delete=opts['delete'],
1696 q.restore(repo, rev, delete=opts['delete'],
1697 qupdate=opts['update'])
1697 qupdate=opts['update'])
1698 q.save_dirty()
1698 q.save_dirty()
1699 return 0
1699 return 0
1700
1700
1701 def save(ui, repo, **opts):
1701 def save(ui, repo, **opts):
1702 """save current queue state"""
1702 """save current queue state"""
1703 q = repo.mq
1703 q = repo.mq
1704 message = commands.logmessage(opts)
1704 message = commands.logmessage(opts)
1705 ret = q.save(repo, msg=message)
1705 ret = q.save(repo, msg=message)
1706 if ret:
1706 if ret:
1707 return ret
1707 return ret
1708 q.save_dirty()
1708 q.save_dirty()
1709 if opts['copy']:
1709 if opts['copy']:
1710 path = q.path
1710 path = q.path
1711 if opts['name']:
1711 if opts['name']:
1712 newpath = os.path.join(q.basepath, opts['name'])
1712 newpath = os.path.join(q.basepath, opts['name'])
1713 if os.path.exists(newpath):
1713 if os.path.exists(newpath):
1714 if not os.path.isdir(newpath):
1714 if not os.path.isdir(newpath):
1715 raise util.Abort(_('destination %s exists and is not '
1715 raise util.Abort(_('destination %s exists and is not '
1716 'a directory') % newpath)
1716 'a directory') % newpath)
1717 if not opts['force']:
1717 if not opts['force']:
1718 raise util.Abort(_('destination %s exists, '
1718 raise util.Abort(_('destination %s exists, '
1719 'use -f to force') % newpath)
1719 'use -f to force') % newpath)
1720 else:
1720 else:
1721 newpath = savename(path)
1721 newpath = savename(path)
1722 ui.warn("copy %s to %s\n" % (path, newpath))
1722 ui.warn("copy %s to %s\n" % (path, newpath))
1723 util.copyfiles(path, newpath)
1723 util.copyfiles(path, newpath)
1724 if opts['empty']:
1724 if opts['empty']:
1725 try:
1725 try:
1726 os.unlink(q.join(q.status_path))
1726 os.unlink(q.join(q.status_path))
1727 except:
1727 except:
1728 pass
1728 pass
1729 return 0
1729 return 0
1730
1730
1731 def strip(ui, repo, rev, **opts):
1731 def strip(ui, repo, rev, **opts):
1732 """strip a revision and all later revs on the same branch"""
1732 """strip a revision and all later revs on the same branch"""
1733 rev = repo.lookup(rev)
1733 rev = repo.lookup(rev)
1734 backup = 'all'
1734 backup = 'all'
1735 if opts['backup']:
1735 if opts['backup']:
1736 backup = 'strip'
1736 backup = 'strip'
1737 elif opts['nobackup']:
1737 elif opts['nobackup']:
1738 backup = 'none'
1738 backup = 'none'
1739 repo.mq.strip(repo, rev, backup=backup)
1739 repo.mq.strip(repo, rev, backup=backup)
1740 return 0
1740 return 0
1741
1741
1742 def select(ui, repo, *args, **opts):
1742 def select(ui, repo, *args, **opts):
1743 '''set or print guarded patches to push
1743 '''set or print guarded patches to push
1744
1744
1745 Use the qguard command to set or print guards on patch, then use
1745 Use the qguard command to set or print guards on patch, then use
1746 qselect to tell mq which guards to use. A patch will be pushed if it
1746 qselect to tell mq which guards to use. A patch will be pushed if it
1747 has no guards or any positive guards match the currently selected guard,
1747 has no guards or any positive guards match the currently selected guard,
1748 but will not be pushed if any negative guards match the current guard.
1748 but will not be pushed if any negative guards match the current guard.
1749 For example:
1749 For example:
1750
1750
1751 qguard foo.patch -stable (negative guard)
1751 qguard foo.patch -stable (negative guard)
1752 qguard bar.patch +stable (positive guard)
1752 qguard bar.patch +stable (positive guard)
1753 qselect stable
1753 qselect stable
1754
1754
1755 This activates the "stable" guard. mq will skip foo.patch (because
1755 This activates the "stable" guard. mq will skip foo.patch (because
1756 it has a negative match) but push bar.patch (because it
1756 it has a negative match) but push bar.patch (because it
1757 has a positive match).
1757 has a positive match).
1758
1758
1759 With no arguments, prints the currently active guards.
1759 With no arguments, prints the currently active guards.
1760 With one argument, sets the active guard.
1760 With one argument, sets the active guard.
1761
1761
1762 Use -n/--none to deactivate guards (no other arguments needed).
1762 Use -n/--none to deactivate guards (no other arguments needed).
1763 When no guards are active, patches with positive guards are skipped
1763 When no guards are active, patches with positive guards are skipped
1764 and patches with negative guards are pushed.
1764 and patches with negative guards are pushed.
1765
1765
1766 qselect can change the guards on applied patches. It does not pop
1766 qselect can change the guards on applied patches. It does not pop
1767 guarded patches by default. Use --pop to pop back to the last applied
1767 guarded patches by default. Use --pop to pop back to the last applied
1768 patch that is not guarded. Use --reapply (which implies --pop) to push
1768 patch that is not guarded. Use --reapply (which implies --pop) to push
1769 back to the current patch afterwards, but skip guarded patches.
1769 back to the current patch afterwards, but skip guarded patches.
1770
1770
1771 Use -s/--series to print a list of all guards in the series file (no
1771 Use -s/--series to print a list of all guards in the series file (no
1772 other arguments needed). Use -v for more information.'''
1772 other arguments needed). Use -v for more information.'''
1773
1773
1774 q = repo.mq
1774 q = repo.mq
1775 guards = q.active()
1775 guards = q.active()
1776 if args or opts['none']:
1776 if args or opts['none']:
1777 old_unapplied = q.unapplied(repo)
1777 old_unapplied = q.unapplied(repo)
1778 old_guarded = [i for i in xrange(len(q.applied)) if
1778 old_guarded = [i for i in xrange(len(q.applied)) if
1779 not q.pushable(i)[0]]
1779 not q.pushable(i)[0]]
1780 q.set_active(args)
1780 q.set_active(args)
1781 q.save_dirty()
1781 q.save_dirty()
1782 if not args:
1782 if not args:
1783 ui.status(_('guards deactivated\n'))
1783 ui.status(_('guards deactivated\n'))
1784 if not opts['pop'] and not opts['reapply']:
1784 if not opts['pop'] and not opts['reapply']:
1785 unapplied = q.unapplied(repo)
1785 unapplied = q.unapplied(repo)
1786 guarded = [i for i in xrange(len(q.applied))
1786 guarded = [i for i in xrange(len(q.applied))
1787 if not q.pushable(i)[0]]
1787 if not q.pushable(i)[0]]
1788 if len(unapplied) != len(old_unapplied):
1788 if len(unapplied) != len(old_unapplied):
1789 ui.status(_('number of unguarded, unapplied patches has '
1789 ui.status(_('number of unguarded, unapplied patches has '
1790 'changed from %d to %d\n') %
1790 'changed from %d to %d\n') %
1791 (len(old_unapplied), len(unapplied)))
1791 (len(old_unapplied), len(unapplied)))
1792 if len(guarded) != len(old_guarded):
1792 if len(guarded) != len(old_guarded):
1793 ui.status(_('number of guarded, applied patches has changed '
1793 ui.status(_('number of guarded, applied patches has changed '
1794 'from %d to %d\n') %
1794 'from %d to %d\n') %
1795 (len(old_guarded), len(guarded)))
1795 (len(old_guarded), len(guarded)))
1796 elif opts['series']:
1796 elif opts['series']:
1797 guards = {}
1797 guards = {}
1798 noguards = 0
1798 noguards = 0
1799 for gs in q.series_guards:
1799 for gs in q.series_guards:
1800 if not gs:
1800 if not gs:
1801 noguards += 1
1801 noguards += 1
1802 for g in gs:
1802 for g in gs:
1803 guards.setdefault(g, 0)
1803 guards.setdefault(g, 0)
1804 guards[g] += 1
1804 guards[g] += 1
1805 if ui.verbose:
1805 if ui.verbose:
1806 guards['NONE'] = noguards
1806 guards['NONE'] = noguards
1807 guards = guards.items()
1807 guards = guards.items()
1808 guards.sort(lambda a, b: cmp(a[0][1:], b[0][1:]))
1808 guards.sort(lambda a, b: cmp(a[0][1:], b[0][1:]))
1809 if guards:
1809 if guards:
1810 ui.note(_('guards in series file:\n'))
1810 ui.note(_('guards in series file:\n'))
1811 for guard, count in guards:
1811 for guard, count in guards:
1812 ui.note('%2d ' % count)
1812 ui.note('%2d ' % count)
1813 ui.write(guard, '\n')
1813 ui.write(guard, '\n')
1814 else:
1814 else:
1815 ui.note(_('no guards in series file\n'))
1815 ui.note(_('no guards in series file\n'))
1816 else:
1816 else:
1817 if guards:
1817 if guards:
1818 ui.note(_('active guards:\n'))
1818 ui.note(_('active guards:\n'))
1819 for g in guards:
1819 for g in guards:
1820 ui.write(g, '\n')
1820 ui.write(g, '\n')
1821 else:
1821 else:
1822 ui.write(_('no active guards\n'))
1822 ui.write(_('no active guards\n'))
1823 reapply = opts['reapply'] and q.applied and q.appliedname(-1)
1823 reapply = opts['reapply'] and q.applied and q.appliedname(-1)
1824 popped = False
1824 popped = False
1825 if opts['pop'] or opts['reapply']:
1825 if opts['pop'] or opts['reapply']:
1826 for i in xrange(len(q.applied)):
1826 for i in xrange(len(q.applied)):
1827 pushable, reason = q.pushable(i)
1827 pushable, reason = q.pushable(i)
1828 if not pushable:
1828 if not pushable:
1829 ui.status(_('popping guarded patches\n'))
1829 ui.status(_('popping guarded patches\n'))
1830 popped = True
1830 popped = True
1831 if i == 0:
1831 if i == 0:
1832 q.pop(repo, all=True)
1832 q.pop(repo, all=True)
1833 else:
1833 else:
1834 q.pop(repo, i-1)
1834 q.pop(repo, i-1)
1835 break
1835 break
1836 if popped:
1836 if popped:
1837 try:
1837 try:
1838 if reapply:
1838 if reapply:
1839 ui.status(_('reapplying unguarded patches\n'))
1839 ui.status(_('reapplying unguarded patches\n'))
1840 q.push(repo, reapply)
1840 q.push(repo, reapply)
1841 finally:
1841 finally:
1842 q.save_dirty()
1842 q.save_dirty()
1843
1843
1844 def reposetup(ui, repo):
1844 def reposetup(ui, repo):
1845 class mqrepo(repo.__class__):
1845 class mqrepo(repo.__class__):
1846 def abort_if_wdir_patched(self, errmsg, force=False):
1846 def abort_if_wdir_patched(self, errmsg, force=False):
1847 if self.mq.applied and not force:
1847 if self.mq.applied and not force:
1848 parent = revlog.hex(self.dirstate.parents()[0])
1848 parent = revlog.hex(self.dirstate.parents()[0])
1849 if parent in [s.rev for s in self.mq.applied]:
1849 if parent in [s.rev for s in self.mq.applied]:
1850 raise util.Abort(errmsg)
1850 raise util.Abort(errmsg)
1851
1851
1852 def commit(self, *args, **opts):
1852 def commit(self, *args, **opts):
1853 if len(args) >= 6:
1853 if len(args) >= 6:
1854 force = args[5]
1854 force = args[5]
1855 else:
1855 else:
1856 force = opts.get('force')
1856 force = opts.get('force')
1857 self.abort_if_wdir_patched(
1857 self.abort_if_wdir_patched(
1858 _('cannot commit over an applied mq patch'),
1858 _('cannot commit over an applied mq patch'),
1859 force)
1859 force)
1860
1860
1861 return super(mqrepo, self).commit(*args, **opts)
1861 return super(mqrepo, self).commit(*args, **opts)
1862
1862
1863 def push(self, remote, force=False, revs=None):
1863 def push(self, remote, force=False, revs=None):
1864 if self.mq.applied and not force:
1864 if self.mq.applied and not force:
1865 raise util.Abort(_('source has mq patches applied'))
1865 raise util.Abort(_('source has mq patches applied'))
1866 return super(mqrepo, self).push(remote, force, revs)
1866 return super(mqrepo, self).push(remote, force, revs)
1867
1867
1868 def tags(self):
1868 def tags(self):
1869 if self.tagscache:
1869 if self.tagscache:
1870 return self.tagscache
1870 return self.tagscache
1871
1871
1872 tagscache = super(mqrepo, self).tags()
1872 tagscache = super(mqrepo, self).tags()
1873
1873
1874 q = self.mq
1874 q = self.mq
1875 if not q.applied:
1875 if not q.applied:
1876 return tagscache
1876 return tagscache
1877
1877
1878 mqtags = [(patch.rev, patch.name) for patch in q.applied]
1878 mqtags = [(patch.rev, patch.name) for patch in q.applied]
1879 mqtags.append((mqtags[-1][0], 'qtip'))
1879 mqtags.append((mqtags[-1][0], 'qtip'))
1880 mqtags.append((mqtags[0][0], 'qbase'))
1880 mqtags.append((mqtags[0][0], 'qbase'))
1881 for patch in mqtags:
1881 for patch in mqtags:
1882 if patch[1] in tagscache:
1882 if patch[1] in tagscache:
1883 self.ui.warn('Tag %s overrides mq patch of the same name\n' % patch[1])
1883 self.ui.warn('Tag %s overrides mq patch of the same name\n' % patch[1])
1884 else:
1884 else:
1885 tagscache[patch[1]] = revlog.bin(patch[0])
1885 tagscache[patch[1]] = revlog.bin(patch[0])
1886
1886
1887 return tagscache
1887 return tagscache
1888
1888
1889 if repo.local():
1889 if repo.local():
1890 repo.__class__ = mqrepo
1890 repo.__class__ = mqrepo
1891 repo.mq = queue(ui, repo.join(""))
1891 repo.mq = queue(ui, repo.join(""))
1892
1892
1893 cmdtable = {
1893 cmdtable = {
1894 "qapplied": (applied, [], 'hg qapplied [PATCH]'),
1894 "qapplied": (applied, [], 'hg qapplied [PATCH]'),
1895 "qclone": (clone,
1895 "qclone": (clone,
1896 [('', 'pull', None, _('use pull protocol to copy metadata')),
1896 [('', 'pull', None, _('use pull protocol to copy metadata')),
1897 ('U', 'noupdate', None, _('do not update the new working directories')),
1897 ('U', 'noupdate', None, _('do not update the new working directories')),
1898 ('', 'uncompressed', None,
1898 ('', 'uncompressed', None,
1899 _('use uncompressed transfer (fast over LAN)')),
1899 _('use uncompressed transfer (fast over LAN)')),
1900 ('e', 'ssh', '', _('specify ssh command to use')),
1900 ('e', 'ssh', '', _('specify ssh command to use')),
1901 ('p', 'patches', '', _('location of source patch repo')),
1901 ('p', 'patches', '', _('location of source patch repo')),
1902 ('', 'remotecmd', '',
1902 ('', 'remotecmd', '',
1903 _('specify hg command to run on the remote side'))],
1903 _('specify hg command to run on the remote side'))],
1904 'hg qclone [OPTION]... SOURCE [DEST]'),
1904 'hg qclone [OPTION]... SOURCE [DEST]'),
1905 "qcommit|qci":
1905 "qcommit|qci":
1906 (commit,
1906 (commit,
1907 commands.table["^commit|ci"][1],
1907 commands.table["^commit|ci"][1],
1908 'hg qcommit [OPTION]... [FILE]...'),
1908 'hg qcommit [OPTION]... [FILE]...'),
1909 "^qdiff": (diff,
1909 "^qdiff": (diff,
1910 [('I', 'include', [], _('include names matching the given patterns')),
1910 [('I', 'include', [], _('include names matching the given patterns')),
1911 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
1911 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
1912 'hg qdiff [-I] [-X] [FILE]...'),
1912 'hg qdiff [-I] [-X] [FILE]...'),
1913 "qdelete|qremove|qrm":
1913 "qdelete|qremove|qrm":
1914 (delete,
1914 (delete,
1915 [('k', 'keep', None, _('keep patch file'))],
1915 [('k', 'keep', None, _('keep patch file'))],
1916 'hg qdelete [-k] PATCH'),
1916 'hg qdelete [-k] PATCH'),
1917 'qfold':
1917 'qfold':
1918 (fold,
1918 (fold,
1919 [('e', 'edit', None, _('edit patch header')),
1919 [('e', 'edit', None, _('edit patch header')),
1920 ('k', 'keep', None, _('keep folded patch files')),
1920 ('k', 'keep', None, _('keep folded patch files')),
1921 ('m', 'message', '', _('set patch header to <text>')),
1921 ('m', 'message', '', _('set patch header to <text>')),
1922 ('l', 'logfile', '', _('set patch header to contents of <file>'))],
1922 ('l', 'logfile', '', _('set patch header to contents of <file>'))],
1923 'hg qfold [-e] [-m <text>] [-l <file] PATCH...'),
1923 'hg qfold [-e] [-m <text>] [-l <file] PATCH...'),
1924 'qguard': (guard, [('l', 'list', None, _('list all patches and guards')),
1924 'qguard': (guard, [('l', 'list', None, _('list all patches and guards')),
1925 ('n', 'none', None, _('drop all guards'))],
1925 ('n', 'none', None, _('drop all guards'))],
1926 'hg qguard [PATCH] [+GUARD...] [-GUARD...]'),
1926 'hg qguard [PATCH] [+GUARD...] [-GUARD...]'),
1927 'qheader': (header, [],
1927 'qheader': (header, [],
1928 _('hg qheader [PATCH]')),
1928 _('hg qheader [PATCH]')),
1929 "^qimport":
1929 "^qimport":
1930 (qimport,
1930 (qimport,
1931 [('e', 'existing', None, 'import file in patch dir'),
1931 [('e', 'existing', None, 'import file in patch dir'),
1932 ('n', 'name', '', 'patch file name'),
1932 ('n', 'name', '', 'patch file name'),
1933 ('f', 'force', None, 'overwrite existing files')],
1933 ('f', 'force', None, 'overwrite existing files')],
1934 'hg qimport [-e] [-n NAME] [-f] FILE...'),
1934 'hg qimport [-e] [-n NAME] [-f] FILE...'),
1935 "^qinit":
1935 "^qinit":
1936 (init,
1936 (init,
1937 [('c', 'create-repo', None, 'create queue repository')],
1937 [('c', 'create-repo', None, 'create queue repository')],
1938 'hg qinit [-c]'),
1938 'hg qinit [-c]'),
1939 "qnew":
1939 "qnew":
1940 (new,
1940 (new,
1941 [('e', 'edit', None, _('edit commit message')),
1941 [('e', 'edit', None, _('edit commit message')),
1942 ('m', 'message', '', _('use <text> as commit message')),
1942 ('m', 'message', '', _('use <text> as commit message')),
1943 ('l', 'logfile', '', _('read the commit message from <file>')),
1943 ('l', 'logfile', '', _('read the commit message from <file>')),
1944 ('f', 'force', None, _('import uncommitted changes into patch'))],
1944 ('f', 'force', None, _('import uncommitted changes into patch'))],
1945 'hg qnew [-e] [-m TEXT] [-l FILE] [-f] PATCH'),
1945 'hg qnew [-e] [-m TEXT] [-l FILE] [-f] PATCH'),
1946 "qnext": (next, [], 'hg qnext'),
1946 "qnext": (next, [], 'hg qnext'),
1947 "qprev": (prev, [], 'hg qprev'),
1947 "qprev": (prev, [], 'hg qprev'),
1948 "^qpop":
1948 "^qpop":
1949 (pop,
1949 (pop,
1950 [('a', 'all', None, 'pop all patches'),
1950 [('a', 'all', None, 'pop all patches'),
1951 ('n', 'name', '', 'queue name to pop'),
1951 ('n', 'name', '', 'queue name to pop'),
1952 ('f', 'force', None, 'forget any local changes')],
1952 ('f', 'force', None, 'forget any local changes')],
1953 'hg qpop [-a] [-n NAME] [-f] [PATCH | INDEX]'),
1953 'hg qpop [-a] [-n NAME] [-f] [PATCH | INDEX]'),
1954 "^qpush":
1954 "^qpush":
1955 (push,
1955 (push,
1956 [('f', 'force', None, 'apply if the patch has rejects'),
1956 [('f', 'force', None, 'apply if the patch has rejects'),
1957 ('l', 'list', None, 'list patch name in commit text'),
1957 ('l', 'list', None, 'list patch name in commit text'),
1958 ('a', 'all', None, 'apply all patches'),
1958 ('a', 'all', None, 'apply all patches'),
1959 ('m', 'merge', None, 'merge from another queue'),
1959 ('m', 'merge', None, 'merge from another queue'),
1960 ('n', 'name', '', 'merge queue name')],
1960 ('n', 'name', '', 'merge queue name')],
1961 'hg qpush [-f] [-l] [-a] [-m] [-n NAME] [PATCH | INDEX]'),
1961 'hg qpush [-f] [-l] [-a] [-m] [-n NAME] [PATCH | INDEX]'),
1962 "^qrefresh":
1962 "^qrefresh":
1963 (refresh,
1963 (refresh,
1964 [('e', 'edit', None, _('edit commit message')),
1964 [('e', 'edit', None, _('edit commit message')),
1965 ('m', 'message', '', _('change commit message with <text>')),
1965 ('m', 'message', '', _('change commit message with <text>')),
1966 ('l', 'logfile', '', _('change commit message with <file> content')),
1966 ('l', 'logfile', '', _('change commit message with <file> content')),
1967 ('s', 'short', None, 'short refresh'),
1967 ('s', 'short', None, 'short refresh'),
1968 ('I', 'include', [], _('include names matching the given patterns')),
1968 ('I', 'include', [], _('include names matching the given patterns')),
1969 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
1969 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
1970 'hg qrefresh [-I] [-X] [-e] [-m TEXT] [-l FILE] [-s] FILES...'),
1970 'hg qrefresh [-I] [-X] [-e] [-m TEXT] [-l FILE] [-s] FILES...'),
1971 'qrename|qmv':
1971 'qrename|qmv':
1972 (rename, [], 'hg qrename PATCH1 [PATCH2]'),
1972 (rename, [], 'hg qrename PATCH1 [PATCH2]'),
1973 "qrestore":
1973 "qrestore":
1974 (restore,
1974 (restore,
1975 [('d', 'delete', None, 'delete save entry'),
1975 [('d', 'delete', None, 'delete save entry'),
1976 ('u', 'update', None, 'update queue working dir')],
1976 ('u', 'update', None, 'update queue working dir')],
1977 'hg qrestore [-d] [-u] REV'),
1977 'hg qrestore [-d] [-u] REV'),
1978 "qsave":
1978 "qsave":
1979 (save,
1979 (save,
1980 [('m', 'message', '', _('use <text> as commit message')),
1980 [('m', 'message', '', _('use <text> as commit message')),
1981 ('l', 'logfile', '', _('read the commit message from <file>')),
1981 ('l', 'logfile', '', _('read the commit message from <file>')),
1982 ('c', 'copy', None, 'copy patch directory'),
1982 ('c', 'copy', None, 'copy patch directory'),
1983 ('n', 'name', '', 'copy directory name'),
1983 ('n', 'name', '', 'copy directory name'),
1984 ('e', 'empty', None, 'clear queue status file'),
1984 ('e', 'empty', None, 'clear queue status file'),
1985 ('f', 'force', None, 'force copy')],
1985 ('f', 'force', None, 'force copy')],
1986 'hg qsave [-m TEXT] [-l FILE] [-c] [-n NAME] [-e] [-f]'),
1986 'hg qsave [-m TEXT] [-l FILE] [-c] [-n NAME] [-e] [-f]'),
1987 "qselect": (select,
1987 "qselect": (select,
1988 [('n', 'none', None, _('disable all guards')),
1988 [('n', 'none', None, _('disable all guards')),
1989 ('s', 'series', None, _('list all guards in series file')),
1989 ('s', 'series', None, _('list all guards in series file')),
1990 ('', 'pop', None,
1990 ('', 'pop', None,
1991 _('pop to before first guarded applied patch')),
1991 _('pop to before first guarded applied patch')),
1992 ('', 'reapply', None, _('pop, then reapply patches'))],
1992 ('', 'reapply', None, _('pop, then reapply patches'))],
1993 'hg qselect [OPTION...] [GUARD...]'),
1993 'hg qselect [OPTION...] [GUARD...]'),
1994 "qseries":
1994 "qseries":
1995 (series,
1995 (series,
1996 [('m', 'missing', None, 'print patches not in series'),
1996 [('m', 'missing', None, 'print patches not in series'),
1997 ('s', 'summary', None, _('print first line of patch header'))],
1997 ('s', 'summary', None, _('print first line of patch header'))],
1998 'hg qseries [-m]'),
1998 'hg qseries [-m]'),
1999 "^strip":
1999 "^strip":
2000 (strip,
2000 (strip,
2001 [('f', 'force', None, 'force multi-head removal'),
2001 [('f', 'force', None, 'force multi-head removal'),
2002 ('b', 'backup', None, 'bundle unrelated changesets'),
2002 ('b', 'backup', None, 'bundle unrelated changesets'),
2003 ('n', 'nobackup', None, 'no backups')],
2003 ('n', 'nobackup', None, 'no backups')],
2004 'hg strip [-f] [-b] [-n] REV'),
2004 'hg strip [-f] [-b] [-n] REV'),
2005 "qtop": (top, [], 'hg qtop'),
2005 "qtop": (top, [], 'hg qtop'),
2006 "qunapplied": (unapplied, [], 'hg qunapplied [PATCH]'),
2006 "qunapplied": (unapplied, [], 'hg qunapplied [PATCH]'),
2007 }
2007 }
@@ -1,145 +1,145 b''
1 # cmdutil.py - help for command processing in mercurial
1 # cmdutil.py - help for command processing in mercurial
2 #
2 #
3 # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7
7
8 from demandload import demandload
8 from demandload import demandload
9 from node import *
9 from node import *
10 from i18n import gettext as _
10 from i18n import gettext as _
11 demandload(globals(), 'mdiff util')
11 demandload(globals(), 'mdiff util')
12 demandload(globals(), 'os sys')
12 demandload(globals(), 'os sys')
13
13
14 def make_filename(repo, pat, node,
14 def make_filename(repo, pat, node,
15 total=None, seqno=None, revwidth=None, pathname=None):
15 total=None, seqno=None, revwidth=None, pathname=None):
16 node_expander = {
16 node_expander = {
17 'H': lambda: hex(node),
17 'H': lambda: hex(node),
18 'R': lambda: str(repo.changelog.rev(node)),
18 'R': lambda: str(repo.changelog.rev(node)),
19 'h': lambda: short(node),
19 'h': lambda: short(node),
20 }
20 }
21 expander = {
21 expander = {
22 '%': lambda: '%',
22 '%': lambda: '%',
23 'b': lambda: os.path.basename(repo.root),
23 'b': lambda: os.path.basename(repo.root),
24 }
24 }
25
25
26 try:
26 try:
27 if node:
27 if node:
28 expander.update(node_expander)
28 expander.update(node_expander)
29 if node and revwidth is not None:
29 if node and revwidth is not None:
30 expander['r'] = (lambda:
30 expander['r'] = (lambda:
31 str(repo.changelog.rev(node)).zfill(revwidth))
31 str(repo.changelog.rev(node)).zfill(revwidth))
32 if total is not None:
32 if total is not None:
33 expander['N'] = lambda: str(total)
33 expander['N'] = lambda: str(total)
34 if seqno is not None:
34 if seqno is not None:
35 expander['n'] = lambda: str(seqno)
35 expander['n'] = lambda: str(seqno)
36 if total is not None and seqno is not None:
36 if total is not None and seqno is not None:
37 expander['n'] = lambda:str(seqno).zfill(len(str(total)))
37 expander['n'] = lambda:str(seqno).zfill(len(str(total)))
38 if pathname is not None:
38 if pathname is not None:
39 expander['s'] = lambda: os.path.basename(pathname)
39 expander['s'] = lambda: os.path.basename(pathname)
40 expander['d'] = lambda: os.path.dirname(pathname) or '.'
40 expander['d'] = lambda: os.path.dirname(pathname) or '.'
41 expander['p'] = lambda: pathname
41 expander['p'] = lambda: pathname
42
42
43 newname = []
43 newname = []
44 patlen = len(pat)
44 patlen = len(pat)
45 i = 0
45 i = 0
46 while i < patlen:
46 while i < patlen:
47 c = pat[i]
47 c = pat[i]
48 if c == '%':
48 if c == '%':
49 i += 1
49 i += 1
50 c = pat[i]
50 c = pat[i]
51 c = expander[c]()
51 c = expander[c]()
52 newname.append(c)
52 newname.append(c)
53 i += 1
53 i += 1
54 return ''.join(newname)
54 return ''.join(newname)
55 except KeyError, inst:
55 except KeyError, inst:
56 raise util.Abort(_("invalid format spec '%%%s' in output file name"),
56 raise util.Abort(_("invalid format spec '%%%s' in output file name") %
57 inst.args[0])
57 inst.args[0])
58
58
59 def make_file(repo, pat, node=None,
59 def make_file(repo, pat, node=None,
60 total=None, seqno=None, revwidth=None, mode='wb', pathname=None):
60 total=None, seqno=None, revwidth=None, mode='wb', pathname=None):
61 if not pat or pat == '-':
61 if not pat or pat == '-':
62 return 'w' in mode and sys.stdout or sys.stdin
62 return 'w' in mode and sys.stdout or sys.stdin
63 if hasattr(pat, 'write') and 'w' in mode:
63 if hasattr(pat, 'write') and 'w' in mode:
64 return pat
64 return pat
65 if hasattr(pat, 'read') and 'r' in mode:
65 if hasattr(pat, 'read') and 'r' in mode:
66 return pat
66 return pat
67 return open(make_filename(repo, pat, node, total, seqno, revwidth,
67 return open(make_filename(repo, pat, node, total, seqno, revwidth,
68 pathname),
68 pathname),
69 mode)
69 mode)
70
70
71 def matchpats(repo, pats=[], opts={}, head=''):
71 def matchpats(repo, pats=[], opts={}, head=''):
72 cwd = repo.getcwd()
72 cwd = repo.getcwd()
73 if not pats and cwd:
73 if not pats and cwd:
74 opts['include'] = [os.path.join(cwd, i)
74 opts['include'] = [os.path.join(cwd, i)
75 for i in opts.get('include', [])]
75 for i in opts.get('include', [])]
76 opts['exclude'] = [os.path.join(cwd, x)
76 opts['exclude'] = [os.path.join(cwd, x)
77 for x in opts.get('exclude', [])]
77 for x in opts.get('exclude', [])]
78 cwd = ''
78 cwd = ''
79 return util.cmdmatcher(repo.root, cwd, pats or ['.'], opts.get('include'),
79 return util.cmdmatcher(repo.root, cwd, pats or ['.'], opts.get('include'),
80 opts.get('exclude'), head)
80 opts.get('exclude'), head)
81
81
82 def makewalk(repo, pats=[], opts={}, node=None, head='', badmatch=None):
82 def makewalk(repo, pats=[], opts={}, node=None, head='', badmatch=None):
83 files, matchfn, anypats = matchpats(repo, pats, opts, head)
83 files, matchfn, anypats = matchpats(repo, pats, opts, head)
84 exact = dict(zip(files, files))
84 exact = dict(zip(files, files))
85 def walk():
85 def walk():
86 for src, fn in repo.walk(node=node, files=files, match=matchfn,
86 for src, fn in repo.walk(node=node, files=files, match=matchfn,
87 badmatch=badmatch):
87 badmatch=badmatch):
88 yield src, fn, util.pathto(repo.getcwd(), fn), fn in exact
88 yield src, fn, util.pathto(repo.getcwd(), fn), fn in exact
89 return files, matchfn, walk()
89 return files, matchfn, walk()
90
90
91 def walk(repo, pats=[], opts={}, node=None, head='', badmatch=None):
91 def walk(repo, pats=[], opts={}, node=None, head='', badmatch=None):
92 files, matchfn, results = makewalk(repo, pats, opts, node, head, badmatch)
92 files, matchfn, results = makewalk(repo, pats, opts, node, head, badmatch)
93 for r in results:
93 for r in results:
94 yield r
94 yield r
95
95
96 def findrenames(repo, added=None, removed=None, threshold=0.5):
96 def findrenames(repo, added=None, removed=None, threshold=0.5):
97 if added is None or removed is None:
97 if added is None or removed is None:
98 added, removed = repo.status()[1:3]
98 added, removed = repo.status()[1:3]
99 changes = repo.changelog.read(repo.dirstate.parents()[0])
99 changes = repo.changelog.read(repo.dirstate.parents()[0])
100 mf = repo.manifest.read(changes[0])
100 mf = repo.manifest.read(changes[0])
101 for a in added:
101 for a in added:
102 aa = repo.wread(a)
102 aa = repo.wread(a)
103 bestscore, bestname = None, None
103 bestscore, bestname = None, None
104 for r in removed:
104 for r in removed:
105 rr = repo.file(r).read(mf[r])
105 rr = repo.file(r).read(mf[r])
106 delta = mdiff.textdiff(aa, rr)
106 delta = mdiff.textdiff(aa, rr)
107 if len(delta) < len(aa):
107 if len(delta) < len(aa):
108 myscore = 1.0 - (float(len(delta)) / len(aa))
108 myscore = 1.0 - (float(len(delta)) / len(aa))
109 if bestscore is None or myscore > bestscore:
109 if bestscore is None or myscore > bestscore:
110 bestscore, bestname = myscore, r
110 bestscore, bestname = myscore, r
111 if bestname and bestscore >= threshold:
111 if bestname and bestscore >= threshold:
112 yield bestname, a, bestscore
112 yield bestname, a, bestscore
113
113
114 def addremove(repo, pats=[], opts={}, wlock=None, dry_run=None,
114 def addremove(repo, pats=[], opts={}, wlock=None, dry_run=None,
115 similarity=None):
115 similarity=None):
116 if dry_run is None:
116 if dry_run is None:
117 dry_run = opts.get('dry_run')
117 dry_run = opts.get('dry_run')
118 if similarity is None:
118 if similarity is None:
119 similarity = float(opts.get('similarity') or 0)
119 similarity = float(opts.get('similarity') or 0)
120 add, remove = [], []
120 add, remove = [], []
121 mapping = {}
121 mapping = {}
122 for src, abs, rel, exact in walk(repo, pats, opts):
122 for src, abs, rel, exact in walk(repo, pats, opts):
123 if src == 'f' and repo.dirstate.state(abs) == '?':
123 if src == 'f' and repo.dirstate.state(abs) == '?':
124 add.append(abs)
124 add.append(abs)
125 mapping[abs] = rel, exact
125 mapping[abs] = rel, exact
126 if repo.ui.verbose or not exact:
126 if repo.ui.verbose or not exact:
127 repo.ui.status(_('adding %s\n') % ((pats and rel) or abs))
127 repo.ui.status(_('adding %s\n') % ((pats and rel) or abs))
128 if repo.dirstate.state(abs) != 'r' and not os.path.exists(rel):
128 if repo.dirstate.state(abs) != 'r' and not os.path.exists(rel):
129 remove.append(abs)
129 remove.append(abs)
130 mapping[abs] = rel, exact
130 mapping[abs] = rel, exact
131 if repo.ui.verbose or not exact:
131 if repo.ui.verbose or not exact:
132 repo.ui.status(_('removing %s\n') % ((pats and rel) or abs))
132 repo.ui.status(_('removing %s\n') % ((pats and rel) or abs))
133 if not dry_run:
133 if not dry_run:
134 repo.add(add, wlock=wlock)
134 repo.add(add, wlock=wlock)
135 repo.remove(remove, wlock=wlock)
135 repo.remove(remove, wlock=wlock)
136 if similarity > 0:
136 if similarity > 0:
137 for old, new, score in findrenames(repo, add, remove, similarity):
137 for old, new, score in findrenames(repo, add, remove, similarity):
138 oldrel, oldexact = mapping[old]
138 oldrel, oldexact = mapping[old]
139 newrel, newexact = mapping[new]
139 newrel, newexact = mapping[new]
140 if repo.ui.verbose or not oldexact or not newexact:
140 if repo.ui.verbose or not oldexact or not newexact:
141 repo.ui.status(_('recording removal of %s as rename to %s '
141 repo.ui.status(_('recording removal of %s as rename to %s '
142 '(%d%% similar)\n') %
142 '(%d%% similar)\n') %
143 (oldrel, newrel, score * 100))
143 (oldrel, newrel, score * 100))
144 if not dry_run:
144 if not dry_run:
145 repo.copy(old, new, wlock=wlock)
145 repo.copy(old, new, wlock=wlock)
@@ -1,3507 +1,3507 b''
1 # commands.py - command processing for mercurial
1 # commands.py - command processing for mercurial
2 #
2 #
3 # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7
7
8 from demandload import demandload
8 from demandload import demandload
9 from node import *
9 from node import *
10 from i18n import gettext as _
10 from i18n import gettext as _
11 demandload(globals(), "os re sys signal shutil imp urllib pdb shlex")
11 demandload(globals(), "os re sys signal shutil imp urllib pdb shlex")
12 demandload(globals(), "fancyopts ui hg util lock revlog templater bundlerepo")
12 demandload(globals(), "fancyopts ui hg util lock revlog templater bundlerepo")
13 demandload(globals(), "fnmatch difflib patch random signal tempfile time")
13 demandload(globals(), "fnmatch difflib patch random signal tempfile time")
14 demandload(globals(), "traceback errno socket version struct atexit sets bz2")
14 demandload(globals(), "traceback errno socket version struct atexit sets bz2")
15 demandload(globals(), "archival cStringIO changegroup")
15 demandload(globals(), "archival cStringIO changegroup")
16 demandload(globals(), "cmdutil hgweb.server sshserver")
16 demandload(globals(), "cmdutil hgweb.server sshserver")
17
17
18 class UnknownCommand(Exception):
18 class UnknownCommand(Exception):
19 """Exception raised if command is not in the command table."""
19 """Exception raised if command is not in the command table."""
20 class AmbiguousCommand(Exception):
20 class AmbiguousCommand(Exception):
21 """Exception raised if command shortcut matches more than one command."""
21 """Exception raised if command shortcut matches more than one command."""
22
22
23 def bail_if_changed(repo):
23 def bail_if_changed(repo):
24 modified, added, removed, deleted = repo.status()[:4]
24 modified, added, removed, deleted = repo.status()[:4]
25 if modified or added or removed or deleted:
25 if modified or added or removed or deleted:
26 raise util.Abort(_("outstanding uncommitted changes"))
26 raise util.Abort(_("outstanding uncommitted changes"))
27
27
28 def relpath(repo, args):
28 def relpath(repo, args):
29 cwd = repo.getcwd()
29 cwd = repo.getcwd()
30 if cwd:
30 if cwd:
31 return [util.normpath(os.path.join(cwd, x)) for x in args]
31 return [util.normpath(os.path.join(cwd, x)) for x in args]
32 return args
32 return args
33
33
34 def logmessage(opts):
34 def logmessage(opts):
35 """ get the log message according to -m and -l option """
35 """ get the log message according to -m and -l option """
36 message = opts['message']
36 message = opts['message']
37 logfile = opts['logfile']
37 logfile = opts['logfile']
38
38
39 if message and logfile:
39 if message and logfile:
40 raise util.Abort(_('options --message and --logfile are mutually '
40 raise util.Abort(_('options --message and --logfile are mutually '
41 'exclusive'))
41 'exclusive'))
42 if not message and logfile:
42 if not message and logfile:
43 try:
43 try:
44 if logfile == '-':
44 if logfile == '-':
45 message = sys.stdin.read()
45 message = sys.stdin.read()
46 else:
46 else:
47 message = open(logfile).read()
47 message = open(logfile).read()
48 except IOError, inst:
48 except IOError, inst:
49 raise util.Abort(_("can't read commit message '%s': %s") %
49 raise util.Abort(_("can't read commit message '%s': %s") %
50 (logfile, inst.strerror))
50 (logfile, inst.strerror))
51 return message
51 return message
52
52
53 def walkchangerevs(ui, repo, pats, opts):
53 def walkchangerevs(ui, repo, pats, opts):
54 '''Iterate over files and the revs they changed in.
54 '''Iterate over files and the revs they changed in.
55
55
56 Callers most commonly need to iterate backwards over the history
56 Callers most commonly need to iterate backwards over the history
57 it is interested in. Doing so has awful (quadratic-looking)
57 it is interested in. Doing so has awful (quadratic-looking)
58 performance, so we use iterators in a "windowed" way.
58 performance, so we use iterators in a "windowed" way.
59
59
60 We walk a window of revisions in the desired order. Within the
60 We walk a window of revisions in the desired order. Within the
61 window, we first walk forwards to gather data, then in the desired
61 window, we first walk forwards to gather data, then in the desired
62 order (usually backwards) to display it.
62 order (usually backwards) to display it.
63
63
64 This function returns an (iterator, getchange, matchfn) tuple. The
64 This function returns an (iterator, getchange, matchfn) tuple. The
65 getchange function returns the changelog entry for a numeric
65 getchange function returns the changelog entry for a numeric
66 revision. The iterator yields 3-tuples. They will be of one of
66 revision. The iterator yields 3-tuples. They will be of one of
67 the following forms:
67 the following forms:
68
68
69 "window", incrementing, lastrev: stepping through a window,
69 "window", incrementing, lastrev: stepping through a window,
70 positive if walking forwards through revs, last rev in the
70 positive if walking forwards through revs, last rev in the
71 sequence iterated over - use to reset state for the current window
71 sequence iterated over - use to reset state for the current window
72
72
73 "add", rev, fns: out-of-order traversal of the given file names
73 "add", rev, fns: out-of-order traversal of the given file names
74 fns, which changed during revision rev - use to gather data for
74 fns, which changed during revision rev - use to gather data for
75 possible display
75 possible display
76
76
77 "iter", rev, None: in-order traversal of the revs earlier iterated
77 "iter", rev, None: in-order traversal of the revs earlier iterated
78 over with "add" - use to display data'''
78 over with "add" - use to display data'''
79
79
80 def increasing_windows(start, end, windowsize=8, sizelimit=512):
80 def increasing_windows(start, end, windowsize=8, sizelimit=512):
81 if start < end:
81 if start < end:
82 while start < end:
82 while start < end:
83 yield start, min(windowsize, end-start)
83 yield start, min(windowsize, end-start)
84 start += windowsize
84 start += windowsize
85 if windowsize < sizelimit:
85 if windowsize < sizelimit:
86 windowsize *= 2
86 windowsize *= 2
87 else:
87 else:
88 while start > end:
88 while start > end:
89 yield start, min(windowsize, start-end-1)
89 yield start, min(windowsize, start-end-1)
90 start -= windowsize
90 start -= windowsize
91 if windowsize < sizelimit:
91 if windowsize < sizelimit:
92 windowsize *= 2
92 windowsize *= 2
93
93
94
94
95 files, matchfn, anypats = cmdutil.matchpats(repo, pats, opts)
95 files, matchfn, anypats = cmdutil.matchpats(repo, pats, opts)
96 follow = opts.get('follow') or opts.get('follow_first')
96 follow = opts.get('follow') or opts.get('follow_first')
97
97
98 if repo.changelog.count() == 0:
98 if repo.changelog.count() == 0:
99 return [], False, matchfn
99 return [], False, matchfn
100
100
101 if follow:
101 if follow:
102 p = repo.dirstate.parents()[0]
102 p = repo.dirstate.parents()[0]
103 if p == nullid:
103 if p == nullid:
104 ui.warn(_('No working directory revision; defaulting to tip\n'))
104 ui.warn(_('No working directory revision; defaulting to tip\n'))
105 start = 'tip'
105 start = 'tip'
106 else:
106 else:
107 start = repo.changelog.rev(p)
107 start = repo.changelog.rev(p)
108 defrange = '%s:0' % start
108 defrange = '%s:0' % start
109 else:
109 else:
110 defrange = 'tip:0'
110 defrange = 'tip:0'
111 revs = map(int, revrange(ui, repo, opts['rev'] or [defrange]))
111 revs = map(int, revrange(ui, repo, opts['rev'] or [defrange]))
112 wanted = {}
112 wanted = {}
113 slowpath = anypats
113 slowpath = anypats
114 fncache = {}
114 fncache = {}
115
115
116 chcache = {}
116 chcache = {}
117 def getchange(rev):
117 def getchange(rev):
118 ch = chcache.get(rev)
118 ch = chcache.get(rev)
119 if ch is None:
119 if ch is None:
120 chcache[rev] = ch = repo.changelog.read(repo.lookup(str(rev)))
120 chcache[rev] = ch = repo.changelog.read(repo.lookup(str(rev)))
121 return ch
121 return ch
122
122
123 if not slowpath and not files:
123 if not slowpath and not files:
124 # No files, no patterns. Display all revs.
124 # No files, no patterns. Display all revs.
125 wanted = dict(zip(revs, revs))
125 wanted = dict(zip(revs, revs))
126 copies = []
126 copies = []
127 if not slowpath:
127 if not slowpath:
128 # Only files, no patterns. Check the history of each file.
128 # Only files, no patterns. Check the history of each file.
129 def filerevgen(filelog, node):
129 def filerevgen(filelog, node):
130 cl_count = repo.changelog.count()
130 cl_count = repo.changelog.count()
131 if node is None:
131 if node is None:
132 last = filelog.count() - 1
132 last = filelog.count() - 1
133 else:
133 else:
134 last = filelog.rev(node)
134 last = filelog.rev(node)
135 for i, window in increasing_windows(last, -1):
135 for i, window in increasing_windows(last, -1):
136 revs = []
136 revs = []
137 for j in xrange(i - window, i + 1):
137 for j in xrange(i - window, i + 1):
138 n = filelog.node(j)
138 n = filelog.node(j)
139 revs.append((filelog.linkrev(n),
139 revs.append((filelog.linkrev(n),
140 follow and filelog.renamed(n)))
140 follow and filelog.renamed(n)))
141 revs.reverse()
141 revs.reverse()
142 for rev in revs:
142 for rev in revs:
143 # only yield rev for which we have the changelog, it can
143 # only yield rev for which we have the changelog, it can
144 # happen while doing "hg log" during a pull or commit
144 # happen while doing "hg log" during a pull or commit
145 if rev[0] < cl_count:
145 if rev[0] < cl_count:
146 yield rev
146 yield rev
147 def iterfiles():
147 def iterfiles():
148 for filename in files:
148 for filename in files:
149 yield filename, None
149 yield filename, None
150 for filename_node in copies:
150 for filename_node in copies:
151 yield filename_node
151 yield filename_node
152 minrev, maxrev = min(revs), max(revs)
152 minrev, maxrev = min(revs), max(revs)
153 for file_, node in iterfiles():
153 for file_, node in iterfiles():
154 filelog = repo.file(file_)
154 filelog = repo.file(file_)
155 # A zero count may be a directory or deleted file, so
155 # A zero count may be a directory or deleted file, so
156 # try to find matching entries on the slow path.
156 # try to find matching entries on the slow path.
157 if filelog.count() == 0:
157 if filelog.count() == 0:
158 slowpath = True
158 slowpath = True
159 break
159 break
160 for rev, copied in filerevgen(filelog, node):
160 for rev, copied in filerevgen(filelog, node):
161 if rev <= maxrev:
161 if rev <= maxrev:
162 if rev < minrev:
162 if rev < minrev:
163 break
163 break
164 fncache.setdefault(rev, [])
164 fncache.setdefault(rev, [])
165 fncache[rev].append(file_)
165 fncache[rev].append(file_)
166 wanted[rev] = 1
166 wanted[rev] = 1
167 if follow and copied:
167 if follow and copied:
168 copies.append(copied)
168 copies.append(copied)
169 if slowpath:
169 if slowpath:
170 if follow:
170 if follow:
171 raise util.Abort(_('can only follow copies/renames for explicit '
171 raise util.Abort(_('can only follow copies/renames for explicit '
172 'file names'))
172 'file names'))
173
173
174 # The slow path checks files modified in every changeset.
174 # The slow path checks files modified in every changeset.
175 def changerevgen():
175 def changerevgen():
176 for i, window in increasing_windows(repo.changelog.count()-1, -1):
176 for i, window in increasing_windows(repo.changelog.count()-1, -1):
177 for j in xrange(i - window, i + 1):
177 for j in xrange(i - window, i + 1):
178 yield j, getchange(j)[3]
178 yield j, getchange(j)[3]
179
179
180 for rev, changefiles in changerevgen():
180 for rev, changefiles in changerevgen():
181 matches = filter(matchfn, changefiles)
181 matches = filter(matchfn, changefiles)
182 if matches:
182 if matches:
183 fncache[rev] = matches
183 fncache[rev] = matches
184 wanted[rev] = 1
184 wanted[rev] = 1
185
185
186 class followfilter:
186 class followfilter:
187 def __init__(self, onlyfirst=False):
187 def __init__(self, onlyfirst=False):
188 self.startrev = -1
188 self.startrev = -1
189 self.roots = []
189 self.roots = []
190 self.onlyfirst = onlyfirst
190 self.onlyfirst = onlyfirst
191
191
192 def match(self, rev):
192 def match(self, rev):
193 def realparents(rev):
193 def realparents(rev):
194 if self.onlyfirst:
194 if self.onlyfirst:
195 return repo.changelog.parentrevs(rev)[0:1]
195 return repo.changelog.parentrevs(rev)[0:1]
196 else:
196 else:
197 return filter(lambda x: x != -1, repo.changelog.parentrevs(rev))
197 return filter(lambda x: x != -1, repo.changelog.parentrevs(rev))
198
198
199 if self.startrev == -1:
199 if self.startrev == -1:
200 self.startrev = rev
200 self.startrev = rev
201 return True
201 return True
202
202
203 if rev > self.startrev:
203 if rev > self.startrev:
204 # forward: all descendants
204 # forward: all descendants
205 if not self.roots:
205 if not self.roots:
206 self.roots.append(self.startrev)
206 self.roots.append(self.startrev)
207 for parent in realparents(rev):
207 for parent in realparents(rev):
208 if parent in self.roots:
208 if parent in self.roots:
209 self.roots.append(rev)
209 self.roots.append(rev)
210 return True
210 return True
211 else:
211 else:
212 # backwards: all parents
212 # backwards: all parents
213 if not self.roots:
213 if not self.roots:
214 self.roots.extend(realparents(self.startrev))
214 self.roots.extend(realparents(self.startrev))
215 if rev in self.roots:
215 if rev in self.roots:
216 self.roots.remove(rev)
216 self.roots.remove(rev)
217 self.roots.extend(realparents(rev))
217 self.roots.extend(realparents(rev))
218 return True
218 return True
219
219
220 return False
220 return False
221
221
222 # it might be worthwhile to do this in the iterator if the rev range
222 # it might be worthwhile to do this in the iterator if the rev range
223 # is descending and the prune args are all within that range
223 # is descending and the prune args are all within that range
224 for rev in opts.get('prune', ()):
224 for rev in opts.get('prune', ()):
225 rev = repo.changelog.rev(repo.lookup(rev))
225 rev = repo.changelog.rev(repo.lookup(rev))
226 ff = followfilter()
226 ff = followfilter()
227 stop = min(revs[0], revs[-1])
227 stop = min(revs[0], revs[-1])
228 for x in range(rev, stop-1, -1):
228 for x in range(rev, stop-1, -1):
229 if ff.match(x) and wanted.has_key(x):
229 if ff.match(x) and wanted.has_key(x):
230 del wanted[x]
230 del wanted[x]
231
231
232 def iterate():
232 def iterate():
233 if follow and not files:
233 if follow and not files:
234 ff = followfilter(onlyfirst=opts.get('follow_first'))
234 ff = followfilter(onlyfirst=opts.get('follow_first'))
235 def want(rev):
235 def want(rev):
236 if ff.match(rev) and rev in wanted:
236 if ff.match(rev) and rev in wanted:
237 return True
237 return True
238 return False
238 return False
239 else:
239 else:
240 def want(rev):
240 def want(rev):
241 return rev in wanted
241 return rev in wanted
242
242
243 for i, window in increasing_windows(0, len(revs)):
243 for i, window in increasing_windows(0, len(revs)):
244 yield 'window', revs[0] < revs[-1], revs[-1]
244 yield 'window', revs[0] < revs[-1], revs[-1]
245 nrevs = [rev for rev in revs[i:i+window] if want(rev)]
245 nrevs = [rev for rev in revs[i:i+window] if want(rev)]
246 srevs = list(nrevs)
246 srevs = list(nrevs)
247 srevs.sort()
247 srevs.sort()
248 for rev in srevs:
248 for rev in srevs:
249 fns = fncache.get(rev) or filter(matchfn, getchange(rev)[3])
249 fns = fncache.get(rev) or filter(matchfn, getchange(rev)[3])
250 yield 'add', rev, fns
250 yield 'add', rev, fns
251 for rev in nrevs:
251 for rev in nrevs:
252 yield 'iter', rev, None
252 yield 'iter', rev, None
253 return iterate(), getchange, matchfn
253 return iterate(), getchange, matchfn
254
254
255 revrangesep = ':'
255 revrangesep = ':'
256
256
257 def revfix(repo, val, defval):
257 def revfix(repo, val, defval):
258 '''turn user-level id of changeset into rev number.
258 '''turn user-level id of changeset into rev number.
259 user-level id can be tag, changeset, rev number, or negative rev
259 user-level id can be tag, changeset, rev number, or negative rev
260 number relative to number of revs (-1 is tip, etc).'''
260 number relative to number of revs (-1 is tip, etc).'''
261 if not val:
261 if not val:
262 return defval
262 return defval
263 try:
263 try:
264 num = int(val)
264 num = int(val)
265 if str(num) != val:
265 if str(num) != val:
266 raise ValueError
266 raise ValueError
267 if num < 0:
267 if num < 0:
268 num += repo.changelog.count()
268 num += repo.changelog.count()
269 if num < 0:
269 if num < 0:
270 num = 0
270 num = 0
271 elif num >= repo.changelog.count():
271 elif num >= repo.changelog.count():
272 raise ValueError
272 raise ValueError
273 except ValueError:
273 except ValueError:
274 try:
274 try:
275 num = repo.changelog.rev(repo.lookup(val))
275 num = repo.changelog.rev(repo.lookup(val))
276 except KeyError:
276 except KeyError:
277 raise util.Abort(_('invalid revision identifier %s'), val)
277 raise util.Abort(_('invalid revision identifier %s') % val)
278 return num
278 return num
279
279
280 def revpair(ui, repo, revs):
280 def revpair(ui, repo, revs):
281 '''return pair of nodes, given list of revisions. second item can
281 '''return pair of nodes, given list of revisions. second item can
282 be None, meaning use working dir.'''
282 be None, meaning use working dir.'''
283 if not revs:
283 if not revs:
284 return repo.dirstate.parents()[0], None
284 return repo.dirstate.parents()[0], None
285 end = None
285 end = None
286 if len(revs) == 1:
286 if len(revs) == 1:
287 start = revs[0]
287 start = revs[0]
288 if revrangesep in start:
288 if revrangesep in start:
289 start, end = start.split(revrangesep, 1)
289 start, end = start.split(revrangesep, 1)
290 start = revfix(repo, start, 0)
290 start = revfix(repo, start, 0)
291 end = revfix(repo, end, repo.changelog.count() - 1)
291 end = revfix(repo, end, repo.changelog.count() - 1)
292 else:
292 else:
293 start = revfix(repo, start, None)
293 start = revfix(repo, start, None)
294 elif len(revs) == 2:
294 elif len(revs) == 2:
295 if revrangesep in revs[0] or revrangesep in revs[1]:
295 if revrangesep in revs[0] or revrangesep in revs[1]:
296 raise util.Abort(_('too many revisions specified'))
296 raise util.Abort(_('too many revisions specified'))
297 start = revfix(repo, revs[0], None)
297 start = revfix(repo, revs[0], None)
298 end = revfix(repo, revs[1], None)
298 end = revfix(repo, revs[1], None)
299 else:
299 else:
300 raise util.Abort(_('too many revisions specified'))
300 raise util.Abort(_('too many revisions specified'))
301 if end is not None: end = repo.lookup(str(end))
301 if end is not None: end = repo.lookup(str(end))
302 return repo.lookup(str(start)), end
302 return repo.lookup(str(start)), end
303
303
304 def revrange(ui, repo, revs):
304 def revrange(ui, repo, revs):
305 """Yield revision as strings from a list of revision specifications."""
305 """Yield revision as strings from a list of revision specifications."""
306 seen = {}
306 seen = {}
307 for spec in revs:
307 for spec in revs:
308 if revrangesep in spec:
308 if revrangesep in spec:
309 start, end = spec.split(revrangesep, 1)
309 start, end = spec.split(revrangesep, 1)
310 start = revfix(repo, start, 0)
310 start = revfix(repo, start, 0)
311 end = revfix(repo, end, repo.changelog.count() - 1)
311 end = revfix(repo, end, repo.changelog.count() - 1)
312 step = start > end and -1 or 1
312 step = start > end and -1 or 1
313 for rev in xrange(start, end+step, step):
313 for rev in xrange(start, end+step, step):
314 if rev in seen:
314 if rev in seen:
315 continue
315 continue
316 seen[rev] = 1
316 seen[rev] = 1
317 yield str(rev)
317 yield str(rev)
318 else:
318 else:
319 rev = revfix(repo, spec, None)
319 rev = revfix(repo, spec, None)
320 if rev in seen:
320 if rev in seen:
321 continue
321 continue
322 seen[rev] = 1
322 seen[rev] = 1
323 yield str(rev)
323 yield str(rev)
324
324
325 def write_bundle(cg, filename=None, compress=True):
325 def write_bundle(cg, filename=None, compress=True):
326 """Write a bundle file and return its filename.
326 """Write a bundle file and return its filename.
327
327
328 Existing files will not be overwritten.
328 Existing files will not be overwritten.
329 If no filename is specified, a temporary file is created.
329 If no filename is specified, a temporary file is created.
330 bz2 compression can be turned off.
330 bz2 compression can be turned off.
331 The bundle file will be deleted in case of errors.
331 The bundle file will be deleted in case of errors.
332 """
332 """
333 class nocompress(object):
333 class nocompress(object):
334 def compress(self, x):
334 def compress(self, x):
335 return x
335 return x
336 def flush(self):
336 def flush(self):
337 return ""
337 return ""
338
338
339 fh = None
339 fh = None
340 cleanup = None
340 cleanup = None
341 try:
341 try:
342 if filename:
342 if filename:
343 if os.path.exists(filename):
343 if os.path.exists(filename):
344 raise util.Abort(_("file '%s' already exists"), filename)
344 raise util.Abort(_("file '%s' already exists") % filename)
345 fh = open(filename, "wb")
345 fh = open(filename, "wb")
346 else:
346 else:
347 fd, filename = tempfile.mkstemp(prefix="hg-bundle-", suffix=".hg")
347 fd, filename = tempfile.mkstemp(prefix="hg-bundle-", suffix=".hg")
348 fh = os.fdopen(fd, "wb")
348 fh = os.fdopen(fd, "wb")
349 cleanup = filename
349 cleanup = filename
350
350
351 if compress:
351 if compress:
352 fh.write("HG10")
352 fh.write("HG10")
353 z = bz2.BZ2Compressor(9)
353 z = bz2.BZ2Compressor(9)
354 else:
354 else:
355 fh.write("HG10UN")
355 fh.write("HG10UN")
356 z = nocompress()
356 z = nocompress()
357 # parse the changegroup data, otherwise we will block
357 # parse the changegroup data, otherwise we will block
358 # in case of sshrepo because we don't know the end of the stream
358 # in case of sshrepo because we don't know the end of the stream
359
359
360 # an empty chunkiter is the end of the changegroup
360 # an empty chunkiter is the end of the changegroup
361 empty = False
361 empty = False
362 while not empty:
362 while not empty:
363 empty = True
363 empty = True
364 for chunk in changegroup.chunkiter(cg):
364 for chunk in changegroup.chunkiter(cg):
365 empty = False
365 empty = False
366 fh.write(z.compress(changegroup.genchunk(chunk)))
366 fh.write(z.compress(changegroup.genchunk(chunk)))
367 fh.write(z.compress(changegroup.closechunk()))
367 fh.write(z.compress(changegroup.closechunk()))
368 fh.write(z.flush())
368 fh.write(z.flush())
369 cleanup = None
369 cleanup = None
370 return filename
370 return filename
371 finally:
371 finally:
372 if fh is not None:
372 if fh is not None:
373 fh.close()
373 fh.close()
374 if cleanup is not None:
374 if cleanup is not None:
375 os.unlink(cleanup)
375 os.unlink(cleanup)
376
376
377 def trimuser(ui, name, rev, revcache):
377 def trimuser(ui, name, rev, revcache):
378 """trim the name of the user who committed a change"""
378 """trim the name of the user who committed a change"""
379 user = revcache.get(rev)
379 user = revcache.get(rev)
380 if user is None:
380 if user is None:
381 user = revcache[rev] = ui.shortuser(name)
381 user = revcache[rev] = ui.shortuser(name)
382 return user
382 return user
383
383
384 class changeset_printer(object):
384 class changeset_printer(object):
385 '''show changeset information when templating not requested.'''
385 '''show changeset information when templating not requested.'''
386
386
387 def __init__(self, ui, repo):
387 def __init__(self, ui, repo):
388 self.ui = ui
388 self.ui = ui
389 self.repo = repo
389 self.repo = repo
390
390
391 def show(self, rev=0, changenode=None, brinfo=None):
391 def show(self, rev=0, changenode=None, brinfo=None):
392 '''show a single changeset or file revision'''
392 '''show a single changeset or file revision'''
393 log = self.repo.changelog
393 log = self.repo.changelog
394 if changenode is None:
394 if changenode is None:
395 changenode = log.node(rev)
395 changenode = log.node(rev)
396 elif not rev:
396 elif not rev:
397 rev = log.rev(changenode)
397 rev = log.rev(changenode)
398
398
399 if self.ui.quiet:
399 if self.ui.quiet:
400 self.ui.write("%d:%s\n" % (rev, short(changenode)))
400 self.ui.write("%d:%s\n" % (rev, short(changenode)))
401 return
401 return
402
402
403 changes = log.read(changenode)
403 changes = log.read(changenode)
404 date = util.datestr(changes[2])
404 date = util.datestr(changes[2])
405
405
406 hexfunc = self.ui.debugflag and hex or short
406 hexfunc = self.ui.debugflag and hex or short
407
407
408 parents = [(log.rev(p), hexfunc(p)) for p in log.parents(changenode)
408 parents = [(log.rev(p), hexfunc(p)) for p in log.parents(changenode)
409 if self.ui.debugflag or p != nullid]
409 if self.ui.debugflag or p != nullid]
410 if (not self.ui.debugflag and len(parents) == 1 and
410 if (not self.ui.debugflag and len(parents) == 1 and
411 parents[0][0] == rev-1):
411 parents[0][0] == rev-1):
412 parents = []
412 parents = []
413
413
414 self.ui.write(_("changeset: %d:%s\n") % (rev, hexfunc(changenode)))
414 self.ui.write(_("changeset: %d:%s\n") % (rev, hexfunc(changenode)))
415
415
416 for tag in self.repo.nodetags(changenode):
416 for tag in self.repo.nodetags(changenode):
417 self.ui.status(_("tag: %s\n") % tag)
417 self.ui.status(_("tag: %s\n") % tag)
418 for parent in parents:
418 for parent in parents:
419 self.ui.write(_("parent: %d:%s\n") % parent)
419 self.ui.write(_("parent: %d:%s\n") % parent)
420
420
421 if brinfo and changenode in brinfo:
421 if brinfo and changenode in brinfo:
422 br = brinfo[changenode]
422 br = brinfo[changenode]
423 self.ui.write(_("branch: %s\n") % " ".join(br))
423 self.ui.write(_("branch: %s\n") % " ".join(br))
424
424
425 self.ui.debug(_("manifest: %d:%s\n") %
425 self.ui.debug(_("manifest: %d:%s\n") %
426 (self.repo.manifest.rev(changes[0]), hex(changes[0])))
426 (self.repo.manifest.rev(changes[0]), hex(changes[0])))
427 self.ui.status(_("user: %s\n") % changes[1])
427 self.ui.status(_("user: %s\n") % changes[1])
428 self.ui.status(_("date: %s\n") % date)
428 self.ui.status(_("date: %s\n") % date)
429
429
430 if self.ui.debugflag:
430 if self.ui.debugflag:
431 files = self.repo.status(log.parents(changenode)[0], changenode)[:3]
431 files = self.repo.status(log.parents(changenode)[0], changenode)[:3]
432 for key, value in zip([_("files:"), _("files+:"), _("files-:")],
432 for key, value in zip([_("files:"), _("files+:"), _("files-:")],
433 files):
433 files):
434 if value:
434 if value:
435 self.ui.note("%-12s %s\n" % (key, " ".join(value)))
435 self.ui.note("%-12s %s\n" % (key, " ".join(value)))
436 else:
436 else:
437 self.ui.note(_("files: %s\n") % " ".join(changes[3]))
437 self.ui.note(_("files: %s\n") % " ".join(changes[3]))
438
438
439 description = changes[4].strip()
439 description = changes[4].strip()
440 if description:
440 if description:
441 if self.ui.verbose:
441 if self.ui.verbose:
442 self.ui.status(_("description:\n"))
442 self.ui.status(_("description:\n"))
443 self.ui.status(description)
443 self.ui.status(description)
444 self.ui.status("\n\n")
444 self.ui.status("\n\n")
445 else:
445 else:
446 self.ui.status(_("summary: %s\n") %
446 self.ui.status(_("summary: %s\n") %
447 description.splitlines()[0])
447 description.splitlines()[0])
448 self.ui.status("\n")
448 self.ui.status("\n")
449
449
450 def show_changeset(ui, repo, opts):
450 def show_changeset(ui, repo, opts):
451 '''show one changeset. uses template or regular display. caller
451 '''show one changeset. uses template or regular display. caller
452 can pass in 'style' and 'template' options in opts.'''
452 can pass in 'style' and 'template' options in opts.'''
453
453
454 tmpl = opts.get('template')
454 tmpl = opts.get('template')
455 if tmpl:
455 if tmpl:
456 tmpl = templater.parsestring(tmpl, quoted=False)
456 tmpl = templater.parsestring(tmpl, quoted=False)
457 else:
457 else:
458 tmpl = ui.config('ui', 'logtemplate')
458 tmpl = ui.config('ui', 'logtemplate')
459 if tmpl: tmpl = templater.parsestring(tmpl)
459 if tmpl: tmpl = templater.parsestring(tmpl)
460 mapfile = opts.get('style') or ui.config('ui', 'style')
460 mapfile = opts.get('style') or ui.config('ui', 'style')
461 if tmpl or mapfile:
461 if tmpl or mapfile:
462 if mapfile:
462 if mapfile:
463 if not os.path.isfile(mapfile):
463 if not os.path.isfile(mapfile):
464 mapname = templater.templatepath('map-cmdline.' + mapfile)
464 mapname = templater.templatepath('map-cmdline.' + mapfile)
465 if not mapname: mapname = templater.templatepath(mapfile)
465 if not mapname: mapname = templater.templatepath(mapfile)
466 if mapname: mapfile = mapname
466 if mapname: mapfile = mapname
467 try:
467 try:
468 t = templater.changeset_templater(ui, repo, mapfile)
468 t = templater.changeset_templater(ui, repo, mapfile)
469 except SyntaxError, inst:
469 except SyntaxError, inst:
470 raise util.Abort(inst.args[0])
470 raise util.Abort(inst.args[0])
471 if tmpl: t.use_template(tmpl)
471 if tmpl: t.use_template(tmpl)
472 return t
472 return t
473 return changeset_printer(ui, repo)
473 return changeset_printer(ui, repo)
474
474
475 def setremoteconfig(ui, opts):
475 def setremoteconfig(ui, opts):
476 "copy remote options to ui tree"
476 "copy remote options to ui tree"
477 if opts.get('ssh'):
477 if opts.get('ssh'):
478 ui.setconfig("ui", "ssh", opts['ssh'])
478 ui.setconfig("ui", "ssh", opts['ssh'])
479 if opts.get('remotecmd'):
479 if opts.get('remotecmd'):
480 ui.setconfig("ui", "remotecmd", opts['remotecmd'])
480 ui.setconfig("ui", "remotecmd", opts['remotecmd'])
481
481
482 def show_version(ui):
482 def show_version(ui):
483 """output version and copyright information"""
483 """output version and copyright information"""
484 ui.write(_("Mercurial Distributed SCM (version %s)\n")
484 ui.write(_("Mercurial Distributed SCM (version %s)\n")
485 % version.get_version())
485 % version.get_version())
486 ui.status(_(
486 ui.status(_(
487 "\nCopyright (C) 2005, 2006 Matt Mackall <mpm@selenic.com>\n"
487 "\nCopyright (C) 2005, 2006 Matt Mackall <mpm@selenic.com>\n"
488 "This is free software; see the source for copying conditions. "
488 "This is free software; see the source for copying conditions. "
489 "There is NO\nwarranty; "
489 "There is NO\nwarranty; "
490 "not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.\n"
490 "not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.\n"
491 ))
491 ))
492
492
493 def help_(ui, name=None, with_version=False):
493 def help_(ui, name=None, with_version=False):
494 """show help for a command, extension, or list of commands
494 """show help for a command, extension, or list of commands
495
495
496 With no arguments, print a list of commands and short help.
496 With no arguments, print a list of commands and short help.
497
497
498 Given a command name, print help for that command.
498 Given a command name, print help for that command.
499
499
500 Given an extension name, print help for that extension, and the
500 Given an extension name, print help for that extension, and the
501 commands it provides."""
501 commands it provides."""
502 option_lists = []
502 option_lists = []
503
503
504 def helpcmd(name):
504 def helpcmd(name):
505 if with_version:
505 if with_version:
506 show_version(ui)
506 show_version(ui)
507 ui.write('\n')
507 ui.write('\n')
508 aliases, i = findcmd(ui, name)
508 aliases, i = findcmd(ui, name)
509 # synopsis
509 # synopsis
510 ui.write("%s\n\n" % i[2])
510 ui.write("%s\n\n" % i[2])
511
511
512 # description
512 # description
513 doc = i[0].__doc__
513 doc = i[0].__doc__
514 if not doc:
514 if not doc:
515 doc = _("(No help text available)")
515 doc = _("(No help text available)")
516 if ui.quiet:
516 if ui.quiet:
517 doc = doc.splitlines(0)[0]
517 doc = doc.splitlines(0)[0]
518 ui.write("%s\n" % doc.rstrip())
518 ui.write("%s\n" % doc.rstrip())
519
519
520 if not ui.quiet:
520 if not ui.quiet:
521 # aliases
521 # aliases
522 if len(aliases) > 1:
522 if len(aliases) > 1:
523 ui.write(_("\naliases: %s\n") % ', '.join(aliases[1:]))
523 ui.write(_("\naliases: %s\n") % ', '.join(aliases[1:]))
524
524
525 # options
525 # options
526 if i[1]:
526 if i[1]:
527 option_lists.append(("options", i[1]))
527 option_lists.append(("options", i[1]))
528
528
529 def helplist(select=None):
529 def helplist(select=None):
530 h = {}
530 h = {}
531 cmds = {}
531 cmds = {}
532 for c, e in table.items():
532 for c, e in table.items():
533 f = c.split("|", 1)[0]
533 f = c.split("|", 1)[0]
534 if select and not select(f):
534 if select and not select(f):
535 continue
535 continue
536 if name == "shortlist" and not f.startswith("^"):
536 if name == "shortlist" and not f.startswith("^"):
537 continue
537 continue
538 f = f.lstrip("^")
538 f = f.lstrip("^")
539 if not ui.debugflag and f.startswith("debug"):
539 if not ui.debugflag and f.startswith("debug"):
540 continue
540 continue
541 doc = e[0].__doc__
541 doc = e[0].__doc__
542 if not doc:
542 if not doc:
543 doc = _("(No help text available)")
543 doc = _("(No help text available)")
544 h[f] = doc.splitlines(0)[0].rstrip()
544 h[f] = doc.splitlines(0)[0].rstrip()
545 cmds[f] = c.lstrip("^")
545 cmds[f] = c.lstrip("^")
546
546
547 fns = h.keys()
547 fns = h.keys()
548 fns.sort()
548 fns.sort()
549 m = max(map(len, fns))
549 m = max(map(len, fns))
550 for f in fns:
550 for f in fns:
551 if ui.verbose:
551 if ui.verbose:
552 commands = cmds[f].replace("|",", ")
552 commands = cmds[f].replace("|",", ")
553 ui.write(" %s:\n %s\n"%(commands, h[f]))
553 ui.write(" %s:\n %s\n"%(commands, h[f]))
554 else:
554 else:
555 ui.write(' %-*s %s\n' % (m, f, h[f]))
555 ui.write(' %-*s %s\n' % (m, f, h[f]))
556
556
557 def helpext(name):
557 def helpext(name):
558 try:
558 try:
559 mod = findext(name)
559 mod = findext(name)
560 except KeyError:
560 except KeyError:
561 raise UnknownCommand(name)
561 raise UnknownCommand(name)
562
562
563 doc = (mod.__doc__ or _('No help text available')).splitlines(0)
563 doc = (mod.__doc__ or _('No help text available')).splitlines(0)
564 ui.write(_('%s extension - %s\n') % (name.split('.')[-1], doc[0]))
564 ui.write(_('%s extension - %s\n') % (name.split('.')[-1], doc[0]))
565 for d in doc[1:]:
565 for d in doc[1:]:
566 ui.write(d, '\n')
566 ui.write(d, '\n')
567
567
568 ui.status('\n')
568 ui.status('\n')
569 if ui.verbose:
569 if ui.verbose:
570 ui.status(_('list of commands:\n\n'))
570 ui.status(_('list of commands:\n\n'))
571 else:
571 else:
572 ui.status(_('list of commands (use "hg help -v %s" '
572 ui.status(_('list of commands (use "hg help -v %s" '
573 'to show aliases and global options):\n\n') % name)
573 'to show aliases and global options):\n\n') % name)
574
574
575 modcmds = dict.fromkeys([c.split('|', 1)[0] for c in mod.cmdtable])
575 modcmds = dict.fromkeys([c.split('|', 1)[0] for c in mod.cmdtable])
576 helplist(modcmds.has_key)
576 helplist(modcmds.has_key)
577
577
578 if name and name != 'shortlist':
578 if name and name != 'shortlist':
579 try:
579 try:
580 helpcmd(name)
580 helpcmd(name)
581 except UnknownCommand:
581 except UnknownCommand:
582 helpext(name)
582 helpext(name)
583
583
584 else:
584 else:
585 # program name
585 # program name
586 if ui.verbose or with_version:
586 if ui.verbose or with_version:
587 show_version(ui)
587 show_version(ui)
588 else:
588 else:
589 ui.status(_("Mercurial Distributed SCM\n"))
589 ui.status(_("Mercurial Distributed SCM\n"))
590 ui.status('\n')
590 ui.status('\n')
591
591
592 # list of commands
592 # list of commands
593 if name == "shortlist":
593 if name == "shortlist":
594 ui.status(_('basic commands (use "hg help" '
594 ui.status(_('basic commands (use "hg help" '
595 'for the full list or option "-v" for details):\n\n'))
595 'for the full list or option "-v" for details):\n\n'))
596 elif ui.verbose:
596 elif ui.verbose:
597 ui.status(_('list of commands:\n\n'))
597 ui.status(_('list of commands:\n\n'))
598 else:
598 else:
599 ui.status(_('list of commands (use "hg help -v" '
599 ui.status(_('list of commands (use "hg help -v" '
600 'to show aliases and global options):\n\n'))
600 'to show aliases and global options):\n\n'))
601
601
602 helplist()
602 helplist()
603
603
604 # global options
604 # global options
605 if ui.verbose:
605 if ui.verbose:
606 option_lists.append(("global options", globalopts))
606 option_lists.append(("global options", globalopts))
607
607
608 # list all option lists
608 # list all option lists
609 opt_output = []
609 opt_output = []
610 for title, options in option_lists:
610 for title, options in option_lists:
611 opt_output.append(("\n%s:\n" % title, None))
611 opt_output.append(("\n%s:\n" % title, None))
612 for shortopt, longopt, default, desc in options:
612 for shortopt, longopt, default, desc in options:
613 opt_output.append(("%2s%s" % (shortopt and "-%s" % shortopt,
613 opt_output.append(("%2s%s" % (shortopt and "-%s" % shortopt,
614 longopt and " --%s" % longopt),
614 longopt and " --%s" % longopt),
615 "%s%s" % (desc,
615 "%s%s" % (desc,
616 default
616 default
617 and _(" (default: %s)") % default
617 and _(" (default: %s)") % default
618 or "")))
618 or "")))
619
619
620 if opt_output:
620 if opt_output:
621 opts_len = max([len(line[0]) for line in opt_output if line[1]])
621 opts_len = max([len(line[0]) for line in opt_output if line[1]])
622 for first, second in opt_output:
622 for first, second in opt_output:
623 if second:
623 if second:
624 ui.write(" %-*s %s\n" % (opts_len, first, second))
624 ui.write(" %-*s %s\n" % (opts_len, first, second))
625 else:
625 else:
626 ui.write("%s\n" % first)
626 ui.write("%s\n" % first)
627
627
628 # Commands start here, listed alphabetically
628 # Commands start here, listed alphabetically
629
629
630 def add(ui, repo, *pats, **opts):
630 def add(ui, repo, *pats, **opts):
631 """add the specified files on the next commit
631 """add the specified files on the next commit
632
632
633 Schedule files to be version controlled and added to the repository.
633 Schedule files to be version controlled and added to the repository.
634
634
635 The files will be added to the repository at the next commit.
635 The files will be added to the repository at the next commit.
636
636
637 If no names are given, add all files in the repository.
637 If no names are given, add all files in the repository.
638 """
638 """
639
639
640 names = []
640 names = []
641 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts):
641 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts):
642 if exact:
642 if exact:
643 if ui.verbose:
643 if ui.verbose:
644 ui.status(_('adding %s\n') % rel)
644 ui.status(_('adding %s\n') % rel)
645 names.append(abs)
645 names.append(abs)
646 elif repo.dirstate.state(abs) == '?':
646 elif repo.dirstate.state(abs) == '?':
647 ui.status(_('adding %s\n') % rel)
647 ui.status(_('adding %s\n') % rel)
648 names.append(abs)
648 names.append(abs)
649 if not opts.get('dry_run'):
649 if not opts.get('dry_run'):
650 repo.add(names)
650 repo.add(names)
651
651
652 def addremove(ui, repo, *pats, **opts):
652 def addremove(ui, repo, *pats, **opts):
653 """add all new files, delete all missing files (DEPRECATED)
653 """add all new files, delete all missing files (DEPRECATED)
654
654
655 Add all new files and remove all missing files from the repository.
655 Add all new files and remove all missing files from the repository.
656
656
657 New files are ignored if they match any of the patterns in .hgignore. As
657 New files are ignored if they match any of the patterns in .hgignore. As
658 with add, these changes take effect at the next commit.
658 with add, these changes take effect at the next commit.
659
659
660 Use the -s option to detect renamed files. With a parameter > 0,
660 Use the -s option to detect renamed files. With a parameter > 0,
661 this compares every removed file with every added file and records
661 this compares every removed file with every added file and records
662 those similar enough as renames. This option takes a percentage
662 those similar enough as renames. This option takes a percentage
663 between 0 (disabled) and 100 (files must be identical) as its
663 between 0 (disabled) and 100 (files must be identical) as its
664 parameter. Detecting renamed files this way can be expensive.
664 parameter. Detecting renamed files this way can be expensive.
665 """
665 """
666 sim = float(opts.get('similarity') or 0)
666 sim = float(opts.get('similarity') or 0)
667 if sim < 0 or sim > 100:
667 if sim < 0 or sim > 100:
668 raise util.Abort(_('similarity must be between 0 and 100'))
668 raise util.Abort(_('similarity must be between 0 and 100'))
669 return cmdutil.addremove(repo, pats, opts, similarity=sim/100.)
669 return cmdutil.addremove(repo, pats, opts, similarity=sim/100.)
670
670
671 def annotate(ui, repo, *pats, **opts):
671 def annotate(ui, repo, *pats, **opts):
672 """show changeset information per file line
672 """show changeset information per file line
673
673
674 List changes in files, showing the revision id responsible for each line
674 List changes in files, showing the revision id responsible for each line
675
675
676 This command is useful to discover who did a change or when a change took
676 This command is useful to discover who did a change or when a change took
677 place.
677 place.
678
678
679 Without the -a option, annotate will avoid processing files it
679 Without the -a option, annotate will avoid processing files it
680 detects as binary. With -a, annotate will generate an annotation
680 detects as binary. With -a, annotate will generate an annotation
681 anyway, probably with undesirable results.
681 anyway, probably with undesirable results.
682 """
682 """
683 def getnode(rev):
683 def getnode(rev):
684 return short(repo.changelog.node(rev))
684 return short(repo.changelog.node(rev))
685
685
686 ucache = {}
686 ucache = {}
687 def getname(rev):
687 def getname(rev):
688 try:
688 try:
689 return ucache[rev]
689 return ucache[rev]
690 except:
690 except:
691 u = trimuser(ui, repo.changectx(rev).user(), rev, ucache)
691 u = trimuser(ui, repo.changectx(rev).user(), rev, ucache)
692 ucache[rev] = u
692 ucache[rev] = u
693 return u
693 return u
694
694
695 dcache = {}
695 dcache = {}
696 def getdate(rev):
696 def getdate(rev):
697 datestr = dcache.get(rev)
697 datestr = dcache.get(rev)
698 if datestr is None:
698 if datestr is None:
699 datestr = dcache[rev] = util.datestr(repo.changectx(rev).date())
699 datestr = dcache[rev] = util.datestr(repo.changectx(rev).date())
700 return datestr
700 return datestr
701
701
702 if not pats:
702 if not pats:
703 raise util.Abort(_('at least one file name or pattern required'))
703 raise util.Abort(_('at least one file name or pattern required'))
704
704
705 opmap = [['user', getname], ['number', str], ['changeset', getnode],
705 opmap = [['user', getname], ['number', str], ['changeset', getnode],
706 ['date', getdate]]
706 ['date', getdate]]
707 if not opts['user'] and not opts['changeset'] and not opts['date']:
707 if not opts['user'] and not opts['changeset'] and not opts['date']:
708 opts['number'] = 1
708 opts['number'] = 1
709
709
710 ctx = repo.changectx(opts['rev'] or repo.dirstate.parents()[0])
710 ctx = repo.changectx(opts['rev'] or repo.dirstate.parents()[0])
711
711
712 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts,
712 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts,
713 node=ctx.node()):
713 node=ctx.node()):
714 fctx = ctx.filectx(abs)
714 fctx = ctx.filectx(abs)
715 if not opts['text'] and util.binary(fctx.data()):
715 if not opts['text'] and util.binary(fctx.data()):
716 ui.write(_("%s: binary file\n") % ((pats and rel) or abs))
716 ui.write(_("%s: binary file\n") % ((pats and rel) or abs))
717 continue
717 continue
718
718
719 lines = fctx.annotate()
719 lines = fctx.annotate()
720 pieces = []
720 pieces = []
721
721
722 for o, f in opmap:
722 for o, f in opmap:
723 if opts[o]:
723 if opts[o]:
724 l = [f(n) for n, dummy in lines]
724 l = [f(n) for n, dummy in lines]
725 if l:
725 if l:
726 m = max(map(len, l))
726 m = max(map(len, l))
727 pieces.append(["%*s" % (m, x) for x in l])
727 pieces.append(["%*s" % (m, x) for x in l])
728
728
729 if pieces:
729 if pieces:
730 for p, l in zip(zip(*pieces), lines):
730 for p, l in zip(zip(*pieces), lines):
731 ui.write("%s: %s" % (" ".join(p), l[1]))
731 ui.write("%s: %s" % (" ".join(p), l[1]))
732
732
733 def archive(ui, repo, dest, **opts):
733 def archive(ui, repo, dest, **opts):
734 '''create unversioned archive of a repository revision
734 '''create unversioned archive of a repository revision
735
735
736 By default, the revision used is the parent of the working
736 By default, the revision used is the parent of the working
737 directory; use "-r" to specify a different revision.
737 directory; use "-r" to specify a different revision.
738
738
739 To specify the type of archive to create, use "-t". Valid
739 To specify the type of archive to create, use "-t". Valid
740 types are:
740 types are:
741
741
742 "files" (default): a directory full of files
742 "files" (default): a directory full of files
743 "tar": tar archive, uncompressed
743 "tar": tar archive, uncompressed
744 "tbz2": tar archive, compressed using bzip2
744 "tbz2": tar archive, compressed using bzip2
745 "tgz": tar archive, compressed using gzip
745 "tgz": tar archive, compressed using gzip
746 "uzip": zip archive, uncompressed
746 "uzip": zip archive, uncompressed
747 "zip": zip archive, compressed using deflate
747 "zip": zip archive, compressed using deflate
748
748
749 The exact name of the destination archive or directory is given
749 The exact name of the destination archive or directory is given
750 using a format string; see "hg help export" for details.
750 using a format string; see "hg help export" for details.
751
751
752 Each member added to an archive file has a directory prefix
752 Each member added to an archive file has a directory prefix
753 prepended. Use "-p" to specify a format string for the prefix.
753 prepended. Use "-p" to specify a format string for the prefix.
754 The default is the basename of the archive, with suffixes removed.
754 The default is the basename of the archive, with suffixes removed.
755 '''
755 '''
756
756
757 if opts['rev']:
757 if opts['rev']:
758 node = repo.lookup(opts['rev'])
758 node = repo.lookup(opts['rev'])
759 else:
759 else:
760 node, p2 = repo.dirstate.parents()
760 node, p2 = repo.dirstate.parents()
761 if p2 != nullid:
761 if p2 != nullid:
762 raise util.Abort(_('uncommitted merge - please provide a '
762 raise util.Abort(_('uncommitted merge - please provide a '
763 'specific revision'))
763 'specific revision'))
764
764
765 dest = cmdutil.make_filename(repo, dest, node)
765 dest = cmdutil.make_filename(repo, dest, node)
766 if os.path.realpath(dest) == repo.root:
766 if os.path.realpath(dest) == repo.root:
767 raise util.Abort(_('repository root cannot be destination'))
767 raise util.Abort(_('repository root cannot be destination'))
768 dummy, matchfn, dummy = cmdutil.matchpats(repo, [], opts)
768 dummy, matchfn, dummy = cmdutil.matchpats(repo, [], opts)
769 kind = opts.get('type') or 'files'
769 kind = opts.get('type') or 'files'
770 prefix = opts['prefix']
770 prefix = opts['prefix']
771 if dest == '-':
771 if dest == '-':
772 if kind == 'files':
772 if kind == 'files':
773 raise util.Abort(_('cannot archive plain files to stdout'))
773 raise util.Abort(_('cannot archive plain files to stdout'))
774 dest = sys.stdout
774 dest = sys.stdout
775 if not prefix: prefix = os.path.basename(repo.root) + '-%h'
775 if not prefix: prefix = os.path.basename(repo.root) + '-%h'
776 prefix = cmdutil.make_filename(repo, prefix, node)
776 prefix = cmdutil.make_filename(repo, prefix, node)
777 archival.archive(repo, dest, node, kind, not opts['no_decode'],
777 archival.archive(repo, dest, node, kind, not opts['no_decode'],
778 matchfn, prefix)
778 matchfn, prefix)
779
779
780 def backout(ui, repo, rev, **opts):
780 def backout(ui, repo, rev, **opts):
781 '''reverse effect of earlier changeset
781 '''reverse effect of earlier changeset
782
782
783 Commit the backed out changes as a new changeset. The new
783 Commit the backed out changes as a new changeset. The new
784 changeset is a child of the backed out changeset.
784 changeset is a child of the backed out changeset.
785
785
786 If you back out a changeset other than the tip, a new head is
786 If you back out a changeset other than the tip, a new head is
787 created. This head is the parent of the working directory. If
787 created. This head is the parent of the working directory. If
788 you back out an old changeset, your working directory will appear
788 you back out an old changeset, your working directory will appear
789 old after the backout. You should merge the backout changeset
789 old after the backout. You should merge the backout changeset
790 with another head.
790 with another head.
791
791
792 The --merge option remembers the parent of the working directory
792 The --merge option remembers the parent of the working directory
793 before starting the backout, then merges the new head with that
793 before starting the backout, then merges the new head with that
794 changeset afterwards. This saves you from doing the merge by
794 changeset afterwards. This saves you from doing the merge by
795 hand. The result of this merge is not committed, as for a normal
795 hand. The result of this merge is not committed, as for a normal
796 merge.'''
796 merge.'''
797
797
798 bail_if_changed(repo)
798 bail_if_changed(repo)
799 op1, op2 = repo.dirstate.parents()
799 op1, op2 = repo.dirstate.parents()
800 if op2 != nullid:
800 if op2 != nullid:
801 raise util.Abort(_('outstanding uncommitted merge'))
801 raise util.Abort(_('outstanding uncommitted merge'))
802 node = repo.lookup(rev)
802 node = repo.lookup(rev)
803 p1, p2 = repo.changelog.parents(node)
803 p1, p2 = repo.changelog.parents(node)
804 if p1 == nullid:
804 if p1 == nullid:
805 raise util.Abort(_('cannot back out a change with no parents'))
805 raise util.Abort(_('cannot back out a change with no parents'))
806 if p2 != nullid:
806 if p2 != nullid:
807 if not opts['parent']:
807 if not opts['parent']:
808 raise util.Abort(_('cannot back out a merge changeset without '
808 raise util.Abort(_('cannot back out a merge changeset without '
809 '--parent'))
809 '--parent'))
810 p = repo.lookup(opts['parent'])
810 p = repo.lookup(opts['parent'])
811 if p not in (p1, p2):
811 if p not in (p1, p2):
812 raise util.Abort(_('%s is not a parent of %s' %
812 raise util.Abort(_('%s is not a parent of %s' %
813 (short(p), short(node))))
813 (short(p), short(node))))
814 parent = p
814 parent = p
815 else:
815 else:
816 if opts['parent']:
816 if opts['parent']:
817 raise util.Abort(_('cannot use --parent on non-merge changeset'))
817 raise util.Abort(_('cannot use --parent on non-merge changeset'))
818 parent = p1
818 parent = p1
819 hg.clean(repo, node, show_stats=False)
819 hg.clean(repo, node, show_stats=False)
820 revert_opts = opts.copy()
820 revert_opts = opts.copy()
821 revert_opts['all'] = True
821 revert_opts['all'] = True
822 revert_opts['rev'] = hex(parent)
822 revert_opts['rev'] = hex(parent)
823 revert(ui, repo, **revert_opts)
823 revert(ui, repo, **revert_opts)
824 commit_opts = opts.copy()
824 commit_opts = opts.copy()
825 commit_opts['addremove'] = False
825 commit_opts['addremove'] = False
826 if not commit_opts['message'] and not commit_opts['logfile']:
826 if not commit_opts['message'] and not commit_opts['logfile']:
827 commit_opts['message'] = _("Backed out changeset %s") % (hex(node))
827 commit_opts['message'] = _("Backed out changeset %s") % (hex(node))
828 commit_opts['force_editor'] = True
828 commit_opts['force_editor'] = True
829 commit(ui, repo, **commit_opts)
829 commit(ui, repo, **commit_opts)
830 def nice(node):
830 def nice(node):
831 return '%d:%s' % (repo.changelog.rev(node), short(node))
831 return '%d:%s' % (repo.changelog.rev(node), short(node))
832 ui.status(_('changeset %s backs out changeset %s\n') %
832 ui.status(_('changeset %s backs out changeset %s\n') %
833 (nice(repo.changelog.tip()), nice(node)))
833 (nice(repo.changelog.tip()), nice(node)))
834 if op1 != node:
834 if op1 != node:
835 if opts['merge']:
835 if opts['merge']:
836 ui.status(_('merging with changeset %s\n') % nice(op1))
836 ui.status(_('merging with changeset %s\n') % nice(op1))
837 n = _lookup(repo, hex(op1))
837 n = _lookup(repo, hex(op1))
838 hg.merge(repo, n)
838 hg.merge(repo, n)
839 else:
839 else:
840 ui.status(_('the backout changeset is a new head - '
840 ui.status(_('the backout changeset is a new head - '
841 'do not forget to merge\n'))
841 'do not forget to merge\n'))
842 ui.status(_('(use "backout --merge" '
842 ui.status(_('(use "backout --merge" '
843 'if you want to auto-merge)\n'))
843 'if you want to auto-merge)\n'))
844
844
845 def bundle(ui, repo, fname, dest=None, **opts):
845 def bundle(ui, repo, fname, dest=None, **opts):
846 """create a changegroup file
846 """create a changegroup file
847
847
848 Generate a compressed changegroup file collecting all changesets
848 Generate a compressed changegroup file collecting all changesets
849 not found in the other repository.
849 not found in the other repository.
850
850
851 This file can then be transferred using conventional means and
851 This file can then be transferred using conventional means and
852 applied to another repository with the unbundle command. This is
852 applied to another repository with the unbundle command. This is
853 useful when native push and pull are not available or when
853 useful when native push and pull are not available or when
854 exporting an entire repository is undesirable. The standard file
854 exporting an entire repository is undesirable. The standard file
855 extension is ".hg".
855 extension is ".hg".
856
856
857 Unlike import/export, this exactly preserves all changeset
857 Unlike import/export, this exactly preserves all changeset
858 contents including permissions, rename data, and revision history.
858 contents including permissions, rename data, and revision history.
859 """
859 """
860 dest = ui.expandpath(dest or 'default-push', dest or 'default')
860 dest = ui.expandpath(dest or 'default-push', dest or 'default')
861 other = hg.repository(ui, dest)
861 other = hg.repository(ui, dest)
862 o = repo.findoutgoing(other, force=opts['force'])
862 o = repo.findoutgoing(other, force=opts['force'])
863 cg = repo.changegroup(o, 'bundle')
863 cg = repo.changegroup(o, 'bundle')
864 write_bundle(cg, fname)
864 write_bundle(cg, fname)
865
865
866 def cat(ui, repo, file1, *pats, **opts):
866 def cat(ui, repo, file1, *pats, **opts):
867 """output the latest or given revisions of files
867 """output the latest or given revisions of files
868
868
869 Print the specified files as they were at the given revision.
869 Print the specified files as they were at the given revision.
870 If no revision is given then the tip is used.
870 If no revision is given then the tip is used.
871
871
872 Output may be to a file, in which case the name of the file is
872 Output may be to a file, in which case the name of the file is
873 given using a format string. The formatting rules are the same as
873 given using a format string. The formatting rules are the same as
874 for the export command, with the following additions:
874 for the export command, with the following additions:
875
875
876 %s basename of file being printed
876 %s basename of file being printed
877 %d dirname of file being printed, or '.' if in repo root
877 %d dirname of file being printed, or '.' if in repo root
878 %p root-relative path name of file being printed
878 %p root-relative path name of file being printed
879 """
879 """
880 ctx = repo.changectx(opts['rev'] or "-1")
880 ctx = repo.changectx(opts['rev'] or "-1")
881 for src, abs, rel, exact in cmdutil.walk(repo, (file1,) + pats, opts,
881 for src, abs, rel, exact in cmdutil.walk(repo, (file1,) + pats, opts,
882 ctx.node()):
882 ctx.node()):
883 fp = cmdutil.make_file(repo, opts['output'], ctx.node(), pathname=abs)
883 fp = cmdutil.make_file(repo, opts['output'], ctx.node(), pathname=abs)
884 fp.write(ctx.filectx(abs).data())
884 fp.write(ctx.filectx(abs).data())
885
885
886 def clone(ui, source, dest=None, **opts):
886 def clone(ui, source, dest=None, **opts):
887 """make a copy of an existing repository
887 """make a copy of an existing repository
888
888
889 Create a copy of an existing repository in a new directory.
889 Create a copy of an existing repository in a new directory.
890
890
891 If no destination directory name is specified, it defaults to the
891 If no destination directory name is specified, it defaults to the
892 basename of the source.
892 basename of the source.
893
893
894 The location of the source is added to the new repository's
894 The location of the source is added to the new repository's
895 .hg/hgrc file, as the default to be used for future pulls.
895 .hg/hgrc file, as the default to be used for future pulls.
896
896
897 For efficiency, hardlinks are used for cloning whenever the source
897 For efficiency, hardlinks are used for cloning whenever the source
898 and destination are on the same filesystem (note this applies only
898 and destination are on the same filesystem (note this applies only
899 to the repository data, not to the checked out files). Some
899 to the repository data, not to the checked out files). Some
900 filesystems, such as AFS, implement hardlinking incorrectly, but
900 filesystems, such as AFS, implement hardlinking incorrectly, but
901 do not report errors. In these cases, use the --pull option to
901 do not report errors. In these cases, use the --pull option to
902 avoid hardlinking.
902 avoid hardlinking.
903
903
904 You can safely clone repositories and checked out files using full
904 You can safely clone repositories and checked out files using full
905 hardlinks with
905 hardlinks with
906
906
907 $ cp -al REPO REPOCLONE
907 $ cp -al REPO REPOCLONE
908
908
909 which is the fastest way to clone. However, the operation is not
909 which is the fastest way to clone. However, the operation is not
910 atomic (making sure REPO is not modified during the operation is
910 atomic (making sure REPO is not modified during the operation is
911 up to you) and you have to make sure your editor breaks hardlinks
911 up to you) and you have to make sure your editor breaks hardlinks
912 (Emacs and most Linux Kernel tools do so).
912 (Emacs and most Linux Kernel tools do so).
913
913
914 If you use the -r option to clone up to a specific revision, no
914 If you use the -r option to clone up to a specific revision, no
915 subsequent revisions will be present in the cloned repository.
915 subsequent revisions will be present in the cloned repository.
916 This option implies --pull, even on local repositories.
916 This option implies --pull, even on local repositories.
917
917
918 See pull for valid source format details.
918 See pull for valid source format details.
919
919
920 It is possible to specify an ssh:// URL as the destination, but no
920 It is possible to specify an ssh:// URL as the destination, but no
921 .hg/hgrc will be created on the remote side. Look at the help text
921 .hg/hgrc will be created on the remote side. Look at the help text
922 for the pull command for important details about ssh:// URLs.
922 for the pull command for important details about ssh:// URLs.
923 """
923 """
924 setremoteconfig(ui, opts)
924 setremoteconfig(ui, opts)
925 hg.clone(ui, ui.expandpath(source), dest,
925 hg.clone(ui, ui.expandpath(source), dest,
926 pull=opts['pull'],
926 pull=opts['pull'],
927 stream=opts['uncompressed'],
927 stream=opts['uncompressed'],
928 rev=opts['rev'],
928 rev=opts['rev'],
929 update=not opts['noupdate'])
929 update=not opts['noupdate'])
930
930
931 def commit(ui, repo, *pats, **opts):
931 def commit(ui, repo, *pats, **opts):
932 """commit the specified files or all outstanding changes
932 """commit the specified files or all outstanding changes
933
933
934 Commit changes to the given files into the repository.
934 Commit changes to the given files into the repository.
935
935
936 If a list of files is omitted, all changes reported by "hg status"
936 If a list of files is omitted, all changes reported by "hg status"
937 will be committed.
937 will be committed.
938
938
939 If no commit message is specified, the editor configured in your hgrc
939 If no commit message is specified, the editor configured in your hgrc
940 or in the EDITOR environment variable is started to enter a message.
940 or in the EDITOR environment variable is started to enter a message.
941 """
941 """
942 message = logmessage(opts)
942 message = logmessage(opts)
943
943
944 if opts['addremove']:
944 if opts['addremove']:
945 cmdutil.addremove(repo, pats, opts)
945 cmdutil.addremove(repo, pats, opts)
946 fns, match, anypats = cmdutil.matchpats(repo, pats, opts)
946 fns, match, anypats = cmdutil.matchpats(repo, pats, opts)
947 if pats:
947 if pats:
948 modified, added, removed = repo.status(files=fns, match=match)[:3]
948 modified, added, removed = repo.status(files=fns, match=match)[:3]
949 files = modified + added + removed
949 files = modified + added + removed
950 else:
950 else:
951 files = []
951 files = []
952 try:
952 try:
953 repo.commit(files, message, opts['user'], opts['date'], match,
953 repo.commit(files, message, opts['user'], opts['date'], match,
954 force_editor=opts.get('force_editor'))
954 force_editor=opts.get('force_editor'))
955 except ValueError, inst:
955 except ValueError, inst:
956 raise util.Abort(str(inst))
956 raise util.Abort(str(inst))
957
957
958 def docopy(ui, repo, pats, opts, wlock):
958 def docopy(ui, repo, pats, opts, wlock):
959 # called with the repo lock held
959 # called with the repo lock held
960 cwd = repo.getcwd()
960 cwd = repo.getcwd()
961 errors = 0
961 errors = 0
962 copied = []
962 copied = []
963 targets = {}
963 targets = {}
964
964
965 def okaytocopy(abs, rel, exact):
965 def okaytocopy(abs, rel, exact):
966 reasons = {'?': _('is not managed'),
966 reasons = {'?': _('is not managed'),
967 'a': _('has been marked for add'),
967 'a': _('has been marked for add'),
968 'r': _('has been marked for remove')}
968 'r': _('has been marked for remove')}
969 state = repo.dirstate.state(abs)
969 state = repo.dirstate.state(abs)
970 reason = reasons.get(state)
970 reason = reasons.get(state)
971 if reason:
971 if reason:
972 if state == 'a':
972 if state == 'a':
973 origsrc = repo.dirstate.copied(abs)
973 origsrc = repo.dirstate.copied(abs)
974 if origsrc is not None:
974 if origsrc is not None:
975 return origsrc
975 return origsrc
976 if exact:
976 if exact:
977 ui.warn(_('%s: not copying - file %s\n') % (rel, reason))
977 ui.warn(_('%s: not copying - file %s\n') % (rel, reason))
978 else:
978 else:
979 return abs
979 return abs
980
980
981 def copy(origsrc, abssrc, relsrc, target, exact):
981 def copy(origsrc, abssrc, relsrc, target, exact):
982 abstarget = util.canonpath(repo.root, cwd, target)
982 abstarget = util.canonpath(repo.root, cwd, target)
983 reltarget = util.pathto(cwd, abstarget)
983 reltarget = util.pathto(cwd, abstarget)
984 prevsrc = targets.get(abstarget)
984 prevsrc = targets.get(abstarget)
985 if prevsrc is not None:
985 if prevsrc is not None:
986 ui.warn(_('%s: not overwriting - %s collides with %s\n') %
986 ui.warn(_('%s: not overwriting - %s collides with %s\n') %
987 (reltarget, abssrc, prevsrc))
987 (reltarget, abssrc, prevsrc))
988 return
988 return
989 if (not opts['after'] and os.path.exists(reltarget) or
989 if (not opts['after'] and os.path.exists(reltarget) or
990 opts['after'] and repo.dirstate.state(abstarget) not in '?r'):
990 opts['after'] and repo.dirstate.state(abstarget) not in '?r'):
991 if not opts['force']:
991 if not opts['force']:
992 ui.warn(_('%s: not overwriting - file exists\n') %
992 ui.warn(_('%s: not overwriting - file exists\n') %
993 reltarget)
993 reltarget)
994 return
994 return
995 if not opts['after'] and not opts.get('dry_run'):
995 if not opts['after'] and not opts.get('dry_run'):
996 os.unlink(reltarget)
996 os.unlink(reltarget)
997 if opts['after']:
997 if opts['after']:
998 if not os.path.exists(reltarget):
998 if not os.path.exists(reltarget):
999 return
999 return
1000 else:
1000 else:
1001 targetdir = os.path.dirname(reltarget) or '.'
1001 targetdir = os.path.dirname(reltarget) or '.'
1002 if not os.path.isdir(targetdir) and not opts.get('dry_run'):
1002 if not os.path.isdir(targetdir) and not opts.get('dry_run'):
1003 os.makedirs(targetdir)
1003 os.makedirs(targetdir)
1004 try:
1004 try:
1005 restore = repo.dirstate.state(abstarget) == 'r'
1005 restore = repo.dirstate.state(abstarget) == 'r'
1006 if restore and not opts.get('dry_run'):
1006 if restore and not opts.get('dry_run'):
1007 repo.undelete([abstarget], wlock)
1007 repo.undelete([abstarget], wlock)
1008 try:
1008 try:
1009 if not opts.get('dry_run'):
1009 if not opts.get('dry_run'):
1010 shutil.copyfile(relsrc, reltarget)
1010 shutil.copyfile(relsrc, reltarget)
1011 shutil.copymode(relsrc, reltarget)
1011 shutil.copymode(relsrc, reltarget)
1012 restore = False
1012 restore = False
1013 finally:
1013 finally:
1014 if restore:
1014 if restore:
1015 repo.remove([abstarget], wlock)
1015 repo.remove([abstarget], wlock)
1016 except shutil.Error, inst:
1016 except shutil.Error, inst:
1017 raise util.Abort(str(inst))
1017 raise util.Abort(str(inst))
1018 except IOError, inst:
1018 except IOError, inst:
1019 if inst.errno == errno.ENOENT:
1019 if inst.errno == errno.ENOENT:
1020 ui.warn(_('%s: deleted in working copy\n') % relsrc)
1020 ui.warn(_('%s: deleted in working copy\n') % relsrc)
1021 else:
1021 else:
1022 ui.warn(_('%s: cannot copy - %s\n') %
1022 ui.warn(_('%s: cannot copy - %s\n') %
1023 (relsrc, inst.strerror))
1023 (relsrc, inst.strerror))
1024 errors += 1
1024 errors += 1
1025 return
1025 return
1026 if ui.verbose or not exact:
1026 if ui.verbose or not exact:
1027 ui.status(_('copying %s to %s\n') % (relsrc, reltarget))
1027 ui.status(_('copying %s to %s\n') % (relsrc, reltarget))
1028 targets[abstarget] = abssrc
1028 targets[abstarget] = abssrc
1029 if abstarget != origsrc and not opts.get('dry_run'):
1029 if abstarget != origsrc and not opts.get('dry_run'):
1030 repo.copy(origsrc, abstarget, wlock)
1030 repo.copy(origsrc, abstarget, wlock)
1031 copied.append((abssrc, relsrc, exact))
1031 copied.append((abssrc, relsrc, exact))
1032
1032
1033 def targetpathfn(pat, dest, srcs):
1033 def targetpathfn(pat, dest, srcs):
1034 if os.path.isdir(pat):
1034 if os.path.isdir(pat):
1035 abspfx = util.canonpath(repo.root, cwd, pat)
1035 abspfx = util.canonpath(repo.root, cwd, pat)
1036 if destdirexists:
1036 if destdirexists:
1037 striplen = len(os.path.split(abspfx)[0])
1037 striplen = len(os.path.split(abspfx)[0])
1038 else:
1038 else:
1039 striplen = len(abspfx)
1039 striplen = len(abspfx)
1040 if striplen:
1040 if striplen:
1041 striplen += len(os.sep)
1041 striplen += len(os.sep)
1042 res = lambda p: os.path.join(dest, p[striplen:])
1042 res = lambda p: os.path.join(dest, p[striplen:])
1043 elif destdirexists:
1043 elif destdirexists:
1044 res = lambda p: os.path.join(dest, os.path.basename(p))
1044 res = lambda p: os.path.join(dest, os.path.basename(p))
1045 else:
1045 else:
1046 res = lambda p: dest
1046 res = lambda p: dest
1047 return res
1047 return res
1048
1048
1049 def targetpathafterfn(pat, dest, srcs):
1049 def targetpathafterfn(pat, dest, srcs):
1050 if util.patkind(pat, None)[0]:
1050 if util.patkind(pat, None)[0]:
1051 # a mercurial pattern
1051 # a mercurial pattern
1052 res = lambda p: os.path.join(dest, os.path.basename(p))
1052 res = lambda p: os.path.join(dest, os.path.basename(p))
1053 else:
1053 else:
1054 abspfx = util.canonpath(repo.root, cwd, pat)
1054 abspfx = util.canonpath(repo.root, cwd, pat)
1055 if len(abspfx) < len(srcs[0][0]):
1055 if len(abspfx) < len(srcs[0][0]):
1056 # A directory. Either the target path contains the last
1056 # A directory. Either the target path contains the last
1057 # component of the source path or it does not.
1057 # component of the source path or it does not.
1058 def evalpath(striplen):
1058 def evalpath(striplen):
1059 score = 0
1059 score = 0
1060 for s in srcs:
1060 for s in srcs:
1061 t = os.path.join(dest, s[0][striplen:])
1061 t = os.path.join(dest, s[0][striplen:])
1062 if os.path.exists(t):
1062 if os.path.exists(t):
1063 score += 1
1063 score += 1
1064 return score
1064 return score
1065
1065
1066 striplen = len(abspfx)
1066 striplen = len(abspfx)
1067 if striplen:
1067 if striplen:
1068 striplen += len(os.sep)
1068 striplen += len(os.sep)
1069 if os.path.isdir(os.path.join(dest, os.path.split(abspfx)[1])):
1069 if os.path.isdir(os.path.join(dest, os.path.split(abspfx)[1])):
1070 score = evalpath(striplen)
1070 score = evalpath(striplen)
1071 striplen1 = len(os.path.split(abspfx)[0])
1071 striplen1 = len(os.path.split(abspfx)[0])
1072 if striplen1:
1072 if striplen1:
1073 striplen1 += len(os.sep)
1073 striplen1 += len(os.sep)
1074 if evalpath(striplen1) > score:
1074 if evalpath(striplen1) > score:
1075 striplen = striplen1
1075 striplen = striplen1
1076 res = lambda p: os.path.join(dest, p[striplen:])
1076 res = lambda p: os.path.join(dest, p[striplen:])
1077 else:
1077 else:
1078 # a file
1078 # a file
1079 if destdirexists:
1079 if destdirexists:
1080 res = lambda p: os.path.join(dest, os.path.basename(p))
1080 res = lambda p: os.path.join(dest, os.path.basename(p))
1081 else:
1081 else:
1082 res = lambda p: dest
1082 res = lambda p: dest
1083 return res
1083 return res
1084
1084
1085
1085
1086 pats = list(pats)
1086 pats = list(pats)
1087 if not pats:
1087 if not pats:
1088 raise util.Abort(_('no source or destination specified'))
1088 raise util.Abort(_('no source or destination specified'))
1089 if len(pats) == 1:
1089 if len(pats) == 1:
1090 raise util.Abort(_('no destination specified'))
1090 raise util.Abort(_('no destination specified'))
1091 dest = pats.pop()
1091 dest = pats.pop()
1092 destdirexists = os.path.isdir(dest)
1092 destdirexists = os.path.isdir(dest)
1093 if (len(pats) > 1 or util.patkind(pats[0], None)[0]) and not destdirexists:
1093 if (len(pats) > 1 or util.patkind(pats[0], None)[0]) and not destdirexists:
1094 raise util.Abort(_('with multiple sources, destination must be an '
1094 raise util.Abort(_('with multiple sources, destination must be an '
1095 'existing directory'))
1095 'existing directory'))
1096 if opts['after']:
1096 if opts['after']:
1097 tfn = targetpathafterfn
1097 tfn = targetpathafterfn
1098 else:
1098 else:
1099 tfn = targetpathfn
1099 tfn = targetpathfn
1100 copylist = []
1100 copylist = []
1101 for pat in pats:
1101 for pat in pats:
1102 srcs = []
1102 srcs = []
1103 for tag, abssrc, relsrc, exact in cmdutil.walk(repo, [pat], opts):
1103 for tag, abssrc, relsrc, exact in cmdutil.walk(repo, [pat], opts):
1104 origsrc = okaytocopy(abssrc, relsrc, exact)
1104 origsrc = okaytocopy(abssrc, relsrc, exact)
1105 if origsrc:
1105 if origsrc:
1106 srcs.append((origsrc, abssrc, relsrc, exact))
1106 srcs.append((origsrc, abssrc, relsrc, exact))
1107 if not srcs:
1107 if not srcs:
1108 continue
1108 continue
1109 copylist.append((tfn(pat, dest, srcs), srcs))
1109 copylist.append((tfn(pat, dest, srcs), srcs))
1110 if not copylist:
1110 if not copylist:
1111 raise util.Abort(_('no files to copy'))
1111 raise util.Abort(_('no files to copy'))
1112
1112
1113 for targetpath, srcs in copylist:
1113 for targetpath, srcs in copylist:
1114 for origsrc, abssrc, relsrc, exact in srcs:
1114 for origsrc, abssrc, relsrc, exact in srcs:
1115 copy(origsrc, abssrc, relsrc, targetpath(abssrc), exact)
1115 copy(origsrc, abssrc, relsrc, targetpath(abssrc), exact)
1116
1116
1117 if errors:
1117 if errors:
1118 ui.warn(_('(consider using --after)\n'))
1118 ui.warn(_('(consider using --after)\n'))
1119 return errors, copied
1119 return errors, copied
1120
1120
1121 def copy(ui, repo, *pats, **opts):
1121 def copy(ui, repo, *pats, **opts):
1122 """mark files as copied for the next commit
1122 """mark files as copied for the next commit
1123
1123
1124 Mark dest as having copies of source files. If dest is a
1124 Mark dest as having copies of source files. If dest is a
1125 directory, copies are put in that directory. If dest is a file,
1125 directory, copies are put in that directory. If dest is a file,
1126 there can only be one source.
1126 there can only be one source.
1127
1127
1128 By default, this command copies the contents of files as they
1128 By default, this command copies the contents of files as they
1129 stand in the working directory. If invoked with --after, the
1129 stand in the working directory. If invoked with --after, the
1130 operation is recorded, but no copying is performed.
1130 operation is recorded, but no copying is performed.
1131
1131
1132 This command takes effect in the next commit.
1132 This command takes effect in the next commit.
1133
1133
1134 NOTE: This command should be treated as experimental. While it
1134 NOTE: This command should be treated as experimental. While it
1135 should properly record copied files, this information is not yet
1135 should properly record copied files, this information is not yet
1136 fully used by merge, nor fully reported by log.
1136 fully used by merge, nor fully reported by log.
1137 """
1137 """
1138 wlock = repo.wlock(0)
1138 wlock = repo.wlock(0)
1139 errs, copied = docopy(ui, repo, pats, opts, wlock)
1139 errs, copied = docopy(ui, repo, pats, opts, wlock)
1140 return errs
1140 return errs
1141
1141
1142 def debugancestor(ui, index, rev1, rev2):
1142 def debugancestor(ui, index, rev1, rev2):
1143 """find the ancestor revision of two revisions in a given index"""
1143 """find the ancestor revision of two revisions in a given index"""
1144 r = revlog.revlog(util.opener(os.getcwd(), audit=False), index, "", 0)
1144 r = revlog.revlog(util.opener(os.getcwd(), audit=False), index, "", 0)
1145 a = r.ancestor(r.lookup(rev1), r.lookup(rev2))
1145 a = r.ancestor(r.lookup(rev1), r.lookup(rev2))
1146 ui.write("%d:%s\n" % (r.rev(a), hex(a)))
1146 ui.write("%d:%s\n" % (r.rev(a), hex(a)))
1147
1147
1148 def debugcomplete(ui, cmd='', **opts):
1148 def debugcomplete(ui, cmd='', **opts):
1149 """returns the completion list associated with the given command"""
1149 """returns the completion list associated with the given command"""
1150
1150
1151 if opts['options']:
1151 if opts['options']:
1152 options = []
1152 options = []
1153 otables = [globalopts]
1153 otables = [globalopts]
1154 if cmd:
1154 if cmd:
1155 aliases, entry = findcmd(ui, cmd)
1155 aliases, entry = findcmd(ui, cmd)
1156 otables.append(entry[1])
1156 otables.append(entry[1])
1157 for t in otables:
1157 for t in otables:
1158 for o in t:
1158 for o in t:
1159 if o[0]:
1159 if o[0]:
1160 options.append('-%s' % o[0])
1160 options.append('-%s' % o[0])
1161 options.append('--%s' % o[1])
1161 options.append('--%s' % o[1])
1162 ui.write("%s\n" % "\n".join(options))
1162 ui.write("%s\n" % "\n".join(options))
1163 return
1163 return
1164
1164
1165 clist = findpossible(ui, cmd).keys()
1165 clist = findpossible(ui, cmd).keys()
1166 clist.sort()
1166 clist.sort()
1167 ui.write("%s\n" % "\n".join(clist))
1167 ui.write("%s\n" % "\n".join(clist))
1168
1168
1169 def debugrebuildstate(ui, repo, rev=None):
1169 def debugrebuildstate(ui, repo, rev=None):
1170 """rebuild the dirstate as it would look like for the given revision"""
1170 """rebuild the dirstate as it would look like for the given revision"""
1171 if not rev:
1171 if not rev:
1172 rev = repo.changelog.tip()
1172 rev = repo.changelog.tip()
1173 else:
1173 else:
1174 rev = repo.lookup(rev)
1174 rev = repo.lookup(rev)
1175 change = repo.changelog.read(rev)
1175 change = repo.changelog.read(rev)
1176 n = change[0]
1176 n = change[0]
1177 files = repo.manifest.read(n)
1177 files = repo.manifest.read(n)
1178 wlock = repo.wlock()
1178 wlock = repo.wlock()
1179 repo.dirstate.rebuild(rev, files)
1179 repo.dirstate.rebuild(rev, files)
1180
1180
1181 def debugcheckstate(ui, repo):
1181 def debugcheckstate(ui, repo):
1182 """validate the correctness of the current dirstate"""
1182 """validate the correctness of the current dirstate"""
1183 parent1, parent2 = repo.dirstate.parents()
1183 parent1, parent2 = repo.dirstate.parents()
1184 repo.dirstate.read()
1184 repo.dirstate.read()
1185 dc = repo.dirstate.map
1185 dc = repo.dirstate.map
1186 keys = dc.keys()
1186 keys = dc.keys()
1187 keys.sort()
1187 keys.sort()
1188 m1n = repo.changelog.read(parent1)[0]
1188 m1n = repo.changelog.read(parent1)[0]
1189 m2n = repo.changelog.read(parent2)[0]
1189 m2n = repo.changelog.read(parent2)[0]
1190 m1 = repo.manifest.read(m1n)
1190 m1 = repo.manifest.read(m1n)
1191 m2 = repo.manifest.read(m2n)
1191 m2 = repo.manifest.read(m2n)
1192 errors = 0
1192 errors = 0
1193 for f in dc:
1193 for f in dc:
1194 state = repo.dirstate.state(f)
1194 state = repo.dirstate.state(f)
1195 if state in "nr" and f not in m1:
1195 if state in "nr" and f not in m1:
1196 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
1196 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
1197 errors += 1
1197 errors += 1
1198 if state in "a" and f in m1:
1198 if state in "a" and f in m1:
1199 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
1199 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
1200 errors += 1
1200 errors += 1
1201 if state in "m" and f not in m1 and f not in m2:
1201 if state in "m" and f not in m1 and f not in m2:
1202 ui.warn(_("%s in state %s, but not in either manifest\n") %
1202 ui.warn(_("%s in state %s, but not in either manifest\n") %
1203 (f, state))
1203 (f, state))
1204 errors += 1
1204 errors += 1
1205 for f in m1:
1205 for f in m1:
1206 state = repo.dirstate.state(f)
1206 state = repo.dirstate.state(f)
1207 if state not in "nrm":
1207 if state not in "nrm":
1208 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
1208 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
1209 errors += 1
1209 errors += 1
1210 if errors:
1210 if errors:
1211 error = _(".hg/dirstate inconsistent with current parent's manifest")
1211 error = _(".hg/dirstate inconsistent with current parent's manifest")
1212 raise util.Abort(error)
1212 raise util.Abort(error)
1213
1213
1214 def debugconfig(ui, repo, *values):
1214 def debugconfig(ui, repo, *values):
1215 """show combined config settings from all hgrc files
1215 """show combined config settings from all hgrc files
1216
1216
1217 With no args, print names and values of all config items.
1217 With no args, print names and values of all config items.
1218
1218
1219 With one arg of the form section.name, print just the value of
1219 With one arg of the form section.name, print just the value of
1220 that config item.
1220 that config item.
1221
1221
1222 With multiple args, print names and values of all config items
1222 With multiple args, print names and values of all config items
1223 with matching section names."""
1223 with matching section names."""
1224
1224
1225 if values:
1225 if values:
1226 if len([v for v in values if '.' in v]) > 1:
1226 if len([v for v in values if '.' in v]) > 1:
1227 raise util.Abort(_('only one config item permitted'))
1227 raise util.Abort(_('only one config item permitted'))
1228 for section, name, value in ui.walkconfig():
1228 for section, name, value in ui.walkconfig():
1229 sectname = section + '.' + name
1229 sectname = section + '.' + name
1230 if values:
1230 if values:
1231 for v in values:
1231 for v in values:
1232 if v == section:
1232 if v == section:
1233 ui.write('%s=%s\n' % (sectname, value))
1233 ui.write('%s=%s\n' % (sectname, value))
1234 elif v == sectname:
1234 elif v == sectname:
1235 ui.write(value, '\n')
1235 ui.write(value, '\n')
1236 else:
1236 else:
1237 ui.write('%s=%s\n' % (sectname, value))
1237 ui.write('%s=%s\n' % (sectname, value))
1238
1238
1239 def debugsetparents(ui, repo, rev1, rev2=None):
1239 def debugsetparents(ui, repo, rev1, rev2=None):
1240 """manually set the parents of the current working directory
1240 """manually set the parents of the current working directory
1241
1241
1242 This is useful for writing repository conversion tools, but should
1242 This is useful for writing repository conversion tools, but should
1243 be used with care.
1243 be used with care.
1244 """
1244 """
1245
1245
1246 if not rev2:
1246 if not rev2:
1247 rev2 = hex(nullid)
1247 rev2 = hex(nullid)
1248
1248
1249 repo.dirstate.setparents(repo.lookup(rev1), repo.lookup(rev2))
1249 repo.dirstate.setparents(repo.lookup(rev1), repo.lookup(rev2))
1250
1250
1251 def debugstate(ui, repo):
1251 def debugstate(ui, repo):
1252 """show the contents of the current dirstate"""
1252 """show the contents of the current dirstate"""
1253 repo.dirstate.read()
1253 repo.dirstate.read()
1254 dc = repo.dirstate.map
1254 dc = repo.dirstate.map
1255 keys = dc.keys()
1255 keys = dc.keys()
1256 keys.sort()
1256 keys.sort()
1257 for file_ in keys:
1257 for file_ in keys:
1258 ui.write("%c %3o %10d %s %s\n"
1258 ui.write("%c %3o %10d %s %s\n"
1259 % (dc[file_][0], dc[file_][1] & 0777, dc[file_][2],
1259 % (dc[file_][0], dc[file_][1] & 0777, dc[file_][2],
1260 time.strftime("%x %X",
1260 time.strftime("%x %X",
1261 time.localtime(dc[file_][3])), file_))
1261 time.localtime(dc[file_][3])), file_))
1262 for f in repo.dirstate.copies:
1262 for f in repo.dirstate.copies:
1263 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copies[f], f))
1263 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copies[f], f))
1264
1264
1265 def debugdata(ui, file_, rev):
1265 def debugdata(ui, file_, rev):
1266 """dump the contents of an data file revision"""
1266 """dump the contents of an data file revision"""
1267 r = revlog.revlog(util.opener(os.getcwd(), audit=False),
1267 r = revlog.revlog(util.opener(os.getcwd(), audit=False),
1268 file_[:-2] + ".i", file_, 0)
1268 file_[:-2] + ".i", file_, 0)
1269 try:
1269 try:
1270 ui.write(r.revision(r.lookup(rev)))
1270 ui.write(r.revision(r.lookup(rev)))
1271 except KeyError:
1271 except KeyError:
1272 raise util.Abort(_('invalid revision identifier %s'), rev)
1272 raise util.Abort(_('invalid revision identifier %s') % rev)
1273
1273
1274 def debugindex(ui, file_):
1274 def debugindex(ui, file_):
1275 """dump the contents of an index file"""
1275 """dump the contents of an index file"""
1276 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_, "", 0)
1276 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_, "", 0)
1277 ui.write(" rev offset length base linkrev" +
1277 ui.write(" rev offset length base linkrev" +
1278 " nodeid p1 p2\n")
1278 " nodeid p1 p2\n")
1279 for i in range(r.count()):
1279 for i in range(r.count()):
1280 node = r.node(i)
1280 node = r.node(i)
1281 pp = r.parents(node)
1281 pp = r.parents(node)
1282 ui.write("% 6d % 9d % 7d % 6d % 7d %s %s %s\n" % (
1282 ui.write("% 6d % 9d % 7d % 6d % 7d %s %s %s\n" % (
1283 i, r.start(i), r.length(i), r.base(i), r.linkrev(node),
1283 i, r.start(i), r.length(i), r.base(i), r.linkrev(node),
1284 short(node), short(pp[0]), short(pp[1])))
1284 short(node), short(pp[0]), short(pp[1])))
1285
1285
1286 def debugindexdot(ui, file_):
1286 def debugindexdot(ui, file_):
1287 """dump an index DAG as a .dot file"""
1287 """dump an index DAG as a .dot file"""
1288 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_, "", 0)
1288 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_, "", 0)
1289 ui.write("digraph G {\n")
1289 ui.write("digraph G {\n")
1290 for i in range(r.count()):
1290 for i in range(r.count()):
1291 node = r.node(i)
1291 node = r.node(i)
1292 pp = r.parents(node)
1292 pp = r.parents(node)
1293 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
1293 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
1294 if pp[1] != nullid:
1294 if pp[1] != nullid:
1295 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
1295 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
1296 ui.write("}\n")
1296 ui.write("}\n")
1297
1297
1298 def debugrename(ui, repo, file, rev=None):
1298 def debugrename(ui, repo, file, rev=None):
1299 """dump rename information"""
1299 """dump rename information"""
1300 r = repo.file(relpath(repo, [file])[0])
1300 r = repo.file(relpath(repo, [file])[0])
1301 if rev:
1301 if rev:
1302 try:
1302 try:
1303 # assume all revision numbers are for changesets
1303 # assume all revision numbers are for changesets
1304 n = repo.lookup(rev)
1304 n = repo.lookup(rev)
1305 change = repo.changelog.read(n)
1305 change = repo.changelog.read(n)
1306 m = repo.manifest.read(change[0])
1306 m = repo.manifest.read(change[0])
1307 n = m[relpath(repo, [file])[0]]
1307 n = m[relpath(repo, [file])[0]]
1308 except (hg.RepoError, KeyError):
1308 except (hg.RepoError, KeyError):
1309 n = r.lookup(rev)
1309 n = r.lookup(rev)
1310 else:
1310 else:
1311 n = r.tip()
1311 n = r.tip()
1312 m = r.renamed(n)
1312 m = r.renamed(n)
1313 if m:
1313 if m:
1314 ui.write(_("renamed from %s:%s\n") % (m[0], hex(m[1])))
1314 ui.write(_("renamed from %s:%s\n") % (m[0], hex(m[1])))
1315 else:
1315 else:
1316 ui.write(_("not renamed\n"))
1316 ui.write(_("not renamed\n"))
1317
1317
1318 def debugwalk(ui, repo, *pats, **opts):
1318 def debugwalk(ui, repo, *pats, **opts):
1319 """show how files match on given patterns"""
1319 """show how files match on given patterns"""
1320 items = list(cmdutil.walk(repo, pats, opts))
1320 items = list(cmdutil.walk(repo, pats, opts))
1321 if not items:
1321 if not items:
1322 return
1322 return
1323 fmt = '%%s %%-%ds %%-%ds %%s' % (
1323 fmt = '%%s %%-%ds %%-%ds %%s' % (
1324 max([len(abs) for (src, abs, rel, exact) in items]),
1324 max([len(abs) for (src, abs, rel, exact) in items]),
1325 max([len(rel) for (src, abs, rel, exact) in items]))
1325 max([len(rel) for (src, abs, rel, exact) in items]))
1326 for src, abs, rel, exact in items:
1326 for src, abs, rel, exact in items:
1327 line = fmt % (src, abs, rel, exact and 'exact' or '')
1327 line = fmt % (src, abs, rel, exact and 'exact' or '')
1328 ui.write("%s\n" % line.rstrip())
1328 ui.write("%s\n" % line.rstrip())
1329
1329
1330 def diff(ui, repo, *pats, **opts):
1330 def diff(ui, repo, *pats, **opts):
1331 """diff repository (or selected files)
1331 """diff repository (or selected files)
1332
1332
1333 Show differences between revisions for the specified files.
1333 Show differences between revisions for the specified files.
1334
1334
1335 Differences between files are shown using the unified diff format.
1335 Differences between files are shown using the unified diff format.
1336
1336
1337 When two revision arguments are given, then changes are shown
1337 When two revision arguments are given, then changes are shown
1338 between those revisions. If only one revision is specified then
1338 between those revisions. If only one revision is specified then
1339 that revision is compared to the working directory, and, when no
1339 that revision is compared to the working directory, and, when no
1340 revisions are specified, the working directory files are compared
1340 revisions are specified, the working directory files are compared
1341 to its parent.
1341 to its parent.
1342
1342
1343 Without the -a option, diff will avoid generating diffs of files
1343 Without the -a option, diff will avoid generating diffs of files
1344 it detects as binary. With -a, diff will generate a diff anyway,
1344 it detects as binary. With -a, diff will generate a diff anyway,
1345 probably with undesirable results.
1345 probably with undesirable results.
1346 """
1346 """
1347 node1, node2 = revpair(ui, repo, opts['rev'])
1347 node1, node2 = revpair(ui, repo, opts['rev'])
1348
1348
1349 fns, matchfn, anypats = cmdutil.matchpats(repo, pats, opts)
1349 fns, matchfn, anypats = cmdutil.matchpats(repo, pats, opts)
1350
1350
1351 patch.diff(repo, node1, node2, fns, match=matchfn,
1351 patch.diff(repo, node1, node2, fns, match=matchfn,
1352 opts=patch.diffopts(ui, opts))
1352 opts=patch.diffopts(ui, opts))
1353
1353
1354 def export(ui, repo, *changesets, **opts):
1354 def export(ui, repo, *changesets, **opts):
1355 """dump the header and diffs for one or more changesets
1355 """dump the header and diffs for one or more changesets
1356
1356
1357 Print the changeset header and diffs for one or more revisions.
1357 Print the changeset header and diffs for one or more revisions.
1358
1358
1359 The information shown in the changeset header is: author,
1359 The information shown in the changeset header is: author,
1360 changeset hash, parent and commit comment.
1360 changeset hash, parent and commit comment.
1361
1361
1362 Output may be to a file, in which case the name of the file is
1362 Output may be to a file, in which case the name of the file is
1363 given using a format string. The formatting rules are as follows:
1363 given using a format string. The formatting rules are as follows:
1364
1364
1365 %% literal "%" character
1365 %% literal "%" character
1366 %H changeset hash (40 bytes of hexadecimal)
1366 %H changeset hash (40 bytes of hexadecimal)
1367 %N number of patches being generated
1367 %N number of patches being generated
1368 %R changeset revision number
1368 %R changeset revision number
1369 %b basename of the exporting repository
1369 %b basename of the exporting repository
1370 %h short-form changeset hash (12 bytes of hexadecimal)
1370 %h short-form changeset hash (12 bytes of hexadecimal)
1371 %n zero-padded sequence number, starting at 1
1371 %n zero-padded sequence number, starting at 1
1372 %r zero-padded changeset revision number
1372 %r zero-padded changeset revision number
1373
1373
1374 Without the -a option, export will avoid generating diffs of files
1374 Without the -a option, export will avoid generating diffs of files
1375 it detects as binary. With -a, export will generate a diff anyway,
1375 it detects as binary. With -a, export will generate a diff anyway,
1376 probably with undesirable results.
1376 probably with undesirable results.
1377
1377
1378 With the --switch-parent option, the diff will be against the second
1378 With the --switch-parent option, the diff will be against the second
1379 parent. It can be useful to review a merge.
1379 parent. It can be useful to review a merge.
1380 """
1380 """
1381 if not changesets:
1381 if not changesets:
1382 raise util.Abort(_("export requires at least one changeset"))
1382 raise util.Abort(_("export requires at least one changeset"))
1383 revs = list(revrange(ui, repo, changesets))
1383 revs = list(revrange(ui, repo, changesets))
1384 if len(revs) > 1:
1384 if len(revs) > 1:
1385 ui.note(_('exporting patches:\n'))
1385 ui.note(_('exporting patches:\n'))
1386 else:
1386 else:
1387 ui.note(_('exporting patch:\n'))
1387 ui.note(_('exporting patch:\n'))
1388 patch.export(repo, map(repo.lookup, revs), template=opts['output'],
1388 patch.export(repo, map(repo.lookup, revs), template=opts['output'],
1389 switch_parent=opts['switch_parent'],
1389 switch_parent=opts['switch_parent'],
1390 opts=patch.diffopts(ui, opts))
1390 opts=patch.diffopts(ui, opts))
1391
1391
1392 def forget(ui, repo, *pats, **opts):
1392 def forget(ui, repo, *pats, **opts):
1393 """don't add the specified files on the next commit (DEPRECATED)
1393 """don't add the specified files on the next commit (DEPRECATED)
1394
1394
1395 (DEPRECATED)
1395 (DEPRECATED)
1396 Undo an 'hg add' scheduled for the next commit.
1396 Undo an 'hg add' scheduled for the next commit.
1397
1397
1398 This command is now deprecated and will be removed in a future
1398 This command is now deprecated and will be removed in a future
1399 release. Please use revert instead.
1399 release. Please use revert instead.
1400 """
1400 """
1401 ui.warn(_("(the forget command is deprecated; use revert instead)\n"))
1401 ui.warn(_("(the forget command is deprecated; use revert instead)\n"))
1402 forget = []
1402 forget = []
1403 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts):
1403 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts):
1404 if repo.dirstate.state(abs) == 'a':
1404 if repo.dirstate.state(abs) == 'a':
1405 forget.append(abs)
1405 forget.append(abs)
1406 if ui.verbose or not exact:
1406 if ui.verbose or not exact:
1407 ui.status(_('forgetting %s\n') % ((pats and rel) or abs))
1407 ui.status(_('forgetting %s\n') % ((pats and rel) or abs))
1408 repo.forget(forget)
1408 repo.forget(forget)
1409
1409
1410 def grep(ui, repo, pattern, *pats, **opts):
1410 def grep(ui, repo, pattern, *pats, **opts):
1411 """search for a pattern in specified files and revisions
1411 """search for a pattern in specified files and revisions
1412
1412
1413 Search revisions of files for a regular expression.
1413 Search revisions of files for a regular expression.
1414
1414
1415 This command behaves differently than Unix grep. It only accepts
1415 This command behaves differently than Unix grep. It only accepts
1416 Python/Perl regexps. It searches repository history, not the
1416 Python/Perl regexps. It searches repository history, not the
1417 working directory. It always prints the revision number in which
1417 working directory. It always prints the revision number in which
1418 a match appears.
1418 a match appears.
1419
1419
1420 By default, grep only prints output for the first revision of a
1420 By default, grep only prints output for the first revision of a
1421 file in which it finds a match. To get it to print every revision
1421 file in which it finds a match. To get it to print every revision
1422 that contains a change in match status ("-" for a match that
1422 that contains a change in match status ("-" for a match that
1423 becomes a non-match, or "+" for a non-match that becomes a match),
1423 becomes a non-match, or "+" for a non-match that becomes a match),
1424 use the --all flag.
1424 use the --all flag.
1425 """
1425 """
1426 reflags = 0
1426 reflags = 0
1427 if opts['ignore_case']:
1427 if opts['ignore_case']:
1428 reflags |= re.I
1428 reflags |= re.I
1429 regexp = re.compile(pattern, reflags)
1429 regexp = re.compile(pattern, reflags)
1430 sep, eol = ':', '\n'
1430 sep, eol = ':', '\n'
1431 if opts['print0']:
1431 if opts['print0']:
1432 sep = eol = '\0'
1432 sep = eol = '\0'
1433
1433
1434 fcache = {}
1434 fcache = {}
1435 def getfile(fn):
1435 def getfile(fn):
1436 if fn not in fcache:
1436 if fn not in fcache:
1437 fcache[fn] = repo.file(fn)
1437 fcache[fn] = repo.file(fn)
1438 return fcache[fn]
1438 return fcache[fn]
1439
1439
1440 def matchlines(body):
1440 def matchlines(body):
1441 begin = 0
1441 begin = 0
1442 linenum = 0
1442 linenum = 0
1443 while True:
1443 while True:
1444 match = regexp.search(body, begin)
1444 match = regexp.search(body, begin)
1445 if not match:
1445 if not match:
1446 break
1446 break
1447 mstart, mend = match.span()
1447 mstart, mend = match.span()
1448 linenum += body.count('\n', begin, mstart) + 1
1448 linenum += body.count('\n', begin, mstart) + 1
1449 lstart = body.rfind('\n', begin, mstart) + 1 or begin
1449 lstart = body.rfind('\n', begin, mstart) + 1 or begin
1450 lend = body.find('\n', mend)
1450 lend = body.find('\n', mend)
1451 yield linenum, mstart - lstart, mend - lstart, body[lstart:lend]
1451 yield linenum, mstart - lstart, mend - lstart, body[lstart:lend]
1452 begin = lend + 1
1452 begin = lend + 1
1453
1453
1454 class linestate(object):
1454 class linestate(object):
1455 def __init__(self, line, linenum, colstart, colend):
1455 def __init__(self, line, linenum, colstart, colend):
1456 self.line = line
1456 self.line = line
1457 self.linenum = linenum
1457 self.linenum = linenum
1458 self.colstart = colstart
1458 self.colstart = colstart
1459 self.colend = colend
1459 self.colend = colend
1460
1460
1461 def __eq__(self, other):
1461 def __eq__(self, other):
1462 return self.line == other.line
1462 return self.line == other.line
1463
1463
1464 matches = {}
1464 matches = {}
1465 copies = {}
1465 copies = {}
1466 def grepbody(fn, rev, body):
1466 def grepbody(fn, rev, body):
1467 matches[rev].setdefault(fn, [])
1467 matches[rev].setdefault(fn, [])
1468 m = matches[rev][fn]
1468 m = matches[rev][fn]
1469 for lnum, cstart, cend, line in matchlines(body):
1469 for lnum, cstart, cend, line in matchlines(body):
1470 s = linestate(line, lnum, cstart, cend)
1470 s = linestate(line, lnum, cstart, cend)
1471 m.append(s)
1471 m.append(s)
1472
1472
1473 def difflinestates(a, b):
1473 def difflinestates(a, b):
1474 sm = difflib.SequenceMatcher(None, a, b)
1474 sm = difflib.SequenceMatcher(None, a, b)
1475 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
1475 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
1476 if tag == 'insert':
1476 if tag == 'insert':
1477 for i in range(blo, bhi):
1477 for i in range(blo, bhi):
1478 yield ('+', b[i])
1478 yield ('+', b[i])
1479 elif tag == 'delete':
1479 elif tag == 'delete':
1480 for i in range(alo, ahi):
1480 for i in range(alo, ahi):
1481 yield ('-', a[i])
1481 yield ('-', a[i])
1482 elif tag == 'replace':
1482 elif tag == 'replace':
1483 for i in range(alo, ahi):
1483 for i in range(alo, ahi):
1484 yield ('-', a[i])
1484 yield ('-', a[i])
1485 for i in range(blo, bhi):
1485 for i in range(blo, bhi):
1486 yield ('+', b[i])
1486 yield ('+', b[i])
1487
1487
1488 prev = {}
1488 prev = {}
1489 ucache = {}
1489 ucache = {}
1490 def display(fn, rev, states, prevstates):
1490 def display(fn, rev, states, prevstates):
1491 counts = {'-': 0, '+': 0}
1491 counts = {'-': 0, '+': 0}
1492 filerevmatches = {}
1492 filerevmatches = {}
1493 if incrementing or not opts['all']:
1493 if incrementing or not opts['all']:
1494 a, b = prevstates, states
1494 a, b = prevstates, states
1495 else:
1495 else:
1496 a, b = states, prevstates
1496 a, b = states, prevstates
1497 for change, l in difflinestates(a, b):
1497 for change, l in difflinestates(a, b):
1498 if incrementing or not opts['all']:
1498 if incrementing or not opts['all']:
1499 r = rev
1499 r = rev
1500 else:
1500 else:
1501 r = prev[fn]
1501 r = prev[fn]
1502 cols = [fn, str(r)]
1502 cols = [fn, str(r)]
1503 if opts['line_number']:
1503 if opts['line_number']:
1504 cols.append(str(l.linenum))
1504 cols.append(str(l.linenum))
1505 if opts['all']:
1505 if opts['all']:
1506 cols.append(change)
1506 cols.append(change)
1507 if opts['user']:
1507 if opts['user']:
1508 cols.append(trimuser(ui, getchange(r)[1], rev,
1508 cols.append(trimuser(ui, getchange(r)[1], rev,
1509 ucache))
1509 ucache))
1510 if opts['files_with_matches']:
1510 if opts['files_with_matches']:
1511 c = (fn, rev)
1511 c = (fn, rev)
1512 if c in filerevmatches:
1512 if c in filerevmatches:
1513 continue
1513 continue
1514 filerevmatches[c] = 1
1514 filerevmatches[c] = 1
1515 else:
1515 else:
1516 cols.append(l.line)
1516 cols.append(l.line)
1517 ui.write(sep.join(cols), eol)
1517 ui.write(sep.join(cols), eol)
1518 counts[change] += 1
1518 counts[change] += 1
1519 return counts['+'], counts['-']
1519 return counts['+'], counts['-']
1520
1520
1521 fstate = {}
1521 fstate = {}
1522 skip = {}
1522 skip = {}
1523 changeiter, getchange, matchfn = walkchangerevs(ui, repo, pats, opts)
1523 changeiter, getchange, matchfn = walkchangerevs(ui, repo, pats, opts)
1524 count = 0
1524 count = 0
1525 incrementing = False
1525 incrementing = False
1526 follow = opts.get('follow')
1526 follow = opts.get('follow')
1527 for st, rev, fns in changeiter:
1527 for st, rev, fns in changeiter:
1528 if st == 'window':
1528 if st == 'window':
1529 incrementing = rev
1529 incrementing = rev
1530 matches.clear()
1530 matches.clear()
1531 elif st == 'add':
1531 elif st == 'add':
1532 change = repo.changelog.read(repo.lookup(str(rev)))
1532 change = repo.changelog.read(repo.lookup(str(rev)))
1533 mf = repo.manifest.read(change[0])
1533 mf = repo.manifest.read(change[0])
1534 matches[rev] = {}
1534 matches[rev] = {}
1535 for fn in fns:
1535 for fn in fns:
1536 if fn in skip:
1536 if fn in skip:
1537 continue
1537 continue
1538 fstate.setdefault(fn, {})
1538 fstate.setdefault(fn, {})
1539 try:
1539 try:
1540 grepbody(fn, rev, getfile(fn).read(mf[fn]))
1540 grepbody(fn, rev, getfile(fn).read(mf[fn]))
1541 if follow:
1541 if follow:
1542 copied = getfile(fn).renamed(mf[fn])
1542 copied = getfile(fn).renamed(mf[fn])
1543 if copied:
1543 if copied:
1544 copies.setdefault(rev, {})[fn] = copied[0]
1544 copies.setdefault(rev, {})[fn] = copied[0]
1545 except KeyError:
1545 except KeyError:
1546 pass
1546 pass
1547 elif st == 'iter':
1547 elif st == 'iter':
1548 states = matches[rev].items()
1548 states = matches[rev].items()
1549 states.sort()
1549 states.sort()
1550 for fn, m in states:
1550 for fn, m in states:
1551 copy = copies.get(rev, {}).get(fn)
1551 copy = copies.get(rev, {}).get(fn)
1552 if fn in skip:
1552 if fn in skip:
1553 if copy:
1553 if copy:
1554 skip[copy] = True
1554 skip[copy] = True
1555 continue
1555 continue
1556 if incrementing or not opts['all'] or fstate[fn]:
1556 if incrementing or not opts['all'] or fstate[fn]:
1557 pos, neg = display(fn, rev, m, fstate[fn])
1557 pos, neg = display(fn, rev, m, fstate[fn])
1558 count += pos + neg
1558 count += pos + neg
1559 if pos and not opts['all']:
1559 if pos and not opts['all']:
1560 skip[fn] = True
1560 skip[fn] = True
1561 if copy:
1561 if copy:
1562 skip[copy] = True
1562 skip[copy] = True
1563 fstate[fn] = m
1563 fstate[fn] = m
1564 if copy:
1564 if copy:
1565 fstate[copy] = m
1565 fstate[copy] = m
1566 prev[fn] = rev
1566 prev[fn] = rev
1567
1567
1568 if not incrementing:
1568 if not incrementing:
1569 fstate = fstate.items()
1569 fstate = fstate.items()
1570 fstate.sort()
1570 fstate.sort()
1571 for fn, state in fstate:
1571 for fn, state in fstate:
1572 if fn in skip:
1572 if fn in skip:
1573 continue
1573 continue
1574 if fn not in copies.get(prev[fn], {}):
1574 if fn not in copies.get(prev[fn], {}):
1575 display(fn, rev, {}, state)
1575 display(fn, rev, {}, state)
1576 return (count == 0 and 1) or 0
1576 return (count == 0 and 1) or 0
1577
1577
1578 def heads(ui, repo, **opts):
1578 def heads(ui, repo, **opts):
1579 """show current repository heads
1579 """show current repository heads
1580
1580
1581 Show all repository head changesets.
1581 Show all repository head changesets.
1582
1582
1583 Repository "heads" are changesets that don't have children
1583 Repository "heads" are changesets that don't have children
1584 changesets. They are where development generally takes place and
1584 changesets. They are where development generally takes place and
1585 are the usual targets for update and merge operations.
1585 are the usual targets for update and merge operations.
1586 """
1586 """
1587 if opts['rev']:
1587 if opts['rev']:
1588 heads = repo.heads(repo.lookup(opts['rev']))
1588 heads = repo.heads(repo.lookup(opts['rev']))
1589 else:
1589 else:
1590 heads = repo.heads()
1590 heads = repo.heads()
1591 br = None
1591 br = None
1592 if opts['branches']:
1592 if opts['branches']:
1593 br = repo.branchlookup(heads)
1593 br = repo.branchlookup(heads)
1594 displayer = show_changeset(ui, repo, opts)
1594 displayer = show_changeset(ui, repo, opts)
1595 for n in heads:
1595 for n in heads:
1596 displayer.show(changenode=n, brinfo=br)
1596 displayer.show(changenode=n, brinfo=br)
1597
1597
1598 def identify(ui, repo):
1598 def identify(ui, repo):
1599 """print information about the working copy
1599 """print information about the working copy
1600
1600
1601 Print a short summary of the current state of the repo.
1601 Print a short summary of the current state of the repo.
1602
1602
1603 This summary identifies the repository state using one or two parent
1603 This summary identifies the repository state using one or two parent
1604 hash identifiers, followed by a "+" if there are uncommitted changes
1604 hash identifiers, followed by a "+" if there are uncommitted changes
1605 in the working directory, followed by a list of tags for this revision.
1605 in the working directory, followed by a list of tags for this revision.
1606 """
1606 """
1607 parents = [p for p in repo.dirstate.parents() if p != nullid]
1607 parents = [p for p in repo.dirstate.parents() if p != nullid]
1608 if not parents:
1608 if not parents:
1609 ui.write(_("unknown\n"))
1609 ui.write(_("unknown\n"))
1610 return
1610 return
1611
1611
1612 hexfunc = ui.debugflag and hex or short
1612 hexfunc = ui.debugflag and hex or short
1613 modified, added, removed, deleted = repo.status()[:4]
1613 modified, added, removed, deleted = repo.status()[:4]
1614 output = ["%s%s" %
1614 output = ["%s%s" %
1615 ('+'.join([hexfunc(parent) for parent in parents]),
1615 ('+'.join([hexfunc(parent) for parent in parents]),
1616 (modified or added or removed or deleted) and "+" or "")]
1616 (modified or added or removed or deleted) and "+" or "")]
1617
1617
1618 if not ui.quiet:
1618 if not ui.quiet:
1619 # multiple tags for a single parent separated by '/'
1619 # multiple tags for a single parent separated by '/'
1620 parenttags = ['/'.join(tags)
1620 parenttags = ['/'.join(tags)
1621 for tags in map(repo.nodetags, parents) if tags]
1621 for tags in map(repo.nodetags, parents) if tags]
1622 # tags for multiple parents separated by ' + '
1622 # tags for multiple parents separated by ' + '
1623 if parenttags:
1623 if parenttags:
1624 output.append(' + '.join(parenttags))
1624 output.append(' + '.join(parenttags))
1625
1625
1626 ui.write("%s\n" % ' '.join(output))
1626 ui.write("%s\n" % ' '.join(output))
1627
1627
1628 def import_(ui, repo, patch1, *patches, **opts):
1628 def import_(ui, repo, patch1, *patches, **opts):
1629 """import an ordered set of patches
1629 """import an ordered set of patches
1630
1630
1631 Import a list of patches and commit them individually.
1631 Import a list of patches and commit them individually.
1632
1632
1633 If there are outstanding changes in the working directory, import
1633 If there are outstanding changes in the working directory, import
1634 will abort unless given the -f flag.
1634 will abort unless given the -f flag.
1635
1635
1636 You can import a patch straight from a mail message. Even patches
1636 You can import a patch straight from a mail message. Even patches
1637 as attachments work (body part must be type text/plain or
1637 as attachments work (body part must be type text/plain or
1638 text/x-patch to be used). From and Subject headers of email
1638 text/x-patch to be used). From and Subject headers of email
1639 message are used as default committer and commit message. All
1639 message are used as default committer and commit message. All
1640 text/plain body parts before first diff are added to commit
1640 text/plain body parts before first diff are added to commit
1641 message.
1641 message.
1642
1642
1643 If imported patch was generated by hg export, user and description
1643 If imported patch was generated by hg export, user and description
1644 from patch override values from message headers and body. Values
1644 from patch override values from message headers and body. Values
1645 given on command line with -m and -u override these.
1645 given on command line with -m and -u override these.
1646
1646
1647 To read a patch from standard input, use patch name "-".
1647 To read a patch from standard input, use patch name "-".
1648 """
1648 """
1649 patches = (patch1,) + patches
1649 patches = (patch1,) + patches
1650
1650
1651 if not opts['force']:
1651 if not opts['force']:
1652 bail_if_changed(repo)
1652 bail_if_changed(repo)
1653
1653
1654 d = opts["base"]
1654 d = opts["base"]
1655 strip = opts["strip"]
1655 strip = opts["strip"]
1656
1656
1657 wlock = repo.wlock()
1657 wlock = repo.wlock()
1658 lock = repo.lock()
1658 lock = repo.lock()
1659
1659
1660 for p in patches:
1660 for p in patches:
1661 pf = os.path.join(d, p)
1661 pf = os.path.join(d, p)
1662
1662
1663 if pf == '-':
1663 if pf == '-':
1664 ui.status(_("applying patch from stdin\n"))
1664 ui.status(_("applying patch from stdin\n"))
1665 tmpname, message, user, date = patch.extract(ui, sys.stdin)
1665 tmpname, message, user, date = patch.extract(ui, sys.stdin)
1666 else:
1666 else:
1667 ui.status(_("applying %s\n") % p)
1667 ui.status(_("applying %s\n") % p)
1668 tmpname, message, user, date = patch.extract(ui, file(pf))
1668 tmpname, message, user, date = patch.extract(ui, file(pf))
1669
1669
1670 if tmpname is None:
1670 if tmpname is None:
1671 raise util.Abort(_('no diffs found'))
1671 raise util.Abort(_('no diffs found'))
1672
1672
1673 try:
1673 try:
1674 if opts['message']:
1674 if opts['message']:
1675 # pickup the cmdline msg
1675 # pickup the cmdline msg
1676 message = opts['message']
1676 message = opts['message']
1677 elif message:
1677 elif message:
1678 # pickup the patch msg
1678 # pickup the patch msg
1679 message = message.strip()
1679 message = message.strip()
1680 else:
1680 else:
1681 # launch the editor
1681 # launch the editor
1682 message = None
1682 message = None
1683 ui.debug(_('message:\n%s\n') % message)
1683 ui.debug(_('message:\n%s\n') % message)
1684
1684
1685 files, fuzz = patch.patch(tmpname, ui, strip=strip, cwd=repo.root)
1685 files, fuzz = patch.patch(tmpname, ui, strip=strip, cwd=repo.root)
1686 files = patch.updatedir(ui, repo, files, wlock=wlock)
1686 files = patch.updatedir(ui, repo, files, wlock=wlock)
1687 repo.commit(files, message, user, date, wlock=wlock, lock=lock)
1687 repo.commit(files, message, user, date, wlock=wlock, lock=lock)
1688 finally:
1688 finally:
1689 os.unlink(tmpname)
1689 os.unlink(tmpname)
1690
1690
1691 def incoming(ui, repo, source="default", **opts):
1691 def incoming(ui, repo, source="default", **opts):
1692 """show new changesets found in source
1692 """show new changesets found in source
1693
1693
1694 Show new changesets found in the specified path/URL or the default
1694 Show new changesets found in the specified path/URL or the default
1695 pull location. These are the changesets that would be pulled if a pull
1695 pull location. These are the changesets that would be pulled if a pull
1696 was requested.
1696 was requested.
1697
1697
1698 For remote repository, using --bundle avoids downloading the changesets
1698 For remote repository, using --bundle avoids downloading the changesets
1699 twice if the incoming is followed by a pull.
1699 twice if the incoming is followed by a pull.
1700
1700
1701 See pull for valid source format details.
1701 See pull for valid source format details.
1702 """
1702 """
1703 source = ui.expandpath(source)
1703 source = ui.expandpath(source)
1704 setremoteconfig(ui, opts)
1704 setremoteconfig(ui, opts)
1705
1705
1706 other = hg.repository(ui, source)
1706 other = hg.repository(ui, source)
1707 incoming = repo.findincoming(other, force=opts["force"])
1707 incoming = repo.findincoming(other, force=opts["force"])
1708 if not incoming:
1708 if not incoming:
1709 ui.status(_("no changes found\n"))
1709 ui.status(_("no changes found\n"))
1710 return
1710 return
1711
1711
1712 cleanup = None
1712 cleanup = None
1713 try:
1713 try:
1714 fname = opts["bundle"]
1714 fname = opts["bundle"]
1715 if fname or not other.local():
1715 if fname or not other.local():
1716 # create a bundle (uncompressed if other repo is not local)
1716 # create a bundle (uncompressed if other repo is not local)
1717 cg = other.changegroup(incoming, "incoming")
1717 cg = other.changegroup(incoming, "incoming")
1718 fname = cleanup = write_bundle(cg, fname, compress=other.local())
1718 fname = cleanup = write_bundle(cg, fname, compress=other.local())
1719 # keep written bundle?
1719 # keep written bundle?
1720 if opts["bundle"]:
1720 if opts["bundle"]:
1721 cleanup = None
1721 cleanup = None
1722 if not other.local():
1722 if not other.local():
1723 # use the created uncompressed bundlerepo
1723 # use the created uncompressed bundlerepo
1724 other = bundlerepo.bundlerepository(ui, repo.root, fname)
1724 other = bundlerepo.bundlerepository(ui, repo.root, fname)
1725
1725
1726 revs = None
1726 revs = None
1727 if opts['rev']:
1727 if opts['rev']:
1728 revs = [other.lookup(rev) for rev in opts['rev']]
1728 revs = [other.lookup(rev) for rev in opts['rev']]
1729 o = other.changelog.nodesbetween(incoming, revs)[0]
1729 o = other.changelog.nodesbetween(incoming, revs)[0]
1730 if opts['newest_first']:
1730 if opts['newest_first']:
1731 o.reverse()
1731 o.reverse()
1732 displayer = show_changeset(ui, other, opts)
1732 displayer = show_changeset(ui, other, opts)
1733 for n in o:
1733 for n in o:
1734 parents = [p for p in other.changelog.parents(n) if p != nullid]
1734 parents = [p for p in other.changelog.parents(n) if p != nullid]
1735 if opts['no_merges'] and len(parents) == 2:
1735 if opts['no_merges'] and len(parents) == 2:
1736 continue
1736 continue
1737 displayer.show(changenode=n)
1737 displayer.show(changenode=n)
1738 if opts['patch']:
1738 if opts['patch']:
1739 prev = (parents and parents[0]) or nullid
1739 prev = (parents and parents[0]) or nullid
1740 patch.diff(other, prev, n, fp=repo.ui)
1740 patch.diff(other, prev, n, fp=repo.ui)
1741 ui.write("\n")
1741 ui.write("\n")
1742 finally:
1742 finally:
1743 if hasattr(other, 'close'):
1743 if hasattr(other, 'close'):
1744 other.close()
1744 other.close()
1745 if cleanup:
1745 if cleanup:
1746 os.unlink(cleanup)
1746 os.unlink(cleanup)
1747
1747
1748 def init(ui, dest=".", **opts):
1748 def init(ui, dest=".", **opts):
1749 """create a new repository in the given directory
1749 """create a new repository in the given directory
1750
1750
1751 Initialize a new repository in the given directory. If the given
1751 Initialize a new repository in the given directory. If the given
1752 directory does not exist, it is created.
1752 directory does not exist, it is created.
1753
1753
1754 If no directory is given, the current directory is used.
1754 If no directory is given, the current directory is used.
1755
1755
1756 It is possible to specify an ssh:// URL as the destination.
1756 It is possible to specify an ssh:// URL as the destination.
1757 Look at the help text for the pull command for important details
1757 Look at the help text for the pull command for important details
1758 about ssh:// URLs.
1758 about ssh:// URLs.
1759 """
1759 """
1760 setremoteconfig(ui, opts)
1760 setremoteconfig(ui, opts)
1761 hg.repository(ui, dest, create=1)
1761 hg.repository(ui, dest, create=1)
1762
1762
1763 def locate(ui, repo, *pats, **opts):
1763 def locate(ui, repo, *pats, **opts):
1764 """locate files matching specific patterns
1764 """locate files matching specific patterns
1765
1765
1766 Print all files under Mercurial control whose names match the
1766 Print all files under Mercurial control whose names match the
1767 given patterns.
1767 given patterns.
1768
1768
1769 This command searches the current directory and its
1769 This command searches the current directory and its
1770 subdirectories. To search an entire repository, move to the root
1770 subdirectories. To search an entire repository, move to the root
1771 of the repository.
1771 of the repository.
1772
1772
1773 If no patterns are given to match, this command prints all file
1773 If no patterns are given to match, this command prints all file
1774 names.
1774 names.
1775
1775
1776 If you want to feed the output of this command into the "xargs"
1776 If you want to feed the output of this command into the "xargs"
1777 command, use the "-0" option to both this command and "xargs".
1777 command, use the "-0" option to both this command and "xargs".
1778 This will avoid the problem of "xargs" treating single filenames
1778 This will avoid the problem of "xargs" treating single filenames
1779 that contain white space as multiple filenames.
1779 that contain white space as multiple filenames.
1780 """
1780 """
1781 end = opts['print0'] and '\0' or '\n'
1781 end = opts['print0'] and '\0' or '\n'
1782 rev = opts['rev']
1782 rev = opts['rev']
1783 if rev:
1783 if rev:
1784 node = repo.lookup(rev)
1784 node = repo.lookup(rev)
1785 else:
1785 else:
1786 node = None
1786 node = None
1787
1787
1788 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts, node=node,
1788 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts, node=node,
1789 head='(?:.*/|)'):
1789 head='(?:.*/|)'):
1790 if not node and repo.dirstate.state(abs) == '?':
1790 if not node and repo.dirstate.state(abs) == '?':
1791 continue
1791 continue
1792 if opts['fullpath']:
1792 if opts['fullpath']:
1793 ui.write(os.path.join(repo.root, abs), end)
1793 ui.write(os.path.join(repo.root, abs), end)
1794 else:
1794 else:
1795 ui.write(((pats and rel) or abs), end)
1795 ui.write(((pats and rel) or abs), end)
1796
1796
1797 def log(ui, repo, *pats, **opts):
1797 def log(ui, repo, *pats, **opts):
1798 """show revision history of entire repository or files
1798 """show revision history of entire repository or files
1799
1799
1800 Print the revision history of the specified files or the entire
1800 Print the revision history of the specified files or the entire
1801 project.
1801 project.
1802
1802
1803 File history is shown without following rename or copy history of
1803 File history is shown without following rename or copy history of
1804 files. Use -f/--follow with a file name to follow history across
1804 files. Use -f/--follow with a file name to follow history across
1805 renames and copies. --follow without a file name will only show
1805 renames and copies. --follow without a file name will only show
1806 ancestors or descendants of the starting revision. --follow-first
1806 ancestors or descendants of the starting revision. --follow-first
1807 only follows the first parent of merge revisions.
1807 only follows the first parent of merge revisions.
1808
1808
1809 If no revision range is specified, the default is tip:0 unless
1809 If no revision range is specified, the default is tip:0 unless
1810 --follow is set, in which case the working directory parent is
1810 --follow is set, in which case the working directory parent is
1811 used as the starting revision.
1811 used as the starting revision.
1812
1812
1813 By default this command outputs: changeset id and hash, tags,
1813 By default this command outputs: changeset id and hash, tags,
1814 non-trivial parents, user, date and time, and a summary for each
1814 non-trivial parents, user, date and time, and a summary for each
1815 commit. When the -v/--verbose switch is used, the list of changed
1815 commit. When the -v/--verbose switch is used, the list of changed
1816 files and full commit message is shown.
1816 files and full commit message is shown.
1817 """
1817 """
1818 class dui(object):
1818 class dui(object):
1819 # Implement and delegate some ui protocol. Save hunks of
1819 # Implement and delegate some ui protocol. Save hunks of
1820 # output for later display in the desired order.
1820 # output for later display in the desired order.
1821 def __init__(self, ui):
1821 def __init__(self, ui):
1822 self.ui = ui
1822 self.ui = ui
1823 self.hunk = {}
1823 self.hunk = {}
1824 self.header = {}
1824 self.header = {}
1825 def bump(self, rev):
1825 def bump(self, rev):
1826 self.rev = rev
1826 self.rev = rev
1827 self.hunk[rev] = []
1827 self.hunk[rev] = []
1828 self.header[rev] = []
1828 self.header[rev] = []
1829 def note(self, *args):
1829 def note(self, *args):
1830 if self.verbose:
1830 if self.verbose:
1831 self.write(*args)
1831 self.write(*args)
1832 def status(self, *args):
1832 def status(self, *args):
1833 if not self.quiet:
1833 if not self.quiet:
1834 self.write(*args)
1834 self.write(*args)
1835 def write(self, *args):
1835 def write(self, *args):
1836 self.hunk[self.rev].append(args)
1836 self.hunk[self.rev].append(args)
1837 def write_header(self, *args):
1837 def write_header(self, *args):
1838 self.header[self.rev].append(args)
1838 self.header[self.rev].append(args)
1839 def debug(self, *args):
1839 def debug(self, *args):
1840 if self.debugflag:
1840 if self.debugflag:
1841 self.write(*args)
1841 self.write(*args)
1842 def __getattr__(self, key):
1842 def __getattr__(self, key):
1843 return getattr(self.ui, key)
1843 return getattr(self.ui, key)
1844
1844
1845 changeiter, getchange, matchfn = walkchangerevs(ui, repo, pats, opts)
1845 changeiter, getchange, matchfn = walkchangerevs(ui, repo, pats, opts)
1846
1846
1847 if opts['limit']:
1847 if opts['limit']:
1848 try:
1848 try:
1849 limit = int(opts['limit'])
1849 limit = int(opts['limit'])
1850 except ValueError:
1850 except ValueError:
1851 raise util.Abort(_('limit must be a positive integer'))
1851 raise util.Abort(_('limit must be a positive integer'))
1852 if limit <= 0: raise util.Abort(_('limit must be positive'))
1852 if limit <= 0: raise util.Abort(_('limit must be positive'))
1853 else:
1853 else:
1854 limit = sys.maxint
1854 limit = sys.maxint
1855 count = 0
1855 count = 0
1856
1856
1857 displayer = show_changeset(ui, repo, opts)
1857 displayer = show_changeset(ui, repo, opts)
1858 for st, rev, fns in changeiter:
1858 for st, rev, fns in changeiter:
1859 if st == 'window':
1859 if st == 'window':
1860 du = dui(ui)
1860 du = dui(ui)
1861 displayer.ui = du
1861 displayer.ui = du
1862 elif st == 'add':
1862 elif st == 'add':
1863 du.bump(rev)
1863 du.bump(rev)
1864 changenode = repo.changelog.node(rev)
1864 changenode = repo.changelog.node(rev)
1865 parents = [p for p in repo.changelog.parents(changenode)
1865 parents = [p for p in repo.changelog.parents(changenode)
1866 if p != nullid]
1866 if p != nullid]
1867 if opts['no_merges'] and len(parents) == 2:
1867 if opts['no_merges'] and len(parents) == 2:
1868 continue
1868 continue
1869 if opts['only_merges'] and len(parents) != 2:
1869 if opts['only_merges'] and len(parents) != 2:
1870 continue
1870 continue
1871
1871
1872 if opts['keyword']:
1872 if opts['keyword']:
1873 changes = getchange(rev)
1873 changes = getchange(rev)
1874 miss = 0
1874 miss = 0
1875 for k in [kw.lower() for kw in opts['keyword']]:
1875 for k in [kw.lower() for kw in opts['keyword']]:
1876 if not (k in changes[1].lower() or
1876 if not (k in changes[1].lower() or
1877 k in changes[4].lower() or
1877 k in changes[4].lower() or
1878 k in " ".join(changes[3][:20]).lower()):
1878 k in " ".join(changes[3][:20]).lower()):
1879 miss = 1
1879 miss = 1
1880 break
1880 break
1881 if miss:
1881 if miss:
1882 continue
1882 continue
1883
1883
1884 br = None
1884 br = None
1885 if opts['branches']:
1885 if opts['branches']:
1886 br = repo.branchlookup([repo.changelog.node(rev)])
1886 br = repo.branchlookup([repo.changelog.node(rev)])
1887
1887
1888 displayer.show(rev, brinfo=br)
1888 displayer.show(rev, brinfo=br)
1889 if opts['patch']:
1889 if opts['patch']:
1890 prev = (parents and parents[0]) or nullid
1890 prev = (parents and parents[0]) or nullid
1891 patch.diff(repo, prev, changenode, match=matchfn, fp=du)
1891 patch.diff(repo, prev, changenode, match=matchfn, fp=du)
1892 du.write("\n\n")
1892 du.write("\n\n")
1893 elif st == 'iter':
1893 elif st == 'iter':
1894 if count == limit: break
1894 if count == limit: break
1895 if du.header[rev]:
1895 if du.header[rev]:
1896 for args in du.header[rev]:
1896 for args in du.header[rev]:
1897 ui.write_header(*args)
1897 ui.write_header(*args)
1898 if du.hunk[rev]:
1898 if du.hunk[rev]:
1899 count += 1
1899 count += 1
1900 for args in du.hunk[rev]:
1900 for args in du.hunk[rev]:
1901 ui.write(*args)
1901 ui.write(*args)
1902
1902
1903 def manifest(ui, repo, rev=None):
1903 def manifest(ui, repo, rev=None):
1904 """output the latest or given revision of the project manifest
1904 """output the latest or given revision of the project manifest
1905
1905
1906 Print a list of version controlled files for the given revision.
1906 Print a list of version controlled files for the given revision.
1907
1907
1908 The manifest is the list of files being version controlled. If no revision
1908 The manifest is the list of files being version controlled. If no revision
1909 is given then the tip is used.
1909 is given then the tip is used.
1910 """
1910 """
1911 if rev:
1911 if rev:
1912 try:
1912 try:
1913 # assume all revision numbers are for changesets
1913 # assume all revision numbers are for changesets
1914 n = repo.lookup(rev)
1914 n = repo.lookup(rev)
1915 change = repo.changelog.read(n)
1915 change = repo.changelog.read(n)
1916 n = change[0]
1916 n = change[0]
1917 except hg.RepoError:
1917 except hg.RepoError:
1918 n = repo.manifest.lookup(rev)
1918 n = repo.manifest.lookup(rev)
1919 else:
1919 else:
1920 n = repo.manifest.tip()
1920 n = repo.manifest.tip()
1921 m = repo.manifest.read(n)
1921 m = repo.manifest.read(n)
1922 files = m.keys()
1922 files = m.keys()
1923 files.sort()
1923 files.sort()
1924
1924
1925 for f in files:
1925 for f in files:
1926 ui.write("%40s %3s %s\n" % (hex(m[f]),
1926 ui.write("%40s %3s %s\n" % (hex(m[f]),
1927 m.execf(f) and "755" or "644", f))
1927 m.execf(f) and "755" or "644", f))
1928
1928
1929 def merge(ui, repo, node=None, force=None, branch=None):
1929 def merge(ui, repo, node=None, force=None, branch=None):
1930 """Merge working directory with another revision
1930 """Merge working directory with another revision
1931
1931
1932 Merge the contents of the current working directory and the
1932 Merge the contents of the current working directory and the
1933 requested revision. Files that changed between either parent are
1933 requested revision. Files that changed between either parent are
1934 marked as changed for the next commit and a commit must be
1934 marked as changed for the next commit and a commit must be
1935 performed before any further updates are allowed.
1935 performed before any further updates are allowed.
1936
1936
1937 If no revision is specified, the working directory's parent is a
1937 If no revision is specified, the working directory's parent is a
1938 head revision, and the repository contains exactly one other head,
1938 head revision, and the repository contains exactly one other head,
1939 the other head is merged with by default. Otherwise, an explicit
1939 the other head is merged with by default. Otherwise, an explicit
1940 revision to merge with must be provided.
1940 revision to merge with must be provided.
1941 """
1941 """
1942
1942
1943 if node or branch:
1943 if node or branch:
1944 node = _lookup(repo, node, branch)
1944 node = _lookup(repo, node, branch)
1945 else:
1945 else:
1946 heads = repo.heads()
1946 heads = repo.heads()
1947 if len(heads) > 2:
1947 if len(heads) > 2:
1948 raise util.Abort(_('repo has %d heads - '
1948 raise util.Abort(_('repo has %d heads - '
1949 'please merge with an explicit rev') %
1949 'please merge with an explicit rev') %
1950 len(heads))
1950 len(heads))
1951 if len(heads) == 1:
1951 if len(heads) == 1:
1952 raise util.Abort(_('there is nothing to merge - '
1952 raise util.Abort(_('there is nothing to merge - '
1953 'use "hg update" instead'))
1953 'use "hg update" instead'))
1954 parent = repo.dirstate.parents()[0]
1954 parent = repo.dirstate.parents()[0]
1955 if parent not in heads:
1955 if parent not in heads:
1956 raise util.Abort(_('working dir not at a head rev - '
1956 raise util.Abort(_('working dir not at a head rev - '
1957 'use "hg update" or merge with an explicit rev'))
1957 'use "hg update" or merge with an explicit rev'))
1958 node = parent == heads[0] and heads[-1] or heads[0]
1958 node = parent == heads[0] and heads[-1] or heads[0]
1959 return hg.merge(repo, node, force=force)
1959 return hg.merge(repo, node, force=force)
1960
1960
1961 def outgoing(ui, repo, dest=None, **opts):
1961 def outgoing(ui, repo, dest=None, **opts):
1962 """show changesets not found in destination
1962 """show changesets not found in destination
1963
1963
1964 Show changesets not found in the specified destination repository or
1964 Show changesets not found in the specified destination repository or
1965 the default push location. These are the changesets that would be pushed
1965 the default push location. These are the changesets that would be pushed
1966 if a push was requested.
1966 if a push was requested.
1967
1967
1968 See pull for valid destination format details.
1968 See pull for valid destination format details.
1969 """
1969 """
1970 dest = ui.expandpath(dest or 'default-push', dest or 'default')
1970 dest = ui.expandpath(dest or 'default-push', dest or 'default')
1971 setremoteconfig(ui, opts)
1971 setremoteconfig(ui, opts)
1972 revs = None
1972 revs = None
1973 if opts['rev']:
1973 if opts['rev']:
1974 revs = [repo.lookup(rev) for rev in opts['rev']]
1974 revs = [repo.lookup(rev) for rev in opts['rev']]
1975
1975
1976 other = hg.repository(ui, dest)
1976 other = hg.repository(ui, dest)
1977 o = repo.findoutgoing(other, force=opts['force'])
1977 o = repo.findoutgoing(other, force=opts['force'])
1978 if not o:
1978 if not o:
1979 ui.status(_("no changes found\n"))
1979 ui.status(_("no changes found\n"))
1980 return
1980 return
1981 o = repo.changelog.nodesbetween(o, revs)[0]
1981 o = repo.changelog.nodesbetween(o, revs)[0]
1982 if opts['newest_first']:
1982 if opts['newest_first']:
1983 o.reverse()
1983 o.reverse()
1984 displayer = show_changeset(ui, repo, opts)
1984 displayer = show_changeset(ui, repo, opts)
1985 for n in o:
1985 for n in o:
1986 parents = [p for p in repo.changelog.parents(n) if p != nullid]
1986 parents = [p for p in repo.changelog.parents(n) if p != nullid]
1987 if opts['no_merges'] and len(parents) == 2:
1987 if opts['no_merges'] and len(parents) == 2:
1988 continue
1988 continue
1989 displayer.show(changenode=n)
1989 displayer.show(changenode=n)
1990 if opts['patch']:
1990 if opts['patch']:
1991 prev = (parents and parents[0]) or nullid
1991 prev = (parents and parents[0]) or nullid
1992 patch.diff(repo, prev, n)
1992 patch.diff(repo, prev, n)
1993 ui.write("\n")
1993 ui.write("\n")
1994
1994
1995 def parents(ui, repo, file_=None, rev=None, branches=None, **opts):
1995 def parents(ui, repo, file_=None, rev=None, branches=None, **opts):
1996 """show the parents of the working dir or revision
1996 """show the parents of the working dir or revision
1997
1997
1998 Print the working directory's parent revisions.
1998 Print the working directory's parent revisions.
1999 """
1999 """
2000 # legacy
2000 # legacy
2001 if file_ and not rev:
2001 if file_ and not rev:
2002 try:
2002 try:
2003 rev = repo.lookup(file_)
2003 rev = repo.lookup(file_)
2004 file_ = None
2004 file_ = None
2005 except hg.RepoError:
2005 except hg.RepoError:
2006 pass
2006 pass
2007 else:
2007 else:
2008 ui.warn(_("'hg parent REV' is deprecated, "
2008 ui.warn(_("'hg parent REV' is deprecated, "
2009 "please use 'hg parents -r REV instead\n"))
2009 "please use 'hg parents -r REV instead\n"))
2010
2010
2011 if rev:
2011 if rev:
2012 if file_:
2012 if file_:
2013 ctx = repo.filectx(file_, changeid=rev)
2013 ctx = repo.filectx(file_, changeid=rev)
2014 else:
2014 else:
2015 ctx = repo.changectx(rev)
2015 ctx = repo.changectx(rev)
2016 p = [cp.node() for cp in ctx.parents()]
2016 p = [cp.node() for cp in ctx.parents()]
2017 else:
2017 else:
2018 p = repo.dirstate.parents()
2018 p = repo.dirstate.parents()
2019
2019
2020 br = None
2020 br = None
2021 if branches is not None:
2021 if branches is not None:
2022 br = repo.branchlookup(p)
2022 br = repo.branchlookup(p)
2023 displayer = show_changeset(ui, repo, opts)
2023 displayer = show_changeset(ui, repo, opts)
2024 for n in p:
2024 for n in p:
2025 if n != nullid:
2025 if n != nullid:
2026 displayer.show(changenode=n, brinfo=br)
2026 displayer.show(changenode=n, brinfo=br)
2027
2027
2028 def paths(ui, repo, search=None):
2028 def paths(ui, repo, search=None):
2029 """show definition of symbolic path names
2029 """show definition of symbolic path names
2030
2030
2031 Show definition of symbolic path name NAME. If no name is given, show
2031 Show definition of symbolic path name NAME. If no name is given, show
2032 definition of available names.
2032 definition of available names.
2033
2033
2034 Path names are defined in the [paths] section of /etc/mercurial/hgrc
2034 Path names are defined in the [paths] section of /etc/mercurial/hgrc
2035 and $HOME/.hgrc. If run inside a repository, .hg/hgrc is used, too.
2035 and $HOME/.hgrc. If run inside a repository, .hg/hgrc is used, too.
2036 """
2036 """
2037 if search:
2037 if search:
2038 for name, path in ui.configitems("paths"):
2038 for name, path in ui.configitems("paths"):
2039 if name == search:
2039 if name == search:
2040 ui.write("%s\n" % path)
2040 ui.write("%s\n" % path)
2041 return
2041 return
2042 ui.warn(_("not found!\n"))
2042 ui.warn(_("not found!\n"))
2043 return 1
2043 return 1
2044 else:
2044 else:
2045 for name, path in ui.configitems("paths"):
2045 for name, path in ui.configitems("paths"):
2046 ui.write("%s = %s\n" % (name, path))
2046 ui.write("%s = %s\n" % (name, path))
2047
2047
2048 def postincoming(ui, repo, modheads, optupdate):
2048 def postincoming(ui, repo, modheads, optupdate):
2049 if modheads == 0:
2049 if modheads == 0:
2050 return
2050 return
2051 if optupdate:
2051 if optupdate:
2052 if modheads == 1:
2052 if modheads == 1:
2053 return hg.update(repo, repo.changelog.tip()) # update
2053 return hg.update(repo, repo.changelog.tip()) # update
2054 else:
2054 else:
2055 ui.status(_("not updating, since new heads added\n"))
2055 ui.status(_("not updating, since new heads added\n"))
2056 if modheads > 1:
2056 if modheads > 1:
2057 ui.status(_("(run 'hg heads' to see heads, 'hg merge' to merge)\n"))
2057 ui.status(_("(run 'hg heads' to see heads, 'hg merge' to merge)\n"))
2058 else:
2058 else:
2059 ui.status(_("(run 'hg update' to get a working copy)\n"))
2059 ui.status(_("(run 'hg update' to get a working copy)\n"))
2060
2060
2061 def pull(ui, repo, source="default", **opts):
2061 def pull(ui, repo, source="default", **opts):
2062 """pull changes from the specified source
2062 """pull changes from the specified source
2063
2063
2064 Pull changes from a remote repository to a local one.
2064 Pull changes from a remote repository to a local one.
2065
2065
2066 This finds all changes from the repository at the specified path
2066 This finds all changes from the repository at the specified path
2067 or URL and adds them to the local repository. By default, this
2067 or URL and adds them to the local repository. By default, this
2068 does not update the copy of the project in the working directory.
2068 does not update the copy of the project in the working directory.
2069
2069
2070 Valid URLs are of the form:
2070 Valid URLs are of the form:
2071
2071
2072 local/filesystem/path
2072 local/filesystem/path
2073 http://[user@]host[:port]/[path]
2073 http://[user@]host[:port]/[path]
2074 https://[user@]host[:port]/[path]
2074 https://[user@]host[:port]/[path]
2075 ssh://[user@]host[:port]/[path]
2075 ssh://[user@]host[:port]/[path]
2076
2076
2077 Some notes about using SSH with Mercurial:
2077 Some notes about using SSH with Mercurial:
2078 - SSH requires an accessible shell account on the destination machine
2078 - SSH requires an accessible shell account on the destination machine
2079 and a copy of hg in the remote path or specified with as remotecmd.
2079 and a copy of hg in the remote path or specified with as remotecmd.
2080 - path is relative to the remote user's home directory by default.
2080 - path is relative to the remote user's home directory by default.
2081 Use an extra slash at the start of a path to specify an absolute path:
2081 Use an extra slash at the start of a path to specify an absolute path:
2082 ssh://example.com//tmp/repository
2082 ssh://example.com//tmp/repository
2083 - Mercurial doesn't use its own compression via SSH; the right thing
2083 - Mercurial doesn't use its own compression via SSH; the right thing
2084 to do is to configure it in your ~/.ssh/ssh_config, e.g.:
2084 to do is to configure it in your ~/.ssh/ssh_config, e.g.:
2085 Host *.mylocalnetwork.example.com
2085 Host *.mylocalnetwork.example.com
2086 Compression off
2086 Compression off
2087 Host *
2087 Host *
2088 Compression on
2088 Compression on
2089 Alternatively specify "ssh -C" as your ssh command in your hgrc or
2089 Alternatively specify "ssh -C" as your ssh command in your hgrc or
2090 with the --ssh command line option.
2090 with the --ssh command line option.
2091 """
2091 """
2092 source = ui.expandpath(source)
2092 source = ui.expandpath(source)
2093 setremoteconfig(ui, opts)
2093 setremoteconfig(ui, opts)
2094
2094
2095 other = hg.repository(ui, source)
2095 other = hg.repository(ui, source)
2096 ui.status(_('pulling from %s\n') % (source))
2096 ui.status(_('pulling from %s\n') % (source))
2097 revs = None
2097 revs = None
2098 if opts['rev'] and not other.local():
2098 if opts['rev'] and not other.local():
2099 raise util.Abort(_("pull -r doesn't work for remote repositories yet"))
2099 raise util.Abort(_("pull -r doesn't work for remote repositories yet"))
2100 elif opts['rev']:
2100 elif opts['rev']:
2101 revs = [other.lookup(rev) for rev in opts['rev']]
2101 revs = [other.lookup(rev) for rev in opts['rev']]
2102 modheads = repo.pull(other, heads=revs, force=opts['force'])
2102 modheads = repo.pull(other, heads=revs, force=opts['force'])
2103 return postincoming(ui, repo, modheads, opts['update'])
2103 return postincoming(ui, repo, modheads, opts['update'])
2104
2104
2105 def push(ui, repo, dest=None, **opts):
2105 def push(ui, repo, dest=None, **opts):
2106 """push changes to the specified destination
2106 """push changes to the specified destination
2107
2107
2108 Push changes from the local repository to the given destination.
2108 Push changes from the local repository to the given destination.
2109
2109
2110 This is the symmetrical operation for pull. It helps to move
2110 This is the symmetrical operation for pull. It helps to move
2111 changes from the current repository to a different one. If the
2111 changes from the current repository to a different one. If the
2112 destination is local this is identical to a pull in that directory
2112 destination is local this is identical to a pull in that directory
2113 from the current one.
2113 from the current one.
2114
2114
2115 By default, push will refuse to run if it detects the result would
2115 By default, push will refuse to run if it detects the result would
2116 increase the number of remote heads. This generally indicates the
2116 increase the number of remote heads. This generally indicates the
2117 the client has forgotten to sync and merge before pushing.
2117 the client has forgotten to sync and merge before pushing.
2118
2118
2119 Valid URLs are of the form:
2119 Valid URLs are of the form:
2120
2120
2121 local/filesystem/path
2121 local/filesystem/path
2122 ssh://[user@]host[:port]/[path]
2122 ssh://[user@]host[:port]/[path]
2123
2123
2124 Look at the help text for the pull command for important details
2124 Look at the help text for the pull command for important details
2125 about ssh:// URLs.
2125 about ssh:// URLs.
2126
2126
2127 Pushing to http:// and https:// URLs is possible, too, if this
2127 Pushing to http:// and https:// URLs is possible, too, if this
2128 feature is enabled on the remote Mercurial server.
2128 feature is enabled on the remote Mercurial server.
2129 """
2129 """
2130 dest = ui.expandpath(dest or 'default-push', dest or 'default')
2130 dest = ui.expandpath(dest or 'default-push', dest or 'default')
2131 setremoteconfig(ui, opts)
2131 setremoteconfig(ui, opts)
2132
2132
2133 other = hg.repository(ui, dest)
2133 other = hg.repository(ui, dest)
2134 ui.status('pushing to %s\n' % (dest))
2134 ui.status('pushing to %s\n' % (dest))
2135 revs = None
2135 revs = None
2136 if opts['rev']:
2136 if opts['rev']:
2137 revs = [repo.lookup(rev) for rev in opts['rev']]
2137 revs = [repo.lookup(rev) for rev in opts['rev']]
2138 r = repo.push(other, opts['force'], revs=revs)
2138 r = repo.push(other, opts['force'], revs=revs)
2139 return r == 0
2139 return r == 0
2140
2140
2141 def rawcommit(ui, repo, *flist, **rc):
2141 def rawcommit(ui, repo, *flist, **rc):
2142 """raw commit interface (DEPRECATED)
2142 """raw commit interface (DEPRECATED)
2143
2143
2144 (DEPRECATED)
2144 (DEPRECATED)
2145 Lowlevel commit, for use in helper scripts.
2145 Lowlevel commit, for use in helper scripts.
2146
2146
2147 This command is not intended to be used by normal users, as it is
2147 This command is not intended to be used by normal users, as it is
2148 primarily useful for importing from other SCMs.
2148 primarily useful for importing from other SCMs.
2149
2149
2150 This command is now deprecated and will be removed in a future
2150 This command is now deprecated and will be removed in a future
2151 release, please use debugsetparents and commit instead.
2151 release, please use debugsetparents and commit instead.
2152 """
2152 """
2153
2153
2154 ui.warn(_("(the rawcommit command is deprecated)\n"))
2154 ui.warn(_("(the rawcommit command is deprecated)\n"))
2155
2155
2156 message = rc['message']
2156 message = rc['message']
2157 if not message and rc['logfile']:
2157 if not message and rc['logfile']:
2158 try:
2158 try:
2159 message = open(rc['logfile']).read()
2159 message = open(rc['logfile']).read()
2160 except IOError:
2160 except IOError:
2161 pass
2161 pass
2162 if not message and not rc['logfile']:
2162 if not message and not rc['logfile']:
2163 raise util.Abort(_("missing commit message"))
2163 raise util.Abort(_("missing commit message"))
2164
2164
2165 files = relpath(repo, list(flist))
2165 files = relpath(repo, list(flist))
2166 if rc['files']:
2166 if rc['files']:
2167 files += open(rc['files']).read().splitlines()
2167 files += open(rc['files']).read().splitlines()
2168
2168
2169 rc['parent'] = map(repo.lookup, rc['parent'])
2169 rc['parent'] = map(repo.lookup, rc['parent'])
2170
2170
2171 try:
2171 try:
2172 repo.rawcommit(files, message, rc['user'], rc['date'], *rc['parent'])
2172 repo.rawcommit(files, message, rc['user'], rc['date'], *rc['parent'])
2173 except ValueError, inst:
2173 except ValueError, inst:
2174 raise util.Abort(str(inst))
2174 raise util.Abort(str(inst))
2175
2175
2176 def recover(ui, repo):
2176 def recover(ui, repo):
2177 """roll back an interrupted transaction
2177 """roll back an interrupted transaction
2178
2178
2179 Recover from an interrupted commit or pull.
2179 Recover from an interrupted commit or pull.
2180
2180
2181 This command tries to fix the repository status after an interrupted
2181 This command tries to fix the repository status after an interrupted
2182 operation. It should only be necessary when Mercurial suggests it.
2182 operation. It should only be necessary when Mercurial suggests it.
2183 """
2183 """
2184 if repo.recover():
2184 if repo.recover():
2185 return hg.verify(repo)
2185 return hg.verify(repo)
2186 return 1
2186 return 1
2187
2187
2188 def remove(ui, repo, *pats, **opts):
2188 def remove(ui, repo, *pats, **opts):
2189 """remove the specified files on the next commit
2189 """remove the specified files on the next commit
2190
2190
2191 Schedule the indicated files for removal from the repository.
2191 Schedule the indicated files for removal from the repository.
2192
2192
2193 This command schedules the files to be removed at the next commit.
2193 This command schedules the files to be removed at the next commit.
2194 This only removes files from the current branch, not from the
2194 This only removes files from the current branch, not from the
2195 entire project history. If the files still exist in the working
2195 entire project history. If the files still exist in the working
2196 directory, they will be deleted from it. If invoked with --after,
2196 directory, they will be deleted from it. If invoked with --after,
2197 files that have been manually deleted are marked as removed.
2197 files that have been manually deleted are marked as removed.
2198
2198
2199 Modified files and added files are not removed by default. To
2199 Modified files and added files are not removed by default. To
2200 remove them, use the -f/--force option.
2200 remove them, use the -f/--force option.
2201 """
2201 """
2202 names = []
2202 names = []
2203 if not opts['after'] and not pats:
2203 if not opts['after'] and not pats:
2204 raise util.Abort(_('no files specified'))
2204 raise util.Abort(_('no files specified'))
2205 files, matchfn, anypats = cmdutil.matchpats(repo, pats, opts)
2205 files, matchfn, anypats = cmdutil.matchpats(repo, pats, opts)
2206 exact = dict.fromkeys(files)
2206 exact = dict.fromkeys(files)
2207 mardu = map(dict.fromkeys, repo.status(files=files, match=matchfn))[:5]
2207 mardu = map(dict.fromkeys, repo.status(files=files, match=matchfn))[:5]
2208 modified, added, removed, deleted, unknown = mardu
2208 modified, added, removed, deleted, unknown = mardu
2209 remove, forget = [], []
2209 remove, forget = [], []
2210 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts):
2210 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts):
2211 reason = None
2211 reason = None
2212 if abs not in deleted and opts['after']:
2212 if abs not in deleted and opts['after']:
2213 reason = _('is still present')
2213 reason = _('is still present')
2214 elif abs in modified and not opts['force']:
2214 elif abs in modified and not opts['force']:
2215 reason = _('is modified (use -f to force removal)')
2215 reason = _('is modified (use -f to force removal)')
2216 elif abs in added:
2216 elif abs in added:
2217 if opts['force']:
2217 if opts['force']:
2218 forget.append(abs)
2218 forget.append(abs)
2219 continue
2219 continue
2220 reason = _('has been marked for add (use -f to force removal)')
2220 reason = _('has been marked for add (use -f to force removal)')
2221 elif abs in unknown:
2221 elif abs in unknown:
2222 reason = _('is not managed')
2222 reason = _('is not managed')
2223 elif abs in removed:
2223 elif abs in removed:
2224 continue
2224 continue
2225 if reason:
2225 if reason:
2226 if exact:
2226 if exact:
2227 ui.warn(_('not removing %s: file %s\n') % (rel, reason))
2227 ui.warn(_('not removing %s: file %s\n') % (rel, reason))
2228 else:
2228 else:
2229 if ui.verbose or not exact:
2229 if ui.verbose or not exact:
2230 ui.status(_('removing %s\n') % rel)
2230 ui.status(_('removing %s\n') % rel)
2231 remove.append(abs)
2231 remove.append(abs)
2232 repo.forget(forget)
2232 repo.forget(forget)
2233 repo.remove(remove, unlink=not opts['after'])
2233 repo.remove(remove, unlink=not opts['after'])
2234
2234
2235 def rename(ui, repo, *pats, **opts):
2235 def rename(ui, repo, *pats, **opts):
2236 """rename files; equivalent of copy + remove
2236 """rename files; equivalent of copy + remove
2237
2237
2238 Mark dest as copies of sources; mark sources for deletion. If
2238 Mark dest as copies of sources; mark sources for deletion. If
2239 dest is a directory, copies are put in that directory. If dest is
2239 dest is a directory, copies are put in that directory. If dest is
2240 a file, there can only be one source.
2240 a file, there can only be one source.
2241
2241
2242 By default, this command copies the contents of files as they
2242 By default, this command copies the contents of files as they
2243 stand in the working directory. If invoked with --after, the
2243 stand in the working directory. If invoked with --after, the
2244 operation is recorded, but no copying is performed.
2244 operation is recorded, but no copying is performed.
2245
2245
2246 This command takes effect in the next commit.
2246 This command takes effect in the next commit.
2247
2247
2248 NOTE: This command should be treated as experimental. While it
2248 NOTE: This command should be treated as experimental. While it
2249 should properly record rename files, this information is not yet
2249 should properly record rename files, this information is not yet
2250 fully used by merge, nor fully reported by log.
2250 fully used by merge, nor fully reported by log.
2251 """
2251 """
2252 wlock = repo.wlock(0)
2252 wlock = repo.wlock(0)
2253 errs, copied = docopy(ui, repo, pats, opts, wlock)
2253 errs, copied = docopy(ui, repo, pats, opts, wlock)
2254 names = []
2254 names = []
2255 for abs, rel, exact in copied:
2255 for abs, rel, exact in copied:
2256 if ui.verbose or not exact:
2256 if ui.verbose or not exact:
2257 ui.status(_('removing %s\n') % rel)
2257 ui.status(_('removing %s\n') % rel)
2258 names.append(abs)
2258 names.append(abs)
2259 if not opts.get('dry_run'):
2259 if not opts.get('dry_run'):
2260 repo.remove(names, True, wlock)
2260 repo.remove(names, True, wlock)
2261 return errs
2261 return errs
2262
2262
2263 def revert(ui, repo, *pats, **opts):
2263 def revert(ui, repo, *pats, **opts):
2264 """revert files or dirs to their states as of some revision
2264 """revert files or dirs to their states as of some revision
2265
2265
2266 With no revision specified, revert the named files or directories
2266 With no revision specified, revert the named files or directories
2267 to the contents they had in the parent of the working directory.
2267 to the contents they had in the parent of the working directory.
2268 This restores the contents of the affected files to an unmodified
2268 This restores the contents of the affected files to an unmodified
2269 state. If the working directory has two parents, you must
2269 state. If the working directory has two parents, you must
2270 explicitly specify the revision to revert to.
2270 explicitly specify the revision to revert to.
2271
2271
2272 Modified files are saved with a .orig suffix before reverting.
2272 Modified files are saved with a .orig suffix before reverting.
2273 To disable these backups, use --no-backup.
2273 To disable these backups, use --no-backup.
2274
2274
2275 Using the -r option, revert the given files or directories to their
2275 Using the -r option, revert the given files or directories to their
2276 contents as of a specific revision. This can be helpful to "roll
2276 contents as of a specific revision. This can be helpful to "roll
2277 back" some or all of a change that should not have been committed.
2277 back" some or all of a change that should not have been committed.
2278
2278
2279 Revert modifies the working directory. It does not commit any
2279 Revert modifies the working directory. It does not commit any
2280 changes, or change the parent of the working directory. If you
2280 changes, or change the parent of the working directory. If you
2281 revert to a revision other than the parent of the working
2281 revert to a revision other than the parent of the working
2282 directory, the reverted files will thus appear modified
2282 directory, the reverted files will thus appear modified
2283 afterwards.
2283 afterwards.
2284
2284
2285 If a file has been deleted, it is recreated. If the executable
2285 If a file has been deleted, it is recreated. If the executable
2286 mode of a file was changed, it is reset.
2286 mode of a file was changed, it is reset.
2287
2287
2288 If names are given, all files matching the names are reverted.
2288 If names are given, all files matching the names are reverted.
2289
2289
2290 If no arguments are given, no files are reverted.
2290 If no arguments are given, no files are reverted.
2291 """
2291 """
2292
2292
2293 if not pats and not opts['all']:
2293 if not pats and not opts['all']:
2294 raise util.Abort(_('no files or directories specified; '
2294 raise util.Abort(_('no files or directories specified; '
2295 'use --all to revert the whole repo'))
2295 'use --all to revert the whole repo'))
2296
2296
2297 parent, p2 = repo.dirstate.parents()
2297 parent, p2 = repo.dirstate.parents()
2298 if opts['rev']:
2298 if opts['rev']:
2299 node = repo.lookup(opts['rev'])
2299 node = repo.lookup(opts['rev'])
2300 elif p2 != nullid:
2300 elif p2 != nullid:
2301 raise util.Abort(_('working dir has two parents; '
2301 raise util.Abort(_('working dir has two parents; '
2302 'you must specify the revision to revert to'))
2302 'you must specify the revision to revert to'))
2303 else:
2303 else:
2304 node = parent
2304 node = parent
2305 mf = repo.manifest.read(repo.changelog.read(node)[0])
2305 mf = repo.manifest.read(repo.changelog.read(node)[0])
2306 if node == parent:
2306 if node == parent:
2307 pmf = mf
2307 pmf = mf
2308 else:
2308 else:
2309 pmf = None
2309 pmf = None
2310
2310
2311 wlock = repo.wlock()
2311 wlock = repo.wlock()
2312
2312
2313 # need all matching names in dirstate and manifest of target rev,
2313 # need all matching names in dirstate and manifest of target rev,
2314 # so have to walk both. do not print errors if files exist in one
2314 # so have to walk both. do not print errors if files exist in one
2315 # but not other.
2315 # but not other.
2316
2316
2317 names = {}
2317 names = {}
2318 target_only = {}
2318 target_only = {}
2319
2319
2320 # walk dirstate.
2320 # walk dirstate.
2321
2321
2322 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts,
2322 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts,
2323 badmatch=mf.has_key):
2323 badmatch=mf.has_key):
2324 names[abs] = (rel, exact)
2324 names[abs] = (rel, exact)
2325 if src == 'b':
2325 if src == 'b':
2326 target_only[abs] = True
2326 target_only[abs] = True
2327
2327
2328 # walk target manifest.
2328 # walk target manifest.
2329
2329
2330 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts, node=node,
2330 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts, node=node,
2331 badmatch=names.has_key):
2331 badmatch=names.has_key):
2332 if abs in names: continue
2332 if abs in names: continue
2333 names[abs] = (rel, exact)
2333 names[abs] = (rel, exact)
2334 target_only[abs] = True
2334 target_only[abs] = True
2335
2335
2336 changes = repo.status(match=names.has_key, wlock=wlock)[:5]
2336 changes = repo.status(match=names.has_key, wlock=wlock)[:5]
2337 modified, added, removed, deleted, unknown = map(dict.fromkeys, changes)
2337 modified, added, removed, deleted, unknown = map(dict.fromkeys, changes)
2338
2338
2339 revert = ([], _('reverting %s\n'))
2339 revert = ([], _('reverting %s\n'))
2340 add = ([], _('adding %s\n'))
2340 add = ([], _('adding %s\n'))
2341 remove = ([], _('removing %s\n'))
2341 remove = ([], _('removing %s\n'))
2342 forget = ([], _('forgetting %s\n'))
2342 forget = ([], _('forgetting %s\n'))
2343 undelete = ([], _('undeleting %s\n'))
2343 undelete = ([], _('undeleting %s\n'))
2344 update = {}
2344 update = {}
2345
2345
2346 disptable = (
2346 disptable = (
2347 # dispatch table:
2347 # dispatch table:
2348 # file state
2348 # file state
2349 # action if in target manifest
2349 # action if in target manifest
2350 # action if not in target manifest
2350 # action if not in target manifest
2351 # make backup if in target manifest
2351 # make backup if in target manifest
2352 # make backup if not in target manifest
2352 # make backup if not in target manifest
2353 (modified, revert, remove, True, True),
2353 (modified, revert, remove, True, True),
2354 (added, revert, forget, True, False),
2354 (added, revert, forget, True, False),
2355 (removed, undelete, None, False, False),
2355 (removed, undelete, None, False, False),
2356 (deleted, revert, remove, False, False),
2356 (deleted, revert, remove, False, False),
2357 (unknown, add, None, True, False),
2357 (unknown, add, None, True, False),
2358 (target_only, add, None, False, False),
2358 (target_only, add, None, False, False),
2359 )
2359 )
2360
2360
2361 entries = names.items()
2361 entries = names.items()
2362 entries.sort()
2362 entries.sort()
2363
2363
2364 for abs, (rel, exact) in entries:
2364 for abs, (rel, exact) in entries:
2365 mfentry = mf.get(abs)
2365 mfentry = mf.get(abs)
2366 def handle(xlist, dobackup):
2366 def handle(xlist, dobackup):
2367 xlist[0].append(abs)
2367 xlist[0].append(abs)
2368 update[abs] = 1
2368 update[abs] = 1
2369 if dobackup and not opts['no_backup'] and os.path.exists(rel):
2369 if dobackup and not opts['no_backup'] and os.path.exists(rel):
2370 bakname = "%s.orig" % rel
2370 bakname = "%s.orig" % rel
2371 ui.note(_('saving current version of %s as %s\n') %
2371 ui.note(_('saving current version of %s as %s\n') %
2372 (rel, bakname))
2372 (rel, bakname))
2373 if not opts.get('dry_run'):
2373 if not opts.get('dry_run'):
2374 shutil.copyfile(rel, bakname)
2374 shutil.copyfile(rel, bakname)
2375 shutil.copymode(rel, bakname)
2375 shutil.copymode(rel, bakname)
2376 if ui.verbose or not exact:
2376 if ui.verbose or not exact:
2377 ui.status(xlist[1] % rel)
2377 ui.status(xlist[1] % rel)
2378 for table, hitlist, misslist, backuphit, backupmiss in disptable:
2378 for table, hitlist, misslist, backuphit, backupmiss in disptable:
2379 if abs not in table: continue
2379 if abs not in table: continue
2380 # file has changed in dirstate
2380 # file has changed in dirstate
2381 if mfentry:
2381 if mfentry:
2382 handle(hitlist, backuphit)
2382 handle(hitlist, backuphit)
2383 elif misslist is not None:
2383 elif misslist is not None:
2384 handle(misslist, backupmiss)
2384 handle(misslist, backupmiss)
2385 else:
2385 else:
2386 if exact: ui.warn(_('file not managed: %s\n' % rel))
2386 if exact: ui.warn(_('file not managed: %s\n' % rel))
2387 break
2387 break
2388 else:
2388 else:
2389 # file has not changed in dirstate
2389 # file has not changed in dirstate
2390 if node == parent:
2390 if node == parent:
2391 if exact: ui.warn(_('no changes needed to %s\n' % rel))
2391 if exact: ui.warn(_('no changes needed to %s\n' % rel))
2392 continue
2392 continue
2393 if pmf is None:
2393 if pmf is None:
2394 # only need parent manifest in this unlikely case,
2394 # only need parent manifest in this unlikely case,
2395 # so do not read by default
2395 # so do not read by default
2396 pmf = repo.manifest.read(repo.changelog.read(parent)[0])
2396 pmf = repo.manifest.read(repo.changelog.read(parent)[0])
2397 if abs in pmf:
2397 if abs in pmf:
2398 if mfentry:
2398 if mfentry:
2399 # if version of file is same in parent and target
2399 # if version of file is same in parent and target
2400 # manifests, do nothing
2400 # manifests, do nothing
2401 if pmf[abs] != mfentry:
2401 if pmf[abs] != mfentry:
2402 handle(revert, False)
2402 handle(revert, False)
2403 else:
2403 else:
2404 handle(remove, False)
2404 handle(remove, False)
2405
2405
2406 if not opts.get('dry_run'):
2406 if not opts.get('dry_run'):
2407 repo.dirstate.forget(forget[0])
2407 repo.dirstate.forget(forget[0])
2408 r = hg.revert(repo, node, update.has_key, wlock)
2408 r = hg.revert(repo, node, update.has_key, wlock)
2409 repo.dirstate.update(add[0], 'a')
2409 repo.dirstate.update(add[0], 'a')
2410 repo.dirstate.update(undelete[0], 'n')
2410 repo.dirstate.update(undelete[0], 'n')
2411 repo.dirstate.update(remove[0], 'r')
2411 repo.dirstate.update(remove[0], 'r')
2412 return r
2412 return r
2413
2413
2414 def rollback(ui, repo):
2414 def rollback(ui, repo):
2415 """roll back the last transaction in this repository
2415 """roll back the last transaction in this repository
2416
2416
2417 Roll back the last transaction in this repository, restoring the
2417 Roll back the last transaction in this repository, restoring the
2418 project to its state prior to the transaction.
2418 project to its state prior to the transaction.
2419
2419
2420 Transactions are used to encapsulate the effects of all commands
2420 Transactions are used to encapsulate the effects of all commands
2421 that create new changesets or propagate existing changesets into a
2421 that create new changesets or propagate existing changesets into a
2422 repository. For example, the following commands are transactional,
2422 repository. For example, the following commands are transactional,
2423 and their effects can be rolled back:
2423 and their effects can be rolled back:
2424
2424
2425 commit
2425 commit
2426 import
2426 import
2427 pull
2427 pull
2428 push (with this repository as destination)
2428 push (with this repository as destination)
2429 unbundle
2429 unbundle
2430
2430
2431 This command should be used with care. There is only one level of
2431 This command should be used with care. There is only one level of
2432 rollback, and there is no way to undo a rollback.
2432 rollback, and there is no way to undo a rollback.
2433
2433
2434 This command is not intended for use on public repositories. Once
2434 This command is not intended for use on public repositories. Once
2435 changes are visible for pull by other users, rolling a transaction
2435 changes are visible for pull by other users, rolling a transaction
2436 back locally is ineffective (someone else may already have pulled
2436 back locally is ineffective (someone else may already have pulled
2437 the changes). Furthermore, a race is possible with readers of the
2437 the changes). Furthermore, a race is possible with readers of the
2438 repository; for example an in-progress pull from the repository
2438 repository; for example an in-progress pull from the repository
2439 may fail if a rollback is performed.
2439 may fail if a rollback is performed.
2440 """
2440 """
2441 repo.rollback()
2441 repo.rollback()
2442
2442
2443 def root(ui, repo):
2443 def root(ui, repo):
2444 """print the root (top) of the current working dir
2444 """print the root (top) of the current working dir
2445
2445
2446 Print the root directory of the current repository.
2446 Print the root directory of the current repository.
2447 """
2447 """
2448 ui.write(repo.root + "\n")
2448 ui.write(repo.root + "\n")
2449
2449
2450 def serve(ui, repo, **opts):
2450 def serve(ui, repo, **opts):
2451 """export the repository via HTTP
2451 """export the repository via HTTP
2452
2452
2453 Start a local HTTP repository browser and pull server.
2453 Start a local HTTP repository browser and pull server.
2454
2454
2455 By default, the server logs accesses to stdout and errors to
2455 By default, the server logs accesses to stdout and errors to
2456 stderr. Use the "-A" and "-E" options to log to files.
2456 stderr. Use the "-A" and "-E" options to log to files.
2457 """
2457 """
2458
2458
2459 if opts["stdio"]:
2459 if opts["stdio"]:
2460 if repo is None:
2460 if repo is None:
2461 raise hg.RepoError(_('no repo found'))
2461 raise hg.RepoError(_('no repo found'))
2462 s = sshserver.sshserver(ui, repo)
2462 s = sshserver.sshserver(ui, repo)
2463 s.serve_forever()
2463 s.serve_forever()
2464
2464
2465 optlist = ("name templates style address port ipv6"
2465 optlist = ("name templates style address port ipv6"
2466 " accesslog errorlog webdir_conf")
2466 " accesslog errorlog webdir_conf")
2467 for o in optlist.split():
2467 for o in optlist.split():
2468 if opts[o]:
2468 if opts[o]:
2469 ui.setconfig("web", o, opts[o])
2469 ui.setconfig("web", o, opts[o])
2470
2470
2471 if repo is None and not ui.config("web", "webdir_conf"):
2471 if repo is None and not ui.config("web", "webdir_conf"):
2472 raise hg.RepoError(_('no repo found'))
2472 raise hg.RepoError(_('no repo found'))
2473
2473
2474 if opts['daemon'] and not opts['daemon_pipefds']:
2474 if opts['daemon'] and not opts['daemon_pipefds']:
2475 rfd, wfd = os.pipe()
2475 rfd, wfd = os.pipe()
2476 args = sys.argv[:]
2476 args = sys.argv[:]
2477 args.append('--daemon-pipefds=%d,%d' % (rfd, wfd))
2477 args.append('--daemon-pipefds=%d,%d' % (rfd, wfd))
2478 pid = os.spawnvp(os.P_NOWAIT | getattr(os, 'P_DETACH', 0),
2478 pid = os.spawnvp(os.P_NOWAIT | getattr(os, 'P_DETACH', 0),
2479 args[0], args)
2479 args[0], args)
2480 os.close(wfd)
2480 os.close(wfd)
2481 os.read(rfd, 1)
2481 os.read(rfd, 1)
2482 os._exit(0)
2482 os._exit(0)
2483
2483
2484 try:
2484 try:
2485 httpd = hgweb.server.create_server(ui, repo)
2485 httpd = hgweb.server.create_server(ui, repo)
2486 except socket.error, inst:
2486 except socket.error, inst:
2487 raise util.Abort(_('cannot start server: ') + inst.args[1])
2487 raise util.Abort(_('cannot start server: %s') % inst.args[1])
2488
2488
2489 if ui.verbose:
2489 if ui.verbose:
2490 addr, port = httpd.socket.getsockname()
2490 addr, port = httpd.socket.getsockname()
2491 if addr == '0.0.0.0':
2491 if addr == '0.0.0.0':
2492 addr = socket.gethostname()
2492 addr = socket.gethostname()
2493 else:
2493 else:
2494 try:
2494 try:
2495 addr = socket.gethostbyaddr(addr)[0]
2495 addr = socket.gethostbyaddr(addr)[0]
2496 except socket.error:
2496 except socket.error:
2497 pass
2497 pass
2498 if port != 80:
2498 if port != 80:
2499 ui.status(_('listening at http://%s:%d/\n') % (addr, port))
2499 ui.status(_('listening at http://%s:%d/\n') % (addr, port))
2500 else:
2500 else:
2501 ui.status(_('listening at http://%s/\n') % addr)
2501 ui.status(_('listening at http://%s/\n') % addr)
2502
2502
2503 if opts['pid_file']:
2503 if opts['pid_file']:
2504 fp = open(opts['pid_file'], 'w')
2504 fp = open(opts['pid_file'], 'w')
2505 fp.write(str(os.getpid()) + '\n')
2505 fp.write(str(os.getpid()) + '\n')
2506 fp.close()
2506 fp.close()
2507
2507
2508 if opts['daemon_pipefds']:
2508 if opts['daemon_pipefds']:
2509 rfd, wfd = [int(x) for x in opts['daemon_pipefds'].split(',')]
2509 rfd, wfd = [int(x) for x in opts['daemon_pipefds'].split(',')]
2510 os.close(rfd)
2510 os.close(rfd)
2511 os.write(wfd, 'y')
2511 os.write(wfd, 'y')
2512 os.close(wfd)
2512 os.close(wfd)
2513 sys.stdout.flush()
2513 sys.stdout.flush()
2514 sys.stderr.flush()
2514 sys.stderr.flush()
2515 fd = os.open(util.nulldev, os.O_RDWR)
2515 fd = os.open(util.nulldev, os.O_RDWR)
2516 if fd != 0: os.dup2(fd, 0)
2516 if fd != 0: os.dup2(fd, 0)
2517 if fd != 1: os.dup2(fd, 1)
2517 if fd != 1: os.dup2(fd, 1)
2518 if fd != 2: os.dup2(fd, 2)
2518 if fd != 2: os.dup2(fd, 2)
2519 if fd not in (0, 1, 2): os.close(fd)
2519 if fd not in (0, 1, 2): os.close(fd)
2520
2520
2521 httpd.serve_forever()
2521 httpd.serve_forever()
2522
2522
2523 def status(ui, repo, *pats, **opts):
2523 def status(ui, repo, *pats, **opts):
2524 """show changed files in the working directory
2524 """show changed files in the working directory
2525
2525
2526 Show status of files in the repository. If names are given, only
2526 Show status of files in the repository. If names are given, only
2527 files that match are shown. Files that are clean or ignored, are
2527 files that match are shown. Files that are clean or ignored, are
2528 not listed unless -c (clean), -i (ignored) or -A is given.
2528 not listed unless -c (clean), -i (ignored) or -A is given.
2529
2529
2530 The codes used to show the status of files are:
2530 The codes used to show the status of files are:
2531 M = modified
2531 M = modified
2532 A = added
2532 A = added
2533 R = removed
2533 R = removed
2534 C = clean
2534 C = clean
2535 ! = deleted, but still tracked
2535 ! = deleted, but still tracked
2536 ? = not tracked
2536 ? = not tracked
2537 I = ignored (not shown by default)
2537 I = ignored (not shown by default)
2538 = the previous added file was copied from here
2538 = the previous added file was copied from here
2539 """
2539 """
2540
2540
2541 all = opts['all']
2541 all = opts['all']
2542
2542
2543 files, matchfn, anypats = cmdutil.matchpats(repo, pats, opts)
2543 files, matchfn, anypats = cmdutil.matchpats(repo, pats, opts)
2544 cwd = (pats and repo.getcwd()) or ''
2544 cwd = (pats and repo.getcwd()) or ''
2545 modified, added, removed, deleted, unknown, ignored, clean = [
2545 modified, added, removed, deleted, unknown, ignored, clean = [
2546 [util.pathto(cwd, x) for x in n]
2546 [util.pathto(cwd, x) for x in n]
2547 for n in repo.status(files=files, match=matchfn,
2547 for n in repo.status(files=files, match=matchfn,
2548 list_ignored=all or opts['ignored'],
2548 list_ignored=all or opts['ignored'],
2549 list_clean=all or opts['clean'])]
2549 list_clean=all or opts['clean'])]
2550
2550
2551 changetypes = (('modified', 'M', modified),
2551 changetypes = (('modified', 'M', modified),
2552 ('added', 'A', added),
2552 ('added', 'A', added),
2553 ('removed', 'R', removed),
2553 ('removed', 'R', removed),
2554 ('deleted', '!', deleted),
2554 ('deleted', '!', deleted),
2555 ('unknown', '?', unknown),
2555 ('unknown', '?', unknown),
2556 ('ignored', 'I', ignored))
2556 ('ignored', 'I', ignored))
2557
2557
2558 explicit_changetypes = changetypes + (('clean', 'C', clean),)
2558 explicit_changetypes = changetypes + (('clean', 'C', clean),)
2559
2559
2560 end = opts['print0'] and '\0' or '\n'
2560 end = opts['print0'] and '\0' or '\n'
2561
2561
2562 for opt, char, changes in ([ct for ct in explicit_changetypes
2562 for opt, char, changes in ([ct for ct in explicit_changetypes
2563 if all or opts[ct[0]]]
2563 if all or opts[ct[0]]]
2564 or changetypes):
2564 or changetypes):
2565 if opts['no_status']:
2565 if opts['no_status']:
2566 format = "%%s%s" % end
2566 format = "%%s%s" % end
2567 else:
2567 else:
2568 format = "%s %%s%s" % (char, end)
2568 format = "%s %%s%s" % (char, end)
2569
2569
2570 for f in changes:
2570 for f in changes:
2571 ui.write(format % f)
2571 ui.write(format % f)
2572 if ((all or opts.get('copies')) and not opts.get('no_status')
2572 if ((all or opts.get('copies')) and not opts.get('no_status')
2573 and opt == 'added' and repo.dirstate.copies.has_key(f)):
2573 and opt == 'added' and repo.dirstate.copies.has_key(f)):
2574 ui.write(' %s%s' % (repo.dirstate.copies[f], end))
2574 ui.write(' %s%s' % (repo.dirstate.copies[f], end))
2575
2575
2576 def tag(ui, repo, name, rev_=None, **opts):
2576 def tag(ui, repo, name, rev_=None, **opts):
2577 """add a tag for the current tip or a given revision
2577 """add a tag for the current tip or a given revision
2578
2578
2579 Name a particular revision using <name>.
2579 Name a particular revision using <name>.
2580
2580
2581 Tags are used to name particular revisions of the repository and are
2581 Tags are used to name particular revisions of the repository and are
2582 very useful to compare different revision, to go back to significant
2582 very useful to compare different revision, to go back to significant
2583 earlier versions or to mark branch points as releases, etc.
2583 earlier versions or to mark branch points as releases, etc.
2584
2584
2585 If no revision is given, the parent of the working directory is used.
2585 If no revision is given, the parent of the working directory is used.
2586
2586
2587 To facilitate version control, distribution, and merging of tags,
2587 To facilitate version control, distribution, and merging of tags,
2588 they are stored as a file named ".hgtags" which is managed
2588 they are stored as a file named ".hgtags" which is managed
2589 similarly to other project files and can be hand-edited if
2589 similarly to other project files and can be hand-edited if
2590 necessary. The file '.hg/localtags' is used for local tags (not
2590 necessary. The file '.hg/localtags' is used for local tags (not
2591 shared among repositories).
2591 shared among repositories).
2592 """
2592 """
2593 if name in ['tip', '.']:
2593 if name in ['tip', '.']:
2594 raise util.Abort(_("the name '%s' is reserved") % name)
2594 raise util.Abort(_("the name '%s' is reserved") % name)
2595 if rev_ is not None:
2595 if rev_ is not None:
2596 ui.warn(_("use of 'hg tag NAME [REV]' is deprecated, "
2596 ui.warn(_("use of 'hg tag NAME [REV]' is deprecated, "
2597 "please use 'hg tag [-r REV] NAME' instead\n"))
2597 "please use 'hg tag [-r REV] NAME' instead\n"))
2598 if opts['rev']:
2598 if opts['rev']:
2599 raise util.Abort(_("use only one form to specify the revision"))
2599 raise util.Abort(_("use only one form to specify the revision"))
2600 if opts['rev']:
2600 if opts['rev']:
2601 rev_ = opts['rev']
2601 rev_ = opts['rev']
2602 if rev_:
2602 if rev_:
2603 r = repo.lookup(rev_)
2603 r = repo.lookup(rev_)
2604 else:
2604 else:
2605 p1, p2 = repo.dirstate.parents()
2605 p1, p2 = repo.dirstate.parents()
2606 if p1 == nullid:
2606 if p1 == nullid:
2607 raise util.Abort(_('no revision to tag'))
2607 raise util.Abort(_('no revision to tag'))
2608 if p2 != nullid:
2608 if p2 != nullid:
2609 raise util.Abort(_('outstanding uncommitted merges'))
2609 raise util.Abort(_('outstanding uncommitted merges'))
2610 r = p1
2610 r = p1
2611
2611
2612 message = opts['message']
2612 message = opts['message']
2613 if not message:
2613 if not message:
2614 message = _('Added tag %s for changeset %s') % (name, short(r))
2614 message = _('Added tag %s for changeset %s') % (name, short(r))
2615
2615
2616 repo.tag(name, r, message, opts['local'], opts['user'], opts['date'])
2616 repo.tag(name, r, message, opts['local'], opts['user'], opts['date'])
2617
2617
2618 def tags(ui, repo):
2618 def tags(ui, repo):
2619 """list repository tags
2619 """list repository tags
2620
2620
2621 List the repository tags.
2621 List the repository tags.
2622
2622
2623 This lists both regular and local tags.
2623 This lists both regular and local tags.
2624 """
2624 """
2625
2625
2626 l = repo.tagslist()
2626 l = repo.tagslist()
2627 l.reverse()
2627 l.reverse()
2628 hexfunc = ui.debugflag and hex or short
2628 hexfunc = ui.debugflag and hex or short
2629 for t, n in l:
2629 for t, n in l:
2630 try:
2630 try:
2631 r = "%5d:%s" % (repo.changelog.rev(n), hexfunc(n))
2631 r = "%5d:%s" % (repo.changelog.rev(n), hexfunc(n))
2632 except KeyError:
2632 except KeyError:
2633 r = " ?:?"
2633 r = " ?:?"
2634 if ui.quiet:
2634 if ui.quiet:
2635 ui.write("%s\n" % t)
2635 ui.write("%s\n" % t)
2636 else:
2636 else:
2637 ui.write("%-30s %s\n" % (t, r))
2637 ui.write("%-30s %s\n" % (t, r))
2638
2638
2639 def tip(ui, repo, **opts):
2639 def tip(ui, repo, **opts):
2640 """show the tip revision
2640 """show the tip revision
2641
2641
2642 Show the tip revision.
2642 Show the tip revision.
2643 """
2643 """
2644 n = repo.changelog.tip()
2644 n = repo.changelog.tip()
2645 br = None
2645 br = None
2646 if opts['branches']:
2646 if opts['branches']:
2647 br = repo.branchlookup([n])
2647 br = repo.branchlookup([n])
2648 show_changeset(ui, repo, opts).show(changenode=n, brinfo=br)
2648 show_changeset(ui, repo, opts).show(changenode=n, brinfo=br)
2649 if opts['patch']:
2649 if opts['patch']:
2650 patch.diff(repo, repo.changelog.parents(n)[0], n)
2650 patch.diff(repo, repo.changelog.parents(n)[0], n)
2651
2651
2652 def unbundle(ui, repo, fname, **opts):
2652 def unbundle(ui, repo, fname, **opts):
2653 """apply a changegroup file
2653 """apply a changegroup file
2654
2654
2655 Apply a compressed changegroup file generated by the bundle
2655 Apply a compressed changegroup file generated by the bundle
2656 command.
2656 command.
2657 """
2657 """
2658 f = urllib.urlopen(fname)
2658 f = urllib.urlopen(fname)
2659
2659
2660 header = f.read(6)
2660 header = f.read(6)
2661 if not header.startswith("HG"):
2661 if not header.startswith("HG"):
2662 raise util.Abort(_("%s: not a Mercurial bundle file") % fname)
2662 raise util.Abort(_("%s: not a Mercurial bundle file") % fname)
2663 elif not header.startswith("HG10"):
2663 elif not header.startswith("HG10"):
2664 raise util.Abort(_("%s: unknown bundle version") % fname)
2664 raise util.Abort(_("%s: unknown bundle version") % fname)
2665 elif header == "HG10BZ":
2665 elif header == "HG10BZ":
2666 def generator(f):
2666 def generator(f):
2667 zd = bz2.BZ2Decompressor()
2667 zd = bz2.BZ2Decompressor()
2668 zd.decompress("BZ")
2668 zd.decompress("BZ")
2669 for chunk in f:
2669 for chunk in f:
2670 yield zd.decompress(chunk)
2670 yield zd.decompress(chunk)
2671 elif header == "HG10UN":
2671 elif header == "HG10UN":
2672 def generator(f):
2672 def generator(f):
2673 for chunk in f:
2673 for chunk in f:
2674 yield chunk
2674 yield chunk
2675 else:
2675 else:
2676 raise util.Abort(_("%s: unknown bundle compression type")
2676 raise util.Abort(_("%s: unknown bundle compression type")
2677 % fname)
2677 % fname)
2678 gen = generator(util.filechunkiter(f, 4096))
2678 gen = generator(util.filechunkiter(f, 4096))
2679 modheads = repo.addchangegroup(util.chunkbuffer(gen), 'unbundle',
2679 modheads = repo.addchangegroup(util.chunkbuffer(gen), 'unbundle',
2680 'bundle:' + fname)
2680 'bundle:' + fname)
2681 return postincoming(ui, repo, modheads, opts['update'])
2681 return postincoming(ui, repo, modheads, opts['update'])
2682
2682
2683 def undo(ui, repo):
2683 def undo(ui, repo):
2684 """undo the last commit or pull (DEPRECATED)
2684 """undo the last commit or pull (DEPRECATED)
2685
2685
2686 (DEPRECATED)
2686 (DEPRECATED)
2687 This command is now deprecated and will be removed in a future
2687 This command is now deprecated and will be removed in a future
2688 release. Please use the rollback command instead. For usage
2688 release. Please use the rollback command instead. For usage
2689 instructions, see the rollback command.
2689 instructions, see the rollback command.
2690 """
2690 """
2691 ui.warn(_('(the undo command is deprecated; use rollback instead)\n'))
2691 ui.warn(_('(the undo command is deprecated; use rollback instead)\n'))
2692 repo.rollback()
2692 repo.rollback()
2693
2693
2694 def update(ui, repo, node=None, merge=False, clean=False, force=None,
2694 def update(ui, repo, node=None, merge=False, clean=False, force=None,
2695 branch=None):
2695 branch=None):
2696 """update or merge working directory
2696 """update or merge working directory
2697
2697
2698 Update the working directory to the specified revision.
2698 Update the working directory to the specified revision.
2699
2699
2700 If there are no outstanding changes in the working directory and
2700 If there are no outstanding changes in the working directory and
2701 there is a linear relationship between the current version and the
2701 there is a linear relationship between the current version and the
2702 requested version, the result is the requested version.
2702 requested version, the result is the requested version.
2703
2703
2704 To merge the working directory with another revision, use the
2704 To merge the working directory with another revision, use the
2705 merge command.
2705 merge command.
2706
2706
2707 By default, update will refuse to run if doing so would require
2707 By default, update will refuse to run if doing so would require
2708 merging or discarding local changes.
2708 merging or discarding local changes.
2709 """
2709 """
2710 node = _lookup(repo, node, branch)
2710 node = _lookup(repo, node, branch)
2711 if merge:
2711 if merge:
2712 ui.warn(_('(the -m/--merge option is deprecated; '
2712 ui.warn(_('(the -m/--merge option is deprecated; '
2713 'use the merge command instead)\n'))
2713 'use the merge command instead)\n'))
2714 return hg.merge(repo, node, force=force)
2714 return hg.merge(repo, node, force=force)
2715 elif clean:
2715 elif clean:
2716 return hg.clean(repo, node)
2716 return hg.clean(repo, node)
2717 else:
2717 else:
2718 return hg.update(repo, node)
2718 return hg.update(repo, node)
2719
2719
2720 def _lookup(repo, node, branch=None):
2720 def _lookup(repo, node, branch=None):
2721 if branch:
2721 if branch:
2722 br = repo.branchlookup(branch=branch)
2722 br = repo.branchlookup(branch=branch)
2723 found = []
2723 found = []
2724 for x in br:
2724 for x in br:
2725 if branch in br[x]:
2725 if branch in br[x]:
2726 found.append(x)
2726 found.append(x)
2727 if len(found) > 1:
2727 if len(found) > 1:
2728 repo.ui.warn(_("Found multiple heads for %s\n") % branch)
2728 repo.ui.warn(_("Found multiple heads for %s\n") % branch)
2729 for x in found:
2729 for x in found:
2730 show_changeset(ui, repo, {}).show(changenode=x, brinfo=br)
2730 show_changeset(ui, repo, {}).show(changenode=x, brinfo=br)
2731 raise util.Abort("")
2731 raise util.Abort("")
2732 if len(found) == 1:
2732 if len(found) == 1:
2733 node = found[0]
2733 node = found[0]
2734 repo.ui.warn(_("Using head %s for branch %s\n")
2734 repo.ui.warn(_("Using head %s for branch %s\n")
2735 % (short(node), branch))
2735 % (short(node), branch))
2736 else:
2736 else:
2737 raise util.Abort(_("branch %s not found\n") % (branch))
2737 raise util.Abort(_("branch %s not found") % branch)
2738 else:
2738 else:
2739 node = node and repo.lookup(node) or repo.changelog.tip()
2739 node = node and repo.lookup(node) or repo.changelog.tip()
2740 return node
2740 return node
2741
2741
2742 def verify(ui, repo):
2742 def verify(ui, repo):
2743 """verify the integrity of the repository
2743 """verify the integrity of the repository
2744
2744
2745 Verify the integrity of the current repository.
2745 Verify the integrity of the current repository.
2746
2746
2747 This will perform an extensive check of the repository's
2747 This will perform an extensive check of the repository's
2748 integrity, validating the hashes and checksums of each entry in
2748 integrity, validating the hashes and checksums of each entry in
2749 the changelog, manifest, and tracked files, as well as the
2749 the changelog, manifest, and tracked files, as well as the
2750 integrity of their crosslinks and indices.
2750 integrity of their crosslinks and indices.
2751 """
2751 """
2752 return hg.verify(repo)
2752 return hg.verify(repo)
2753
2753
2754 # Command options and aliases are listed here, alphabetically
2754 # Command options and aliases are listed here, alphabetically
2755
2755
2756 table = {
2756 table = {
2757 "^add":
2757 "^add":
2758 (add,
2758 (add,
2759 [('I', 'include', [], _('include names matching the given patterns')),
2759 [('I', 'include', [], _('include names matching the given patterns')),
2760 ('X', 'exclude', [], _('exclude names matching the given patterns')),
2760 ('X', 'exclude', [], _('exclude names matching the given patterns')),
2761 ('n', 'dry-run', None, _('do not perform actions, just print output'))],
2761 ('n', 'dry-run', None, _('do not perform actions, just print output'))],
2762 _('hg add [OPTION]... [FILE]...')),
2762 _('hg add [OPTION]... [FILE]...')),
2763 "addremove":
2763 "addremove":
2764 (addremove,
2764 (addremove,
2765 [('I', 'include', [], _('include names matching the given patterns')),
2765 [('I', 'include', [], _('include names matching the given patterns')),
2766 ('X', 'exclude', [], _('exclude names matching the given patterns')),
2766 ('X', 'exclude', [], _('exclude names matching the given patterns')),
2767 ('n', 'dry-run', None,
2767 ('n', 'dry-run', None,
2768 _('do not perform actions, just print output')),
2768 _('do not perform actions, just print output')),
2769 ('s', 'similarity', '',
2769 ('s', 'similarity', '',
2770 _('guess renamed files by similarity (0<=s<=1)'))],
2770 _('guess renamed files by similarity (0<=s<=1)'))],
2771 _('hg addremove [OPTION]... [FILE]...')),
2771 _('hg addremove [OPTION]... [FILE]...')),
2772 "^annotate":
2772 "^annotate":
2773 (annotate,
2773 (annotate,
2774 [('r', 'rev', '', _('annotate the specified revision')),
2774 [('r', 'rev', '', _('annotate the specified revision')),
2775 ('a', 'text', None, _('treat all files as text')),
2775 ('a', 'text', None, _('treat all files as text')),
2776 ('u', 'user', None, _('list the author')),
2776 ('u', 'user', None, _('list the author')),
2777 ('d', 'date', None, _('list the date')),
2777 ('d', 'date', None, _('list the date')),
2778 ('n', 'number', None, _('list the revision number (default)')),
2778 ('n', 'number', None, _('list the revision number (default)')),
2779 ('c', 'changeset', None, _('list the changeset')),
2779 ('c', 'changeset', None, _('list the changeset')),
2780 ('I', 'include', [], _('include names matching the given patterns')),
2780 ('I', 'include', [], _('include names matching the given patterns')),
2781 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2781 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2782 _('hg annotate [-r REV] [-a] [-u] [-d] [-n] [-c] FILE...')),
2782 _('hg annotate [-r REV] [-a] [-u] [-d] [-n] [-c] FILE...')),
2783 "archive":
2783 "archive":
2784 (archive,
2784 (archive,
2785 [('', 'no-decode', None, _('do not pass files through decoders')),
2785 [('', 'no-decode', None, _('do not pass files through decoders')),
2786 ('p', 'prefix', '', _('directory prefix for files in archive')),
2786 ('p', 'prefix', '', _('directory prefix for files in archive')),
2787 ('r', 'rev', '', _('revision to distribute')),
2787 ('r', 'rev', '', _('revision to distribute')),
2788 ('t', 'type', '', _('type of distribution to create')),
2788 ('t', 'type', '', _('type of distribution to create')),
2789 ('I', 'include', [], _('include names matching the given patterns')),
2789 ('I', 'include', [], _('include names matching the given patterns')),
2790 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2790 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2791 _('hg archive [OPTION]... DEST')),
2791 _('hg archive [OPTION]... DEST')),
2792 "backout":
2792 "backout":
2793 (backout,
2793 (backout,
2794 [('', 'merge', None,
2794 [('', 'merge', None,
2795 _('merge with old dirstate parent after backout')),
2795 _('merge with old dirstate parent after backout')),
2796 ('m', 'message', '', _('use <text> as commit message')),
2796 ('m', 'message', '', _('use <text> as commit message')),
2797 ('l', 'logfile', '', _('read commit message from <file>')),
2797 ('l', 'logfile', '', _('read commit message from <file>')),
2798 ('d', 'date', '', _('record datecode as commit date')),
2798 ('d', 'date', '', _('record datecode as commit date')),
2799 ('', 'parent', '', _('parent to choose when backing out merge')),
2799 ('', 'parent', '', _('parent to choose when backing out merge')),
2800 ('u', 'user', '', _('record user as committer')),
2800 ('u', 'user', '', _('record user as committer')),
2801 ('I', 'include', [], _('include names matching the given patterns')),
2801 ('I', 'include', [], _('include names matching the given patterns')),
2802 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2802 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2803 _('hg backout [OPTION]... REV')),
2803 _('hg backout [OPTION]... REV')),
2804 "bundle":
2804 "bundle":
2805 (bundle,
2805 (bundle,
2806 [('f', 'force', None,
2806 [('f', 'force', None,
2807 _('run even when remote repository is unrelated'))],
2807 _('run even when remote repository is unrelated'))],
2808 _('hg bundle FILE DEST')),
2808 _('hg bundle FILE DEST')),
2809 "cat":
2809 "cat":
2810 (cat,
2810 (cat,
2811 [('o', 'output', '', _('print output to file with formatted name')),
2811 [('o', 'output', '', _('print output to file with formatted name')),
2812 ('r', 'rev', '', _('print the given revision')),
2812 ('r', 'rev', '', _('print the given revision')),
2813 ('I', 'include', [], _('include names matching the given patterns')),
2813 ('I', 'include', [], _('include names matching the given patterns')),
2814 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2814 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2815 _('hg cat [OPTION]... FILE...')),
2815 _('hg cat [OPTION]... FILE...')),
2816 "^clone":
2816 "^clone":
2817 (clone,
2817 (clone,
2818 [('U', 'noupdate', None, _('do not update the new working directory')),
2818 [('U', 'noupdate', None, _('do not update the new working directory')),
2819 ('r', 'rev', [],
2819 ('r', 'rev', [],
2820 _('a changeset you would like to have after cloning')),
2820 _('a changeset you would like to have after cloning')),
2821 ('', 'pull', None, _('use pull protocol to copy metadata')),
2821 ('', 'pull', None, _('use pull protocol to copy metadata')),
2822 ('', 'uncompressed', None,
2822 ('', 'uncompressed', None,
2823 _('use uncompressed transfer (fast over LAN)')),
2823 _('use uncompressed transfer (fast over LAN)')),
2824 ('e', 'ssh', '', _('specify ssh command to use')),
2824 ('e', 'ssh', '', _('specify ssh command to use')),
2825 ('', 'remotecmd', '',
2825 ('', 'remotecmd', '',
2826 _('specify hg command to run on the remote side'))],
2826 _('specify hg command to run on the remote side'))],
2827 _('hg clone [OPTION]... SOURCE [DEST]')),
2827 _('hg clone [OPTION]... SOURCE [DEST]')),
2828 "^commit|ci":
2828 "^commit|ci":
2829 (commit,
2829 (commit,
2830 [('A', 'addremove', None,
2830 [('A', 'addremove', None,
2831 _('mark new/missing files as added/removed before committing')),
2831 _('mark new/missing files as added/removed before committing')),
2832 ('m', 'message', '', _('use <text> as commit message')),
2832 ('m', 'message', '', _('use <text> as commit message')),
2833 ('l', 'logfile', '', _('read the commit message from <file>')),
2833 ('l', 'logfile', '', _('read the commit message from <file>')),
2834 ('d', 'date', '', _('record datecode as commit date')),
2834 ('d', 'date', '', _('record datecode as commit date')),
2835 ('u', 'user', '', _('record user as commiter')),
2835 ('u', 'user', '', _('record user as commiter')),
2836 ('I', 'include', [], _('include names matching the given patterns')),
2836 ('I', 'include', [], _('include names matching the given patterns')),
2837 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2837 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2838 _('hg commit [OPTION]... [FILE]...')),
2838 _('hg commit [OPTION]... [FILE]...')),
2839 "copy|cp":
2839 "copy|cp":
2840 (copy,
2840 (copy,
2841 [('A', 'after', None, _('record a copy that has already occurred')),
2841 [('A', 'after', None, _('record a copy that has already occurred')),
2842 ('f', 'force', None,
2842 ('f', 'force', None,
2843 _('forcibly copy over an existing managed file')),
2843 _('forcibly copy over an existing managed file')),
2844 ('I', 'include', [], _('include names matching the given patterns')),
2844 ('I', 'include', [], _('include names matching the given patterns')),
2845 ('X', 'exclude', [], _('exclude names matching the given patterns')),
2845 ('X', 'exclude', [], _('exclude names matching the given patterns')),
2846 ('n', 'dry-run', None, _('do not perform actions, just print output'))],
2846 ('n', 'dry-run', None, _('do not perform actions, just print output'))],
2847 _('hg copy [OPTION]... [SOURCE]... DEST')),
2847 _('hg copy [OPTION]... [SOURCE]... DEST')),
2848 "debugancestor": (debugancestor, [], _('debugancestor INDEX REV1 REV2')),
2848 "debugancestor": (debugancestor, [], _('debugancestor INDEX REV1 REV2')),
2849 "debugcomplete":
2849 "debugcomplete":
2850 (debugcomplete,
2850 (debugcomplete,
2851 [('o', 'options', None, _('show the command options'))],
2851 [('o', 'options', None, _('show the command options'))],
2852 _('debugcomplete [-o] CMD')),
2852 _('debugcomplete [-o] CMD')),
2853 "debugrebuildstate":
2853 "debugrebuildstate":
2854 (debugrebuildstate,
2854 (debugrebuildstate,
2855 [('r', 'rev', '', _('revision to rebuild to'))],
2855 [('r', 'rev', '', _('revision to rebuild to'))],
2856 _('debugrebuildstate [-r REV] [REV]')),
2856 _('debugrebuildstate [-r REV] [REV]')),
2857 "debugcheckstate": (debugcheckstate, [], _('debugcheckstate')),
2857 "debugcheckstate": (debugcheckstate, [], _('debugcheckstate')),
2858 "debugconfig": (debugconfig, [], _('debugconfig [NAME]...')),
2858 "debugconfig": (debugconfig, [], _('debugconfig [NAME]...')),
2859 "debugsetparents": (debugsetparents, [], _('debugsetparents REV1 [REV2]')),
2859 "debugsetparents": (debugsetparents, [], _('debugsetparents REV1 [REV2]')),
2860 "debugstate": (debugstate, [], _('debugstate')),
2860 "debugstate": (debugstate, [], _('debugstate')),
2861 "debugdata": (debugdata, [], _('debugdata FILE REV')),
2861 "debugdata": (debugdata, [], _('debugdata FILE REV')),
2862 "debugindex": (debugindex, [], _('debugindex FILE')),
2862 "debugindex": (debugindex, [], _('debugindex FILE')),
2863 "debugindexdot": (debugindexdot, [], _('debugindexdot FILE')),
2863 "debugindexdot": (debugindexdot, [], _('debugindexdot FILE')),
2864 "debugrename": (debugrename, [], _('debugrename FILE [REV]')),
2864 "debugrename": (debugrename, [], _('debugrename FILE [REV]')),
2865 "debugwalk":
2865 "debugwalk":
2866 (debugwalk,
2866 (debugwalk,
2867 [('I', 'include', [], _('include names matching the given patterns')),
2867 [('I', 'include', [], _('include names matching the given patterns')),
2868 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2868 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2869 _('debugwalk [OPTION]... [FILE]...')),
2869 _('debugwalk [OPTION]... [FILE]...')),
2870 "^diff":
2870 "^diff":
2871 (diff,
2871 (diff,
2872 [('r', 'rev', [], _('revision')),
2872 [('r', 'rev', [], _('revision')),
2873 ('a', 'text', None, _('treat all files as text')),
2873 ('a', 'text', None, _('treat all files as text')),
2874 ('p', 'show-function', None,
2874 ('p', 'show-function', None,
2875 _('show which function each change is in')),
2875 _('show which function each change is in')),
2876 ('g', 'git', None, _('use git extended diff format')),
2876 ('g', 'git', None, _('use git extended diff format')),
2877 ('w', 'ignore-all-space', None,
2877 ('w', 'ignore-all-space', None,
2878 _('ignore white space when comparing lines')),
2878 _('ignore white space when comparing lines')),
2879 ('b', 'ignore-space-change', None,
2879 ('b', 'ignore-space-change', None,
2880 _('ignore changes in the amount of white space')),
2880 _('ignore changes in the amount of white space')),
2881 ('B', 'ignore-blank-lines', None,
2881 ('B', 'ignore-blank-lines', None,
2882 _('ignore changes whose lines are all blank')),
2882 _('ignore changes whose lines are all blank')),
2883 ('I', 'include', [], _('include names matching the given patterns')),
2883 ('I', 'include', [], _('include names matching the given patterns')),
2884 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2884 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2885 _('hg diff [-a] [-I] [-X] [-r REV1 [-r REV2]] [FILE]...')),
2885 _('hg diff [-a] [-I] [-X] [-r REV1 [-r REV2]] [FILE]...')),
2886 "^export":
2886 "^export":
2887 (export,
2887 (export,
2888 [('o', 'output', '', _('print output to file with formatted name')),
2888 [('o', 'output', '', _('print output to file with formatted name')),
2889 ('a', 'text', None, _('treat all files as text')),
2889 ('a', 'text', None, _('treat all files as text')),
2890 ('g', 'git', None, _('use git extended diff format')),
2890 ('g', 'git', None, _('use git extended diff format')),
2891 ('', 'switch-parent', None, _('diff against the second parent'))],
2891 ('', 'switch-parent', None, _('diff against the second parent'))],
2892 _('hg export [-a] [-o OUTFILESPEC] REV...')),
2892 _('hg export [-a] [-o OUTFILESPEC] REV...')),
2893 "debugforget|forget":
2893 "debugforget|forget":
2894 (forget,
2894 (forget,
2895 [('I', 'include', [], _('include names matching the given patterns')),
2895 [('I', 'include', [], _('include names matching the given patterns')),
2896 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2896 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2897 _('hg forget [OPTION]... FILE...')),
2897 _('hg forget [OPTION]... FILE...')),
2898 "grep":
2898 "grep":
2899 (grep,
2899 (grep,
2900 [('0', 'print0', None, _('end fields with NUL')),
2900 [('0', 'print0', None, _('end fields with NUL')),
2901 ('', 'all', None, _('print all revisions that match')),
2901 ('', 'all', None, _('print all revisions that match')),
2902 ('f', 'follow', None,
2902 ('f', 'follow', None,
2903 _('follow changeset history, or file history across copies and renames')),
2903 _('follow changeset history, or file history across copies and renames')),
2904 ('i', 'ignore-case', None, _('ignore case when matching')),
2904 ('i', 'ignore-case', None, _('ignore case when matching')),
2905 ('l', 'files-with-matches', None,
2905 ('l', 'files-with-matches', None,
2906 _('print only filenames and revs that match')),
2906 _('print only filenames and revs that match')),
2907 ('n', 'line-number', None, _('print matching line numbers')),
2907 ('n', 'line-number', None, _('print matching line numbers')),
2908 ('r', 'rev', [], _('search in given revision range')),
2908 ('r', 'rev', [], _('search in given revision range')),
2909 ('u', 'user', None, _('print user who committed change')),
2909 ('u', 'user', None, _('print user who committed change')),
2910 ('I', 'include', [], _('include names matching the given patterns')),
2910 ('I', 'include', [], _('include names matching the given patterns')),
2911 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2911 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2912 _('hg grep [OPTION]... PATTERN [FILE]...')),
2912 _('hg grep [OPTION]... PATTERN [FILE]...')),
2913 "heads":
2913 "heads":
2914 (heads,
2914 (heads,
2915 [('b', 'branches', None, _('show branches')),
2915 [('b', 'branches', None, _('show branches')),
2916 ('', 'style', '', _('display using template map file')),
2916 ('', 'style', '', _('display using template map file')),
2917 ('r', 'rev', '', _('show only heads which are descendants of rev')),
2917 ('r', 'rev', '', _('show only heads which are descendants of rev')),
2918 ('', 'template', '', _('display with template'))],
2918 ('', 'template', '', _('display with template'))],
2919 _('hg heads [-b] [-r <rev>]')),
2919 _('hg heads [-b] [-r <rev>]')),
2920 "help": (help_, [], _('hg help [COMMAND]')),
2920 "help": (help_, [], _('hg help [COMMAND]')),
2921 "identify|id": (identify, [], _('hg identify')),
2921 "identify|id": (identify, [], _('hg identify')),
2922 "import|patch":
2922 "import|patch":
2923 (import_,
2923 (import_,
2924 [('p', 'strip', 1,
2924 [('p', 'strip', 1,
2925 _('directory strip option for patch. This has the same\n'
2925 _('directory strip option for patch. This has the same\n'
2926 'meaning as the corresponding patch option')),
2926 'meaning as the corresponding patch option')),
2927 ('m', 'message', '', _('use <text> as commit message')),
2927 ('m', 'message', '', _('use <text> as commit message')),
2928 ('b', 'base', '', _('base path')),
2928 ('b', 'base', '', _('base path')),
2929 ('f', 'force', None,
2929 ('f', 'force', None,
2930 _('skip check for outstanding uncommitted changes'))],
2930 _('skip check for outstanding uncommitted changes'))],
2931 _('hg import [-p NUM] [-b BASE] [-m MESSAGE] [-f] PATCH...')),
2931 _('hg import [-p NUM] [-b BASE] [-m MESSAGE] [-f] PATCH...')),
2932 "incoming|in": (incoming,
2932 "incoming|in": (incoming,
2933 [('M', 'no-merges', None, _('do not show merges')),
2933 [('M', 'no-merges', None, _('do not show merges')),
2934 ('f', 'force', None,
2934 ('f', 'force', None,
2935 _('run even when remote repository is unrelated')),
2935 _('run even when remote repository is unrelated')),
2936 ('', 'style', '', _('display using template map file')),
2936 ('', 'style', '', _('display using template map file')),
2937 ('n', 'newest-first', None, _('show newest record first')),
2937 ('n', 'newest-first', None, _('show newest record first')),
2938 ('', 'bundle', '', _('file to store the bundles into')),
2938 ('', 'bundle', '', _('file to store the bundles into')),
2939 ('p', 'patch', None, _('show patch')),
2939 ('p', 'patch', None, _('show patch')),
2940 ('r', 'rev', [], _('a specific revision up to which you would like to pull')),
2940 ('r', 'rev', [], _('a specific revision up to which you would like to pull')),
2941 ('', 'template', '', _('display with template')),
2941 ('', 'template', '', _('display with template')),
2942 ('e', 'ssh', '', _('specify ssh command to use')),
2942 ('e', 'ssh', '', _('specify ssh command to use')),
2943 ('', 'remotecmd', '',
2943 ('', 'remotecmd', '',
2944 _('specify hg command to run on the remote side'))],
2944 _('specify hg command to run on the remote side'))],
2945 _('hg incoming [-p] [-n] [-M] [-r REV]...'
2945 _('hg incoming [-p] [-n] [-M] [-r REV]...'
2946 ' [--bundle FILENAME] [SOURCE]')),
2946 ' [--bundle FILENAME] [SOURCE]')),
2947 "^init":
2947 "^init":
2948 (init,
2948 (init,
2949 [('e', 'ssh', '', _('specify ssh command to use')),
2949 [('e', 'ssh', '', _('specify ssh command to use')),
2950 ('', 'remotecmd', '',
2950 ('', 'remotecmd', '',
2951 _('specify hg command to run on the remote side'))],
2951 _('specify hg command to run on the remote side'))],
2952 _('hg init [-e FILE] [--remotecmd FILE] [DEST]')),
2952 _('hg init [-e FILE] [--remotecmd FILE] [DEST]')),
2953 "locate":
2953 "locate":
2954 (locate,
2954 (locate,
2955 [('r', 'rev', '', _('search the repository as it stood at rev')),
2955 [('r', 'rev', '', _('search the repository as it stood at rev')),
2956 ('0', 'print0', None,
2956 ('0', 'print0', None,
2957 _('end filenames with NUL, for use with xargs')),
2957 _('end filenames with NUL, for use with xargs')),
2958 ('f', 'fullpath', None,
2958 ('f', 'fullpath', None,
2959 _('print complete paths from the filesystem root')),
2959 _('print complete paths from the filesystem root')),
2960 ('I', 'include', [], _('include names matching the given patterns')),
2960 ('I', 'include', [], _('include names matching the given patterns')),
2961 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2961 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2962 _('hg locate [OPTION]... [PATTERN]...')),
2962 _('hg locate [OPTION]... [PATTERN]...')),
2963 "^log|history":
2963 "^log|history":
2964 (log,
2964 (log,
2965 [('b', 'branches', None, _('show branches')),
2965 [('b', 'branches', None, _('show branches')),
2966 ('f', 'follow', None,
2966 ('f', 'follow', None,
2967 _('follow changeset history, or file history across copies and renames')),
2967 _('follow changeset history, or file history across copies and renames')),
2968 ('', 'follow-first', None,
2968 ('', 'follow-first', None,
2969 _('only follow the first parent of merge changesets')),
2969 _('only follow the first parent of merge changesets')),
2970 ('k', 'keyword', [], _('search for a keyword')),
2970 ('k', 'keyword', [], _('search for a keyword')),
2971 ('l', 'limit', '', _('limit number of changes displayed')),
2971 ('l', 'limit', '', _('limit number of changes displayed')),
2972 ('r', 'rev', [], _('show the specified revision or range')),
2972 ('r', 'rev', [], _('show the specified revision or range')),
2973 ('M', 'no-merges', None, _('do not show merges')),
2973 ('M', 'no-merges', None, _('do not show merges')),
2974 ('', 'style', '', _('display using template map file')),
2974 ('', 'style', '', _('display using template map file')),
2975 ('m', 'only-merges', None, _('show only merges')),
2975 ('m', 'only-merges', None, _('show only merges')),
2976 ('p', 'patch', None, _('show patch')),
2976 ('p', 'patch', None, _('show patch')),
2977 ('P', 'prune', [], _('do not display revision or any of its ancestors')),
2977 ('P', 'prune', [], _('do not display revision or any of its ancestors')),
2978 ('', 'template', '', _('display with template')),
2978 ('', 'template', '', _('display with template')),
2979 ('I', 'include', [], _('include names matching the given patterns')),
2979 ('I', 'include', [], _('include names matching the given patterns')),
2980 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2980 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2981 _('hg log [OPTION]... [FILE]')),
2981 _('hg log [OPTION]... [FILE]')),
2982 "manifest": (manifest, [], _('hg manifest [REV]')),
2982 "manifest": (manifest, [], _('hg manifest [REV]')),
2983 "merge":
2983 "merge":
2984 (merge,
2984 (merge,
2985 [('b', 'branch', '', _('merge with head of a specific branch')),
2985 [('b', 'branch', '', _('merge with head of a specific branch')),
2986 ('f', 'force', None, _('force a merge with outstanding changes'))],
2986 ('f', 'force', None, _('force a merge with outstanding changes'))],
2987 _('hg merge [-b TAG] [-f] [REV]')),
2987 _('hg merge [-b TAG] [-f] [REV]')),
2988 "outgoing|out": (outgoing,
2988 "outgoing|out": (outgoing,
2989 [('M', 'no-merges', None, _('do not show merges')),
2989 [('M', 'no-merges', None, _('do not show merges')),
2990 ('f', 'force', None,
2990 ('f', 'force', None,
2991 _('run even when remote repository is unrelated')),
2991 _('run even when remote repository is unrelated')),
2992 ('p', 'patch', None, _('show patch')),
2992 ('p', 'patch', None, _('show patch')),
2993 ('', 'style', '', _('display using template map file')),
2993 ('', 'style', '', _('display using template map file')),
2994 ('r', 'rev', [], _('a specific revision you would like to push')),
2994 ('r', 'rev', [], _('a specific revision you would like to push')),
2995 ('n', 'newest-first', None, _('show newest record first')),
2995 ('n', 'newest-first', None, _('show newest record first')),
2996 ('', 'template', '', _('display with template')),
2996 ('', 'template', '', _('display with template')),
2997 ('e', 'ssh', '', _('specify ssh command to use')),
2997 ('e', 'ssh', '', _('specify ssh command to use')),
2998 ('', 'remotecmd', '',
2998 ('', 'remotecmd', '',
2999 _('specify hg command to run on the remote side'))],
2999 _('specify hg command to run on the remote side'))],
3000 _('hg outgoing [-M] [-p] [-n] [-r REV]... [DEST]')),
3000 _('hg outgoing [-M] [-p] [-n] [-r REV]... [DEST]')),
3001 "^parents":
3001 "^parents":
3002 (parents,
3002 (parents,
3003 [('b', 'branches', None, _('show branches')),
3003 [('b', 'branches', None, _('show branches')),
3004 ('r', 'rev', '', _('show parents from the specified rev')),
3004 ('r', 'rev', '', _('show parents from the specified rev')),
3005 ('', 'style', '', _('display using template map file')),
3005 ('', 'style', '', _('display using template map file')),
3006 ('', 'template', '', _('display with template'))],
3006 ('', 'template', '', _('display with template'))],
3007 _('hg parents [-b] [-r REV] [FILE]')),
3007 _('hg parents [-b] [-r REV] [FILE]')),
3008 "paths": (paths, [], _('hg paths [NAME]')),
3008 "paths": (paths, [], _('hg paths [NAME]')),
3009 "^pull":
3009 "^pull":
3010 (pull,
3010 (pull,
3011 [('u', 'update', None,
3011 [('u', 'update', None,
3012 _('update the working directory to tip after pull')),
3012 _('update the working directory to tip after pull')),
3013 ('e', 'ssh', '', _('specify ssh command to use')),
3013 ('e', 'ssh', '', _('specify ssh command to use')),
3014 ('f', 'force', None,
3014 ('f', 'force', None,
3015 _('run even when remote repository is unrelated')),
3015 _('run even when remote repository is unrelated')),
3016 ('r', 'rev', [], _('a specific revision up to which you would like to pull')),
3016 ('r', 'rev', [], _('a specific revision up to which you would like to pull')),
3017 ('', 'remotecmd', '',
3017 ('', 'remotecmd', '',
3018 _('specify hg command to run on the remote side'))],
3018 _('specify hg command to run on the remote side'))],
3019 _('hg pull [-u] [-r REV]... [-e FILE] [--remotecmd FILE] [SOURCE]')),
3019 _('hg pull [-u] [-r REV]... [-e FILE] [--remotecmd FILE] [SOURCE]')),
3020 "^push":
3020 "^push":
3021 (push,
3021 (push,
3022 [('f', 'force', None, _('force push')),
3022 [('f', 'force', None, _('force push')),
3023 ('e', 'ssh', '', _('specify ssh command to use')),
3023 ('e', 'ssh', '', _('specify ssh command to use')),
3024 ('r', 'rev', [], _('a specific revision you would like to push')),
3024 ('r', 'rev', [], _('a specific revision you would like to push')),
3025 ('', 'remotecmd', '',
3025 ('', 'remotecmd', '',
3026 _('specify hg command to run on the remote side'))],
3026 _('specify hg command to run on the remote side'))],
3027 _('hg push [-f] [-r REV]... [-e FILE] [--remotecmd FILE] [DEST]')),
3027 _('hg push [-f] [-r REV]... [-e FILE] [--remotecmd FILE] [DEST]')),
3028 "debugrawcommit|rawcommit":
3028 "debugrawcommit|rawcommit":
3029 (rawcommit,
3029 (rawcommit,
3030 [('p', 'parent', [], _('parent')),
3030 [('p', 'parent', [], _('parent')),
3031 ('d', 'date', '', _('date code')),
3031 ('d', 'date', '', _('date code')),
3032 ('u', 'user', '', _('user')),
3032 ('u', 'user', '', _('user')),
3033 ('F', 'files', '', _('file list')),
3033 ('F', 'files', '', _('file list')),
3034 ('m', 'message', '', _('commit message')),
3034 ('m', 'message', '', _('commit message')),
3035 ('l', 'logfile', '', _('commit message file'))],
3035 ('l', 'logfile', '', _('commit message file'))],
3036 _('hg debugrawcommit [OPTION]... [FILE]...')),
3036 _('hg debugrawcommit [OPTION]... [FILE]...')),
3037 "recover": (recover, [], _('hg recover')),
3037 "recover": (recover, [], _('hg recover')),
3038 "^remove|rm":
3038 "^remove|rm":
3039 (remove,
3039 (remove,
3040 [('A', 'after', None, _('record remove that has already occurred')),
3040 [('A', 'after', None, _('record remove that has already occurred')),
3041 ('f', 'force', None, _('remove file even if modified')),
3041 ('f', 'force', None, _('remove file even if modified')),
3042 ('I', 'include', [], _('include names matching the given patterns')),
3042 ('I', 'include', [], _('include names matching the given patterns')),
3043 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
3043 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
3044 _('hg remove [OPTION]... FILE...')),
3044 _('hg remove [OPTION]... FILE...')),
3045 "rename|mv":
3045 "rename|mv":
3046 (rename,
3046 (rename,
3047 [('A', 'after', None, _('record a rename that has already occurred')),
3047 [('A', 'after', None, _('record a rename that has already occurred')),
3048 ('f', 'force', None,
3048 ('f', 'force', None,
3049 _('forcibly copy over an existing managed file')),
3049 _('forcibly copy over an existing managed file')),
3050 ('I', 'include', [], _('include names matching the given patterns')),
3050 ('I', 'include', [], _('include names matching the given patterns')),
3051 ('X', 'exclude', [], _('exclude names matching the given patterns')),
3051 ('X', 'exclude', [], _('exclude names matching the given patterns')),
3052 ('n', 'dry-run', None, _('do not perform actions, just print output'))],
3052 ('n', 'dry-run', None, _('do not perform actions, just print output'))],
3053 _('hg rename [OPTION]... SOURCE... DEST')),
3053 _('hg rename [OPTION]... SOURCE... DEST')),
3054 "^revert":
3054 "^revert":
3055 (revert,
3055 (revert,
3056 [('a', 'all', None, _('revert all changes when no arguments given')),
3056 [('a', 'all', None, _('revert all changes when no arguments given')),
3057 ('r', 'rev', '', _('revision to revert to')),
3057 ('r', 'rev', '', _('revision to revert to')),
3058 ('', 'no-backup', None, _('do not save backup copies of files')),
3058 ('', 'no-backup', None, _('do not save backup copies of files')),
3059 ('I', 'include', [], _('include names matching given patterns')),
3059 ('I', 'include', [], _('include names matching given patterns')),
3060 ('X', 'exclude', [], _('exclude names matching given patterns')),
3060 ('X', 'exclude', [], _('exclude names matching given patterns')),
3061 ('n', 'dry-run', None, _('do not perform actions, just print output'))],
3061 ('n', 'dry-run', None, _('do not perform actions, just print output'))],
3062 _('hg revert [-r REV] [NAME]...')),
3062 _('hg revert [-r REV] [NAME]...')),
3063 "rollback": (rollback, [], _('hg rollback')),
3063 "rollback": (rollback, [], _('hg rollback')),
3064 "root": (root, [], _('hg root')),
3064 "root": (root, [], _('hg root')),
3065 "^serve":
3065 "^serve":
3066 (serve,
3066 (serve,
3067 [('A', 'accesslog', '', _('name of access log file to write to')),
3067 [('A', 'accesslog', '', _('name of access log file to write to')),
3068 ('d', 'daemon', None, _('run server in background')),
3068 ('d', 'daemon', None, _('run server in background')),
3069 ('', 'daemon-pipefds', '', _('used internally by daemon mode')),
3069 ('', 'daemon-pipefds', '', _('used internally by daemon mode')),
3070 ('E', 'errorlog', '', _('name of error log file to write to')),
3070 ('E', 'errorlog', '', _('name of error log file to write to')),
3071 ('p', 'port', 0, _('port to use (default: 8000)')),
3071 ('p', 'port', 0, _('port to use (default: 8000)')),
3072 ('a', 'address', '', _('address to use')),
3072 ('a', 'address', '', _('address to use')),
3073 ('n', 'name', '',
3073 ('n', 'name', '',
3074 _('name to show in web pages (default: working dir)')),
3074 _('name to show in web pages (default: working dir)')),
3075 ('', 'webdir-conf', '', _('name of the webdir config file'
3075 ('', 'webdir-conf', '', _('name of the webdir config file'
3076 ' (serve more than one repo)')),
3076 ' (serve more than one repo)')),
3077 ('', 'pid-file', '', _('name of file to write process ID to')),
3077 ('', 'pid-file', '', _('name of file to write process ID to')),
3078 ('', 'stdio', None, _('for remote clients')),
3078 ('', 'stdio', None, _('for remote clients')),
3079 ('t', 'templates', '', _('web templates to use')),
3079 ('t', 'templates', '', _('web templates to use')),
3080 ('', 'style', '', _('template style to use')),
3080 ('', 'style', '', _('template style to use')),
3081 ('6', 'ipv6', None, _('use IPv6 in addition to IPv4'))],
3081 ('6', 'ipv6', None, _('use IPv6 in addition to IPv4'))],
3082 _('hg serve [OPTION]...')),
3082 _('hg serve [OPTION]...')),
3083 "^status|st":
3083 "^status|st":
3084 (status,
3084 (status,
3085 [('A', 'all', None, _('show status of all files')),
3085 [('A', 'all', None, _('show status of all files')),
3086 ('m', 'modified', None, _('show only modified files')),
3086 ('m', 'modified', None, _('show only modified files')),
3087 ('a', 'added', None, _('show only added files')),
3087 ('a', 'added', None, _('show only added files')),
3088 ('r', 'removed', None, _('show only removed files')),
3088 ('r', 'removed', None, _('show only removed files')),
3089 ('d', 'deleted', None, _('show only deleted (but tracked) files')),
3089 ('d', 'deleted', None, _('show only deleted (but tracked) files')),
3090 ('c', 'clean', None, _('show only files without changes')),
3090 ('c', 'clean', None, _('show only files without changes')),
3091 ('u', 'unknown', None, _('show only unknown (not tracked) files')),
3091 ('u', 'unknown', None, _('show only unknown (not tracked) files')),
3092 ('i', 'ignored', None, _('show ignored files')),
3092 ('i', 'ignored', None, _('show ignored files')),
3093 ('n', 'no-status', None, _('hide status prefix')),
3093 ('n', 'no-status', None, _('hide status prefix')),
3094 ('C', 'copies', None, _('show source of copied files')),
3094 ('C', 'copies', None, _('show source of copied files')),
3095 ('0', 'print0', None,
3095 ('0', 'print0', None,
3096 _('end filenames with NUL, for use with xargs')),
3096 _('end filenames with NUL, for use with xargs')),
3097 ('I', 'include', [], _('include names matching the given patterns')),
3097 ('I', 'include', [], _('include names matching the given patterns')),
3098 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
3098 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
3099 _('hg status [OPTION]... [FILE]...')),
3099 _('hg status [OPTION]... [FILE]...')),
3100 "tag":
3100 "tag":
3101 (tag,
3101 (tag,
3102 [('l', 'local', None, _('make the tag local')),
3102 [('l', 'local', None, _('make the tag local')),
3103 ('m', 'message', '', _('message for tag commit log entry')),
3103 ('m', 'message', '', _('message for tag commit log entry')),
3104 ('d', 'date', '', _('record datecode as commit date')),
3104 ('d', 'date', '', _('record datecode as commit date')),
3105 ('u', 'user', '', _('record user as commiter')),
3105 ('u', 'user', '', _('record user as commiter')),
3106 ('r', 'rev', '', _('revision to tag'))],
3106 ('r', 'rev', '', _('revision to tag'))],
3107 _('hg tag [-l] [-m TEXT] [-d DATE] [-u USER] [-r REV] NAME')),
3107 _('hg tag [-l] [-m TEXT] [-d DATE] [-u USER] [-r REV] NAME')),
3108 "tags": (tags, [], _('hg tags')),
3108 "tags": (tags, [], _('hg tags')),
3109 "tip":
3109 "tip":
3110 (tip,
3110 (tip,
3111 [('b', 'branches', None, _('show branches')),
3111 [('b', 'branches', None, _('show branches')),
3112 ('', 'style', '', _('display using template map file')),
3112 ('', 'style', '', _('display using template map file')),
3113 ('p', 'patch', None, _('show patch')),
3113 ('p', 'patch', None, _('show patch')),
3114 ('', 'template', '', _('display with template'))],
3114 ('', 'template', '', _('display with template'))],
3115 _('hg tip [-b] [-p]')),
3115 _('hg tip [-b] [-p]')),
3116 "unbundle":
3116 "unbundle":
3117 (unbundle,
3117 (unbundle,
3118 [('u', 'update', None,
3118 [('u', 'update', None,
3119 _('update the working directory to tip after unbundle'))],
3119 _('update the working directory to tip after unbundle'))],
3120 _('hg unbundle [-u] FILE')),
3120 _('hg unbundle [-u] FILE')),
3121 "debugundo|undo": (undo, [], _('hg undo')),
3121 "debugundo|undo": (undo, [], _('hg undo')),
3122 "^update|up|checkout|co":
3122 "^update|up|checkout|co":
3123 (update,
3123 (update,
3124 [('b', 'branch', '', _('checkout the head of a specific branch')),
3124 [('b', 'branch', '', _('checkout the head of a specific branch')),
3125 ('m', 'merge', None, _('allow merging of branches (DEPRECATED)')),
3125 ('m', 'merge', None, _('allow merging of branches (DEPRECATED)')),
3126 ('C', 'clean', None, _('overwrite locally modified files')),
3126 ('C', 'clean', None, _('overwrite locally modified files')),
3127 ('f', 'force', None, _('force a merge with outstanding changes'))],
3127 ('f', 'force', None, _('force a merge with outstanding changes'))],
3128 _('hg update [-b TAG] [-m] [-C] [-f] [REV]')),
3128 _('hg update [-b TAG] [-m] [-C] [-f] [REV]')),
3129 "verify": (verify, [], _('hg verify')),
3129 "verify": (verify, [], _('hg verify')),
3130 "version": (show_version, [], _('hg version')),
3130 "version": (show_version, [], _('hg version')),
3131 }
3131 }
3132
3132
3133 globalopts = [
3133 globalopts = [
3134 ('R', 'repository', '',
3134 ('R', 'repository', '',
3135 _('repository root directory or symbolic path name')),
3135 _('repository root directory or symbolic path name')),
3136 ('', 'cwd', '', _('change working directory')),
3136 ('', 'cwd', '', _('change working directory')),
3137 ('y', 'noninteractive', None,
3137 ('y', 'noninteractive', None,
3138 _('do not prompt, assume \'yes\' for any required answers')),
3138 _('do not prompt, assume \'yes\' for any required answers')),
3139 ('q', 'quiet', None, _('suppress output')),
3139 ('q', 'quiet', None, _('suppress output')),
3140 ('v', 'verbose', None, _('enable additional output')),
3140 ('v', 'verbose', None, _('enable additional output')),
3141 ('', 'config', [], _('set/override config option')),
3141 ('', 'config', [], _('set/override config option')),
3142 ('', 'debug', None, _('enable debugging output')),
3142 ('', 'debug', None, _('enable debugging output')),
3143 ('', 'debugger', None, _('start debugger')),
3143 ('', 'debugger', None, _('start debugger')),
3144 ('', 'lsprof', None, _('print improved command execution profile')),
3144 ('', 'lsprof', None, _('print improved command execution profile')),
3145 ('', 'traceback', None, _('print traceback on exception')),
3145 ('', 'traceback', None, _('print traceback on exception')),
3146 ('', 'time', None, _('time how long the command takes')),
3146 ('', 'time', None, _('time how long the command takes')),
3147 ('', 'profile', None, _('print command execution profile')),
3147 ('', 'profile', None, _('print command execution profile')),
3148 ('', 'version', None, _('output version information and exit')),
3148 ('', 'version', None, _('output version information and exit')),
3149 ('h', 'help', None, _('display help and exit')),
3149 ('h', 'help', None, _('display help and exit')),
3150 ]
3150 ]
3151
3151
3152 norepo = ("clone init version help debugancestor debugcomplete debugdata"
3152 norepo = ("clone init version help debugancestor debugcomplete debugdata"
3153 " debugindex debugindexdot")
3153 " debugindex debugindexdot")
3154 optionalrepo = ("paths serve debugconfig")
3154 optionalrepo = ("paths serve debugconfig")
3155
3155
3156 def findpossible(ui, cmd):
3156 def findpossible(ui, cmd):
3157 """
3157 """
3158 Return cmd -> (aliases, command table entry)
3158 Return cmd -> (aliases, command table entry)
3159 for each matching command.
3159 for each matching command.
3160 Return debug commands (or their aliases) only if no normal command matches.
3160 Return debug commands (or their aliases) only if no normal command matches.
3161 """
3161 """
3162 choice = {}
3162 choice = {}
3163 debugchoice = {}
3163 debugchoice = {}
3164 for e in table.keys():
3164 for e in table.keys():
3165 aliases = e.lstrip("^").split("|")
3165 aliases = e.lstrip("^").split("|")
3166 found = None
3166 found = None
3167 if cmd in aliases:
3167 if cmd in aliases:
3168 found = cmd
3168 found = cmd
3169 elif not ui.config("ui", "strict"):
3169 elif not ui.config("ui", "strict"):
3170 for a in aliases:
3170 for a in aliases:
3171 if a.startswith(cmd):
3171 if a.startswith(cmd):
3172 found = a
3172 found = a
3173 break
3173 break
3174 if found is not None:
3174 if found is not None:
3175 if aliases[0].startswith("debug"):
3175 if aliases[0].startswith("debug"):
3176 debugchoice[found] = (aliases, table[e])
3176 debugchoice[found] = (aliases, table[e])
3177 else:
3177 else:
3178 choice[found] = (aliases, table[e])
3178 choice[found] = (aliases, table[e])
3179
3179
3180 if not choice and debugchoice:
3180 if not choice and debugchoice:
3181 choice = debugchoice
3181 choice = debugchoice
3182
3182
3183 return choice
3183 return choice
3184
3184
3185 def findcmd(ui, cmd):
3185 def findcmd(ui, cmd):
3186 """Return (aliases, command table entry) for command string."""
3186 """Return (aliases, command table entry) for command string."""
3187 choice = findpossible(ui, cmd)
3187 choice = findpossible(ui, cmd)
3188
3188
3189 if choice.has_key(cmd):
3189 if choice.has_key(cmd):
3190 return choice[cmd]
3190 return choice[cmd]
3191
3191
3192 if len(choice) > 1:
3192 if len(choice) > 1:
3193 clist = choice.keys()
3193 clist = choice.keys()
3194 clist.sort()
3194 clist.sort()
3195 raise AmbiguousCommand(cmd, clist)
3195 raise AmbiguousCommand(cmd, clist)
3196
3196
3197 if choice:
3197 if choice:
3198 return choice.values()[0]
3198 return choice.values()[0]
3199
3199
3200 raise UnknownCommand(cmd)
3200 raise UnknownCommand(cmd)
3201
3201
3202 def catchterm(*args):
3202 def catchterm(*args):
3203 raise util.SignalInterrupt
3203 raise util.SignalInterrupt
3204
3204
3205 def run():
3205 def run():
3206 sys.exit(dispatch(sys.argv[1:]))
3206 sys.exit(dispatch(sys.argv[1:]))
3207
3207
3208 class ParseError(Exception):
3208 class ParseError(Exception):
3209 """Exception raised on errors in parsing the command line."""
3209 """Exception raised on errors in parsing the command line."""
3210
3210
3211 def parse(ui, args):
3211 def parse(ui, args):
3212 options = {}
3212 options = {}
3213 cmdoptions = {}
3213 cmdoptions = {}
3214
3214
3215 try:
3215 try:
3216 args = fancyopts.fancyopts(args, globalopts, options)
3216 args = fancyopts.fancyopts(args, globalopts, options)
3217 except fancyopts.getopt.GetoptError, inst:
3217 except fancyopts.getopt.GetoptError, inst:
3218 raise ParseError(None, inst)
3218 raise ParseError(None, inst)
3219
3219
3220 if args:
3220 if args:
3221 cmd, args = args[0], args[1:]
3221 cmd, args = args[0], args[1:]
3222 aliases, i = findcmd(ui, cmd)
3222 aliases, i = findcmd(ui, cmd)
3223 cmd = aliases[0]
3223 cmd = aliases[0]
3224 defaults = ui.config("defaults", cmd)
3224 defaults = ui.config("defaults", cmd)
3225 if defaults:
3225 if defaults:
3226 args = shlex.split(defaults) + args
3226 args = shlex.split(defaults) + args
3227 c = list(i[1])
3227 c = list(i[1])
3228 else:
3228 else:
3229 cmd = None
3229 cmd = None
3230 c = []
3230 c = []
3231
3231
3232 # combine global options into local
3232 # combine global options into local
3233 for o in globalopts:
3233 for o in globalopts:
3234 c.append((o[0], o[1], options[o[1]], o[3]))
3234 c.append((o[0], o[1], options[o[1]], o[3]))
3235
3235
3236 try:
3236 try:
3237 args = fancyopts.fancyopts(args, c, cmdoptions)
3237 args = fancyopts.fancyopts(args, c, cmdoptions)
3238 except fancyopts.getopt.GetoptError, inst:
3238 except fancyopts.getopt.GetoptError, inst:
3239 raise ParseError(cmd, inst)
3239 raise ParseError(cmd, inst)
3240
3240
3241 # separate global options back out
3241 # separate global options back out
3242 for o in globalopts:
3242 for o in globalopts:
3243 n = o[1]
3243 n = o[1]
3244 options[n] = cmdoptions[n]
3244 options[n] = cmdoptions[n]
3245 del cmdoptions[n]
3245 del cmdoptions[n]
3246
3246
3247 return (cmd, cmd and i[0] or None, args, options, cmdoptions)
3247 return (cmd, cmd and i[0] or None, args, options, cmdoptions)
3248
3248
3249 external = {}
3249 external = {}
3250
3250
3251 def findext(name):
3251 def findext(name):
3252 '''return module with given extension name'''
3252 '''return module with given extension name'''
3253 try:
3253 try:
3254 return sys.modules[external[name]]
3254 return sys.modules[external[name]]
3255 except KeyError:
3255 except KeyError:
3256 for k, v in external.iteritems():
3256 for k, v in external.iteritems():
3257 if k.endswith('.' + name) or k.endswith('/' + name) or v == name:
3257 if k.endswith('.' + name) or k.endswith('/' + name) or v == name:
3258 return sys.modules[v]
3258 return sys.modules[v]
3259 raise KeyError(name)
3259 raise KeyError(name)
3260
3260
3261 def load_extensions(ui):
3261 def load_extensions(ui):
3262 added = []
3262 added = []
3263 for ext_name, load_from_name in ui.extensions():
3263 for ext_name, load_from_name in ui.extensions():
3264 if ext_name in external:
3264 if ext_name in external:
3265 continue
3265 continue
3266 try:
3266 try:
3267 if load_from_name:
3267 if load_from_name:
3268 # the module will be loaded in sys.modules
3268 # the module will be loaded in sys.modules
3269 # choose an unique name so that it doesn't
3269 # choose an unique name so that it doesn't
3270 # conflicts with other modules
3270 # conflicts with other modules
3271 module_name = "hgext_%s" % ext_name.replace('.', '_')
3271 module_name = "hgext_%s" % ext_name.replace('.', '_')
3272 mod = imp.load_source(module_name, load_from_name)
3272 mod = imp.load_source(module_name, load_from_name)
3273 else:
3273 else:
3274 def importh(name):
3274 def importh(name):
3275 mod = __import__(name)
3275 mod = __import__(name)
3276 components = name.split('.')
3276 components = name.split('.')
3277 for comp in components[1:]:
3277 for comp in components[1:]:
3278 mod = getattr(mod, comp)
3278 mod = getattr(mod, comp)
3279 return mod
3279 return mod
3280 try:
3280 try:
3281 mod = importh("hgext.%s" % ext_name)
3281 mod = importh("hgext.%s" % ext_name)
3282 except ImportError:
3282 except ImportError:
3283 mod = importh(ext_name)
3283 mod = importh(ext_name)
3284 external[ext_name] = mod.__name__
3284 external[ext_name] = mod.__name__
3285 added.append((mod, ext_name))
3285 added.append((mod, ext_name))
3286 except (util.SignalInterrupt, KeyboardInterrupt):
3286 except (util.SignalInterrupt, KeyboardInterrupt):
3287 raise
3287 raise
3288 except Exception, inst:
3288 except Exception, inst:
3289 ui.warn(_("*** failed to import extension %s: %s\n") %
3289 ui.warn(_("*** failed to import extension %s: %s\n") %
3290 (ext_name, inst))
3290 (ext_name, inst))
3291 if ui.print_exc():
3291 if ui.print_exc():
3292 return 1
3292 return 1
3293
3293
3294 for mod, name in added:
3294 for mod, name in added:
3295 uisetup = getattr(mod, 'uisetup', None)
3295 uisetup = getattr(mod, 'uisetup', None)
3296 if uisetup:
3296 if uisetup:
3297 uisetup(ui)
3297 uisetup(ui)
3298 cmdtable = getattr(mod, 'cmdtable', {})
3298 cmdtable = getattr(mod, 'cmdtable', {})
3299 for t in cmdtable:
3299 for t in cmdtable:
3300 if t in table:
3300 if t in table:
3301 ui.warn(_("module %s overrides %s\n") % (name, t))
3301 ui.warn(_("module %s overrides %s\n") % (name, t))
3302 table.update(cmdtable)
3302 table.update(cmdtable)
3303
3303
3304 def dispatch(args):
3304 def dispatch(args):
3305 for name in 'SIGBREAK', 'SIGHUP', 'SIGTERM':
3305 for name in 'SIGBREAK', 'SIGHUP', 'SIGTERM':
3306 num = getattr(signal, name, None)
3306 num = getattr(signal, name, None)
3307 if num: signal.signal(num, catchterm)
3307 if num: signal.signal(num, catchterm)
3308
3308
3309 try:
3309 try:
3310 u = ui.ui(traceback='--traceback' in sys.argv[1:])
3310 u = ui.ui(traceback='--traceback' in sys.argv[1:])
3311 except util.Abort, inst:
3311 except util.Abort, inst:
3312 sys.stderr.write(_("abort: %s\n") % inst)
3312 sys.stderr.write(_("abort: %s\n") % inst)
3313 return -1
3313 return -1
3314
3314
3315 load_extensions(u)
3315 load_extensions(u)
3316 u.addreadhook(load_extensions)
3316 u.addreadhook(load_extensions)
3317
3317
3318 try:
3318 try:
3319 cmd, func, args, options, cmdoptions = parse(u, args)
3319 cmd, func, args, options, cmdoptions = parse(u, args)
3320 if options["time"]:
3320 if options["time"]:
3321 def get_times():
3321 def get_times():
3322 t = os.times()
3322 t = os.times()
3323 if t[4] == 0.0: # Windows leaves this as zero, so use time.clock()
3323 if t[4] == 0.0: # Windows leaves this as zero, so use time.clock()
3324 t = (t[0], t[1], t[2], t[3], time.clock())
3324 t = (t[0], t[1], t[2], t[3], time.clock())
3325 return t
3325 return t
3326 s = get_times()
3326 s = get_times()
3327 def print_time():
3327 def print_time():
3328 t = get_times()
3328 t = get_times()
3329 u.warn(_("Time: real %.3f secs (user %.3f+%.3f sys %.3f+%.3f)\n") %
3329 u.warn(_("Time: real %.3f secs (user %.3f+%.3f sys %.3f+%.3f)\n") %
3330 (t[4]-s[4], t[0]-s[0], t[2]-s[2], t[1]-s[1], t[3]-s[3]))
3330 (t[4]-s[4], t[0]-s[0], t[2]-s[2], t[1]-s[1], t[3]-s[3]))
3331 atexit.register(print_time)
3331 atexit.register(print_time)
3332
3332
3333 u.updateopts(options["verbose"], options["debug"], options["quiet"],
3333 u.updateopts(options["verbose"], options["debug"], options["quiet"],
3334 not options["noninteractive"], options["traceback"],
3334 not options["noninteractive"], options["traceback"],
3335 options["config"])
3335 options["config"])
3336
3336
3337 # enter the debugger before command execution
3337 # enter the debugger before command execution
3338 if options['debugger']:
3338 if options['debugger']:
3339 pdb.set_trace()
3339 pdb.set_trace()
3340
3340
3341 try:
3341 try:
3342 if options['cwd']:
3342 if options['cwd']:
3343 try:
3343 try:
3344 os.chdir(options['cwd'])
3344 os.chdir(options['cwd'])
3345 except OSError, inst:
3345 except OSError, inst:
3346 raise util.Abort('%s: %s' %
3346 raise util.Abort('%s: %s' %
3347 (options['cwd'], inst.strerror))
3347 (options['cwd'], inst.strerror))
3348
3348
3349 path = u.expandpath(options["repository"]) or ""
3349 path = u.expandpath(options["repository"]) or ""
3350 repo = path and hg.repository(u, path=path) or None
3350 repo = path and hg.repository(u, path=path) or None
3351
3351
3352 if options['help']:
3352 if options['help']:
3353 return help_(u, cmd, options['version'])
3353 return help_(u, cmd, options['version'])
3354 elif options['version']:
3354 elif options['version']:
3355 return show_version(u)
3355 return show_version(u)
3356 elif not cmd:
3356 elif not cmd:
3357 return help_(u, 'shortlist')
3357 return help_(u, 'shortlist')
3358
3358
3359 if cmd not in norepo.split():
3359 if cmd not in norepo.split():
3360 try:
3360 try:
3361 if not repo:
3361 if not repo:
3362 repo = hg.repository(u, path=path)
3362 repo = hg.repository(u, path=path)
3363 u = repo.ui
3363 u = repo.ui
3364 for name in external.itervalues():
3364 for name in external.itervalues():
3365 mod = sys.modules[name]
3365 mod = sys.modules[name]
3366 if hasattr(mod, 'reposetup'):
3366 if hasattr(mod, 'reposetup'):
3367 mod.reposetup(u, repo)
3367 mod.reposetup(u, repo)
3368 hg.repo_setup_hooks.append(mod.reposetup)
3368 hg.repo_setup_hooks.append(mod.reposetup)
3369 except hg.RepoError:
3369 except hg.RepoError:
3370 if cmd not in optionalrepo.split():
3370 if cmd not in optionalrepo.split():
3371 raise
3371 raise
3372 d = lambda: func(u, repo, *args, **cmdoptions)
3372 d = lambda: func(u, repo, *args, **cmdoptions)
3373 else:
3373 else:
3374 d = lambda: func(u, *args, **cmdoptions)
3374 d = lambda: func(u, *args, **cmdoptions)
3375
3375
3376 # reupdate the options, repo/.hg/hgrc may have changed them
3376 # reupdate the options, repo/.hg/hgrc may have changed them
3377 u.updateopts(options["verbose"], options["debug"], options["quiet"],
3377 u.updateopts(options["verbose"], options["debug"], options["quiet"],
3378 not options["noninteractive"], options["traceback"],
3378 not options["noninteractive"], options["traceback"],
3379 options["config"])
3379 options["config"])
3380
3380
3381 try:
3381 try:
3382 if options['profile']:
3382 if options['profile']:
3383 import hotshot, hotshot.stats
3383 import hotshot, hotshot.stats
3384 prof = hotshot.Profile("hg.prof")
3384 prof = hotshot.Profile("hg.prof")
3385 try:
3385 try:
3386 try:
3386 try:
3387 return prof.runcall(d)
3387 return prof.runcall(d)
3388 except:
3388 except:
3389 try:
3389 try:
3390 u.warn(_('exception raised - generating '
3390 u.warn(_('exception raised - generating '
3391 'profile anyway\n'))
3391 'profile anyway\n'))
3392 except:
3392 except:
3393 pass
3393 pass
3394 raise
3394 raise
3395 finally:
3395 finally:
3396 prof.close()
3396 prof.close()
3397 stats = hotshot.stats.load("hg.prof")
3397 stats = hotshot.stats.load("hg.prof")
3398 stats.strip_dirs()
3398 stats.strip_dirs()
3399 stats.sort_stats('time', 'calls')
3399 stats.sort_stats('time', 'calls')
3400 stats.print_stats(40)
3400 stats.print_stats(40)
3401 elif options['lsprof']:
3401 elif options['lsprof']:
3402 try:
3402 try:
3403 from mercurial import lsprof
3403 from mercurial import lsprof
3404 except ImportError:
3404 except ImportError:
3405 raise util.Abort(_(
3405 raise util.Abort(_(
3406 'lsprof not available - install from '
3406 'lsprof not available - install from '
3407 'http://codespeak.net/svn/user/arigo/hack/misc/lsprof/'))
3407 'http://codespeak.net/svn/user/arigo/hack/misc/lsprof/'))
3408 p = lsprof.Profiler()
3408 p = lsprof.Profiler()
3409 p.enable(subcalls=True)
3409 p.enable(subcalls=True)
3410 try:
3410 try:
3411 return d()
3411 return d()
3412 finally:
3412 finally:
3413 p.disable()
3413 p.disable()
3414 stats = lsprof.Stats(p.getstats())
3414 stats = lsprof.Stats(p.getstats())
3415 stats.sort()
3415 stats.sort()
3416 stats.pprint(top=10, file=sys.stderr, climit=5)
3416 stats.pprint(top=10, file=sys.stderr, climit=5)
3417 else:
3417 else:
3418 return d()
3418 return d()
3419 finally:
3419 finally:
3420 u.flush()
3420 u.flush()
3421 except:
3421 except:
3422 # enter the debugger when we hit an exception
3422 # enter the debugger when we hit an exception
3423 if options['debugger']:
3423 if options['debugger']:
3424 pdb.post_mortem(sys.exc_info()[2])
3424 pdb.post_mortem(sys.exc_info()[2])
3425 u.print_exc()
3425 u.print_exc()
3426 raise
3426 raise
3427 except ParseError, inst:
3427 except ParseError, inst:
3428 if inst.args[0]:
3428 if inst.args[0]:
3429 u.warn(_("hg %s: %s\n") % (inst.args[0], inst.args[1]))
3429 u.warn(_("hg %s: %s\n") % (inst.args[0], inst.args[1]))
3430 help_(u, inst.args[0])
3430 help_(u, inst.args[0])
3431 else:
3431 else:
3432 u.warn(_("hg: %s\n") % inst.args[1])
3432 u.warn(_("hg: %s\n") % inst.args[1])
3433 help_(u, 'shortlist')
3433 help_(u, 'shortlist')
3434 except AmbiguousCommand, inst:
3434 except AmbiguousCommand, inst:
3435 u.warn(_("hg: command '%s' is ambiguous:\n %s\n") %
3435 u.warn(_("hg: command '%s' is ambiguous:\n %s\n") %
3436 (inst.args[0], " ".join(inst.args[1])))
3436 (inst.args[0], " ".join(inst.args[1])))
3437 except UnknownCommand, inst:
3437 except UnknownCommand, inst:
3438 u.warn(_("hg: unknown command '%s'\n") % inst.args[0])
3438 u.warn(_("hg: unknown command '%s'\n") % inst.args[0])
3439 help_(u, 'shortlist')
3439 help_(u, 'shortlist')
3440 except hg.RepoError, inst:
3440 except hg.RepoError, inst:
3441 u.warn(_("abort: %s!\n") % inst)
3441 u.warn(_("abort: %s!\n") % inst)
3442 except lock.LockHeld, inst:
3442 except lock.LockHeld, inst:
3443 if inst.errno == errno.ETIMEDOUT:
3443 if inst.errno == errno.ETIMEDOUT:
3444 reason = _('timed out waiting for lock held by %s') % inst.locker
3444 reason = _('timed out waiting for lock held by %s') % inst.locker
3445 else:
3445 else:
3446 reason = _('lock held by %s') % inst.locker
3446 reason = _('lock held by %s') % inst.locker
3447 u.warn(_("abort: %s: %s\n") % (inst.desc or inst.filename, reason))
3447 u.warn(_("abort: %s: %s\n") % (inst.desc or inst.filename, reason))
3448 except lock.LockUnavailable, inst:
3448 except lock.LockUnavailable, inst:
3449 u.warn(_("abort: could not lock %s: %s\n") %
3449 u.warn(_("abort: could not lock %s: %s\n") %
3450 (inst.desc or inst.filename, inst.strerror))
3450 (inst.desc or inst.filename, inst.strerror))
3451 except revlog.RevlogError, inst:
3451 except revlog.RevlogError, inst:
3452 u.warn(_("abort: "), inst, "!\n")
3452 u.warn(_("abort: %s!\n") % inst)
3453 except util.SignalInterrupt:
3453 except util.SignalInterrupt:
3454 u.warn(_("killed!\n"))
3454 u.warn(_("killed!\n"))
3455 except KeyboardInterrupt:
3455 except KeyboardInterrupt:
3456 try:
3456 try:
3457 u.warn(_("interrupted!\n"))
3457 u.warn(_("interrupted!\n"))
3458 except IOError, inst:
3458 except IOError, inst:
3459 if inst.errno == errno.EPIPE:
3459 if inst.errno == errno.EPIPE:
3460 if u.debugflag:
3460 if u.debugflag:
3461 u.warn(_("\nbroken pipe\n"))
3461 u.warn(_("\nbroken pipe\n"))
3462 else:
3462 else:
3463 raise
3463 raise
3464 except IOError, inst:
3464 except IOError, inst:
3465 if hasattr(inst, "code"):
3465 if hasattr(inst, "code"):
3466 u.warn(_("abort: %s\n") % inst)
3466 u.warn(_("abort: %s\n") % inst)
3467 elif hasattr(inst, "reason"):
3467 elif hasattr(inst, "reason"):
3468 u.warn(_("abort: error: %s\n") % inst.reason[1])
3468 u.warn(_("abort: error: %s\n") % inst.reason[1])
3469 elif hasattr(inst, "args") and inst[0] == errno.EPIPE:
3469 elif hasattr(inst, "args") and inst[0] == errno.EPIPE:
3470 if u.debugflag:
3470 if u.debugflag:
3471 u.warn(_("broken pipe\n"))
3471 u.warn(_("broken pipe\n"))
3472 elif getattr(inst, "strerror", None):
3472 elif getattr(inst, "strerror", None):
3473 if getattr(inst, "filename", None):
3473 if getattr(inst, "filename", None):
3474 u.warn(_("abort: %s - %s\n") % (inst.strerror, inst.filename))
3474 u.warn(_("abort: %s - %s\n") % (inst.strerror, inst.filename))
3475 else:
3475 else:
3476 u.warn(_("abort: %s\n") % inst.strerror)
3476 u.warn(_("abort: %s\n") % inst.strerror)
3477 else:
3477 else:
3478 raise
3478 raise
3479 except OSError, inst:
3479 except OSError, inst:
3480 if hasattr(inst, "filename"):
3480 if hasattr(inst, "filename"):
3481 u.warn(_("abort: %s: %s\n") % (inst.strerror, inst.filename))
3481 u.warn(_("abort: %s: %s\n") % (inst.strerror, inst.filename))
3482 else:
3482 else:
3483 u.warn(_("abort: %s\n") % inst.strerror)
3483 u.warn(_("abort: %s\n") % inst.strerror)
3484 except util.Abort, inst:
3484 except util.Abort, inst:
3485 u.warn(_('abort: '), inst.args[0] % inst.args[1:], '\n')
3485 u.warn(_("abort: %s\n") % inst)
3486 except TypeError, inst:
3486 except TypeError, inst:
3487 # was this an argument error?
3487 # was this an argument error?
3488 tb = traceback.extract_tb(sys.exc_info()[2])
3488 tb = traceback.extract_tb(sys.exc_info()[2])
3489 if len(tb) > 2: # no
3489 if len(tb) > 2: # no
3490 raise
3490 raise
3491 u.debug(inst, "\n")
3491 u.debug(inst, "\n")
3492 u.warn(_("%s: invalid arguments\n") % cmd)
3492 u.warn(_("%s: invalid arguments\n") % cmd)
3493 help_(u, cmd)
3493 help_(u, cmd)
3494 except SystemExit, inst:
3494 except SystemExit, inst:
3495 # Commands shouldn't sys.exit directly, but give a return code.
3495 # Commands shouldn't sys.exit directly, but give a return code.
3496 # Just in case catch this and and pass exit code to caller.
3496 # Just in case catch this and and pass exit code to caller.
3497 return inst.code
3497 return inst.code
3498 except:
3498 except:
3499 u.warn(_("** unknown exception encountered, details follow\n"))
3499 u.warn(_("** unknown exception encountered, details follow\n"))
3500 u.warn(_("** report bug details to "
3500 u.warn(_("** report bug details to "
3501 "http://www.selenic.com/mercurial/bts\n"))
3501 "http://www.selenic.com/mercurial/bts\n"))
3502 u.warn(_("** or mercurial@selenic.com\n"))
3502 u.warn(_("** or mercurial@selenic.com\n"))
3503 u.warn(_("** Mercurial Distributed SCM (version %s)\n")
3503 u.warn(_("** Mercurial Distributed SCM (version %s)\n")
3504 % version.get_version())
3504 % version.get_version())
3505 raise
3505 raise
3506
3506
3507 return -1
3507 return -1
@@ -1,231 +1,231 b''
1 # hg.py - repository classes for mercurial
1 # hg.py - repository classes for mercurial
2 #
2 #
3 # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
4 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
4 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
5 #
5 #
6 # This software may be used and distributed according to the terms
6 # This software may be used and distributed according to the terms
7 # of the GNU General Public License, incorporated herein by reference.
7 # of the GNU General Public License, incorporated herein by reference.
8
8
9 from node import *
9 from node import *
10 from repo import *
10 from repo import *
11 from demandload import *
11 from demandload import *
12 from i18n import gettext as _
12 from i18n import gettext as _
13 demandload(globals(), "localrepo bundlerepo httprepo sshrepo statichttprepo")
13 demandload(globals(), "localrepo bundlerepo httprepo sshrepo statichttprepo")
14 demandload(globals(), "errno lock os shutil util merge@_merge verify@_verify")
14 demandload(globals(), "errno lock os shutil util merge@_merge verify@_verify")
15
15
16 def _local(path):
16 def _local(path):
17 return (os.path.isfile(path and util.drop_scheme('file', path)) and
17 return (os.path.isfile(path and util.drop_scheme('file', path)) and
18 bundlerepo or localrepo)
18 bundlerepo or localrepo)
19
19
20 schemes = {
20 schemes = {
21 'bundle': bundlerepo,
21 'bundle': bundlerepo,
22 'file': _local,
22 'file': _local,
23 'hg': httprepo,
23 'hg': httprepo,
24 'http': httprepo,
24 'http': httprepo,
25 'https': httprepo,
25 'https': httprepo,
26 'old-http': statichttprepo,
26 'old-http': statichttprepo,
27 'ssh': sshrepo,
27 'ssh': sshrepo,
28 'static-http': statichttprepo,
28 'static-http': statichttprepo,
29 }
29 }
30
30
31 def _lookup(path):
31 def _lookup(path):
32 scheme = 'file'
32 scheme = 'file'
33 if path:
33 if path:
34 c = path.find(':')
34 c = path.find(':')
35 if c > 0:
35 if c > 0:
36 scheme = path[:c]
36 scheme = path[:c]
37 thing = schemes.get(scheme) or schemes['file']
37 thing = schemes.get(scheme) or schemes['file']
38 try:
38 try:
39 return thing(path)
39 return thing(path)
40 except TypeError:
40 except TypeError:
41 return thing
41 return thing
42
42
43 def islocal(repo):
43 def islocal(repo):
44 '''return true if repo or path is local'''
44 '''return true if repo or path is local'''
45 if isinstance(repo, str):
45 if isinstance(repo, str):
46 try:
46 try:
47 return _lookup(repo).islocal(repo)
47 return _lookup(repo).islocal(repo)
48 except AttributeError:
48 except AttributeError:
49 return False
49 return False
50 return repo.local()
50 return repo.local()
51
51
52 repo_setup_hooks = []
52 repo_setup_hooks = []
53
53
54 def repository(ui, path=None, create=False):
54 def repository(ui, path=None, create=False):
55 """return a repository object for the specified path"""
55 """return a repository object for the specified path"""
56 repo = _lookup(path).instance(ui, path, create)
56 repo = _lookup(path).instance(ui, path, create)
57 for hook in repo_setup_hooks:
57 for hook in repo_setup_hooks:
58 hook(ui, repo)
58 hook(ui, repo)
59 return repo
59 return repo
60
60
61 def defaultdest(source):
61 def defaultdest(source):
62 '''return default destination of clone if none is given'''
62 '''return default destination of clone if none is given'''
63 return os.path.basename(os.path.normpath(source))
63 return os.path.basename(os.path.normpath(source))
64
64
65 def clone(ui, source, dest=None, pull=False, rev=None, update=True,
65 def clone(ui, source, dest=None, pull=False, rev=None, update=True,
66 stream=False):
66 stream=False):
67 """Make a copy of an existing repository.
67 """Make a copy of an existing repository.
68
68
69 Create a copy of an existing repository in a new directory. The
69 Create a copy of an existing repository in a new directory. The
70 source and destination are URLs, as passed to the repository
70 source and destination are URLs, as passed to the repository
71 function. Returns a pair of repository objects, the source and
71 function. Returns a pair of repository objects, the source and
72 newly created destination.
72 newly created destination.
73
73
74 The location of the source is added to the new repository's
74 The location of the source is added to the new repository's
75 .hg/hgrc file, as the default to be used for future pulls and
75 .hg/hgrc file, as the default to be used for future pulls and
76 pushes.
76 pushes.
77
77
78 If an exception is raised, the partly cloned/updated destination
78 If an exception is raised, the partly cloned/updated destination
79 repository will be deleted.
79 repository will be deleted.
80
80
81 Arguments:
81 Arguments:
82
82
83 source: repository object or URL
83 source: repository object or URL
84
84
85 dest: URL of destination repository to create (defaults to base
85 dest: URL of destination repository to create (defaults to base
86 name of source repository)
86 name of source repository)
87
87
88 pull: always pull from source repository, even in local case
88 pull: always pull from source repository, even in local case
89
89
90 stream: stream raw data uncompressed from repository (fast over
90 stream: stream raw data uncompressed from repository (fast over
91 LAN, slow over WAN)
91 LAN, slow over WAN)
92
92
93 rev: revision to clone up to (implies pull=True)
93 rev: revision to clone up to (implies pull=True)
94
94
95 update: update working directory after clone completes, if
95 update: update working directory after clone completes, if
96 destination is local repository
96 destination is local repository
97 """
97 """
98 if isinstance(source, str):
98 if isinstance(source, str):
99 src_repo = repository(ui, source)
99 src_repo = repository(ui, source)
100 else:
100 else:
101 src_repo = source
101 src_repo = source
102 source = src_repo.url()
102 source = src_repo.url()
103
103
104 if dest is None:
104 if dest is None:
105 dest = defaultdest(source)
105 dest = defaultdest(source)
106
106
107 def localpath(path):
107 def localpath(path):
108 if path.startswith('file://'):
108 if path.startswith('file://'):
109 return path[7:]
109 return path[7:]
110 if path.startswith('file:'):
110 if path.startswith('file:'):
111 return path[5:]
111 return path[5:]
112 return path
112 return path
113
113
114 dest = localpath(dest)
114 dest = localpath(dest)
115 source = localpath(source)
115 source = localpath(source)
116
116
117 if os.path.exists(dest):
117 if os.path.exists(dest):
118 raise util.Abort(_("destination '%s' already exists"), dest)
118 raise util.Abort(_("destination '%s' already exists") % dest)
119
119
120 class DirCleanup(object):
120 class DirCleanup(object):
121 def __init__(self, dir_):
121 def __init__(self, dir_):
122 self.rmtree = shutil.rmtree
122 self.rmtree = shutil.rmtree
123 self.dir_ = dir_
123 self.dir_ = dir_
124 def close(self):
124 def close(self):
125 self.dir_ = None
125 self.dir_ = None
126 def __del__(self):
126 def __del__(self):
127 if self.dir_:
127 if self.dir_:
128 self.rmtree(self.dir_, True)
128 self.rmtree(self.dir_, True)
129
129
130 dest_repo = repository(ui, dest, create=True)
130 dest_repo = repository(ui, dest, create=True)
131
131
132 dest_path = None
132 dest_path = None
133 dir_cleanup = None
133 dir_cleanup = None
134 if dest_repo.local():
134 if dest_repo.local():
135 dest_path = os.path.realpath(dest_repo.root)
135 dest_path = os.path.realpath(dest_repo.root)
136 dir_cleanup = DirCleanup(dest_path)
136 dir_cleanup = DirCleanup(dest_path)
137
137
138 abspath = source
138 abspath = source
139 copy = False
139 copy = False
140 if src_repo.local() and dest_repo.local():
140 if src_repo.local() and dest_repo.local():
141 abspath = os.path.abspath(source)
141 abspath = os.path.abspath(source)
142 copy = not pull and not rev
142 copy = not pull and not rev
143
143
144 src_lock, dest_lock = None, None
144 src_lock, dest_lock = None, None
145 if copy:
145 if copy:
146 try:
146 try:
147 # we use a lock here because if we race with commit, we
147 # we use a lock here because if we race with commit, we
148 # can end up with extra data in the cloned revlogs that's
148 # can end up with extra data in the cloned revlogs that's
149 # not pointed to by changesets, thus causing verify to
149 # not pointed to by changesets, thus causing verify to
150 # fail
150 # fail
151 src_lock = src_repo.lock()
151 src_lock = src_repo.lock()
152 except lock.LockException:
152 except lock.LockException:
153 copy = False
153 copy = False
154
154
155 if copy:
155 if copy:
156 # we lock here to avoid premature writing to the target
156 # we lock here to avoid premature writing to the target
157 dest_lock = lock.lock(os.path.join(dest_path, ".hg", "lock"))
157 dest_lock = lock.lock(os.path.join(dest_path, ".hg", "lock"))
158
158
159 # we need to remove the (empty) data dir in dest so copyfiles
159 # we need to remove the (empty) data dir in dest so copyfiles
160 # can do its work
160 # can do its work
161 os.rmdir(os.path.join(dest_path, ".hg", "data"))
161 os.rmdir(os.path.join(dest_path, ".hg", "data"))
162 files = "data 00manifest.d 00manifest.i 00changelog.d 00changelog.i"
162 files = "data 00manifest.d 00manifest.i 00changelog.d 00changelog.i"
163 for f in files.split():
163 for f in files.split():
164 src = os.path.join(source, ".hg", f)
164 src = os.path.join(source, ".hg", f)
165 dst = os.path.join(dest_path, ".hg", f)
165 dst = os.path.join(dest_path, ".hg", f)
166 try:
166 try:
167 util.copyfiles(src, dst)
167 util.copyfiles(src, dst)
168 except OSError, inst:
168 except OSError, inst:
169 if inst.errno != errno.ENOENT:
169 if inst.errno != errno.ENOENT:
170 raise
170 raise
171
171
172 # we need to re-init the repo after manually copying the data
172 # we need to re-init the repo after manually copying the data
173 # into it
173 # into it
174 dest_repo = repository(ui, dest)
174 dest_repo = repository(ui, dest)
175
175
176 else:
176 else:
177 revs = None
177 revs = None
178 if rev:
178 if rev:
179 if not src_repo.local():
179 if not src_repo.local():
180 raise util.Abort(_("clone by revision not supported yet "
180 raise util.Abort(_("clone by revision not supported yet "
181 "for remote repositories"))
181 "for remote repositories"))
182 revs = [src_repo.lookup(r) for r in rev]
182 revs = [src_repo.lookup(r) for r in rev]
183
183
184 if dest_repo.local():
184 if dest_repo.local():
185 dest_repo.clone(src_repo, heads=revs, stream=stream)
185 dest_repo.clone(src_repo, heads=revs, stream=stream)
186 elif src_repo.local():
186 elif src_repo.local():
187 src_repo.push(dest_repo, revs=revs)
187 src_repo.push(dest_repo, revs=revs)
188 else:
188 else:
189 raise util.Abort(_("clone from remote to remote not supported"))
189 raise util.Abort(_("clone from remote to remote not supported"))
190
190
191 if src_lock:
191 if src_lock:
192 src_lock.release()
192 src_lock.release()
193
193
194 if dest_repo.local():
194 if dest_repo.local():
195 fp = dest_repo.opener("hgrc", "w", text=True)
195 fp = dest_repo.opener("hgrc", "w", text=True)
196 fp.write("[paths]\n")
196 fp.write("[paths]\n")
197 fp.write("default = %s\n" % abspath)
197 fp.write("default = %s\n" % abspath)
198 fp.close()
198 fp.close()
199
199
200 if dest_lock:
200 if dest_lock:
201 dest_lock.release()
201 dest_lock.release()
202
202
203 if update:
203 if update:
204 _merge.update(dest_repo, dest_repo.changelog.tip())
204 _merge.update(dest_repo, dest_repo.changelog.tip())
205 if dir_cleanup:
205 if dir_cleanup:
206 dir_cleanup.close()
206 dir_cleanup.close()
207
207
208 return src_repo, dest_repo
208 return src_repo, dest_repo
209
209
210 def update(repo, node):
210 def update(repo, node):
211 """update the working directory to node, merging linear changes"""
211 """update the working directory to node, merging linear changes"""
212 return _merge.update(repo, node)
212 return _merge.update(repo, node)
213
213
214 def clean(repo, node, wlock=None, show_stats=True):
214 def clean(repo, node, wlock=None, show_stats=True):
215 """forcibly switch the working directory to node, clobbering changes"""
215 """forcibly switch the working directory to node, clobbering changes"""
216 return _merge.update(repo, node, force=True, wlock=wlock,
216 return _merge.update(repo, node, force=True, wlock=wlock,
217 show_stats=show_stats)
217 show_stats=show_stats)
218
218
219 def merge(repo, node, force=None, remind=True, wlock=None):
219 def merge(repo, node, force=None, remind=True, wlock=None):
220 """branch merge with node, resolving changes"""
220 """branch merge with node, resolving changes"""
221 return _merge.update(repo, node, branchmerge=True, force=force,
221 return _merge.update(repo, node, branchmerge=True, force=force,
222 remind=remind, wlock=wlock)
222 remind=remind, wlock=wlock)
223
223
224 def revert(repo, node, choose, wlock):
224 def revert(repo, node, choose, wlock):
225 """revert changes to revision in node without updating dirstate"""
225 """revert changes to revision in node without updating dirstate"""
226 return _merge.update(repo, node, force=True, partial=choose,
226 return _merge.update(repo, node, force=True, partial=choose,
227 show_stats=False, wlock=wlock)
227 show_stats=False, wlock=wlock)
228
228
229 def verify(repo):
229 def verify(repo):
230 """verify the consistency of a repository"""
230 """verify the consistency of a repository"""
231 return _verify.verify(repo)
231 return _verify.verify(repo)
@@ -1,352 +1,352 b''
1 # httprepo.py - HTTP repository proxy classes for mercurial
1 # httprepo.py - HTTP repository proxy classes for mercurial
2 #
2 #
3 # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
4 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
4 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
5 #
5 #
6 # This software may be used and distributed according to the terms
6 # This software may be used and distributed according to the terms
7 # of the GNU General Public License, incorporated herein by reference.
7 # of the GNU General Public License, incorporated herein by reference.
8
8
9 from node import *
9 from node import *
10 from remoterepo import *
10 from remoterepo import *
11 from i18n import gettext as _
11 from i18n import gettext as _
12 from demandload import *
12 from demandload import *
13 demandload(globals(), "hg os urllib urllib2 urlparse zlib util httplib")
13 demandload(globals(), "hg os urllib urllib2 urlparse zlib util httplib")
14 demandload(globals(), "errno keepalive tempfile socket")
14 demandload(globals(), "errno keepalive tempfile socket")
15
15
16 class passwordmgr(urllib2.HTTPPasswordMgrWithDefaultRealm):
16 class passwordmgr(urllib2.HTTPPasswordMgrWithDefaultRealm):
17 def __init__(self, ui):
17 def __init__(self, ui):
18 urllib2.HTTPPasswordMgrWithDefaultRealm.__init__(self)
18 urllib2.HTTPPasswordMgrWithDefaultRealm.__init__(self)
19 self.ui = ui
19 self.ui = ui
20
20
21 def find_user_password(self, realm, authuri):
21 def find_user_password(self, realm, authuri):
22 authinfo = urllib2.HTTPPasswordMgrWithDefaultRealm.find_user_password(
22 authinfo = urllib2.HTTPPasswordMgrWithDefaultRealm.find_user_password(
23 self, realm, authuri)
23 self, realm, authuri)
24 user, passwd = authinfo
24 user, passwd = authinfo
25 if user and passwd:
25 if user and passwd:
26 return (user, passwd)
26 return (user, passwd)
27
27
28 if not self.ui.interactive:
28 if not self.ui.interactive:
29 raise util.Abort(_('http authorization required'))
29 raise util.Abort(_('http authorization required'))
30
30
31 self.ui.write(_("http authorization required\n"))
31 self.ui.write(_("http authorization required\n"))
32 self.ui.status(_("realm: %s\n") % realm)
32 self.ui.status(_("realm: %s\n") % realm)
33 if user:
33 if user:
34 self.ui.status(_("user: %s\n") % user)
34 self.ui.status(_("user: %s\n") % user)
35 else:
35 else:
36 user = self.ui.prompt(_("user:"), default=None)
36 user = self.ui.prompt(_("user:"), default=None)
37
37
38 if not passwd:
38 if not passwd:
39 passwd = self.ui.getpass()
39 passwd = self.ui.getpass()
40
40
41 self.add_password(realm, authuri, user, passwd)
41 self.add_password(realm, authuri, user, passwd)
42 return (user, passwd)
42 return (user, passwd)
43
43
44 def netlocsplit(netloc):
44 def netlocsplit(netloc):
45 '''split [user[:passwd]@]host[:port] into 4-tuple.'''
45 '''split [user[:passwd]@]host[:port] into 4-tuple.'''
46
46
47 a = netloc.find('@')
47 a = netloc.find('@')
48 if a == -1:
48 if a == -1:
49 user, passwd = None, None
49 user, passwd = None, None
50 else:
50 else:
51 userpass, netloc = netloc[:a], netloc[a+1:]
51 userpass, netloc = netloc[:a], netloc[a+1:]
52 c = userpass.find(':')
52 c = userpass.find(':')
53 if c == -1:
53 if c == -1:
54 user, passwd = urllib.unquote(userpass), None
54 user, passwd = urllib.unquote(userpass), None
55 else:
55 else:
56 user = urllib.unquote(userpass[:c])
56 user = urllib.unquote(userpass[:c])
57 passwd = urllib.unquote(userpass[c+1:])
57 passwd = urllib.unquote(userpass[c+1:])
58 c = netloc.find(':')
58 c = netloc.find(':')
59 if c == -1:
59 if c == -1:
60 host, port = netloc, None
60 host, port = netloc, None
61 else:
61 else:
62 host, port = netloc[:c], netloc[c+1:]
62 host, port = netloc[:c], netloc[c+1:]
63 return host, port, user, passwd
63 return host, port, user, passwd
64
64
65 def netlocunsplit(host, port, user=None, passwd=None):
65 def netlocunsplit(host, port, user=None, passwd=None):
66 '''turn host, port, user, passwd into [user[:passwd]@]host[:port].'''
66 '''turn host, port, user, passwd into [user[:passwd]@]host[:port].'''
67 if port:
67 if port:
68 hostport = host + ':' + port
68 hostport = host + ':' + port
69 else:
69 else:
70 hostport = host
70 hostport = host
71 if user:
71 if user:
72 if passwd:
72 if passwd:
73 userpass = urllib.quote(user) + ':' + urllib.quote(passwd)
73 userpass = urllib.quote(user) + ':' + urllib.quote(passwd)
74 else:
74 else:
75 userpass = urllib.quote(user)
75 userpass = urllib.quote(user)
76 return userpass + '@' + hostport
76 return userpass + '@' + hostport
77 return hostport
77 return hostport
78
78
79 class httpconnection(keepalive.HTTPConnection):
79 class httpconnection(keepalive.HTTPConnection):
80 # must be able to send big bundle as stream.
80 # must be able to send big bundle as stream.
81
81
82 def send(self, data):
82 def send(self, data):
83 if isinstance(data, str):
83 if isinstance(data, str):
84 keepalive.HTTPConnection.send(self, data)
84 keepalive.HTTPConnection.send(self, data)
85 else:
85 else:
86 # if auth required, some data sent twice, so rewind here
86 # if auth required, some data sent twice, so rewind here
87 data.seek(0)
87 data.seek(0)
88 for chunk in util.filechunkiter(data):
88 for chunk in util.filechunkiter(data):
89 keepalive.HTTPConnection.send(self, chunk)
89 keepalive.HTTPConnection.send(self, chunk)
90
90
91 class basehttphandler(keepalive.HTTPHandler):
91 class basehttphandler(keepalive.HTTPHandler):
92 def http_open(self, req):
92 def http_open(self, req):
93 return self.do_open(httpconnection, req)
93 return self.do_open(httpconnection, req)
94
94
95 has_https = hasattr(urllib2, 'HTTPSHandler')
95 has_https = hasattr(urllib2, 'HTTPSHandler')
96 if has_https:
96 if has_https:
97 class httpsconnection(httplib.HTTPSConnection):
97 class httpsconnection(httplib.HTTPSConnection):
98 response_class = keepalive.HTTPResponse
98 response_class = keepalive.HTTPResponse
99 # must be able to send big bundle as stream.
99 # must be able to send big bundle as stream.
100
100
101 def send(self, data):
101 def send(self, data):
102 if isinstance(data, str):
102 if isinstance(data, str):
103 httplib.HTTPSConnection.send(self, data)
103 httplib.HTTPSConnection.send(self, data)
104 else:
104 else:
105 # if auth required, some data sent twice, so rewind here
105 # if auth required, some data sent twice, so rewind here
106 data.seek(0)
106 data.seek(0)
107 for chunk in util.filechunkiter(data):
107 for chunk in util.filechunkiter(data):
108 httplib.HTTPSConnection.send(self, chunk)
108 httplib.HTTPSConnection.send(self, chunk)
109
109
110 class httphandler(basehttphandler, urllib2.HTTPSHandler):
110 class httphandler(basehttphandler, urllib2.HTTPSHandler):
111 def https_open(self, req):
111 def https_open(self, req):
112 return self.do_open(httpsconnection, req)
112 return self.do_open(httpsconnection, req)
113 else:
113 else:
114 class httphandler(basehttphandler):
114 class httphandler(basehttphandler):
115 pass
115 pass
116
116
117 class httprepository(remoterepository):
117 class httprepository(remoterepository):
118 def __init__(self, ui, path):
118 def __init__(self, ui, path):
119 self.path = path
119 self.path = path
120 self.caps = None
120 self.caps = None
121 scheme, netloc, urlpath, query, frag = urlparse.urlsplit(path)
121 scheme, netloc, urlpath, query, frag = urlparse.urlsplit(path)
122 if query or frag:
122 if query or frag:
123 raise util.Abort(_('unsupported URL component: "%s"') %
123 raise util.Abort(_('unsupported URL component: "%s"') %
124 (query or frag))
124 (query or frag))
125 if not urlpath: urlpath = '/'
125 if not urlpath: urlpath = '/'
126 host, port, user, passwd = netlocsplit(netloc)
126 host, port, user, passwd = netlocsplit(netloc)
127
127
128 # urllib cannot handle URLs with embedded user or passwd
128 # urllib cannot handle URLs with embedded user or passwd
129 self._url = urlparse.urlunsplit((scheme, netlocunsplit(host, port),
129 self._url = urlparse.urlunsplit((scheme, netlocunsplit(host, port),
130 urlpath, '', ''))
130 urlpath, '', ''))
131 self.ui = ui
131 self.ui = ui
132
132
133 proxyurl = ui.config("http_proxy", "host") or os.getenv('http_proxy')
133 proxyurl = ui.config("http_proxy", "host") or os.getenv('http_proxy')
134 proxyauthinfo = None
134 proxyauthinfo = None
135 handler = httphandler()
135 handler = httphandler()
136
136
137 if proxyurl:
137 if proxyurl:
138 # proxy can be proper url or host[:port]
138 # proxy can be proper url or host[:port]
139 if not (proxyurl.startswith('http:') or
139 if not (proxyurl.startswith('http:') or
140 proxyurl.startswith('https:')):
140 proxyurl.startswith('https:')):
141 proxyurl = 'http://' + proxyurl + '/'
141 proxyurl = 'http://' + proxyurl + '/'
142 snpqf = urlparse.urlsplit(proxyurl)
142 snpqf = urlparse.urlsplit(proxyurl)
143 proxyscheme, proxynetloc, proxypath, proxyquery, proxyfrag = snpqf
143 proxyscheme, proxynetloc, proxypath, proxyquery, proxyfrag = snpqf
144 hpup = netlocsplit(proxynetloc)
144 hpup = netlocsplit(proxynetloc)
145
145
146 proxyhost, proxyport, proxyuser, proxypasswd = hpup
146 proxyhost, proxyport, proxyuser, proxypasswd = hpup
147 if not proxyuser:
147 if not proxyuser:
148 proxyuser = ui.config("http_proxy", "user")
148 proxyuser = ui.config("http_proxy", "user")
149 proxypasswd = ui.config("http_proxy", "passwd")
149 proxypasswd = ui.config("http_proxy", "passwd")
150
150
151 # see if we should use a proxy for this url
151 # see if we should use a proxy for this url
152 no_list = [ "localhost", "127.0.0.1" ]
152 no_list = [ "localhost", "127.0.0.1" ]
153 no_list.extend([p.lower() for
153 no_list.extend([p.lower() for
154 p in ui.configlist("http_proxy", "no")])
154 p in ui.configlist("http_proxy", "no")])
155 no_list.extend([p.strip().lower() for
155 no_list.extend([p.strip().lower() for
156 p in os.getenv("no_proxy", '').split(',')
156 p in os.getenv("no_proxy", '').split(',')
157 if p.strip()])
157 if p.strip()])
158 # "http_proxy.always" config is for running tests on localhost
158 # "http_proxy.always" config is for running tests on localhost
159 if (not ui.configbool("http_proxy", "always") and
159 if (not ui.configbool("http_proxy", "always") and
160 host.lower() in no_list):
160 host.lower() in no_list):
161 ui.debug(_('disabling proxy for %s\n') % host)
161 ui.debug(_('disabling proxy for %s\n') % host)
162 else:
162 else:
163 proxyurl = urlparse.urlunsplit((
163 proxyurl = urlparse.urlunsplit((
164 proxyscheme, netlocunsplit(proxyhost, proxyport,
164 proxyscheme, netlocunsplit(proxyhost, proxyport,
165 proxyuser, proxypasswd or ''),
165 proxyuser, proxypasswd or ''),
166 proxypath, proxyquery, proxyfrag))
166 proxypath, proxyquery, proxyfrag))
167 handler = urllib2.ProxyHandler({scheme: proxyurl})
167 handler = urllib2.ProxyHandler({scheme: proxyurl})
168 ui.debug(_('proxying through %s\n') % proxyurl)
168 ui.debug(_('proxying through %s\n') % proxyurl)
169
169
170 # urllib2 takes proxy values from the environment and those
170 # urllib2 takes proxy values from the environment and those
171 # will take precedence if found, so drop them
171 # will take precedence if found, so drop them
172 for env in ["HTTP_PROXY", "http_proxy", "no_proxy"]:
172 for env in ["HTTP_PROXY", "http_proxy", "no_proxy"]:
173 try:
173 try:
174 if os.environ.has_key(env):
174 if os.environ.has_key(env):
175 del os.environ[env]
175 del os.environ[env]
176 except OSError:
176 except OSError:
177 pass
177 pass
178
178
179 passmgr = passwordmgr(ui)
179 passmgr = passwordmgr(ui)
180 if user:
180 if user:
181 ui.debug(_('http auth: user %s, password %s\n') %
181 ui.debug(_('http auth: user %s, password %s\n') %
182 (user, passwd and '*' * len(passwd) or 'not set'))
182 (user, passwd and '*' * len(passwd) or 'not set'))
183 passmgr.add_password(None, host, user, passwd or '')
183 passmgr.add_password(None, host, user, passwd or '')
184
184
185 opener = urllib2.build_opener(
185 opener = urllib2.build_opener(
186 handler,
186 handler,
187 urllib2.HTTPBasicAuthHandler(passmgr),
187 urllib2.HTTPBasicAuthHandler(passmgr),
188 urllib2.HTTPDigestAuthHandler(passmgr))
188 urllib2.HTTPDigestAuthHandler(passmgr))
189
189
190 # 1.0 here is the _protocol_ version
190 # 1.0 here is the _protocol_ version
191 opener.addheaders = [('User-agent', 'mercurial/proto-1.0')]
191 opener.addheaders = [('User-agent', 'mercurial/proto-1.0')]
192 urllib2.install_opener(opener)
192 urllib2.install_opener(opener)
193
193
194 def url(self):
194 def url(self):
195 return self.path
195 return self.path
196
196
197 # look up capabilities only when needed
197 # look up capabilities only when needed
198
198
199 def get_caps(self):
199 def get_caps(self):
200 if self.caps is None:
200 if self.caps is None:
201 try:
201 try:
202 self.caps = self.do_read('capabilities').split()
202 self.caps = self.do_read('capabilities').split()
203 except hg.RepoError:
203 except hg.RepoError:
204 self.caps = ()
204 self.caps = ()
205 self.ui.debug(_('capabilities: %s\n') %
205 self.ui.debug(_('capabilities: %s\n') %
206 (' '.join(self.caps or ['none'])))
206 (' '.join(self.caps or ['none'])))
207 return self.caps
207 return self.caps
208
208
209 capabilities = property(get_caps)
209 capabilities = property(get_caps)
210
210
211 def lock(self):
211 def lock(self):
212 raise util.Abort(_('operation not supported over http'))
212 raise util.Abort(_('operation not supported over http'))
213
213
214 def do_cmd(self, cmd, **args):
214 def do_cmd(self, cmd, **args):
215 data = args.pop('data', None)
215 data = args.pop('data', None)
216 headers = args.pop('headers', {})
216 headers = args.pop('headers', {})
217 self.ui.debug(_("sending %s command\n") % cmd)
217 self.ui.debug(_("sending %s command\n") % cmd)
218 q = {"cmd": cmd}
218 q = {"cmd": cmd}
219 q.update(args)
219 q.update(args)
220 qs = urllib.urlencode(q)
220 qs = urllib.urlencode(q)
221 cu = "%s?%s" % (self._url, qs)
221 cu = "%s?%s" % (self._url, qs)
222 try:
222 try:
223 resp = urllib2.urlopen(urllib2.Request(cu, data, headers))
223 resp = urllib2.urlopen(urllib2.Request(cu, data, headers))
224 except urllib2.HTTPError, inst:
224 except urllib2.HTTPError, inst:
225 if inst.code == 401:
225 if inst.code == 401:
226 raise util.Abort(_('authorization failed'))
226 raise util.Abort(_('authorization failed'))
227 raise
227 raise
228 except httplib.HTTPException, inst:
228 except httplib.HTTPException, inst:
229 self.ui.debug(_('http error while sending %s command\n') % cmd)
229 self.ui.debug(_('http error while sending %s command\n') % cmd)
230 self.ui.print_exc()
230 self.ui.print_exc()
231 raise IOError(None, inst)
231 raise IOError(None, inst)
232 try:
232 try:
233 proto = resp.getheader('content-type')
233 proto = resp.getheader('content-type')
234 except AttributeError:
234 except AttributeError:
235 proto = resp.headers['content-type']
235 proto = resp.headers['content-type']
236
236
237 # accept old "text/plain" and "application/hg-changegroup" for now
237 # accept old "text/plain" and "application/hg-changegroup" for now
238 if not proto.startswith('application/mercurial') and \
238 if not proto.startswith('application/mercurial') and \
239 not proto.startswith('text/plain') and \
239 not proto.startswith('text/plain') and \
240 not proto.startswith('application/hg-changegroup'):
240 not proto.startswith('application/hg-changegroup'):
241 raise hg.RepoError(_("'%s' does not appear to be an hg repository") %
241 raise hg.RepoError(_("'%s' does not appear to be an hg repository") %
242 self._url)
242 self._url)
243
243
244 if proto.startswith('application/mercurial'):
244 if proto.startswith('application/mercurial'):
245 version = proto[22:]
245 version = proto[22:]
246 if float(version) > 0.1:
246 if float(version) > 0.1:
247 raise hg.RepoError(_("'%s' uses newer protocol %s") %
247 raise hg.RepoError(_("'%s' uses newer protocol %s") %
248 (self._url, version))
248 (self._url, version))
249
249
250 return resp
250 return resp
251
251
252 def do_read(self, cmd, **args):
252 def do_read(self, cmd, **args):
253 fp = self.do_cmd(cmd, **args)
253 fp = self.do_cmd(cmd, **args)
254 try:
254 try:
255 return fp.read()
255 return fp.read()
256 finally:
256 finally:
257 # if using keepalive, allow connection to be reused
257 # if using keepalive, allow connection to be reused
258 fp.close()
258 fp.close()
259
259
260 def heads(self):
260 def heads(self):
261 d = self.do_read("heads")
261 d = self.do_read("heads")
262 try:
262 try:
263 return map(bin, d[:-1].split(" "))
263 return map(bin, d[:-1].split(" "))
264 except:
264 except:
265 self.ui.warn(_("unexpected response:\n") + d[:400] + "\n...\n")
265 self.ui.warn(_("unexpected response:\n") + d[:400] + "\n...\n")
266 raise
266 raise
267
267
268 def branches(self, nodes):
268 def branches(self, nodes):
269 n = " ".join(map(hex, nodes))
269 n = " ".join(map(hex, nodes))
270 d = self.do_read("branches", nodes=n)
270 d = self.do_read("branches", nodes=n)
271 try:
271 try:
272 br = [ tuple(map(bin, b.split(" "))) for b in d.splitlines() ]
272 br = [ tuple(map(bin, b.split(" "))) for b in d.splitlines() ]
273 return br
273 return br
274 except:
274 except:
275 self.ui.warn(_("unexpected response:\n") + d[:400] + "\n...\n")
275 self.ui.warn(_("unexpected response:\n") + d[:400] + "\n...\n")
276 raise
276 raise
277
277
278 def between(self, pairs):
278 def between(self, pairs):
279 n = "\n".join(["-".join(map(hex, p)) for p in pairs])
279 n = "\n".join(["-".join(map(hex, p)) for p in pairs])
280 d = self.do_read("between", pairs=n)
280 d = self.do_read("between", pairs=n)
281 try:
281 try:
282 p = [ l and map(bin, l.split(" ")) or [] for l in d.splitlines() ]
282 p = [ l and map(bin, l.split(" ")) or [] for l in d.splitlines() ]
283 return p
283 return p
284 except:
284 except:
285 self.ui.warn(_("unexpected response:\n") + d[:400] + "\n...\n")
285 self.ui.warn(_("unexpected response:\n") + d[:400] + "\n...\n")
286 raise
286 raise
287
287
288 def changegroup(self, nodes, kind):
288 def changegroup(self, nodes, kind):
289 n = " ".join(map(hex, nodes))
289 n = " ".join(map(hex, nodes))
290 f = self.do_cmd("changegroup", roots=n)
290 f = self.do_cmd("changegroup", roots=n)
291 bytes = 0
291 bytes = 0
292
292
293 def zgenerator(f):
293 def zgenerator(f):
294 zd = zlib.decompressobj()
294 zd = zlib.decompressobj()
295 try:
295 try:
296 for chnk in f:
296 for chnk in f:
297 yield zd.decompress(chnk)
297 yield zd.decompress(chnk)
298 except httplib.HTTPException, inst:
298 except httplib.HTTPException, inst:
299 raise IOError(None, _('connection ended unexpectedly'))
299 raise IOError(None, _('connection ended unexpectedly'))
300 yield zd.flush()
300 yield zd.flush()
301
301
302 return util.chunkbuffer(zgenerator(util.filechunkiter(f)))
302 return util.chunkbuffer(zgenerator(util.filechunkiter(f)))
303
303
304 def unbundle(self, cg, heads, source):
304 def unbundle(self, cg, heads, source):
305 # have to stream bundle to a temp file because we do not have
305 # have to stream bundle to a temp file because we do not have
306 # http 1.1 chunked transfer.
306 # http 1.1 chunked transfer.
307
307
308 fd, tempname = tempfile.mkstemp(prefix='hg-unbundle-')
308 fd, tempname = tempfile.mkstemp(prefix='hg-unbundle-')
309 fp = os.fdopen(fd, 'wb+')
309 fp = os.fdopen(fd, 'wb+')
310 try:
310 try:
311 for chunk in util.filechunkiter(cg):
311 for chunk in util.filechunkiter(cg):
312 fp.write(chunk)
312 fp.write(chunk)
313 length = fp.tell()
313 length = fp.tell()
314 try:
314 try:
315 rfp = self.do_cmd(
315 rfp = self.do_cmd(
316 'unbundle', data=fp,
316 'unbundle', data=fp,
317 headers={'content-length': length,
317 headers={'content-length': length,
318 'content-type': 'application/octet-stream'},
318 'content-type': 'application/octet-stream'},
319 heads=' '.join(map(hex, heads)))
319 heads=' '.join(map(hex, heads)))
320 try:
320 try:
321 ret = int(rfp.readline())
321 ret = int(rfp.readline())
322 self.ui.write(rfp.read())
322 self.ui.write(rfp.read())
323 return ret
323 return ret
324 finally:
324 finally:
325 rfp.close()
325 rfp.close()
326 except socket.error, err:
326 except socket.error, err:
327 if err[0] in (errno.ECONNRESET, errno.EPIPE):
327 if err[0] in (errno.ECONNRESET, errno.EPIPE):
328 raise util.Abort(_('push failed: %s'), err[1])
328 raise util.Abort(_('push failed: %s') % err[1])
329 raise util.Abort(err[1])
329 raise util.Abort(err[1])
330 finally:
330 finally:
331 fp.close()
331 fp.close()
332 os.unlink(tempname)
332 os.unlink(tempname)
333
333
334 def stream_out(self):
334 def stream_out(self):
335 return self.do_cmd('stream_out')
335 return self.do_cmd('stream_out')
336
336
337 class httpsrepository(httprepository):
337 class httpsrepository(httprepository):
338 def __init__(self, ui, path):
338 def __init__(self, ui, path):
339 if not has_https:
339 if not has_https:
340 raise util.Abort(_('Python support for SSL and HTTPS '
340 raise util.Abort(_('Python support for SSL and HTTPS '
341 'is not installed'))
341 'is not installed'))
342 httprepository.__init__(self, ui, path)
342 httprepository.__init__(self, ui, path)
343
343
344 def instance(ui, path, create):
344 def instance(ui, path, create):
345 if create:
345 if create:
346 raise util.Abort(_('cannot create new http repository'))
346 raise util.Abort(_('cannot create new http repository'))
347 if path.startswith('hg:'):
347 if path.startswith('hg:'):
348 ui.warn(_("hg:// syntax is deprecated, please use http:// instead\n"))
348 ui.warn(_("hg:// syntax is deprecated, please use http:// instead\n"))
349 path = 'http:' + path[3:]
349 path = 'http:' + path[3:]
350 if path.startswith('https:'):
350 if path.startswith('https:'):
351 return httpsrepository(ui, path)
351 return httpsrepository(ui, path)
352 return httprepository(ui, path)
352 return httprepository(ui, path)
@@ -1,1751 +1,1751 b''
1 # localrepo.py - read/write repository class for mercurial
1 # localrepo.py - read/write repository class for mercurial
2 #
2 #
3 # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7
7
8 from node import *
8 from node import *
9 from i18n import gettext as _
9 from i18n import gettext as _
10 from demandload import *
10 from demandload import *
11 import repo
11 import repo
12 demandload(globals(), "appendfile changegroup")
12 demandload(globals(), "appendfile changegroup")
13 demandload(globals(), "changelog dirstate filelog manifest context")
13 demandload(globals(), "changelog dirstate filelog manifest context")
14 demandload(globals(), "re lock transaction tempfile stat mdiff errno ui")
14 demandload(globals(), "re lock transaction tempfile stat mdiff errno ui")
15 demandload(globals(), "os revlog time util")
15 demandload(globals(), "os revlog time util")
16
16
17 class localrepository(repo.repository):
17 class localrepository(repo.repository):
18 capabilities = ()
18 capabilities = ()
19
19
20 def __del__(self):
20 def __del__(self):
21 self.transhandle = None
21 self.transhandle = None
22 def __init__(self, parentui, path=None, create=0):
22 def __init__(self, parentui, path=None, create=0):
23 repo.repository.__init__(self)
23 repo.repository.__init__(self)
24 if not path:
24 if not path:
25 p = os.getcwd()
25 p = os.getcwd()
26 while not os.path.isdir(os.path.join(p, ".hg")):
26 while not os.path.isdir(os.path.join(p, ".hg")):
27 oldp = p
27 oldp = p
28 p = os.path.dirname(p)
28 p = os.path.dirname(p)
29 if p == oldp:
29 if p == oldp:
30 raise repo.RepoError(_("no repo found"))
30 raise repo.RepoError(_("no repo found"))
31 path = p
31 path = p
32 self.path = os.path.join(path, ".hg")
32 self.path = os.path.join(path, ".hg")
33
33
34 if not os.path.isdir(self.path):
34 if not os.path.isdir(self.path):
35 if create:
35 if create:
36 if not os.path.exists(path):
36 if not os.path.exists(path):
37 os.mkdir(path)
37 os.mkdir(path)
38 os.mkdir(self.path)
38 os.mkdir(self.path)
39 os.mkdir(self.join("data"))
39 os.mkdir(self.join("data"))
40 else:
40 else:
41 raise repo.RepoError(_("repository %s not found") % path)
41 raise repo.RepoError(_("repository %s not found") % path)
42 elif create:
42 elif create:
43 raise repo.RepoError(_("repository %s already exists") % path)
43 raise repo.RepoError(_("repository %s already exists") % path)
44
44
45 self.root = os.path.abspath(path)
45 self.root = os.path.abspath(path)
46 self.origroot = path
46 self.origroot = path
47 self.ui = ui.ui(parentui=parentui)
47 self.ui = ui.ui(parentui=parentui)
48 self.opener = util.opener(self.path)
48 self.opener = util.opener(self.path)
49 self.wopener = util.opener(self.root)
49 self.wopener = util.opener(self.root)
50
50
51 try:
51 try:
52 self.ui.readconfig(self.join("hgrc"), self.root)
52 self.ui.readconfig(self.join("hgrc"), self.root)
53 except IOError:
53 except IOError:
54 pass
54 pass
55
55
56 v = self.ui.revlogopts
56 v = self.ui.revlogopts
57 self.revlogversion = int(v.get('format', revlog.REVLOG_DEFAULT_FORMAT))
57 self.revlogversion = int(v.get('format', revlog.REVLOG_DEFAULT_FORMAT))
58 self.revlogv1 = self.revlogversion != revlog.REVLOGV0
58 self.revlogv1 = self.revlogversion != revlog.REVLOGV0
59 fl = v.get('flags', None)
59 fl = v.get('flags', None)
60 flags = 0
60 flags = 0
61 if fl != None:
61 if fl != None:
62 for x in fl.split():
62 for x in fl.split():
63 flags |= revlog.flagstr(x)
63 flags |= revlog.flagstr(x)
64 elif self.revlogv1:
64 elif self.revlogv1:
65 flags = revlog.REVLOG_DEFAULT_FLAGS
65 flags = revlog.REVLOG_DEFAULT_FLAGS
66
66
67 v = self.revlogversion | flags
67 v = self.revlogversion | flags
68 self.manifest = manifest.manifest(self.opener, v)
68 self.manifest = manifest.manifest(self.opener, v)
69 self.changelog = changelog.changelog(self.opener, v)
69 self.changelog = changelog.changelog(self.opener, v)
70
70
71 # the changelog might not have the inline index flag
71 # the changelog might not have the inline index flag
72 # on. If the format of the changelog is the same as found in
72 # on. If the format of the changelog is the same as found in
73 # .hgrc, apply any flags found in the .hgrc as well.
73 # .hgrc, apply any flags found in the .hgrc as well.
74 # Otherwise, just version from the changelog
74 # Otherwise, just version from the changelog
75 v = self.changelog.version
75 v = self.changelog.version
76 if v == self.revlogversion:
76 if v == self.revlogversion:
77 v |= flags
77 v |= flags
78 self.revlogversion = v
78 self.revlogversion = v
79
79
80 self.tagscache = None
80 self.tagscache = None
81 self.nodetagscache = None
81 self.nodetagscache = None
82 self.encodepats = None
82 self.encodepats = None
83 self.decodepats = None
83 self.decodepats = None
84 self.transhandle = None
84 self.transhandle = None
85
85
86 self.dirstate = dirstate.dirstate(self.opener, self.ui, self.root)
86 self.dirstate = dirstate.dirstate(self.opener, self.ui, self.root)
87
87
88 def url(self):
88 def url(self):
89 return 'file:' + self.root
89 return 'file:' + self.root
90
90
91 def hook(self, name, throw=False, **args):
91 def hook(self, name, throw=False, **args):
92 def callhook(hname, funcname):
92 def callhook(hname, funcname):
93 '''call python hook. hook is callable object, looked up as
93 '''call python hook. hook is callable object, looked up as
94 name in python module. if callable returns "true", hook
94 name in python module. if callable returns "true", hook
95 fails, else passes. if hook raises exception, treated as
95 fails, else passes. if hook raises exception, treated as
96 hook failure. exception propagates if throw is "true".
96 hook failure. exception propagates if throw is "true".
97
97
98 reason for "true" meaning "hook failed" is so that
98 reason for "true" meaning "hook failed" is so that
99 unmodified commands (e.g. mercurial.commands.update) can
99 unmodified commands (e.g. mercurial.commands.update) can
100 be run as hooks without wrappers to convert return values.'''
100 be run as hooks without wrappers to convert return values.'''
101
101
102 self.ui.note(_("calling hook %s: %s\n") % (hname, funcname))
102 self.ui.note(_("calling hook %s: %s\n") % (hname, funcname))
103 d = funcname.rfind('.')
103 d = funcname.rfind('.')
104 if d == -1:
104 if d == -1:
105 raise util.Abort(_('%s hook is invalid ("%s" not in a module)')
105 raise util.Abort(_('%s hook is invalid ("%s" not in a module)')
106 % (hname, funcname))
106 % (hname, funcname))
107 modname = funcname[:d]
107 modname = funcname[:d]
108 try:
108 try:
109 obj = __import__(modname)
109 obj = __import__(modname)
110 except ImportError:
110 except ImportError:
111 try:
111 try:
112 # extensions are loaded with hgext_ prefix
112 # extensions are loaded with hgext_ prefix
113 obj = __import__("hgext_%s" % modname)
113 obj = __import__("hgext_%s" % modname)
114 except ImportError:
114 except ImportError:
115 raise util.Abort(_('%s hook is invalid '
115 raise util.Abort(_('%s hook is invalid '
116 '(import of "%s" failed)') %
116 '(import of "%s" failed)') %
117 (hname, modname))
117 (hname, modname))
118 try:
118 try:
119 for p in funcname.split('.')[1:]:
119 for p in funcname.split('.')[1:]:
120 obj = getattr(obj, p)
120 obj = getattr(obj, p)
121 except AttributeError, err:
121 except AttributeError, err:
122 raise util.Abort(_('%s hook is invalid '
122 raise util.Abort(_('%s hook is invalid '
123 '("%s" is not defined)') %
123 '("%s" is not defined)') %
124 (hname, funcname))
124 (hname, funcname))
125 if not callable(obj):
125 if not callable(obj):
126 raise util.Abort(_('%s hook is invalid '
126 raise util.Abort(_('%s hook is invalid '
127 '("%s" is not callable)') %
127 '("%s" is not callable)') %
128 (hname, funcname))
128 (hname, funcname))
129 try:
129 try:
130 r = obj(ui=self.ui, repo=self, hooktype=name, **args)
130 r = obj(ui=self.ui, repo=self, hooktype=name, **args)
131 except (KeyboardInterrupt, util.SignalInterrupt):
131 except (KeyboardInterrupt, util.SignalInterrupt):
132 raise
132 raise
133 except Exception, exc:
133 except Exception, exc:
134 if isinstance(exc, util.Abort):
134 if isinstance(exc, util.Abort):
135 self.ui.warn(_('error: %s hook failed: %s\n') %
135 self.ui.warn(_('error: %s hook failed: %s\n') %
136 (hname, exc.args[0] % exc.args[1:]))
136 (hname, exc.args[0]))
137 else:
137 else:
138 self.ui.warn(_('error: %s hook raised an exception: '
138 self.ui.warn(_('error: %s hook raised an exception: '
139 '%s\n') % (hname, exc))
139 '%s\n') % (hname, exc))
140 if throw:
140 if throw:
141 raise
141 raise
142 self.ui.print_exc()
142 self.ui.print_exc()
143 return True
143 return True
144 if r:
144 if r:
145 if throw:
145 if throw:
146 raise util.Abort(_('%s hook failed') % hname)
146 raise util.Abort(_('%s hook failed') % hname)
147 self.ui.warn(_('warning: %s hook failed\n') % hname)
147 self.ui.warn(_('warning: %s hook failed\n') % hname)
148 return r
148 return r
149
149
150 def runhook(name, cmd):
150 def runhook(name, cmd):
151 self.ui.note(_("running hook %s: %s\n") % (name, cmd))
151 self.ui.note(_("running hook %s: %s\n") % (name, cmd))
152 env = dict([('HG_' + k.upper(), v) for k, v in args.iteritems()])
152 env = dict([('HG_' + k.upper(), v) for k, v in args.iteritems()])
153 r = util.system(cmd, environ=env, cwd=self.root)
153 r = util.system(cmd, environ=env, cwd=self.root)
154 if r:
154 if r:
155 desc, r = util.explain_exit(r)
155 desc, r = util.explain_exit(r)
156 if throw:
156 if throw:
157 raise util.Abort(_('%s hook %s') % (name, desc))
157 raise util.Abort(_('%s hook %s') % (name, desc))
158 self.ui.warn(_('warning: %s hook %s\n') % (name, desc))
158 self.ui.warn(_('warning: %s hook %s\n') % (name, desc))
159 return r
159 return r
160
160
161 r = False
161 r = False
162 hooks = [(hname, cmd) for hname, cmd in self.ui.configitems("hooks")
162 hooks = [(hname, cmd) for hname, cmd in self.ui.configitems("hooks")
163 if hname.split(".", 1)[0] == name and cmd]
163 if hname.split(".", 1)[0] == name and cmd]
164 hooks.sort()
164 hooks.sort()
165 for hname, cmd in hooks:
165 for hname, cmd in hooks:
166 if cmd.startswith('python:'):
166 if cmd.startswith('python:'):
167 r = callhook(hname, cmd[7:].strip()) or r
167 r = callhook(hname, cmd[7:].strip()) or r
168 else:
168 else:
169 r = runhook(hname, cmd) or r
169 r = runhook(hname, cmd) or r
170 return r
170 return r
171
171
172 tag_disallowed = ':\r\n'
172 tag_disallowed = ':\r\n'
173
173
174 def tag(self, name, node, message, local, user, date):
174 def tag(self, name, node, message, local, user, date):
175 '''tag a revision with a symbolic name.
175 '''tag a revision with a symbolic name.
176
176
177 if local is True, the tag is stored in a per-repository file.
177 if local is True, the tag is stored in a per-repository file.
178 otherwise, it is stored in the .hgtags file, and a new
178 otherwise, it is stored in the .hgtags file, and a new
179 changeset is committed with the change.
179 changeset is committed with the change.
180
180
181 keyword arguments:
181 keyword arguments:
182
182
183 local: whether to store tag in non-version-controlled file
183 local: whether to store tag in non-version-controlled file
184 (default False)
184 (default False)
185
185
186 message: commit message to use if committing
186 message: commit message to use if committing
187
187
188 user: name of user to use if committing
188 user: name of user to use if committing
189
189
190 date: date tuple to use if committing'''
190 date: date tuple to use if committing'''
191
191
192 for c in self.tag_disallowed:
192 for c in self.tag_disallowed:
193 if c in name:
193 if c in name:
194 raise util.Abort(_('%r cannot be used in a tag name') % c)
194 raise util.Abort(_('%r cannot be used in a tag name') % c)
195
195
196 self.hook('pretag', throw=True, node=hex(node), tag=name, local=local)
196 self.hook('pretag', throw=True, node=hex(node), tag=name, local=local)
197
197
198 if local:
198 if local:
199 self.opener('localtags', 'a').write('%s %s\n' % (hex(node), name))
199 self.opener('localtags', 'a').write('%s %s\n' % (hex(node), name))
200 self.hook('tag', node=hex(node), tag=name, local=local)
200 self.hook('tag', node=hex(node), tag=name, local=local)
201 return
201 return
202
202
203 for x in self.status()[:5]:
203 for x in self.status()[:5]:
204 if '.hgtags' in x:
204 if '.hgtags' in x:
205 raise util.Abort(_('working copy of .hgtags is changed '
205 raise util.Abort(_('working copy of .hgtags is changed '
206 '(please commit .hgtags manually)'))
206 '(please commit .hgtags manually)'))
207
207
208 self.wfile('.hgtags', 'ab').write('%s %s\n' % (hex(node), name))
208 self.wfile('.hgtags', 'ab').write('%s %s\n' % (hex(node), name))
209 if self.dirstate.state('.hgtags') == '?':
209 if self.dirstate.state('.hgtags') == '?':
210 self.add(['.hgtags'])
210 self.add(['.hgtags'])
211
211
212 self.commit(['.hgtags'], message, user, date)
212 self.commit(['.hgtags'], message, user, date)
213 self.hook('tag', node=hex(node), tag=name, local=local)
213 self.hook('tag', node=hex(node), tag=name, local=local)
214
214
215 def tags(self):
215 def tags(self):
216 '''return a mapping of tag to node'''
216 '''return a mapping of tag to node'''
217 if not self.tagscache:
217 if not self.tagscache:
218 self.tagscache = {}
218 self.tagscache = {}
219
219
220 def parsetag(line, context):
220 def parsetag(line, context):
221 if not line:
221 if not line:
222 return
222 return
223 s = l.split(" ", 1)
223 s = l.split(" ", 1)
224 if len(s) != 2:
224 if len(s) != 2:
225 self.ui.warn(_("%s: cannot parse entry\n") % context)
225 self.ui.warn(_("%s: cannot parse entry\n") % context)
226 return
226 return
227 node, key = s
227 node, key = s
228 key = key.strip()
228 key = key.strip()
229 try:
229 try:
230 bin_n = bin(node)
230 bin_n = bin(node)
231 except TypeError:
231 except TypeError:
232 self.ui.warn(_("%s: node '%s' is not well formed\n") %
232 self.ui.warn(_("%s: node '%s' is not well formed\n") %
233 (context, node))
233 (context, node))
234 return
234 return
235 if bin_n not in self.changelog.nodemap:
235 if bin_n not in self.changelog.nodemap:
236 self.ui.warn(_("%s: tag '%s' refers to unknown node\n") %
236 self.ui.warn(_("%s: tag '%s' refers to unknown node\n") %
237 (context, key))
237 (context, key))
238 return
238 return
239 self.tagscache[key] = bin_n
239 self.tagscache[key] = bin_n
240
240
241 # read the tags file from each head, ending with the tip,
241 # read the tags file from each head, ending with the tip,
242 # and add each tag found to the map, with "newer" ones
242 # and add each tag found to the map, with "newer" ones
243 # taking precedence
243 # taking precedence
244 heads = self.heads()
244 heads = self.heads()
245 heads.reverse()
245 heads.reverse()
246 fl = self.file(".hgtags")
246 fl = self.file(".hgtags")
247 for node in heads:
247 for node in heads:
248 change = self.changelog.read(node)
248 change = self.changelog.read(node)
249 rev = self.changelog.rev(node)
249 rev = self.changelog.rev(node)
250 fn, ff = self.manifest.find(change[0], '.hgtags')
250 fn, ff = self.manifest.find(change[0], '.hgtags')
251 if fn is None: continue
251 if fn is None: continue
252 count = 0
252 count = 0
253 for l in fl.read(fn).splitlines():
253 for l in fl.read(fn).splitlines():
254 count += 1
254 count += 1
255 parsetag(l, _(".hgtags (rev %d:%s), line %d") %
255 parsetag(l, _(".hgtags (rev %d:%s), line %d") %
256 (rev, short(node), count))
256 (rev, short(node), count))
257 try:
257 try:
258 f = self.opener("localtags")
258 f = self.opener("localtags")
259 count = 0
259 count = 0
260 for l in f:
260 for l in f:
261 count += 1
261 count += 1
262 parsetag(l, _("localtags, line %d") % count)
262 parsetag(l, _("localtags, line %d") % count)
263 except IOError:
263 except IOError:
264 pass
264 pass
265
265
266 self.tagscache['tip'] = self.changelog.tip()
266 self.tagscache['tip'] = self.changelog.tip()
267
267
268 return self.tagscache
268 return self.tagscache
269
269
270 def tagslist(self):
270 def tagslist(self):
271 '''return a list of tags ordered by revision'''
271 '''return a list of tags ordered by revision'''
272 l = []
272 l = []
273 for t, n in self.tags().items():
273 for t, n in self.tags().items():
274 try:
274 try:
275 r = self.changelog.rev(n)
275 r = self.changelog.rev(n)
276 except:
276 except:
277 r = -2 # sort to the beginning of the list if unknown
277 r = -2 # sort to the beginning of the list if unknown
278 l.append((r, t, n))
278 l.append((r, t, n))
279 l.sort()
279 l.sort()
280 return [(t, n) for r, t, n in l]
280 return [(t, n) for r, t, n in l]
281
281
282 def nodetags(self, node):
282 def nodetags(self, node):
283 '''return the tags associated with a node'''
283 '''return the tags associated with a node'''
284 if not self.nodetagscache:
284 if not self.nodetagscache:
285 self.nodetagscache = {}
285 self.nodetagscache = {}
286 for t, n in self.tags().items():
286 for t, n in self.tags().items():
287 self.nodetagscache.setdefault(n, []).append(t)
287 self.nodetagscache.setdefault(n, []).append(t)
288 return self.nodetagscache.get(node, [])
288 return self.nodetagscache.get(node, [])
289
289
290 def lookup(self, key):
290 def lookup(self, key):
291 try:
291 try:
292 return self.tags()[key]
292 return self.tags()[key]
293 except KeyError:
293 except KeyError:
294 if key == '.':
294 if key == '.':
295 key = self.dirstate.parents()[0]
295 key = self.dirstate.parents()[0]
296 if key == nullid:
296 if key == nullid:
297 raise repo.RepoError(_("no revision checked out"))
297 raise repo.RepoError(_("no revision checked out"))
298 try:
298 try:
299 return self.changelog.lookup(key)
299 return self.changelog.lookup(key)
300 except:
300 except:
301 raise repo.RepoError(_("unknown revision '%s'") % key)
301 raise repo.RepoError(_("unknown revision '%s'") % key)
302
302
303 def dev(self):
303 def dev(self):
304 return os.lstat(self.path).st_dev
304 return os.lstat(self.path).st_dev
305
305
306 def local(self):
306 def local(self):
307 return True
307 return True
308
308
309 def join(self, f):
309 def join(self, f):
310 return os.path.join(self.path, f)
310 return os.path.join(self.path, f)
311
311
312 def wjoin(self, f):
312 def wjoin(self, f):
313 return os.path.join(self.root, f)
313 return os.path.join(self.root, f)
314
314
315 def file(self, f):
315 def file(self, f):
316 if f[0] == '/':
316 if f[0] == '/':
317 f = f[1:]
317 f = f[1:]
318 return filelog.filelog(self.opener, f, self.revlogversion)
318 return filelog.filelog(self.opener, f, self.revlogversion)
319
319
320 def changectx(self, changeid):
320 def changectx(self, changeid):
321 return context.changectx(self, changeid)
321 return context.changectx(self, changeid)
322
322
323 def filectx(self, path, changeid=None, fileid=None):
323 def filectx(self, path, changeid=None, fileid=None):
324 """changeid can be a changeset revision, node, or tag.
324 """changeid can be a changeset revision, node, or tag.
325 fileid can be a file revision or node."""
325 fileid can be a file revision or node."""
326 return context.filectx(self, path, changeid, fileid)
326 return context.filectx(self, path, changeid, fileid)
327
327
328 def getcwd(self):
328 def getcwd(self):
329 return self.dirstate.getcwd()
329 return self.dirstate.getcwd()
330
330
331 def wfile(self, f, mode='r'):
331 def wfile(self, f, mode='r'):
332 return self.wopener(f, mode)
332 return self.wopener(f, mode)
333
333
334 def wread(self, filename):
334 def wread(self, filename):
335 if self.encodepats == None:
335 if self.encodepats == None:
336 l = []
336 l = []
337 for pat, cmd in self.ui.configitems("encode"):
337 for pat, cmd in self.ui.configitems("encode"):
338 mf = util.matcher(self.root, "", [pat], [], [])[1]
338 mf = util.matcher(self.root, "", [pat], [], [])[1]
339 l.append((mf, cmd))
339 l.append((mf, cmd))
340 self.encodepats = l
340 self.encodepats = l
341
341
342 data = self.wopener(filename, 'r').read()
342 data = self.wopener(filename, 'r').read()
343
343
344 for mf, cmd in self.encodepats:
344 for mf, cmd in self.encodepats:
345 if mf(filename):
345 if mf(filename):
346 self.ui.debug(_("filtering %s through %s\n") % (filename, cmd))
346 self.ui.debug(_("filtering %s through %s\n") % (filename, cmd))
347 data = util.filter(data, cmd)
347 data = util.filter(data, cmd)
348 break
348 break
349
349
350 return data
350 return data
351
351
352 def wwrite(self, filename, data, fd=None):
352 def wwrite(self, filename, data, fd=None):
353 if self.decodepats == None:
353 if self.decodepats == None:
354 l = []
354 l = []
355 for pat, cmd in self.ui.configitems("decode"):
355 for pat, cmd in self.ui.configitems("decode"):
356 mf = util.matcher(self.root, "", [pat], [], [])[1]
356 mf = util.matcher(self.root, "", [pat], [], [])[1]
357 l.append((mf, cmd))
357 l.append((mf, cmd))
358 self.decodepats = l
358 self.decodepats = l
359
359
360 for mf, cmd in self.decodepats:
360 for mf, cmd in self.decodepats:
361 if mf(filename):
361 if mf(filename):
362 self.ui.debug(_("filtering %s through %s\n") % (filename, cmd))
362 self.ui.debug(_("filtering %s through %s\n") % (filename, cmd))
363 data = util.filter(data, cmd)
363 data = util.filter(data, cmd)
364 break
364 break
365
365
366 if fd:
366 if fd:
367 return fd.write(data)
367 return fd.write(data)
368 return self.wopener(filename, 'w').write(data)
368 return self.wopener(filename, 'w').write(data)
369
369
370 def transaction(self):
370 def transaction(self):
371 tr = self.transhandle
371 tr = self.transhandle
372 if tr != None and tr.running():
372 if tr != None and tr.running():
373 return tr.nest()
373 return tr.nest()
374
374
375 # save dirstate for rollback
375 # save dirstate for rollback
376 try:
376 try:
377 ds = self.opener("dirstate").read()
377 ds = self.opener("dirstate").read()
378 except IOError:
378 except IOError:
379 ds = ""
379 ds = ""
380 self.opener("journal.dirstate", "w").write(ds)
380 self.opener("journal.dirstate", "w").write(ds)
381
381
382 tr = transaction.transaction(self.ui.warn, self.opener,
382 tr = transaction.transaction(self.ui.warn, self.opener,
383 self.join("journal"),
383 self.join("journal"),
384 aftertrans(self.path))
384 aftertrans(self.path))
385 self.transhandle = tr
385 self.transhandle = tr
386 return tr
386 return tr
387
387
388 def recover(self):
388 def recover(self):
389 l = self.lock()
389 l = self.lock()
390 if os.path.exists(self.join("journal")):
390 if os.path.exists(self.join("journal")):
391 self.ui.status(_("rolling back interrupted transaction\n"))
391 self.ui.status(_("rolling back interrupted transaction\n"))
392 transaction.rollback(self.opener, self.join("journal"))
392 transaction.rollback(self.opener, self.join("journal"))
393 self.reload()
393 self.reload()
394 return True
394 return True
395 else:
395 else:
396 self.ui.warn(_("no interrupted transaction available\n"))
396 self.ui.warn(_("no interrupted transaction available\n"))
397 return False
397 return False
398
398
399 def rollback(self, wlock=None):
399 def rollback(self, wlock=None):
400 if not wlock:
400 if not wlock:
401 wlock = self.wlock()
401 wlock = self.wlock()
402 l = self.lock()
402 l = self.lock()
403 if os.path.exists(self.join("undo")):
403 if os.path.exists(self.join("undo")):
404 self.ui.status(_("rolling back last transaction\n"))
404 self.ui.status(_("rolling back last transaction\n"))
405 transaction.rollback(self.opener, self.join("undo"))
405 transaction.rollback(self.opener, self.join("undo"))
406 util.rename(self.join("undo.dirstate"), self.join("dirstate"))
406 util.rename(self.join("undo.dirstate"), self.join("dirstate"))
407 self.reload()
407 self.reload()
408 self.wreload()
408 self.wreload()
409 else:
409 else:
410 self.ui.warn(_("no rollback information available\n"))
410 self.ui.warn(_("no rollback information available\n"))
411
411
412 def wreload(self):
412 def wreload(self):
413 self.dirstate.read()
413 self.dirstate.read()
414
414
415 def reload(self):
415 def reload(self):
416 self.changelog.load()
416 self.changelog.load()
417 self.manifest.load()
417 self.manifest.load()
418 self.tagscache = None
418 self.tagscache = None
419 self.nodetagscache = None
419 self.nodetagscache = None
420
420
421 def do_lock(self, lockname, wait, releasefn=None, acquirefn=None,
421 def do_lock(self, lockname, wait, releasefn=None, acquirefn=None,
422 desc=None):
422 desc=None):
423 try:
423 try:
424 l = lock.lock(self.join(lockname), 0, releasefn, desc=desc)
424 l = lock.lock(self.join(lockname), 0, releasefn, desc=desc)
425 except lock.LockHeld, inst:
425 except lock.LockHeld, inst:
426 if not wait:
426 if not wait:
427 raise
427 raise
428 self.ui.warn(_("waiting for lock on %s held by %s\n") %
428 self.ui.warn(_("waiting for lock on %s held by %s\n") %
429 (desc, inst.args[0]))
429 (desc, inst.args[0]))
430 # default to 600 seconds timeout
430 # default to 600 seconds timeout
431 l = lock.lock(self.join(lockname),
431 l = lock.lock(self.join(lockname),
432 int(self.ui.config("ui", "timeout") or 600),
432 int(self.ui.config("ui", "timeout") or 600),
433 releasefn, desc=desc)
433 releasefn, desc=desc)
434 if acquirefn:
434 if acquirefn:
435 acquirefn()
435 acquirefn()
436 return l
436 return l
437
437
438 def lock(self, wait=1):
438 def lock(self, wait=1):
439 return self.do_lock("lock", wait, acquirefn=self.reload,
439 return self.do_lock("lock", wait, acquirefn=self.reload,
440 desc=_('repository %s') % self.origroot)
440 desc=_('repository %s') % self.origroot)
441
441
442 def wlock(self, wait=1):
442 def wlock(self, wait=1):
443 return self.do_lock("wlock", wait, self.dirstate.write,
443 return self.do_lock("wlock", wait, self.dirstate.write,
444 self.wreload,
444 self.wreload,
445 desc=_('working directory of %s') % self.origroot)
445 desc=_('working directory of %s') % self.origroot)
446
446
447 def checkfilemerge(self, filename, text, filelog, manifest1, manifest2):
447 def checkfilemerge(self, filename, text, filelog, manifest1, manifest2):
448 "determine whether a new filenode is needed"
448 "determine whether a new filenode is needed"
449 fp1 = manifest1.get(filename, nullid)
449 fp1 = manifest1.get(filename, nullid)
450 fp2 = manifest2.get(filename, nullid)
450 fp2 = manifest2.get(filename, nullid)
451
451
452 if fp2 != nullid:
452 if fp2 != nullid:
453 # is one parent an ancestor of the other?
453 # is one parent an ancestor of the other?
454 fpa = filelog.ancestor(fp1, fp2)
454 fpa = filelog.ancestor(fp1, fp2)
455 if fpa == fp1:
455 if fpa == fp1:
456 fp1, fp2 = fp2, nullid
456 fp1, fp2 = fp2, nullid
457 elif fpa == fp2:
457 elif fpa == fp2:
458 fp2 = nullid
458 fp2 = nullid
459
459
460 # is the file unmodified from the parent? report existing entry
460 # is the file unmodified from the parent? report existing entry
461 if fp2 == nullid and text == filelog.read(fp1):
461 if fp2 == nullid and text == filelog.read(fp1):
462 return (fp1, None, None)
462 return (fp1, None, None)
463
463
464 return (None, fp1, fp2)
464 return (None, fp1, fp2)
465
465
466 def rawcommit(self, files, text, user, date, p1=None, p2=None, wlock=None):
466 def rawcommit(self, files, text, user, date, p1=None, p2=None, wlock=None):
467 orig_parent = self.dirstate.parents()[0] or nullid
467 orig_parent = self.dirstate.parents()[0] or nullid
468 p1 = p1 or self.dirstate.parents()[0] or nullid
468 p1 = p1 or self.dirstate.parents()[0] or nullid
469 p2 = p2 or self.dirstate.parents()[1] or nullid
469 p2 = p2 or self.dirstate.parents()[1] or nullid
470 c1 = self.changelog.read(p1)
470 c1 = self.changelog.read(p1)
471 c2 = self.changelog.read(p2)
471 c2 = self.changelog.read(p2)
472 m1 = self.manifest.read(c1[0]).copy()
472 m1 = self.manifest.read(c1[0]).copy()
473 m2 = self.manifest.read(c2[0])
473 m2 = self.manifest.read(c2[0])
474 changed = []
474 changed = []
475
475
476 if orig_parent == p1:
476 if orig_parent == p1:
477 update_dirstate = 1
477 update_dirstate = 1
478 else:
478 else:
479 update_dirstate = 0
479 update_dirstate = 0
480
480
481 if not wlock:
481 if not wlock:
482 wlock = self.wlock()
482 wlock = self.wlock()
483 l = self.lock()
483 l = self.lock()
484 tr = self.transaction()
484 tr = self.transaction()
485 linkrev = self.changelog.count()
485 linkrev = self.changelog.count()
486 for f in files:
486 for f in files:
487 try:
487 try:
488 t = self.wread(f)
488 t = self.wread(f)
489 m1.set(f, util.is_exec(self.wjoin(f), m1.execf(f)))
489 m1.set(f, util.is_exec(self.wjoin(f), m1.execf(f)))
490 r = self.file(f)
490 r = self.file(f)
491
491
492 (entry, fp1, fp2) = self.checkfilemerge(f, t, r, m1, m2)
492 (entry, fp1, fp2) = self.checkfilemerge(f, t, r, m1, m2)
493 if entry:
493 if entry:
494 m1[f] = entry
494 m1[f] = entry
495 continue
495 continue
496
496
497 m1[f] = r.add(t, {}, tr, linkrev, fp1, fp2)
497 m1[f] = r.add(t, {}, tr, linkrev, fp1, fp2)
498 changed.append(f)
498 changed.append(f)
499 if update_dirstate:
499 if update_dirstate:
500 self.dirstate.update([f], "n")
500 self.dirstate.update([f], "n")
501 except IOError:
501 except IOError:
502 try:
502 try:
503 del m1[f]
503 del m1[f]
504 if update_dirstate:
504 if update_dirstate:
505 self.dirstate.forget([f])
505 self.dirstate.forget([f])
506 except:
506 except:
507 # deleted from p2?
507 # deleted from p2?
508 pass
508 pass
509
509
510 mnode = self.manifest.add(m1, tr, linkrev, c1[0], c2[0])
510 mnode = self.manifest.add(m1, tr, linkrev, c1[0], c2[0])
511 user = user or self.ui.username()
511 user = user or self.ui.username()
512 n = self.changelog.add(mnode, changed, text, tr, p1, p2, user, date)
512 n = self.changelog.add(mnode, changed, text, tr, p1, p2, user, date)
513 tr.close()
513 tr.close()
514 if update_dirstate:
514 if update_dirstate:
515 self.dirstate.setparents(n, nullid)
515 self.dirstate.setparents(n, nullid)
516
516
517 def commit(self, files=None, text="", user=None, date=None,
517 def commit(self, files=None, text="", user=None, date=None,
518 match=util.always, force=False, lock=None, wlock=None,
518 match=util.always, force=False, lock=None, wlock=None,
519 force_editor=False):
519 force_editor=False):
520 commit = []
520 commit = []
521 remove = []
521 remove = []
522 changed = []
522 changed = []
523
523
524 if files:
524 if files:
525 for f in files:
525 for f in files:
526 s = self.dirstate.state(f)
526 s = self.dirstate.state(f)
527 if s in 'nmai':
527 if s in 'nmai':
528 commit.append(f)
528 commit.append(f)
529 elif s == 'r':
529 elif s == 'r':
530 remove.append(f)
530 remove.append(f)
531 else:
531 else:
532 self.ui.warn(_("%s not tracked!\n") % f)
532 self.ui.warn(_("%s not tracked!\n") % f)
533 else:
533 else:
534 modified, added, removed, deleted, unknown = self.status(match=match)[:5]
534 modified, added, removed, deleted, unknown = self.status(match=match)[:5]
535 commit = modified + added
535 commit = modified + added
536 remove = removed
536 remove = removed
537
537
538 p1, p2 = self.dirstate.parents()
538 p1, p2 = self.dirstate.parents()
539 c1 = self.changelog.read(p1)
539 c1 = self.changelog.read(p1)
540 c2 = self.changelog.read(p2)
540 c2 = self.changelog.read(p2)
541 m1 = self.manifest.read(c1[0]).copy()
541 m1 = self.manifest.read(c1[0]).copy()
542 m2 = self.manifest.read(c2[0])
542 m2 = self.manifest.read(c2[0])
543
543
544 if not commit and not remove and not force and p2 == nullid:
544 if not commit and not remove and not force and p2 == nullid:
545 self.ui.status(_("nothing changed\n"))
545 self.ui.status(_("nothing changed\n"))
546 return None
546 return None
547
547
548 xp1 = hex(p1)
548 xp1 = hex(p1)
549 if p2 == nullid: xp2 = ''
549 if p2 == nullid: xp2 = ''
550 else: xp2 = hex(p2)
550 else: xp2 = hex(p2)
551
551
552 self.hook("precommit", throw=True, parent1=xp1, parent2=xp2)
552 self.hook("precommit", throw=True, parent1=xp1, parent2=xp2)
553
553
554 if not wlock:
554 if not wlock:
555 wlock = self.wlock()
555 wlock = self.wlock()
556 if not lock:
556 if not lock:
557 lock = self.lock()
557 lock = self.lock()
558 tr = self.transaction()
558 tr = self.transaction()
559
559
560 # check in files
560 # check in files
561 new = {}
561 new = {}
562 linkrev = self.changelog.count()
562 linkrev = self.changelog.count()
563 commit.sort()
563 commit.sort()
564 for f in commit:
564 for f in commit:
565 self.ui.note(f + "\n")
565 self.ui.note(f + "\n")
566 try:
566 try:
567 m1.set(f, util.is_exec(self.wjoin(f), m1.execf(f)))
567 m1.set(f, util.is_exec(self.wjoin(f), m1.execf(f)))
568 t = self.wread(f)
568 t = self.wread(f)
569 except IOError:
569 except IOError:
570 self.ui.warn(_("trouble committing %s!\n") % f)
570 self.ui.warn(_("trouble committing %s!\n") % f)
571 raise
571 raise
572
572
573 r = self.file(f)
573 r = self.file(f)
574
574
575 meta = {}
575 meta = {}
576 cp = self.dirstate.copied(f)
576 cp = self.dirstate.copied(f)
577 if cp:
577 if cp:
578 meta["copy"] = cp
578 meta["copy"] = cp
579 meta["copyrev"] = hex(m1.get(cp, m2.get(cp, nullid)))
579 meta["copyrev"] = hex(m1.get(cp, m2.get(cp, nullid)))
580 self.ui.debug(_(" %s: copy %s:%s\n") % (f, cp, meta["copyrev"]))
580 self.ui.debug(_(" %s: copy %s:%s\n") % (f, cp, meta["copyrev"]))
581 fp1, fp2 = nullid, nullid
581 fp1, fp2 = nullid, nullid
582 else:
582 else:
583 entry, fp1, fp2 = self.checkfilemerge(f, t, r, m1, m2)
583 entry, fp1, fp2 = self.checkfilemerge(f, t, r, m1, m2)
584 if entry:
584 if entry:
585 new[f] = entry
585 new[f] = entry
586 continue
586 continue
587
587
588 new[f] = r.add(t, meta, tr, linkrev, fp1, fp2)
588 new[f] = r.add(t, meta, tr, linkrev, fp1, fp2)
589 # remember what we've added so that we can later calculate
589 # remember what we've added so that we can later calculate
590 # the files to pull from a set of changesets
590 # the files to pull from a set of changesets
591 changed.append(f)
591 changed.append(f)
592
592
593 # update manifest
593 # update manifest
594 m1.update(new)
594 m1.update(new)
595 for f in remove:
595 for f in remove:
596 if f in m1:
596 if f in m1:
597 del m1[f]
597 del m1[f]
598 mn = self.manifest.add(m1, tr, linkrev, c1[0], c2[0],
598 mn = self.manifest.add(m1, tr, linkrev, c1[0], c2[0],
599 (new, remove))
599 (new, remove))
600
600
601 # add changeset
601 # add changeset
602 new = new.keys()
602 new = new.keys()
603 new.sort()
603 new.sort()
604
604
605 user = user or self.ui.username()
605 user = user or self.ui.username()
606 if not text or force_editor:
606 if not text or force_editor:
607 edittext = []
607 edittext = []
608 if text:
608 if text:
609 edittext.append(text)
609 edittext.append(text)
610 edittext.append("")
610 edittext.append("")
611 if p2 != nullid:
611 if p2 != nullid:
612 edittext.append("HG: branch merge")
612 edittext.append("HG: branch merge")
613 edittext.extend(["HG: changed %s" % f for f in changed])
613 edittext.extend(["HG: changed %s" % f for f in changed])
614 edittext.extend(["HG: removed %s" % f for f in remove])
614 edittext.extend(["HG: removed %s" % f for f in remove])
615 if not changed and not remove:
615 if not changed and not remove:
616 edittext.append("HG: no files changed")
616 edittext.append("HG: no files changed")
617 edittext.append("")
617 edittext.append("")
618 # run editor in the repository root
618 # run editor in the repository root
619 olddir = os.getcwd()
619 olddir = os.getcwd()
620 os.chdir(self.root)
620 os.chdir(self.root)
621 text = self.ui.edit("\n".join(edittext), user)
621 text = self.ui.edit("\n".join(edittext), user)
622 os.chdir(olddir)
622 os.chdir(olddir)
623
623
624 lines = [line.rstrip() for line in text.rstrip().splitlines()]
624 lines = [line.rstrip() for line in text.rstrip().splitlines()]
625 while lines and not lines[0]:
625 while lines and not lines[0]:
626 del lines[0]
626 del lines[0]
627 if not lines:
627 if not lines:
628 return None
628 return None
629 text = '\n'.join(lines)
629 text = '\n'.join(lines)
630 n = self.changelog.add(mn, changed + remove, text, tr, p1, p2, user, date)
630 n = self.changelog.add(mn, changed + remove, text, tr, p1, p2, user, date)
631 self.hook('pretxncommit', throw=True, node=hex(n), parent1=xp1,
631 self.hook('pretxncommit', throw=True, node=hex(n), parent1=xp1,
632 parent2=xp2)
632 parent2=xp2)
633 tr.close()
633 tr.close()
634
634
635 self.dirstate.setparents(n)
635 self.dirstate.setparents(n)
636 self.dirstate.update(new, "n")
636 self.dirstate.update(new, "n")
637 self.dirstate.forget(remove)
637 self.dirstate.forget(remove)
638
638
639 self.hook("commit", node=hex(n), parent1=xp1, parent2=xp2)
639 self.hook("commit", node=hex(n), parent1=xp1, parent2=xp2)
640 return n
640 return n
641
641
642 def walk(self, node=None, files=[], match=util.always, badmatch=None):
642 def walk(self, node=None, files=[], match=util.always, badmatch=None):
643 if node:
643 if node:
644 fdict = dict.fromkeys(files)
644 fdict = dict.fromkeys(files)
645 for fn in self.manifest.read(self.changelog.read(node)[0]):
645 for fn in self.manifest.read(self.changelog.read(node)[0]):
646 for ffn in fdict:
646 for ffn in fdict:
647 # match if the file is the exact name or a directory
647 # match if the file is the exact name or a directory
648 if ffn == fn or fn.startswith("%s/" % ffn):
648 if ffn == fn or fn.startswith("%s/" % ffn):
649 del fdict[ffn]
649 del fdict[ffn]
650 break
650 break
651 if match(fn):
651 if match(fn):
652 yield 'm', fn
652 yield 'm', fn
653 for fn in fdict:
653 for fn in fdict:
654 if badmatch and badmatch(fn):
654 if badmatch and badmatch(fn):
655 if match(fn):
655 if match(fn):
656 yield 'b', fn
656 yield 'b', fn
657 else:
657 else:
658 self.ui.warn(_('%s: No such file in rev %s\n') % (
658 self.ui.warn(_('%s: No such file in rev %s\n') % (
659 util.pathto(self.getcwd(), fn), short(node)))
659 util.pathto(self.getcwd(), fn), short(node)))
660 else:
660 else:
661 for src, fn in self.dirstate.walk(files, match, badmatch=badmatch):
661 for src, fn in self.dirstate.walk(files, match, badmatch=badmatch):
662 yield src, fn
662 yield src, fn
663
663
664 def status(self, node1=None, node2=None, files=[], match=util.always,
664 def status(self, node1=None, node2=None, files=[], match=util.always,
665 wlock=None, list_ignored=False, list_clean=False):
665 wlock=None, list_ignored=False, list_clean=False):
666 """return status of files between two nodes or node and working directory
666 """return status of files between two nodes or node and working directory
667
667
668 If node1 is None, use the first dirstate parent instead.
668 If node1 is None, use the first dirstate parent instead.
669 If node2 is None, compare node1 with working directory.
669 If node2 is None, compare node1 with working directory.
670 """
670 """
671
671
672 def fcmp(fn, mf):
672 def fcmp(fn, mf):
673 t1 = self.wread(fn)
673 t1 = self.wread(fn)
674 return self.file(fn).cmp(mf.get(fn, nullid), t1)
674 return self.file(fn).cmp(mf.get(fn, nullid), t1)
675
675
676 def mfmatches(node):
676 def mfmatches(node):
677 change = self.changelog.read(node)
677 change = self.changelog.read(node)
678 mf = dict(self.manifest.read(change[0]))
678 mf = dict(self.manifest.read(change[0]))
679 for fn in mf.keys():
679 for fn in mf.keys():
680 if not match(fn):
680 if not match(fn):
681 del mf[fn]
681 del mf[fn]
682 return mf
682 return mf
683
683
684 modified, added, removed, deleted, unknown = [], [], [], [], []
684 modified, added, removed, deleted, unknown = [], [], [], [], []
685 ignored, clean = [], []
685 ignored, clean = [], []
686
686
687 compareworking = False
687 compareworking = False
688 if not node1 or (not node2 and node1 == self.dirstate.parents()[0]):
688 if not node1 or (not node2 and node1 == self.dirstate.parents()[0]):
689 compareworking = True
689 compareworking = True
690
690
691 if not compareworking:
691 if not compareworking:
692 # read the manifest from node1 before the manifest from node2,
692 # read the manifest from node1 before the manifest from node2,
693 # so that we'll hit the manifest cache if we're going through
693 # so that we'll hit the manifest cache if we're going through
694 # all the revisions in parent->child order.
694 # all the revisions in parent->child order.
695 mf1 = mfmatches(node1)
695 mf1 = mfmatches(node1)
696
696
697 # are we comparing the working directory?
697 # are we comparing the working directory?
698 if not node2:
698 if not node2:
699 if not wlock:
699 if not wlock:
700 try:
700 try:
701 wlock = self.wlock(wait=0)
701 wlock = self.wlock(wait=0)
702 except lock.LockException:
702 except lock.LockException:
703 wlock = None
703 wlock = None
704 (lookup, modified, added, removed, deleted, unknown,
704 (lookup, modified, added, removed, deleted, unknown,
705 ignored, clean) = self.dirstate.status(files, match,
705 ignored, clean) = self.dirstate.status(files, match,
706 list_ignored, list_clean)
706 list_ignored, list_clean)
707
707
708 # are we comparing working dir against its parent?
708 # are we comparing working dir against its parent?
709 if compareworking:
709 if compareworking:
710 if lookup:
710 if lookup:
711 # do a full compare of any files that might have changed
711 # do a full compare of any files that might have changed
712 mf2 = mfmatches(self.dirstate.parents()[0])
712 mf2 = mfmatches(self.dirstate.parents()[0])
713 for f in lookup:
713 for f in lookup:
714 if fcmp(f, mf2):
714 if fcmp(f, mf2):
715 modified.append(f)
715 modified.append(f)
716 else:
716 else:
717 clean.append(f)
717 clean.append(f)
718 if wlock is not None:
718 if wlock is not None:
719 self.dirstate.update([f], "n")
719 self.dirstate.update([f], "n")
720 else:
720 else:
721 # we are comparing working dir against non-parent
721 # we are comparing working dir against non-parent
722 # generate a pseudo-manifest for the working dir
722 # generate a pseudo-manifest for the working dir
723 mf2 = mfmatches(self.dirstate.parents()[0])
723 mf2 = mfmatches(self.dirstate.parents()[0])
724 for f in lookup + modified + added:
724 for f in lookup + modified + added:
725 mf2[f] = ""
725 mf2[f] = ""
726 for f in removed:
726 for f in removed:
727 if f in mf2:
727 if f in mf2:
728 del mf2[f]
728 del mf2[f]
729 else:
729 else:
730 # we are comparing two revisions
730 # we are comparing two revisions
731 mf2 = mfmatches(node2)
731 mf2 = mfmatches(node2)
732
732
733 if not compareworking:
733 if not compareworking:
734 # flush lists from dirstate before comparing manifests
734 # flush lists from dirstate before comparing manifests
735 modified, added, clean = [], [], []
735 modified, added, clean = [], [], []
736
736
737 # make sure to sort the files so we talk to the disk in a
737 # make sure to sort the files so we talk to the disk in a
738 # reasonable order
738 # reasonable order
739 mf2keys = mf2.keys()
739 mf2keys = mf2.keys()
740 mf2keys.sort()
740 mf2keys.sort()
741 for fn in mf2keys:
741 for fn in mf2keys:
742 if mf1.has_key(fn):
742 if mf1.has_key(fn):
743 if mf1[fn] != mf2[fn] and (mf2[fn] != "" or fcmp(fn, mf1)):
743 if mf1[fn] != mf2[fn] and (mf2[fn] != "" or fcmp(fn, mf1)):
744 modified.append(fn)
744 modified.append(fn)
745 elif list_clean:
745 elif list_clean:
746 clean.append(fn)
746 clean.append(fn)
747 del mf1[fn]
747 del mf1[fn]
748 else:
748 else:
749 added.append(fn)
749 added.append(fn)
750
750
751 removed = mf1.keys()
751 removed = mf1.keys()
752
752
753 # sort and return results:
753 # sort and return results:
754 for l in modified, added, removed, deleted, unknown, ignored, clean:
754 for l in modified, added, removed, deleted, unknown, ignored, clean:
755 l.sort()
755 l.sort()
756 return (modified, added, removed, deleted, unknown, ignored, clean)
756 return (modified, added, removed, deleted, unknown, ignored, clean)
757
757
758 def add(self, list, wlock=None):
758 def add(self, list, wlock=None):
759 if not wlock:
759 if not wlock:
760 wlock = self.wlock()
760 wlock = self.wlock()
761 for f in list:
761 for f in list:
762 p = self.wjoin(f)
762 p = self.wjoin(f)
763 if not os.path.exists(p):
763 if not os.path.exists(p):
764 self.ui.warn(_("%s does not exist!\n") % f)
764 self.ui.warn(_("%s does not exist!\n") % f)
765 elif not os.path.isfile(p):
765 elif not os.path.isfile(p):
766 self.ui.warn(_("%s not added: only files supported currently\n")
766 self.ui.warn(_("%s not added: only files supported currently\n")
767 % f)
767 % f)
768 elif self.dirstate.state(f) in 'an':
768 elif self.dirstate.state(f) in 'an':
769 self.ui.warn(_("%s already tracked!\n") % f)
769 self.ui.warn(_("%s already tracked!\n") % f)
770 else:
770 else:
771 self.dirstate.update([f], "a")
771 self.dirstate.update([f], "a")
772
772
773 def forget(self, list, wlock=None):
773 def forget(self, list, wlock=None):
774 if not wlock:
774 if not wlock:
775 wlock = self.wlock()
775 wlock = self.wlock()
776 for f in list:
776 for f in list:
777 if self.dirstate.state(f) not in 'ai':
777 if self.dirstate.state(f) not in 'ai':
778 self.ui.warn(_("%s not added!\n") % f)
778 self.ui.warn(_("%s not added!\n") % f)
779 else:
779 else:
780 self.dirstate.forget([f])
780 self.dirstate.forget([f])
781
781
782 def remove(self, list, unlink=False, wlock=None):
782 def remove(self, list, unlink=False, wlock=None):
783 if unlink:
783 if unlink:
784 for f in list:
784 for f in list:
785 try:
785 try:
786 util.unlink(self.wjoin(f))
786 util.unlink(self.wjoin(f))
787 except OSError, inst:
787 except OSError, inst:
788 if inst.errno != errno.ENOENT:
788 if inst.errno != errno.ENOENT:
789 raise
789 raise
790 if not wlock:
790 if not wlock:
791 wlock = self.wlock()
791 wlock = self.wlock()
792 for f in list:
792 for f in list:
793 p = self.wjoin(f)
793 p = self.wjoin(f)
794 if os.path.exists(p):
794 if os.path.exists(p):
795 self.ui.warn(_("%s still exists!\n") % f)
795 self.ui.warn(_("%s still exists!\n") % f)
796 elif self.dirstate.state(f) == 'a':
796 elif self.dirstate.state(f) == 'a':
797 self.dirstate.forget([f])
797 self.dirstate.forget([f])
798 elif f not in self.dirstate:
798 elif f not in self.dirstate:
799 self.ui.warn(_("%s not tracked!\n") % f)
799 self.ui.warn(_("%s not tracked!\n") % f)
800 else:
800 else:
801 self.dirstate.update([f], "r")
801 self.dirstate.update([f], "r")
802
802
803 def undelete(self, list, wlock=None):
803 def undelete(self, list, wlock=None):
804 p = self.dirstate.parents()[0]
804 p = self.dirstate.parents()[0]
805 mn = self.changelog.read(p)[0]
805 mn = self.changelog.read(p)[0]
806 m = self.manifest.read(mn)
806 m = self.manifest.read(mn)
807 if not wlock:
807 if not wlock:
808 wlock = self.wlock()
808 wlock = self.wlock()
809 for f in list:
809 for f in list:
810 if self.dirstate.state(f) not in "r":
810 if self.dirstate.state(f) not in "r":
811 self.ui.warn("%s not removed!\n" % f)
811 self.ui.warn("%s not removed!\n" % f)
812 else:
812 else:
813 t = self.file(f).read(m[f])
813 t = self.file(f).read(m[f])
814 self.wwrite(f, t)
814 self.wwrite(f, t)
815 util.set_exec(self.wjoin(f), m.execf(f))
815 util.set_exec(self.wjoin(f), m.execf(f))
816 self.dirstate.update([f], "n")
816 self.dirstate.update([f], "n")
817
817
818 def copy(self, source, dest, wlock=None):
818 def copy(self, source, dest, wlock=None):
819 p = self.wjoin(dest)
819 p = self.wjoin(dest)
820 if not os.path.exists(p):
820 if not os.path.exists(p):
821 self.ui.warn(_("%s does not exist!\n") % dest)
821 self.ui.warn(_("%s does not exist!\n") % dest)
822 elif not os.path.isfile(p):
822 elif not os.path.isfile(p):
823 self.ui.warn(_("copy failed: %s is not a file\n") % dest)
823 self.ui.warn(_("copy failed: %s is not a file\n") % dest)
824 else:
824 else:
825 if not wlock:
825 if not wlock:
826 wlock = self.wlock()
826 wlock = self.wlock()
827 if self.dirstate.state(dest) == '?':
827 if self.dirstate.state(dest) == '?':
828 self.dirstate.update([dest], "a")
828 self.dirstate.update([dest], "a")
829 self.dirstate.copy(source, dest)
829 self.dirstate.copy(source, dest)
830
830
831 def heads(self, start=None):
831 def heads(self, start=None):
832 heads = self.changelog.heads(start)
832 heads = self.changelog.heads(start)
833 # sort the output in rev descending order
833 # sort the output in rev descending order
834 heads = [(-self.changelog.rev(h), h) for h in heads]
834 heads = [(-self.changelog.rev(h), h) for h in heads]
835 heads.sort()
835 heads.sort()
836 return [n for (r, n) in heads]
836 return [n for (r, n) in heads]
837
837
838 # branchlookup returns a dict giving a list of branches for
838 # branchlookup returns a dict giving a list of branches for
839 # each head. A branch is defined as the tag of a node or
839 # each head. A branch is defined as the tag of a node or
840 # the branch of the node's parents. If a node has multiple
840 # the branch of the node's parents. If a node has multiple
841 # branch tags, tags are eliminated if they are visible from other
841 # branch tags, tags are eliminated if they are visible from other
842 # branch tags.
842 # branch tags.
843 #
843 #
844 # So, for this graph: a->b->c->d->e
844 # So, for this graph: a->b->c->d->e
845 # \ /
845 # \ /
846 # aa -----/
846 # aa -----/
847 # a has tag 2.6.12
847 # a has tag 2.6.12
848 # d has tag 2.6.13
848 # d has tag 2.6.13
849 # e would have branch tags for 2.6.12 and 2.6.13. Because the node
849 # e would have branch tags for 2.6.12 and 2.6.13. Because the node
850 # for 2.6.12 can be reached from the node 2.6.13, that is eliminated
850 # for 2.6.12 can be reached from the node 2.6.13, that is eliminated
851 # from the list.
851 # from the list.
852 #
852 #
853 # It is possible that more than one head will have the same branch tag.
853 # It is possible that more than one head will have the same branch tag.
854 # callers need to check the result for multiple heads under the same
854 # callers need to check the result for multiple heads under the same
855 # branch tag if that is a problem for them (ie checkout of a specific
855 # branch tag if that is a problem for them (ie checkout of a specific
856 # branch).
856 # branch).
857 #
857 #
858 # passing in a specific branch will limit the depth of the search
858 # passing in a specific branch will limit the depth of the search
859 # through the parents. It won't limit the branches returned in the
859 # through the parents. It won't limit the branches returned in the
860 # result though.
860 # result though.
861 def branchlookup(self, heads=None, branch=None):
861 def branchlookup(self, heads=None, branch=None):
862 if not heads:
862 if not heads:
863 heads = self.heads()
863 heads = self.heads()
864 headt = [ h for h in heads ]
864 headt = [ h for h in heads ]
865 chlog = self.changelog
865 chlog = self.changelog
866 branches = {}
866 branches = {}
867 merges = []
867 merges = []
868 seenmerge = {}
868 seenmerge = {}
869
869
870 # traverse the tree once for each head, recording in the branches
870 # traverse the tree once for each head, recording in the branches
871 # dict which tags are visible from this head. The branches
871 # dict which tags are visible from this head. The branches
872 # dict also records which tags are visible from each tag
872 # dict also records which tags are visible from each tag
873 # while we traverse.
873 # while we traverse.
874 while headt or merges:
874 while headt or merges:
875 if merges:
875 if merges:
876 n, found = merges.pop()
876 n, found = merges.pop()
877 visit = [n]
877 visit = [n]
878 else:
878 else:
879 h = headt.pop()
879 h = headt.pop()
880 visit = [h]
880 visit = [h]
881 found = [h]
881 found = [h]
882 seen = {}
882 seen = {}
883 while visit:
883 while visit:
884 n = visit.pop()
884 n = visit.pop()
885 if n in seen:
885 if n in seen:
886 continue
886 continue
887 pp = chlog.parents(n)
887 pp = chlog.parents(n)
888 tags = self.nodetags(n)
888 tags = self.nodetags(n)
889 if tags:
889 if tags:
890 for x in tags:
890 for x in tags:
891 if x == 'tip':
891 if x == 'tip':
892 continue
892 continue
893 for f in found:
893 for f in found:
894 branches.setdefault(f, {})[n] = 1
894 branches.setdefault(f, {})[n] = 1
895 branches.setdefault(n, {})[n] = 1
895 branches.setdefault(n, {})[n] = 1
896 break
896 break
897 if n not in found:
897 if n not in found:
898 found.append(n)
898 found.append(n)
899 if branch in tags:
899 if branch in tags:
900 continue
900 continue
901 seen[n] = 1
901 seen[n] = 1
902 if pp[1] != nullid and n not in seenmerge:
902 if pp[1] != nullid and n not in seenmerge:
903 merges.append((pp[1], [x for x in found]))
903 merges.append((pp[1], [x for x in found]))
904 seenmerge[n] = 1
904 seenmerge[n] = 1
905 if pp[0] != nullid:
905 if pp[0] != nullid:
906 visit.append(pp[0])
906 visit.append(pp[0])
907 # traverse the branches dict, eliminating branch tags from each
907 # traverse the branches dict, eliminating branch tags from each
908 # head that are visible from another branch tag for that head.
908 # head that are visible from another branch tag for that head.
909 out = {}
909 out = {}
910 viscache = {}
910 viscache = {}
911 for h in heads:
911 for h in heads:
912 def visible(node):
912 def visible(node):
913 if node in viscache:
913 if node in viscache:
914 return viscache[node]
914 return viscache[node]
915 ret = {}
915 ret = {}
916 visit = [node]
916 visit = [node]
917 while visit:
917 while visit:
918 x = visit.pop()
918 x = visit.pop()
919 if x in viscache:
919 if x in viscache:
920 ret.update(viscache[x])
920 ret.update(viscache[x])
921 elif x not in ret:
921 elif x not in ret:
922 ret[x] = 1
922 ret[x] = 1
923 if x in branches:
923 if x in branches:
924 visit[len(visit):] = branches[x].keys()
924 visit[len(visit):] = branches[x].keys()
925 viscache[node] = ret
925 viscache[node] = ret
926 return ret
926 return ret
927 if h not in branches:
927 if h not in branches:
928 continue
928 continue
929 # O(n^2), but somewhat limited. This only searches the
929 # O(n^2), but somewhat limited. This only searches the
930 # tags visible from a specific head, not all the tags in the
930 # tags visible from a specific head, not all the tags in the
931 # whole repo.
931 # whole repo.
932 for b in branches[h]:
932 for b in branches[h]:
933 vis = False
933 vis = False
934 for bb in branches[h].keys():
934 for bb in branches[h].keys():
935 if b != bb:
935 if b != bb:
936 if b in visible(bb):
936 if b in visible(bb):
937 vis = True
937 vis = True
938 break
938 break
939 if not vis:
939 if not vis:
940 l = out.setdefault(h, [])
940 l = out.setdefault(h, [])
941 l[len(l):] = self.nodetags(b)
941 l[len(l):] = self.nodetags(b)
942 return out
942 return out
943
943
944 def branches(self, nodes):
944 def branches(self, nodes):
945 if not nodes:
945 if not nodes:
946 nodes = [self.changelog.tip()]
946 nodes = [self.changelog.tip()]
947 b = []
947 b = []
948 for n in nodes:
948 for n in nodes:
949 t = n
949 t = n
950 while 1:
950 while 1:
951 p = self.changelog.parents(n)
951 p = self.changelog.parents(n)
952 if p[1] != nullid or p[0] == nullid:
952 if p[1] != nullid or p[0] == nullid:
953 b.append((t, n, p[0], p[1]))
953 b.append((t, n, p[0], p[1]))
954 break
954 break
955 n = p[0]
955 n = p[0]
956 return b
956 return b
957
957
958 def between(self, pairs):
958 def between(self, pairs):
959 r = []
959 r = []
960
960
961 for top, bottom in pairs:
961 for top, bottom in pairs:
962 n, l, i = top, [], 0
962 n, l, i = top, [], 0
963 f = 1
963 f = 1
964
964
965 while n != bottom:
965 while n != bottom:
966 p = self.changelog.parents(n)[0]
966 p = self.changelog.parents(n)[0]
967 if i == f:
967 if i == f:
968 l.append(n)
968 l.append(n)
969 f = f * 2
969 f = f * 2
970 n = p
970 n = p
971 i += 1
971 i += 1
972
972
973 r.append(l)
973 r.append(l)
974
974
975 return r
975 return r
976
976
977 def findincoming(self, remote, base=None, heads=None, force=False):
977 def findincoming(self, remote, base=None, heads=None, force=False):
978 """Return list of roots of the subsets of missing nodes from remote
978 """Return list of roots of the subsets of missing nodes from remote
979
979
980 If base dict is specified, assume that these nodes and their parents
980 If base dict is specified, assume that these nodes and their parents
981 exist on the remote side and that no child of a node of base exists
981 exist on the remote side and that no child of a node of base exists
982 in both remote and self.
982 in both remote and self.
983 Furthermore base will be updated to include the nodes that exists
983 Furthermore base will be updated to include the nodes that exists
984 in self and remote but no children exists in self and remote.
984 in self and remote but no children exists in self and remote.
985 If a list of heads is specified, return only nodes which are heads
985 If a list of heads is specified, return only nodes which are heads
986 or ancestors of these heads.
986 or ancestors of these heads.
987
987
988 All the ancestors of base are in self and in remote.
988 All the ancestors of base are in self and in remote.
989 All the descendants of the list returned are missing in self.
989 All the descendants of the list returned are missing in self.
990 (and so we know that the rest of the nodes are missing in remote, see
990 (and so we know that the rest of the nodes are missing in remote, see
991 outgoing)
991 outgoing)
992 """
992 """
993 m = self.changelog.nodemap
993 m = self.changelog.nodemap
994 search = []
994 search = []
995 fetch = {}
995 fetch = {}
996 seen = {}
996 seen = {}
997 seenbranch = {}
997 seenbranch = {}
998 if base == None:
998 if base == None:
999 base = {}
999 base = {}
1000
1000
1001 if not heads:
1001 if not heads:
1002 heads = remote.heads()
1002 heads = remote.heads()
1003
1003
1004 if self.changelog.tip() == nullid:
1004 if self.changelog.tip() == nullid:
1005 base[nullid] = 1
1005 base[nullid] = 1
1006 if heads != [nullid]:
1006 if heads != [nullid]:
1007 return [nullid]
1007 return [nullid]
1008 return []
1008 return []
1009
1009
1010 # assume we're closer to the tip than the root
1010 # assume we're closer to the tip than the root
1011 # and start by examining the heads
1011 # and start by examining the heads
1012 self.ui.status(_("searching for changes\n"))
1012 self.ui.status(_("searching for changes\n"))
1013
1013
1014 unknown = []
1014 unknown = []
1015 for h in heads:
1015 for h in heads:
1016 if h not in m:
1016 if h not in m:
1017 unknown.append(h)
1017 unknown.append(h)
1018 else:
1018 else:
1019 base[h] = 1
1019 base[h] = 1
1020
1020
1021 if not unknown:
1021 if not unknown:
1022 return []
1022 return []
1023
1023
1024 req = dict.fromkeys(unknown)
1024 req = dict.fromkeys(unknown)
1025 reqcnt = 0
1025 reqcnt = 0
1026
1026
1027 # search through remote branches
1027 # search through remote branches
1028 # a 'branch' here is a linear segment of history, with four parts:
1028 # a 'branch' here is a linear segment of history, with four parts:
1029 # head, root, first parent, second parent
1029 # head, root, first parent, second parent
1030 # (a branch always has two parents (or none) by definition)
1030 # (a branch always has two parents (or none) by definition)
1031 unknown = remote.branches(unknown)
1031 unknown = remote.branches(unknown)
1032 while unknown:
1032 while unknown:
1033 r = []
1033 r = []
1034 while unknown:
1034 while unknown:
1035 n = unknown.pop(0)
1035 n = unknown.pop(0)
1036 if n[0] in seen:
1036 if n[0] in seen:
1037 continue
1037 continue
1038
1038
1039 self.ui.debug(_("examining %s:%s\n")
1039 self.ui.debug(_("examining %s:%s\n")
1040 % (short(n[0]), short(n[1])))
1040 % (short(n[0]), short(n[1])))
1041 if n[0] == nullid: # found the end of the branch
1041 if n[0] == nullid: # found the end of the branch
1042 pass
1042 pass
1043 elif n in seenbranch:
1043 elif n in seenbranch:
1044 self.ui.debug(_("branch already found\n"))
1044 self.ui.debug(_("branch already found\n"))
1045 continue
1045 continue
1046 elif n[1] and n[1] in m: # do we know the base?
1046 elif n[1] and n[1] in m: # do we know the base?
1047 self.ui.debug(_("found incomplete branch %s:%s\n")
1047 self.ui.debug(_("found incomplete branch %s:%s\n")
1048 % (short(n[0]), short(n[1])))
1048 % (short(n[0]), short(n[1])))
1049 search.append(n) # schedule branch range for scanning
1049 search.append(n) # schedule branch range for scanning
1050 seenbranch[n] = 1
1050 seenbranch[n] = 1
1051 else:
1051 else:
1052 if n[1] not in seen and n[1] not in fetch:
1052 if n[1] not in seen and n[1] not in fetch:
1053 if n[2] in m and n[3] in m:
1053 if n[2] in m and n[3] in m:
1054 self.ui.debug(_("found new changeset %s\n") %
1054 self.ui.debug(_("found new changeset %s\n") %
1055 short(n[1]))
1055 short(n[1]))
1056 fetch[n[1]] = 1 # earliest unknown
1056 fetch[n[1]] = 1 # earliest unknown
1057 for p in n[2:4]:
1057 for p in n[2:4]:
1058 if p in m:
1058 if p in m:
1059 base[p] = 1 # latest known
1059 base[p] = 1 # latest known
1060
1060
1061 for p in n[2:4]:
1061 for p in n[2:4]:
1062 if p not in req and p not in m:
1062 if p not in req and p not in m:
1063 r.append(p)
1063 r.append(p)
1064 req[p] = 1
1064 req[p] = 1
1065 seen[n[0]] = 1
1065 seen[n[0]] = 1
1066
1066
1067 if r:
1067 if r:
1068 reqcnt += 1
1068 reqcnt += 1
1069 self.ui.debug(_("request %d: %s\n") %
1069 self.ui.debug(_("request %d: %s\n") %
1070 (reqcnt, " ".join(map(short, r))))
1070 (reqcnt, " ".join(map(short, r))))
1071 for p in range(0, len(r), 10):
1071 for p in range(0, len(r), 10):
1072 for b in remote.branches(r[p:p+10]):
1072 for b in remote.branches(r[p:p+10]):
1073 self.ui.debug(_("received %s:%s\n") %
1073 self.ui.debug(_("received %s:%s\n") %
1074 (short(b[0]), short(b[1])))
1074 (short(b[0]), short(b[1])))
1075 unknown.append(b)
1075 unknown.append(b)
1076
1076
1077 # do binary search on the branches we found
1077 # do binary search on the branches we found
1078 while search:
1078 while search:
1079 n = search.pop(0)
1079 n = search.pop(0)
1080 reqcnt += 1
1080 reqcnt += 1
1081 l = remote.between([(n[0], n[1])])[0]
1081 l = remote.between([(n[0], n[1])])[0]
1082 l.append(n[1])
1082 l.append(n[1])
1083 p = n[0]
1083 p = n[0]
1084 f = 1
1084 f = 1
1085 for i in l:
1085 for i in l:
1086 self.ui.debug(_("narrowing %d:%d %s\n") % (f, len(l), short(i)))
1086 self.ui.debug(_("narrowing %d:%d %s\n") % (f, len(l), short(i)))
1087 if i in m:
1087 if i in m:
1088 if f <= 2:
1088 if f <= 2:
1089 self.ui.debug(_("found new branch changeset %s\n") %
1089 self.ui.debug(_("found new branch changeset %s\n") %
1090 short(p))
1090 short(p))
1091 fetch[p] = 1
1091 fetch[p] = 1
1092 base[i] = 1
1092 base[i] = 1
1093 else:
1093 else:
1094 self.ui.debug(_("narrowed branch search to %s:%s\n")
1094 self.ui.debug(_("narrowed branch search to %s:%s\n")
1095 % (short(p), short(i)))
1095 % (short(p), short(i)))
1096 search.append((p, i))
1096 search.append((p, i))
1097 break
1097 break
1098 p, f = i, f * 2
1098 p, f = i, f * 2
1099
1099
1100 # sanity check our fetch list
1100 # sanity check our fetch list
1101 for f in fetch.keys():
1101 for f in fetch.keys():
1102 if f in m:
1102 if f in m:
1103 raise repo.RepoError(_("already have changeset ") + short(f[:4]))
1103 raise repo.RepoError(_("already have changeset ") + short(f[:4]))
1104
1104
1105 if base.keys() == [nullid]:
1105 if base.keys() == [nullid]:
1106 if force:
1106 if force:
1107 self.ui.warn(_("warning: repository is unrelated\n"))
1107 self.ui.warn(_("warning: repository is unrelated\n"))
1108 else:
1108 else:
1109 raise util.Abort(_("repository is unrelated"))
1109 raise util.Abort(_("repository is unrelated"))
1110
1110
1111 self.ui.debug(_("found new changesets starting at ") +
1111 self.ui.debug(_("found new changesets starting at ") +
1112 " ".join([short(f) for f in fetch]) + "\n")
1112 " ".join([short(f) for f in fetch]) + "\n")
1113
1113
1114 self.ui.debug(_("%d total queries\n") % reqcnt)
1114 self.ui.debug(_("%d total queries\n") % reqcnt)
1115
1115
1116 return fetch.keys()
1116 return fetch.keys()
1117
1117
1118 def findoutgoing(self, remote, base=None, heads=None, force=False):
1118 def findoutgoing(self, remote, base=None, heads=None, force=False):
1119 """Return list of nodes that are roots of subsets not in remote
1119 """Return list of nodes that are roots of subsets not in remote
1120
1120
1121 If base dict is specified, assume that these nodes and their parents
1121 If base dict is specified, assume that these nodes and their parents
1122 exist on the remote side.
1122 exist on the remote side.
1123 If a list of heads is specified, return only nodes which are heads
1123 If a list of heads is specified, return only nodes which are heads
1124 or ancestors of these heads, and return a second element which
1124 or ancestors of these heads, and return a second element which
1125 contains all remote heads which get new children.
1125 contains all remote heads which get new children.
1126 """
1126 """
1127 if base == None:
1127 if base == None:
1128 base = {}
1128 base = {}
1129 self.findincoming(remote, base, heads, force=force)
1129 self.findincoming(remote, base, heads, force=force)
1130
1130
1131 self.ui.debug(_("common changesets up to ")
1131 self.ui.debug(_("common changesets up to ")
1132 + " ".join(map(short, base.keys())) + "\n")
1132 + " ".join(map(short, base.keys())) + "\n")
1133
1133
1134 remain = dict.fromkeys(self.changelog.nodemap)
1134 remain = dict.fromkeys(self.changelog.nodemap)
1135
1135
1136 # prune everything remote has from the tree
1136 # prune everything remote has from the tree
1137 del remain[nullid]
1137 del remain[nullid]
1138 remove = base.keys()
1138 remove = base.keys()
1139 while remove:
1139 while remove:
1140 n = remove.pop(0)
1140 n = remove.pop(0)
1141 if n in remain:
1141 if n in remain:
1142 del remain[n]
1142 del remain[n]
1143 for p in self.changelog.parents(n):
1143 for p in self.changelog.parents(n):
1144 remove.append(p)
1144 remove.append(p)
1145
1145
1146 # find every node whose parents have been pruned
1146 # find every node whose parents have been pruned
1147 subset = []
1147 subset = []
1148 # find every remote head that will get new children
1148 # find every remote head that will get new children
1149 updated_heads = {}
1149 updated_heads = {}
1150 for n in remain:
1150 for n in remain:
1151 p1, p2 = self.changelog.parents(n)
1151 p1, p2 = self.changelog.parents(n)
1152 if p1 not in remain and p2 not in remain:
1152 if p1 not in remain and p2 not in remain:
1153 subset.append(n)
1153 subset.append(n)
1154 if heads:
1154 if heads:
1155 if p1 in heads:
1155 if p1 in heads:
1156 updated_heads[p1] = True
1156 updated_heads[p1] = True
1157 if p2 in heads:
1157 if p2 in heads:
1158 updated_heads[p2] = True
1158 updated_heads[p2] = True
1159
1159
1160 # this is the set of all roots we have to push
1160 # this is the set of all roots we have to push
1161 if heads:
1161 if heads:
1162 return subset, updated_heads.keys()
1162 return subset, updated_heads.keys()
1163 else:
1163 else:
1164 return subset
1164 return subset
1165
1165
1166 def pull(self, remote, heads=None, force=False, lock=None):
1166 def pull(self, remote, heads=None, force=False, lock=None):
1167 mylock = False
1167 mylock = False
1168 if not lock:
1168 if not lock:
1169 lock = self.lock()
1169 lock = self.lock()
1170 mylock = True
1170 mylock = True
1171
1171
1172 try:
1172 try:
1173 fetch = self.findincoming(remote, force=force)
1173 fetch = self.findincoming(remote, force=force)
1174 if fetch == [nullid]:
1174 if fetch == [nullid]:
1175 self.ui.status(_("requesting all changes\n"))
1175 self.ui.status(_("requesting all changes\n"))
1176
1176
1177 if not fetch:
1177 if not fetch:
1178 self.ui.status(_("no changes found\n"))
1178 self.ui.status(_("no changes found\n"))
1179 return 0
1179 return 0
1180
1180
1181 if heads is None:
1181 if heads is None:
1182 cg = remote.changegroup(fetch, 'pull')
1182 cg = remote.changegroup(fetch, 'pull')
1183 else:
1183 else:
1184 cg = remote.changegroupsubset(fetch, heads, 'pull')
1184 cg = remote.changegroupsubset(fetch, heads, 'pull')
1185 return self.addchangegroup(cg, 'pull', remote.url())
1185 return self.addchangegroup(cg, 'pull', remote.url())
1186 finally:
1186 finally:
1187 if mylock:
1187 if mylock:
1188 lock.release()
1188 lock.release()
1189
1189
1190 def push(self, remote, force=False, revs=None):
1190 def push(self, remote, force=False, revs=None):
1191 # there are two ways to push to remote repo:
1191 # there are two ways to push to remote repo:
1192 #
1192 #
1193 # addchangegroup assumes local user can lock remote
1193 # addchangegroup assumes local user can lock remote
1194 # repo (local filesystem, old ssh servers).
1194 # repo (local filesystem, old ssh servers).
1195 #
1195 #
1196 # unbundle assumes local user cannot lock remote repo (new ssh
1196 # unbundle assumes local user cannot lock remote repo (new ssh
1197 # servers, http servers).
1197 # servers, http servers).
1198
1198
1199 if remote.capable('unbundle'):
1199 if remote.capable('unbundle'):
1200 return self.push_unbundle(remote, force, revs)
1200 return self.push_unbundle(remote, force, revs)
1201 return self.push_addchangegroup(remote, force, revs)
1201 return self.push_addchangegroup(remote, force, revs)
1202
1202
1203 def prepush(self, remote, force, revs):
1203 def prepush(self, remote, force, revs):
1204 base = {}
1204 base = {}
1205 remote_heads = remote.heads()
1205 remote_heads = remote.heads()
1206 inc = self.findincoming(remote, base, remote_heads, force=force)
1206 inc = self.findincoming(remote, base, remote_heads, force=force)
1207 if not force and inc:
1207 if not force and inc:
1208 self.ui.warn(_("abort: unsynced remote changes!\n"))
1208 self.ui.warn(_("abort: unsynced remote changes!\n"))
1209 self.ui.status(_("(did you forget to sync?"
1209 self.ui.status(_("(did you forget to sync?"
1210 " use push -f to force)\n"))
1210 " use push -f to force)\n"))
1211 return None, 1
1211 return None, 1
1212
1212
1213 update, updated_heads = self.findoutgoing(remote, base, remote_heads)
1213 update, updated_heads = self.findoutgoing(remote, base, remote_heads)
1214 if revs is not None:
1214 if revs is not None:
1215 msng_cl, bases, heads = self.changelog.nodesbetween(update, revs)
1215 msng_cl, bases, heads = self.changelog.nodesbetween(update, revs)
1216 else:
1216 else:
1217 bases, heads = update, self.changelog.heads()
1217 bases, heads = update, self.changelog.heads()
1218
1218
1219 if not bases:
1219 if not bases:
1220 self.ui.status(_("no changes found\n"))
1220 self.ui.status(_("no changes found\n"))
1221 return None, 1
1221 return None, 1
1222 elif not force:
1222 elif not force:
1223 # FIXME we don't properly detect creation of new heads
1223 # FIXME we don't properly detect creation of new heads
1224 # in the push -r case, assume the user knows what he's doing
1224 # in the push -r case, assume the user knows what he's doing
1225 if not revs and len(remote_heads) < len(heads) \
1225 if not revs and len(remote_heads) < len(heads) \
1226 and remote_heads != [nullid]:
1226 and remote_heads != [nullid]:
1227 self.ui.warn(_("abort: push creates new remote branches!\n"))
1227 self.ui.warn(_("abort: push creates new remote branches!\n"))
1228 self.ui.status(_("(did you forget to merge?"
1228 self.ui.status(_("(did you forget to merge?"
1229 " use push -f to force)\n"))
1229 " use push -f to force)\n"))
1230 return None, 1
1230 return None, 1
1231
1231
1232 if revs is None:
1232 if revs is None:
1233 cg = self.changegroup(update, 'push')
1233 cg = self.changegroup(update, 'push')
1234 else:
1234 else:
1235 cg = self.changegroupsubset(update, revs, 'push')
1235 cg = self.changegroupsubset(update, revs, 'push')
1236 return cg, remote_heads
1236 return cg, remote_heads
1237
1237
1238 def push_addchangegroup(self, remote, force, revs):
1238 def push_addchangegroup(self, remote, force, revs):
1239 lock = remote.lock()
1239 lock = remote.lock()
1240
1240
1241 ret = self.prepush(remote, force, revs)
1241 ret = self.prepush(remote, force, revs)
1242 if ret[0] is not None:
1242 if ret[0] is not None:
1243 cg, remote_heads = ret
1243 cg, remote_heads = ret
1244 return remote.addchangegroup(cg, 'push', self.url())
1244 return remote.addchangegroup(cg, 'push', self.url())
1245 return ret[1]
1245 return ret[1]
1246
1246
1247 def push_unbundle(self, remote, force, revs):
1247 def push_unbundle(self, remote, force, revs):
1248 # local repo finds heads on server, finds out what revs it
1248 # local repo finds heads on server, finds out what revs it
1249 # must push. once revs transferred, if server finds it has
1249 # must push. once revs transferred, if server finds it has
1250 # different heads (someone else won commit/push race), server
1250 # different heads (someone else won commit/push race), server
1251 # aborts.
1251 # aborts.
1252
1252
1253 ret = self.prepush(remote, force, revs)
1253 ret = self.prepush(remote, force, revs)
1254 if ret[0] is not None:
1254 if ret[0] is not None:
1255 cg, remote_heads = ret
1255 cg, remote_heads = ret
1256 if force: remote_heads = ['force']
1256 if force: remote_heads = ['force']
1257 return remote.unbundle(cg, remote_heads, 'push')
1257 return remote.unbundle(cg, remote_heads, 'push')
1258 return ret[1]
1258 return ret[1]
1259
1259
1260 def changegroupsubset(self, bases, heads, source):
1260 def changegroupsubset(self, bases, heads, source):
1261 """This function generates a changegroup consisting of all the nodes
1261 """This function generates a changegroup consisting of all the nodes
1262 that are descendents of any of the bases, and ancestors of any of
1262 that are descendents of any of the bases, and ancestors of any of
1263 the heads.
1263 the heads.
1264
1264
1265 It is fairly complex as determining which filenodes and which
1265 It is fairly complex as determining which filenodes and which
1266 manifest nodes need to be included for the changeset to be complete
1266 manifest nodes need to be included for the changeset to be complete
1267 is non-trivial.
1267 is non-trivial.
1268
1268
1269 Another wrinkle is doing the reverse, figuring out which changeset in
1269 Another wrinkle is doing the reverse, figuring out which changeset in
1270 the changegroup a particular filenode or manifestnode belongs to."""
1270 the changegroup a particular filenode or manifestnode belongs to."""
1271
1271
1272 self.hook('preoutgoing', throw=True, source=source)
1272 self.hook('preoutgoing', throw=True, source=source)
1273
1273
1274 # Set up some initial variables
1274 # Set up some initial variables
1275 # Make it easy to refer to self.changelog
1275 # Make it easy to refer to self.changelog
1276 cl = self.changelog
1276 cl = self.changelog
1277 # msng is short for missing - compute the list of changesets in this
1277 # msng is short for missing - compute the list of changesets in this
1278 # changegroup.
1278 # changegroup.
1279 msng_cl_lst, bases, heads = cl.nodesbetween(bases, heads)
1279 msng_cl_lst, bases, heads = cl.nodesbetween(bases, heads)
1280 # Some bases may turn out to be superfluous, and some heads may be
1280 # Some bases may turn out to be superfluous, and some heads may be
1281 # too. nodesbetween will return the minimal set of bases and heads
1281 # too. nodesbetween will return the minimal set of bases and heads
1282 # necessary to re-create the changegroup.
1282 # necessary to re-create the changegroup.
1283
1283
1284 # Known heads are the list of heads that it is assumed the recipient
1284 # Known heads are the list of heads that it is assumed the recipient
1285 # of this changegroup will know about.
1285 # of this changegroup will know about.
1286 knownheads = {}
1286 knownheads = {}
1287 # We assume that all parents of bases are known heads.
1287 # We assume that all parents of bases are known heads.
1288 for n in bases:
1288 for n in bases:
1289 for p in cl.parents(n):
1289 for p in cl.parents(n):
1290 if p != nullid:
1290 if p != nullid:
1291 knownheads[p] = 1
1291 knownheads[p] = 1
1292 knownheads = knownheads.keys()
1292 knownheads = knownheads.keys()
1293 if knownheads:
1293 if knownheads:
1294 # Now that we know what heads are known, we can compute which
1294 # Now that we know what heads are known, we can compute which
1295 # changesets are known. The recipient must know about all
1295 # changesets are known. The recipient must know about all
1296 # changesets required to reach the known heads from the null
1296 # changesets required to reach the known heads from the null
1297 # changeset.
1297 # changeset.
1298 has_cl_set, junk, junk = cl.nodesbetween(None, knownheads)
1298 has_cl_set, junk, junk = cl.nodesbetween(None, knownheads)
1299 junk = None
1299 junk = None
1300 # Transform the list into an ersatz set.
1300 # Transform the list into an ersatz set.
1301 has_cl_set = dict.fromkeys(has_cl_set)
1301 has_cl_set = dict.fromkeys(has_cl_set)
1302 else:
1302 else:
1303 # If there were no known heads, the recipient cannot be assumed to
1303 # If there were no known heads, the recipient cannot be assumed to
1304 # know about any changesets.
1304 # know about any changesets.
1305 has_cl_set = {}
1305 has_cl_set = {}
1306
1306
1307 # Make it easy to refer to self.manifest
1307 # Make it easy to refer to self.manifest
1308 mnfst = self.manifest
1308 mnfst = self.manifest
1309 # We don't know which manifests are missing yet
1309 # We don't know which manifests are missing yet
1310 msng_mnfst_set = {}
1310 msng_mnfst_set = {}
1311 # Nor do we know which filenodes are missing.
1311 # Nor do we know which filenodes are missing.
1312 msng_filenode_set = {}
1312 msng_filenode_set = {}
1313
1313
1314 junk = mnfst.index[mnfst.count() - 1] # Get around a bug in lazyindex
1314 junk = mnfst.index[mnfst.count() - 1] # Get around a bug in lazyindex
1315 junk = None
1315 junk = None
1316
1316
1317 # A changeset always belongs to itself, so the changenode lookup
1317 # A changeset always belongs to itself, so the changenode lookup
1318 # function for a changenode is identity.
1318 # function for a changenode is identity.
1319 def identity(x):
1319 def identity(x):
1320 return x
1320 return x
1321
1321
1322 # A function generating function. Sets up an environment for the
1322 # A function generating function. Sets up an environment for the
1323 # inner function.
1323 # inner function.
1324 def cmp_by_rev_func(revlog):
1324 def cmp_by_rev_func(revlog):
1325 # Compare two nodes by their revision number in the environment's
1325 # Compare two nodes by their revision number in the environment's
1326 # revision history. Since the revision number both represents the
1326 # revision history. Since the revision number both represents the
1327 # most efficient order to read the nodes in, and represents a
1327 # most efficient order to read the nodes in, and represents a
1328 # topological sorting of the nodes, this function is often useful.
1328 # topological sorting of the nodes, this function is often useful.
1329 def cmp_by_rev(a, b):
1329 def cmp_by_rev(a, b):
1330 return cmp(revlog.rev(a), revlog.rev(b))
1330 return cmp(revlog.rev(a), revlog.rev(b))
1331 return cmp_by_rev
1331 return cmp_by_rev
1332
1332
1333 # If we determine that a particular file or manifest node must be a
1333 # If we determine that a particular file or manifest node must be a
1334 # node that the recipient of the changegroup will already have, we can
1334 # node that the recipient of the changegroup will already have, we can
1335 # also assume the recipient will have all the parents. This function
1335 # also assume the recipient will have all the parents. This function
1336 # prunes them from the set of missing nodes.
1336 # prunes them from the set of missing nodes.
1337 def prune_parents(revlog, hasset, msngset):
1337 def prune_parents(revlog, hasset, msngset):
1338 haslst = hasset.keys()
1338 haslst = hasset.keys()
1339 haslst.sort(cmp_by_rev_func(revlog))
1339 haslst.sort(cmp_by_rev_func(revlog))
1340 for node in haslst:
1340 for node in haslst:
1341 parentlst = [p for p in revlog.parents(node) if p != nullid]
1341 parentlst = [p for p in revlog.parents(node) if p != nullid]
1342 while parentlst:
1342 while parentlst:
1343 n = parentlst.pop()
1343 n = parentlst.pop()
1344 if n not in hasset:
1344 if n not in hasset:
1345 hasset[n] = 1
1345 hasset[n] = 1
1346 p = [p for p in revlog.parents(n) if p != nullid]
1346 p = [p for p in revlog.parents(n) if p != nullid]
1347 parentlst.extend(p)
1347 parentlst.extend(p)
1348 for n in hasset:
1348 for n in hasset:
1349 msngset.pop(n, None)
1349 msngset.pop(n, None)
1350
1350
1351 # This is a function generating function used to set up an environment
1351 # This is a function generating function used to set up an environment
1352 # for the inner function to execute in.
1352 # for the inner function to execute in.
1353 def manifest_and_file_collector(changedfileset):
1353 def manifest_and_file_collector(changedfileset):
1354 # This is an information gathering function that gathers
1354 # This is an information gathering function that gathers
1355 # information from each changeset node that goes out as part of
1355 # information from each changeset node that goes out as part of
1356 # the changegroup. The information gathered is a list of which
1356 # the changegroup. The information gathered is a list of which
1357 # manifest nodes are potentially required (the recipient may
1357 # manifest nodes are potentially required (the recipient may
1358 # already have them) and total list of all files which were
1358 # already have them) and total list of all files which were
1359 # changed in any changeset in the changegroup.
1359 # changed in any changeset in the changegroup.
1360 #
1360 #
1361 # We also remember the first changenode we saw any manifest
1361 # We also remember the first changenode we saw any manifest
1362 # referenced by so we can later determine which changenode 'owns'
1362 # referenced by so we can later determine which changenode 'owns'
1363 # the manifest.
1363 # the manifest.
1364 def collect_manifests_and_files(clnode):
1364 def collect_manifests_and_files(clnode):
1365 c = cl.read(clnode)
1365 c = cl.read(clnode)
1366 for f in c[3]:
1366 for f in c[3]:
1367 # This is to make sure we only have one instance of each
1367 # This is to make sure we only have one instance of each
1368 # filename string for each filename.
1368 # filename string for each filename.
1369 changedfileset.setdefault(f, f)
1369 changedfileset.setdefault(f, f)
1370 msng_mnfst_set.setdefault(c[0], clnode)
1370 msng_mnfst_set.setdefault(c[0], clnode)
1371 return collect_manifests_and_files
1371 return collect_manifests_and_files
1372
1372
1373 # Figure out which manifest nodes (of the ones we think might be part
1373 # Figure out which manifest nodes (of the ones we think might be part
1374 # of the changegroup) the recipient must know about and remove them
1374 # of the changegroup) the recipient must know about and remove them
1375 # from the changegroup.
1375 # from the changegroup.
1376 def prune_manifests():
1376 def prune_manifests():
1377 has_mnfst_set = {}
1377 has_mnfst_set = {}
1378 for n in msng_mnfst_set:
1378 for n in msng_mnfst_set:
1379 # If a 'missing' manifest thinks it belongs to a changenode
1379 # If a 'missing' manifest thinks it belongs to a changenode
1380 # the recipient is assumed to have, obviously the recipient
1380 # the recipient is assumed to have, obviously the recipient
1381 # must have that manifest.
1381 # must have that manifest.
1382 linknode = cl.node(mnfst.linkrev(n))
1382 linknode = cl.node(mnfst.linkrev(n))
1383 if linknode in has_cl_set:
1383 if linknode in has_cl_set:
1384 has_mnfst_set[n] = 1
1384 has_mnfst_set[n] = 1
1385 prune_parents(mnfst, has_mnfst_set, msng_mnfst_set)
1385 prune_parents(mnfst, has_mnfst_set, msng_mnfst_set)
1386
1386
1387 # Use the information collected in collect_manifests_and_files to say
1387 # Use the information collected in collect_manifests_and_files to say
1388 # which changenode any manifestnode belongs to.
1388 # which changenode any manifestnode belongs to.
1389 def lookup_manifest_link(mnfstnode):
1389 def lookup_manifest_link(mnfstnode):
1390 return msng_mnfst_set[mnfstnode]
1390 return msng_mnfst_set[mnfstnode]
1391
1391
1392 # A function generating function that sets up the initial environment
1392 # A function generating function that sets up the initial environment
1393 # the inner function.
1393 # the inner function.
1394 def filenode_collector(changedfiles):
1394 def filenode_collector(changedfiles):
1395 next_rev = [0]
1395 next_rev = [0]
1396 # This gathers information from each manifestnode included in the
1396 # This gathers information from each manifestnode included in the
1397 # changegroup about which filenodes the manifest node references
1397 # changegroup about which filenodes the manifest node references
1398 # so we can include those in the changegroup too.
1398 # so we can include those in the changegroup too.
1399 #
1399 #
1400 # It also remembers which changenode each filenode belongs to. It
1400 # It also remembers which changenode each filenode belongs to. It
1401 # does this by assuming the a filenode belongs to the changenode
1401 # does this by assuming the a filenode belongs to the changenode
1402 # the first manifest that references it belongs to.
1402 # the first manifest that references it belongs to.
1403 def collect_msng_filenodes(mnfstnode):
1403 def collect_msng_filenodes(mnfstnode):
1404 r = mnfst.rev(mnfstnode)
1404 r = mnfst.rev(mnfstnode)
1405 if r == next_rev[0]:
1405 if r == next_rev[0]:
1406 # If the last rev we looked at was the one just previous,
1406 # If the last rev we looked at was the one just previous,
1407 # we only need to see a diff.
1407 # we only need to see a diff.
1408 delta = mdiff.patchtext(mnfst.delta(mnfstnode))
1408 delta = mdiff.patchtext(mnfst.delta(mnfstnode))
1409 # For each line in the delta
1409 # For each line in the delta
1410 for dline in delta.splitlines():
1410 for dline in delta.splitlines():
1411 # get the filename and filenode for that line
1411 # get the filename and filenode for that line
1412 f, fnode = dline.split('\0')
1412 f, fnode = dline.split('\0')
1413 fnode = bin(fnode[:40])
1413 fnode = bin(fnode[:40])
1414 f = changedfiles.get(f, None)
1414 f = changedfiles.get(f, None)
1415 # And if the file is in the list of files we care
1415 # And if the file is in the list of files we care
1416 # about.
1416 # about.
1417 if f is not None:
1417 if f is not None:
1418 # Get the changenode this manifest belongs to
1418 # Get the changenode this manifest belongs to
1419 clnode = msng_mnfst_set[mnfstnode]
1419 clnode = msng_mnfst_set[mnfstnode]
1420 # Create the set of filenodes for the file if
1420 # Create the set of filenodes for the file if
1421 # there isn't one already.
1421 # there isn't one already.
1422 ndset = msng_filenode_set.setdefault(f, {})
1422 ndset = msng_filenode_set.setdefault(f, {})
1423 # And set the filenode's changelog node to the
1423 # And set the filenode's changelog node to the
1424 # manifest's if it hasn't been set already.
1424 # manifest's if it hasn't been set already.
1425 ndset.setdefault(fnode, clnode)
1425 ndset.setdefault(fnode, clnode)
1426 else:
1426 else:
1427 # Otherwise we need a full manifest.
1427 # Otherwise we need a full manifest.
1428 m = mnfst.read(mnfstnode)
1428 m = mnfst.read(mnfstnode)
1429 # For every file in we care about.
1429 # For every file in we care about.
1430 for f in changedfiles:
1430 for f in changedfiles:
1431 fnode = m.get(f, None)
1431 fnode = m.get(f, None)
1432 # If it's in the manifest
1432 # If it's in the manifest
1433 if fnode is not None:
1433 if fnode is not None:
1434 # See comments above.
1434 # See comments above.
1435 clnode = msng_mnfst_set[mnfstnode]
1435 clnode = msng_mnfst_set[mnfstnode]
1436 ndset = msng_filenode_set.setdefault(f, {})
1436 ndset = msng_filenode_set.setdefault(f, {})
1437 ndset.setdefault(fnode, clnode)
1437 ndset.setdefault(fnode, clnode)
1438 # Remember the revision we hope to see next.
1438 # Remember the revision we hope to see next.
1439 next_rev[0] = r + 1
1439 next_rev[0] = r + 1
1440 return collect_msng_filenodes
1440 return collect_msng_filenodes
1441
1441
1442 # We have a list of filenodes we think we need for a file, lets remove
1442 # We have a list of filenodes we think we need for a file, lets remove
1443 # all those we now the recipient must have.
1443 # all those we now the recipient must have.
1444 def prune_filenodes(f, filerevlog):
1444 def prune_filenodes(f, filerevlog):
1445 msngset = msng_filenode_set[f]
1445 msngset = msng_filenode_set[f]
1446 hasset = {}
1446 hasset = {}
1447 # If a 'missing' filenode thinks it belongs to a changenode we
1447 # If a 'missing' filenode thinks it belongs to a changenode we
1448 # assume the recipient must have, then the recipient must have
1448 # assume the recipient must have, then the recipient must have
1449 # that filenode.
1449 # that filenode.
1450 for n in msngset:
1450 for n in msngset:
1451 clnode = cl.node(filerevlog.linkrev(n))
1451 clnode = cl.node(filerevlog.linkrev(n))
1452 if clnode in has_cl_set:
1452 if clnode in has_cl_set:
1453 hasset[n] = 1
1453 hasset[n] = 1
1454 prune_parents(filerevlog, hasset, msngset)
1454 prune_parents(filerevlog, hasset, msngset)
1455
1455
1456 # A function generator function that sets up the a context for the
1456 # A function generator function that sets up the a context for the
1457 # inner function.
1457 # inner function.
1458 def lookup_filenode_link_func(fname):
1458 def lookup_filenode_link_func(fname):
1459 msngset = msng_filenode_set[fname]
1459 msngset = msng_filenode_set[fname]
1460 # Lookup the changenode the filenode belongs to.
1460 # Lookup the changenode the filenode belongs to.
1461 def lookup_filenode_link(fnode):
1461 def lookup_filenode_link(fnode):
1462 return msngset[fnode]
1462 return msngset[fnode]
1463 return lookup_filenode_link
1463 return lookup_filenode_link
1464
1464
1465 # Now that we have all theses utility functions to help out and
1465 # Now that we have all theses utility functions to help out and
1466 # logically divide up the task, generate the group.
1466 # logically divide up the task, generate the group.
1467 def gengroup():
1467 def gengroup():
1468 # The set of changed files starts empty.
1468 # The set of changed files starts empty.
1469 changedfiles = {}
1469 changedfiles = {}
1470 # Create a changenode group generator that will call our functions
1470 # Create a changenode group generator that will call our functions
1471 # back to lookup the owning changenode and collect information.
1471 # back to lookup the owning changenode and collect information.
1472 group = cl.group(msng_cl_lst, identity,
1472 group = cl.group(msng_cl_lst, identity,
1473 manifest_and_file_collector(changedfiles))
1473 manifest_and_file_collector(changedfiles))
1474 for chnk in group:
1474 for chnk in group:
1475 yield chnk
1475 yield chnk
1476
1476
1477 # The list of manifests has been collected by the generator
1477 # The list of manifests has been collected by the generator
1478 # calling our functions back.
1478 # calling our functions back.
1479 prune_manifests()
1479 prune_manifests()
1480 msng_mnfst_lst = msng_mnfst_set.keys()
1480 msng_mnfst_lst = msng_mnfst_set.keys()
1481 # Sort the manifestnodes by revision number.
1481 # Sort the manifestnodes by revision number.
1482 msng_mnfst_lst.sort(cmp_by_rev_func(mnfst))
1482 msng_mnfst_lst.sort(cmp_by_rev_func(mnfst))
1483 # Create a generator for the manifestnodes that calls our lookup
1483 # Create a generator for the manifestnodes that calls our lookup
1484 # and data collection functions back.
1484 # and data collection functions back.
1485 group = mnfst.group(msng_mnfst_lst, lookup_manifest_link,
1485 group = mnfst.group(msng_mnfst_lst, lookup_manifest_link,
1486 filenode_collector(changedfiles))
1486 filenode_collector(changedfiles))
1487 for chnk in group:
1487 for chnk in group:
1488 yield chnk
1488 yield chnk
1489
1489
1490 # These are no longer needed, dereference and toss the memory for
1490 # These are no longer needed, dereference and toss the memory for
1491 # them.
1491 # them.
1492 msng_mnfst_lst = None
1492 msng_mnfst_lst = None
1493 msng_mnfst_set.clear()
1493 msng_mnfst_set.clear()
1494
1494
1495 changedfiles = changedfiles.keys()
1495 changedfiles = changedfiles.keys()
1496 changedfiles.sort()
1496 changedfiles.sort()
1497 # Go through all our files in order sorted by name.
1497 # Go through all our files in order sorted by name.
1498 for fname in changedfiles:
1498 for fname in changedfiles:
1499 filerevlog = self.file(fname)
1499 filerevlog = self.file(fname)
1500 # Toss out the filenodes that the recipient isn't really
1500 # Toss out the filenodes that the recipient isn't really
1501 # missing.
1501 # missing.
1502 if msng_filenode_set.has_key(fname):
1502 if msng_filenode_set.has_key(fname):
1503 prune_filenodes(fname, filerevlog)
1503 prune_filenodes(fname, filerevlog)
1504 msng_filenode_lst = msng_filenode_set[fname].keys()
1504 msng_filenode_lst = msng_filenode_set[fname].keys()
1505 else:
1505 else:
1506 msng_filenode_lst = []
1506 msng_filenode_lst = []
1507 # If any filenodes are left, generate the group for them,
1507 # If any filenodes are left, generate the group for them,
1508 # otherwise don't bother.
1508 # otherwise don't bother.
1509 if len(msng_filenode_lst) > 0:
1509 if len(msng_filenode_lst) > 0:
1510 yield changegroup.genchunk(fname)
1510 yield changegroup.genchunk(fname)
1511 # Sort the filenodes by their revision #
1511 # Sort the filenodes by their revision #
1512 msng_filenode_lst.sort(cmp_by_rev_func(filerevlog))
1512 msng_filenode_lst.sort(cmp_by_rev_func(filerevlog))
1513 # Create a group generator and only pass in a changenode
1513 # Create a group generator and only pass in a changenode
1514 # lookup function as we need to collect no information
1514 # lookup function as we need to collect no information
1515 # from filenodes.
1515 # from filenodes.
1516 group = filerevlog.group(msng_filenode_lst,
1516 group = filerevlog.group(msng_filenode_lst,
1517 lookup_filenode_link_func(fname))
1517 lookup_filenode_link_func(fname))
1518 for chnk in group:
1518 for chnk in group:
1519 yield chnk
1519 yield chnk
1520 if msng_filenode_set.has_key(fname):
1520 if msng_filenode_set.has_key(fname):
1521 # Don't need this anymore, toss it to free memory.
1521 # Don't need this anymore, toss it to free memory.
1522 del msng_filenode_set[fname]
1522 del msng_filenode_set[fname]
1523 # Signal that no more groups are left.
1523 # Signal that no more groups are left.
1524 yield changegroup.closechunk()
1524 yield changegroup.closechunk()
1525
1525
1526 if msng_cl_lst:
1526 if msng_cl_lst:
1527 self.hook('outgoing', node=hex(msng_cl_lst[0]), source=source)
1527 self.hook('outgoing', node=hex(msng_cl_lst[0]), source=source)
1528
1528
1529 return util.chunkbuffer(gengroup())
1529 return util.chunkbuffer(gengroup())
1530
1530
1531 def changegroup(self, basenodes, source):
1531 def changegroup(self, basenodes, source):
1532 """Generate a changegroup of all nodes that we have that a recipient
1532 """Generate a changegroup of all nodes that we have that a recipient
1533 doesn't.
1533 doesn't.
1534
1534
1535 This is much easier than the previous function as we can assume that
1535 This is much easier than the previous function as we can assume that
1536 the recipient has any changenode we aren't sending them."""
1536 the recipient has any changenode we aren't sending them."""
1537
1537
1538 self.hook('preoutgoing', throw=True, source=source)
1538 self.hook('preoutgoing', throw=True, source=source)
1539
1539
1540 cl = self.changelog
1540 cl = self.changelog
1541 nodes = cl.nodesbetween(basenodes, None)[0]
1541 nodes = cl.nodesbetween(basenodes, None)[0]
1542 revset = dict.fromkeys([cl.rev(n) for n in nodes])
1542 revset = dict.fromkeys([cl.rev(n) for n in nodes])
1543
1543
1544 def identity(x):
1544 def identity(x):
1545 return x
1545 return x
1546
1546
1547 def gennodelst(revlog):
1547 def gennodelst(revlog):
1548 for r in xrange(0, revlog.count()):
1548 for r in xrange(0, revlog.count()):
1549 n = revlog.node(r)
1549 n = revlog.node(r)
1550 if revlog.linkrev(n) in revset:
1550 if revlog.linkrev(n) in revset:
1551 yield n
1551 yield n
1552
1552
1553 def changed_file_collector(changedfileset):
1553 def changed_file_collector(changedfileset):
1554 def collect_changed_files(clnode):
1554 def collect_changed_files(clnode):
1555 c = cl.read(clnode)
1555 c = cl.read(clnode)
1556 for fname in c[3]:
1556 for fname in c[3]:
1557 changedfileset[fname] = 1
1557 changedfileset[fname] = 1
1558 return collect_changed_files
1558 return collect_changed_files
1559
1559
1560 def lookuprevlink_func(revlog):
1560 def lookuprevlink_func(revlog):
1561 def lookuprevlink(n):
1561 def lookuprevlink(n):
1562 return cl.node(revlog.linkrev(n))
1562 return cl.node(revlog.linkrev(n))
1563 return lookuprevlink
1563 return lookuprevlink
1564
1564
1565 def gengroup():
1565 def gengroup():
1566 # construct a list of all changed files
1566 # construct a list of all changed files
1567 changedfiles = {}
1567 changedfiles = {}
1568
1568
1569 for chnk in cl.group(nodes, identity,
1569 for chnk in cl.group(nodes, identity,
1570 changed_file_collector(changedfiles)):
1570 changed_file_collector(changedfiles)):
1571 yield chnk
1571 yield chnk
1572 changedfiles = changedfiles.keys()
1572 changedfiles = changedfiles.keys()
1573 changedfiles.sort()
1573 changedfiles.sort()
1574
1574
1575 mnfst = self.manifest
1575 mnfst = self.manifest
1576 nodeiter = gennodelst(mnfst)
1576 nodeiter = gennodelst(mnfst)
1577 for chnk in mnfst.group(nodeiter, lookuprevlink_func(mnfst)):
1577 for chnk in mnfst.group(nodeiter, lookuprevlink_func(mnfst)):
1578 yield chnk
1578 yield chnk
1579
1579
1580 for fname in changedfiles:
1580 for fname in changedfiles:
1581 filerevlog = self.file(fname)
1581 filerevlog = self.file(fname)
1582 nodeiter = gennodelst(filerevlog)
1582 nodeiter = gennodelst(filerevlog)
1583 nodeiter = list(nodeiter)
1583 nodeiter = list(nodeiter)
1584 if nodeiter:
1584 if nodeiter:
1585 yield changegroup.genchunk(fname)
1585 yield changegroup.genchunk(fname)
1586 lookup = lookuprevlink_func(filerevlog)
1586 lookup = lookuprevlink_func(filerevlog)
1587 for chnk in filerevlog.group(nodeiter, lookup):
1587 for chnk in filerevlog.group(nodeiter, lookup):
1588 yield chnk
1588 yield chnk
1589
1589
1590 yield changegroup.closechunk()
1590 yield changegroup.closechunk()
1591
1591
1592 if nodes:
1592 if nodes:
1593 self.hook('outgoing', node=hex(nodes[0]), source=source)
1593 self.hook('outgoing', node=hex(nodes[0]), source=source)
1594
1594
1595 return util.chunkbuffer(gengroup())
1595 return util.chunkbuffer(gengroup())
1596
1596
1597 def addchangegroup(self, source, srctype, url):
1597 def addchangegroup(self, source, srctype, url):
1598 """add changegroup to repo.
1598 """add changegroup to repo.
1599 returns number of heads modified or added + 1."""
1599 returns number of heads modified or added + 1."""
1600
1600
1601 def csmap(x):
1601 def csmap(x):
1602 self.ui.debug(_("add changeset %s\n") % short(x))
1602 self.ui.debug(_("add changeset %s\n") % short(x))
1603 return cl.count()
1603 return cl.count()
1604
1604
1605 def revmap(x):
1605 def revmap(x):
1606 return cl.rev(x)
1606 return cl.rev(x)
1607
1607
1608 if not source:
1608 if not source:
1609 return 0
1609 return 0
1610
1610
1611 self.hook('prechangegroup', throw=True, source=srctype, url=url)
1611 self.hook('prechangegroup', throw=True, source=srctype, url=url)
1612
1612
1613 changesets = files = revisions = 0
1613 changesets = files = revisions = 0
1614
1614
1615 tr = self.transaction()
1615 tr = self.transaction()
1616
1616
1617 # write changelog data to temp files so concurrent readers will not see
1617 # write changelog data to temp files so concurrent readers will not see
1618 # inconsistent view
1618 # inconsistent view
1619 cl = None
1619 cl = None
1620 try:
1620 try:
1621 cl = appendfile.appendchangelog(self.opener, self.changelog.version)
1621 cl = appendfile.appendchangelog(self.opener, self.changelog.version)
1622
1622
1623 oldheads = len(cl.heads())
1623 oldheads = len(cl.heads())
1624
1624
1625 # pull off the changeset group
1625 # pull off the changeset group
1626 self.ui.status(_("adding changesets\n"))
1626 self.ui.status(_("adding changesets\n"))
1627 cor = cl.count() - 1
1627 cor = cl.count() - 1
1628 chunkiter = changegroup.chunkiter(source)
1628 chunkiter = changegroup.chunkiter(source)
1629 if cl.addgroup(chunkiter, csmap, tr, 1) is None:
1629 if cl.addgroup(chunkiter, csmap, tr, 1) is None:
1630 raise util.Abort(_("received changelog group is empty"))
1630 raise util.Abort(_("received changelog group is empty"))
1631 cnr = cl.count() - 1
1631 cnr = cl.count() - 1
1632 changesets = cnr - cor
1632 changesets = cnr - cor
1633
1633
1634 # pull off the manifest group
1634 # pull off the manifest group
1635 self.ui.status(_("adding manifests\n"))
1635 self.ui.status(_("adding manifests\n"))
1636 chunkiter = changegroup.chunkiter(source)
1636 chunkiter = changegroup.chunkiter(source)
1637 # no need to check for empty manifest group here:
1637 # no need to check for empty manifest group here:
1638 # if the result of the merge of 1 and 2 is the same in 3 and 4,
1638 # if the result of the merge of 1 and 2 is the same in 3 and 4,
1639 # no new manifest will be created and the manifest group will
1639 # no new manifest will be created and the manifest group will
1640 # be empty during the pull
1640 # be empty during the pull
1641 self.manifest.addgroup(chunkiter, revmap, tr)
1641 self.manifest.addgroup(chunkiter, revmap, tr)
1642
1642
1643 # process the files
1643 # process the files
1644 self.ui.status(_("adding file changes\n"))
1644 self.ui.status(_("adding file changes\n"))
1645 while 1:
1645 while 1:
1646 f = changegroup.getchunk(source)
1646 f = changegroup.getchunk(source)
1647 if not f:
1647 if not f:
1648 break
1648 break
1649 self.ui.debug(_("adding %s revisions\n") % f)
1649 self.ui.debug(_("adding %s revisions\n") % f)
1650 fl = self.file(f)
1650 fl = self.file(f)
1651 o = fl.count()
1651 o = fl.count()
1652 chunkiter = changegroup.chunkiter(source)
1652 chunkiter = changegroup.chunkiter(source)
1653 if fl.addgroup(chunkiter, revmap, tr) is None:
1653 if fl.addgroup(chunkiter, revmap, tr) is None:
1654 raise util.Abort(_("received file revlog group is empty"))
1654 raise util.Abort(_("received file revlog group is empty"))
1655 revisions += fl.count() - o
1655 revisions += fl.count() - o
1656 files += 1
1656 files += 1
1657
1657
1658 cl.writedata()
1658 cl.writedata()
1659 finally:
1659 finally:
1660 if cl:
1660 if cl:
1661 cl.cleanup()
1661 cl.cleanup()
1662
1662
1663 # make changelog see real files again
1663 # make changelog see real files again
1664 self.changelog = changelog.changelog(self.opener, self.changelog.version)
1664 self.changelog = changelog.changelog(self.opener, self.changelog.version)
1665 self.changelog.checkinlinesize(tr)
1665 self.changelog.checkinlinesize(tr)
1666
1666
1667 newheads = len(self.changelog.heads())
1667 newheads = len(self.changelog.heads())
1668 heads = ""
1668 heads = ""
1669 if oldheads and newheads != oldheads:
1669 if oldheads and newheads != oldheads:
1670 heads = _(" (%+d heads)") % (newheads - oldheads)
1670 heads = _(" (%+d heads)") % (newheads - oldheads)
1671
1671
1672 self.ui.status(_("added %d changesets"
1672 self.ui.status(_("added %d changesets"
1673 " with %d changes to %d files%s\n")
1673 " with %d changes to %d files%s\n")
1674 % (changesets, revisions, files, heads))
1674 % (changesets, revisions, files, heads))
1675
1675
1676 if changesets > 0:
1676 if changesets > 0:
1677 self.hook('pretxnchangegroup', throw=True,
1677 self.hook('pretxnchangegroup', throw=True,
1678 node=hex(self.changelog.node(cor+1)), source=srctype,
1678 node=hex(self.changelog.node(cor+1)), source=srctype,
1679 url=url)
1679 url=url)
1680
1680
1681 tr.close()
1681 tr.close()
1682
1682
1683 if changesets > 0:
1683 if changesets > 0:
1684 self.hook("changegroup", node=hex(self.changelog.node(cor+1)),
1684 self.hook("changegroup", node=hex(self.changelog.node(cor+1)),
1685 source=srctype, url=url)
1685 source=srctype, url=url)
1686
1686
1687 for i in range(cor + 1, cnr + 1):
1687 for i in range(cor + 1, cnr + 1):
1688 self.hook("incoming", node=hex(self.changelog.node(i)),
1688 self.hook("incoming", node=hex(self.changelog.node(i)),
1689 source=srctype, url=url)
1689 source=srctype, url=url)
1690
1690
1691 return newheads - oldheads + 1
1691 return newheads - oldheads + 1
1692
1692
1693
1693
1694 def stream_in(self, remote):
1694 def stream_in(self, remote):
1695 fp = remote.stream_out()
1695 fp = remote.stream_out()
1696 resp = int(fp.readline())
1696 resp = int(fp.readline())
1697 if resp != 0:
1697 if resp != 0:
1698 raise util.Abort(_('operation forbidden by server'))
1698 raise util.Abort(_('operation forbidden by server'))
1699 self.ui.status(_('streaming all changes\n'))
1699 self.ui.status(_('streaming all changes\n'))
1700 total_files, total_bytes = map(int, fp.readline().split(' ', 1))
1700 total_files, total_bytes = map(int, fp.readline().split(' ', 1))
1701 self.ui.status(_('%d files to transfer, %s of data\n') %
1701 self.ui.status(_('%d files to transfer, %s of data\n') %
1702 (total_files, util.bytecount(total_bytes)))
1702 (total_files, util.bytecount(total_bytes)))
1703 start = time.time()
1703 start = time.time()
1704 for i in xrange(total_files):
1704 for i in xrange(total_files):
1705 name, size = fp.readline().split('\0', 1)
1705 name, size = fp.readline().split('\0', 1)
1706 size = int(size)
1706 size = int(size)
1707 self.ui.debug('adding %s (%s)\n' % (name, util.bytecount(size)))
1707 self.ui.debug('adding %s (%s)\n' % (name, util.bytecount(size)))
1708 ofp = self.opener(name, 'w')
1708 ofp = self.opener(name, 'w')
1709 for chunk in util.filechunkiter(fp, limit=size):
1709 for chunk in util.filechunkiter(fp, limit=size):
1710 ofp.write(chunk)
1710 ofp.write(chunk)
1711 ofp.close()
1711 ofp.close()
1712 elapsed = time.time() - start
1712 elapsed = time.time() - start
1713 self.ui.status(_('transferred %s in %.1f seconds (%s/sec)\n') %
1713 self.ui.status(_('transferred %s in %.1f seconds (%s/sec)\n') %
1714 (util.bytecount(total_bytes), elapsed,
1714 (util.bytecount(total_bytes), elapsed,
1715 util.bytecount(total_bytes / elapsed)))
1715 util.bytecount(total_bytes / elapsed)))
1716 self.reload()
1716 self.reload()
1717 return len(self.heads()) + 1
1717 return len(self.heads()) + 1
1718
1718
1719 def clone(self, remote, heads=[], stream=False):
1719 def clone(self, remote, heads=[], stream=False):
1720 '''clone remote repository.
1720 '''clone remote repository.
1721
1721
1722 keyword arguments:
1722 keyword arguments:
1723 heads: list of revs to clone (forces use of pull)
1723 heads: list of revs to clone (forces use of pull)
1724 stream: use streaming clone if possible'''
1724 stream: use streaming clone if possible'''
1725
1725
1726 # now, all clients that can request uncompressed clones can
1726 # now, all clients that can request uncompressed clones can
1727 # read repo formats supported by all servers that can serve
1727 # read repo formats supported by all servers that can serve
1728 # them.
1728 # them.
1729
1729
1730 # if revlog format changes, client will have to check version
1730 # if revlog format changes, client will have to check version
1731 # and format flags on "stream" capability, and use
1731 # and format flags on "stream" capability, and use
1732 # uncompressed only if compatible.
1732 # uncompressed only if compatible.
1733
1733
1734 if stream and not heads and remote.capable('stream'):
1734 if stream and not heads and remote.capable('stream'):
1735 return self.stream_in(remote)
1735 return self.stream_in(remote)
1736 return self.pull(remote, heads)
1736 return self.pull(remote, heads)
1737
1737
1738 # used to avoid circular references so destructors work
1738 # used to avoid circular references so destructors work
1739 def aftertrans(base):
1739 def aftertrans(base):
1740 p = base
1740 p = base
1741 def a():
1741 def a():
1742 util.rename(os.path.join(p, "journal"), os.path.join(p, "undo"))
1742 util.rename(os.path.join(p, "journal"), os.path.join(p, "undo"))
1743 util.rename(os.path.join(p, "journal.dirstate"),
1743 util.rename(os.path.join(p, "journal.dirstate"),
1744 os.path.join(p, "undo.dirstate"))
1744 os.path.join(p, "undo.dirstate"))
1745 return a
1745 return a
1746
1746
1747 def instance(ui, path, create):
1747 def instance(ui, path, create):
1748 return localrepository(ui, util.drop_scheme('file', path), create)
1748 return localrepository(ui, util.drop_scheme('file', path), create)
1749
1749
1750 def islocal(path):
1750 def islocal(path):
1751 return True
1751 return True
General Comments 0
You need to be logged in to leave comments. Login now