##// END OF EJS Templates
redo merge with mpm....
Vadim Gelfer -
r2921:addb58e3 merge default
parent child Browse files
Show More
@@ -0,0 +1,68 b''
1 # mail.py - mail sending bits for mercurial
2 #
3 # Copyright 2006 Matt Mackall <mpm@selenic.com>
4 #
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
7
8 from i18n import gettext as _
9 from demandload import *
10 demandload(globals(), "os re smtplib templater util")
11
12 def _smtp(ui):
13 '''send mail using smtp.'''
14
15 local_hostname = ui.config('smtp', 'local_hostname')
16 s = smtplib.SMTP(local_hostname=local_hostname)
17 mailhost = ui.config('smtp', 'host')
18 if not mailhost:
19 raise util.Abort(_('no [smtp]host in hgrc - cannot send mail'))
20 mailport = int(ui.config('smtp', 'port', 25))
21 self.note(_('sending mail: smtp host %s, port %s\n') %
22 (mailhost, mailport))
23 s.connect(host=mailhost, port=mailport)
24 if ui.configbool('smtp', 'tls'):
25 ui.note(_('(using tls)\n'))
26 s.ehlo()
27 s.starttls()
28 s.ehlo()
29 username = ui.config('smtp', 'username')
30 password = ui.config('smtp', 'password')
31 if username and password:
32 ui.note(_('(authenticating to mail server as %s)\n') %
33 (username))
34 s.login(username, password)
35 return s
36
37 class _sendmail(object):
38 '''send mail using sendmail.'''
39
40 def __init__(self, ui, program):
41 self.ui = ui
42 self.program = program
43
44 def sendmail(self, sender, recipients, msg):
45 cmdline = '%s -f %s %s' % (
46 self.program, templater.email(sender),
47 ' '.join(map(templater.email, recipients)))
48 self.ui.note(_('sending mail: %s\n') % cmdline)
49 fp = os.popen(cmdline, 'w')
50 fp.write(msg)
51 ret = fp.close()
52 if ret:
53 raise util.Abort('%s %s' % (
54 os.path.basename(self.program.split(None, 1)[0]),
55 util.explain_exit(ret)[0]))
56
57 def connect(ui):
58 '''make a mail connection. object returned has one method, sendmail.
59 call as sendmail(sender, list-of-recipients, msg).'''
60
61 method = ui.config('email', 'method', 'smtp')
62 if method == 'smtp':
63 return smtp(ui)
64
65 return sendmail(ui, method)
66
67 def sendmail(ui, sender, recipients, msg):
68 return connect(ui).sendmail(sender, recipients, msg)
@@ -1,1980 +1,1980 b''
1 # queue.py - patch queues for mercurial
1 # queue.py - patch queues for mercurial
2 #
2 #
3 # Copyright 2005, 2006 Chris Mason <mason@suse.com>
3 # Copyright 2005, 2006 Chris Mason <mason@suse.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7
7
8 '''patch management and development
8 '''patch management and development
9
9
10 This extension lets you work with a stack of patches in a Mercurial
10 This extension lets you work with a stack of patches in a Mercurial
11 repository. It manages two stacks of patches - all known patches, and
11 repository. It manages two stacks of patches - all known patches, and
12 applied patches (subset of known patches).
12 applied patches (subset of known patches).
13
13
14 Known patches are represented as patch files in the .hg/patches
14 Known patches are represented as patch files in the .hg/patches
15 directory. Applied patches are both patch files and changesets.
15 directory. Applied patches are both patch files and changesets.
16
16
17 Common tasks (use "hg help command" for more details):
17 Common tasks (use "hg help command" for more details):
18
18
19 prepare repository to work with patches qinit
19 prepare repository to work with patches qinit
20 create new patch qnew
20 create new patch qnew
21 import existing patch qimport
21 import existing patch qimport
22
22
23 print patch series qseries
23 print patch series qseries
24 print applied patches qapplied
24 print applied patches qapplied
25 print name of top applied patch qtop
25 print name of top applied patch qtop
26
26
27 add known patch to applied stack qpush
27 add known patch to applied stack qpush
28 remove patch from applied stack qpop
28 remove patch from applied stack qpop
29 refresh contents of top applied patch qrefresh
29 refresh contents of top applied patch qrefresh
30 '''
30 '''
31
31
32 from mercurial.demandload import *
32 from mercurial.demandload import *
33 from mercurial.i18n import gettext as _
33 from mercurial.i18n import gettext as _
34 demandload(globals(), "os sys re struct traceback errno bz2")
34 demandload(globals(), "os sys re struct traceback errno bz2")
35 demandload(globals(), "mercurial:cmdutil,commands,hg,patch,revlog,ui,util")
35 demandload(globals(), "mercurial:cmdutil,commands,hg,patch,revlog,ui,util")
36
36
37 commands.norepo += " qclone qversion"
37 commands.norepo += " qclone qversion"
38
38
39 class statusentry:
39 class statusentry:
40 def __init__(self, rev, name=None):
40 def __init__(self, rev, name=None):
41 if not name:
41 if not name:
42 fields = rev.split(':')
42 fields = rev.split(':')
43 if len(fields) == 2:
43 if len(fields) == 2:
44 self.rev, self.name = fields
44 self.rev, self.name = fields
45 else:
45 else:
46 self.rev, self.name = None, None
46 self.rev, self.name = None, None
47 else:
47 else:
48 self.rev, self.name = rev, name
48 self.rev, self.name = rev, name
49
49
50 def __str__(self):
50 def __str__(self):
51 return self.rev + ':' + self.name
51 return self.rev + ':' + self.name
52
52
53 class queue:
53 class queue:
54 def __init__(self, ui, path, patchdir=None):
54 def __init__(self, ui, path, patchdir=None):
55 self.basepath = path
55 self.basepath = path
56 self.path = patchdir or os.path.join(path, "patches")
56 self.path = patchdir or os.path.join(path, "patches")
57 self.opener = util.opener(self.path)
57 self.opener = util.opener(self.path)
58 self.ui = ui
58 self.ui = ui
59 self.applied = []
59 self.applied = []
60 self.full_series = []
60 self.full_series = []
61 self.applied_dirty = 0
61 self.applied_dirty = 0
62 self.series_dirty = 0
62 self.series_dirty = 0
63 self.series_path = "series"
63 self.series_path = "series"
64 self.status_path = "status"
64 self.status_path = "status"
65 self.guards_path = "guards"
65 self.guards_path = "guards"
66 self.active_guards = None
66 self.active_guards = None
67 self.guards_dirty = False
67 self.guards_dirty = False
68 self._diffopts = None
68 self._diffopts = None
69
69
70 if os.path.exists(self.join(self.series_path)):
70 if os.path.exists(self.join(self.series_path)):
71 self.full_series = self.opener(self.series_path).read().splitlines()
71 self.full_series = self.opener(self.series_path).read().splitlines()
72 self.parse_series()
72 self.parse_series()
73
73
74 if os.path.exists(self.join(self.status_path)):
74 if os.path.exists(self.join(self.status_path)):
75 lines = self.opener(self.status_path).read().splitlines()
75 lines = self.opener(self.status_path).read().splitlines()
76 self.applied = [statusentry(l) for l in lines]
76 self.applied = [statusentry(l) for l in lines]
77
77
78 def diffopts(self):
78 def diffopts(self):
79 if self._diffopts is None:
79 if self._diffopts is None:
80 self._diffopts = self.ui.diffopts()
80 self._diffopts = patch.diffopts(self.ui)
81 return self._diffopts
81 return self._diffopts
82
82
83 def join(self, *p):
83 def join(self, *p):
84 return os.path.join(self.path, *p)
84 return os.path.join(self.path, *p)
85
85
86 def find_series(self, patch):
86 def find_series(self, patch):
87 pre = re.compile("(\s*)([^#]+)")
87 pre = re.compile("(\s*)([^#]+)")
88 index = 0
88 index = 0
89 for l in self.full_series:
89 for l in self.full_series:
90 m = pre.match(l)
90 m = pre.match(l)
91 if m:
91 if m:
92 s = m.group(2)
92 s = m.group(2)
93 s = s.rstrip()
93 s = s.rstrip()
94 if s == patch:
94 if s == patch:
95 return index
95 return index
96 index += 1
96 index += 1
97 return None
97 return None
98
98
99 guard_re = re.compile(r'\s?#([-+][^-+# \t\r\n\f][^# \t\r\n\f]*)')
99 guard_re = re.compile(r'\s?#([-+][^-+# \t\r\n\f][^# \t\r\n\f]*)')
100
100
101 def parse_series(self):
101 def parse_series(self):
102 self.series = []
102 self.series = []
103 self.series_guards = []
103 self.series_guards = []
104 for l in self.full_series:
104 for l in self.full_series:
105 h = l.find('#')
105 h = l.find('#')
106 if h == -1:
106 if h == -1:
107 patch = l
107 patch = l
108 comment = ''
108 comment = ''
109 elif h == 0:
109 elif h == 0:
110 continue
110 continue
111 else:
111 else:
112 patch = l[:h]
112 patch = l[:h]
113 comment = l[h:]
113 comment = l[h:]
114 patch = patch.strip()
114 patch = patch.strip()
115 if patch:
115 if patch:
116 self.series.append(patch)
116 self.series.append(patch)
117 self.series_guards.append(self.guard_re.findall(comment))
117 self.series_guards.append(self.guard_re.findall(comment))
118
118
119 def check_guard(self, guard):
119 def check_guard(self, guard):
120 bad_chars = '# \t\r\n\f'
120 bad_chars = '# \t\r\n\f'
121 first = guard[0]
121 first = guard[0]
122 for c in '-+':
122 for c in '-+':
123 if first == c:
123 if first == c:
124 return (_('guard %r starts with invalid character: %r') %
124 return (_('guard %r starts with invalid character: %r') %
125 (guard, c))
125 (guard, c))
126 for c in bad_chars:
126 for c in bad_chars:
127 if c in guard:
127 if c in guard:
128 return _('invalid character in guard %r: %r') % (guard, c)
128 return _('invalid character in guard %r: %r') % (guard, c)
129
129
130 def set_active(self, guards):
130 def set_active(self, guards):
131 for guard in guards:
131 for guard in guards:
132 bad = self.check_guard(guard)
132 bad = self.check_guard(guard)
133 if bad:
133 if bad:
134 raise util.Abort(bad)
134 raise util.Abort(bad)
135 guards = dict.fromkeys(guards).keys()
135 guards = dict.fromkeys(guards).keys()
136 guards.sort()
136 guards.sort()
137 self.ui.debug('active guards: %s\n' % ' '.join(guards))
137 self.ui.debug('active guards: %s\n' % ' '.join(guards))
138 self.active_guards = guards
138 self.active_guards = guards
139 self.guards_dirty = True
139 self.guards_dirty = True
140
140
141 def active(self):
141 def active(self):
142 if self.active_guards is None:
142 if self.active_guards is None:
143 self.active_guards = []
143 self.active_guards = []
144 try:
144 try:
145 guards = self.opener(self.guards_path).read().split()
145 guards = self.opener(self.guards_path).read().split()
146 except IOError, err:
146 except IOError, err:
147 if err.errno != errno.ENOENT: raise
147 if err.errno != errno.ENOENT: raise
148 guards = []
148 guards = []
149 for i, guard in enumerate(guards):
149 for i, guard in enumerate(guards):
150 bad = self.check_guard(guard)
150 bad = self.check_guard(guard)
151 if bad:
151 if bad:
152 self.ui.warn('%s:%d: %s\n' %
152 self.ui.warn('%s:%d: %s\n' %
153 (self.join(self.guards_path), i + 1, bad))
153 (self.join(self.guards_path), i + 1, bad))
154 else:
154 else:
155 self.active_guards.append(guard)
155 self.active_guards.append(guard)
156 return self.active_guards
156 return self.active_guards
157
157
158 def set_guards(self, idx, guards):
158 def set_guards(self, idx, guards):
159 for g in guards:
159 for g in guards:
160 if len(g) < 2:
160 if len(g) < 2:
161 raise util.Abort(_('guard %r too short') % g)
161 raise util.Abort(_('guard %r too short') % g)
162 if g[0] not in '-+':
162 if g[0] not in '-+':
163 raise util.Abort(_('guard %r starts with invalid char') % g)
163 raise util.Abort(_('guard %r starts with invalid char') % g)
164 bad = self.check_guard(g[1:])
164 bad = self.check_guard(g[1:])
165 if bad:
165 if bad:
166 raise util.Abort(bad)
166 raise util.Abort(bad)
167 drop = self.guard_re.sub('', self.full_series[idx])
167 drop = self.guard_re.sub('', self.full_series[idx])
168 self.full_series[idx] = drop + ''.join([' #' + g for g in guards])
168 self.full_series[idx] = drop + ''.join([' #' + g for g in guards])
169 self.parse_series()
169 self.parse_series()
170 self.series_dirty = True
170 self.series_dirty = True
171
171
172 def pushable(self, idx):
172 def pushable(self, idx):
173 if isinstance(idx, str):
173 if isinstance(idx, str):
174 idx = self.series.index(idx)
174 idx = self.series.index(idx)
175 patchguards = self.series_guards[idx]
175 patchguards = self.series_guards[idx]
176 if not patchguards:
176 if not patchguards:
177 return True, None
177 return True, None
178 default = False
178 default = False
179 guards = self.active()
179 guards = self.active()
180 exactneg = [g for g in patchguards if g[0] == '-' and g[1:] in guards]
180 exactneg = [g for g in patchguards if g[0] == '-' and g[1:] in guards]
181 if exactneg:
181 if exactneg:
182 return False, exactneg[0]
182 return False, exactneg[0]
183 pos = [g for g in patchguards if g[0] == '+']
183 pos = [g for g in patchguards if g[0] == '+']
184 exactpos = [g for g in pos if g[1:] in guards]
184 exactpos = [g for g in pos if g[1:] in guards]
185 if pos:
185 if pos:
186 if exactpos:
186 if exactpos:
187 return True, exactpos[0]
187 return True, exactpos[0]
188 return False, pos
188 return False, pos
189 return True, ''
189 return True, ''
190
190
191 def explain_pushable(self, idx, all_patches=False):
191 def explain_pushable(self, idx, all_patches=False):
192 write = all_patches and self.ui.write or self.ui.warn
192 write = all_patches and self.ui.write or self.ui.warn
193 if all_patches or self.ui.verbose:
193 if all_patches or self.ui.verbose:
194 if isinstance(idx, str):
194 if isinstance(idx, str):
195 idx = self.series.index(idx)
195 idx = self.series.index(idx)
196 pushable, why = self.pushable(idx)
196 pushable, why = self.pushable(idx)
197 if all_patches and pushable:
197 if all_patches and pushable:
198 if why is None:
198 if why is None:
199 write(_('allowing %s - no guards in effect\n') %
199 write(_('allowing %s - no guards in effect\n') %
200 self.series[idx])
200 self.series[idx])
201 else:
201 else:
202 if not why:
202 if not why:
203 write(_('allowing %s - no matching negative guards\n') %
203 write(_('allowing %s - no matching negative guards\n') %
204 self.series[idx])
204 self.series[idx])
205 else:
205 else:
206 write(_('allowing %s - guarded by %r\n') %
206 write(_('allowing %s - guarded by %r\n') %
207 (self.series[idx], why))
207 (self.series[idx], why))
208 if not pushable:
208 if not pushable:
209 if why:
209 if why:
210 write(_('skipping %s - guarded by %r\n') %
210 write(_('skipping %s - guarded by %r\n') %
211 (self.series[idx], ' '.join(why)))
211 (self.series[idx], ' '.join(why)))
212 else:
212 else:
213 write(_('skipping %s - no matching guards\n') %
213 write(_('skipping %s - no matching guards\n') %
214 self.series[idx])
214 self.series[idx])
215
215
216 def save_dirty(self):
216 def save_dirty(self):
217 def write_list(items, path):
217 def write_list(items, path):
218 fp = self.opener(path, 'w')
218 fp = self.opener(path, 'w')
219 for i in items:
219 for i in items:
220 print >> fp, i
220 print >> fp, i
221 fp.close()
221 fp.close()
222 if self.applied_dirty: write_list(map(str, self.applied), self.status_path)
222 if self.applied_dirty: write_list(map(str, self.applied), self.status_path)
223 if self.series_dirty: write_list(self.full_series, self.series_path)
223 if self.series_dirty: write_list(self.full_series, self.series_path)
224 if self.guards_dirty: write_list(self.active_guards, self.guards_path)
224 if self.guards_dirty: write_list(self.active_guards, self.guards_path)
225
225
226 def readheaders(self, patch):
226 def readheaders(self, patch):
227 def eatdiff(lines):
227 def eatdiff(lines):
228 while lines:
228 while lines:
229 l = lines[-1]
229 l = lines[-1]
230 if (l.startswith("diff -") or
230 if (l.startswith("diff -") or
231 l.startswith("Index:") or
231 l.startswith("Index:") or
232 l.startswith("===========")):
232 l.startswith("===========")):
233 del lines[-1]
233 del lines[-1]
234 else:
234 else:
235 break
235 break
236 def eatempty(lines):
236 def eatempty(lines):
237 while lines:
237 while lines:
238 l = lines[-1]
238 l = lines[-1]
239 if re.match('\s*$', l):
239 if re.match('\s*$', l):
240 del lines[-1]
240 del lines[-1]
241 else:
241 else:
242 break
242 break
243
243
244 pf = self.join(patch)
244 pf = self.join(patch)
245 message = []
245 message = []
246 comments = []
246 comments = []
247 user = None
247 user = None
248 date = None
248 date = None
249 format = None
249 format = None
250 subject = None
250 subject = None
251 diffstart = 0
251 diffstart = 0
252
252
253 for line in file(pf):
253 for line in file(pf):
254 line = line.rstrip()
254 line = line.rstrip()
255 if diffstart:
255 if diffstart:
256 if line.startswith('+++ '):
256 if line.startswith('+++ '):
257 diffstart = 2
257 diffstart = 2
258 break
258 break
259 if line.startswith("--- "):
259 if line.startswith("--- "):
260 diffstart = 1
260 diffstart = 1
261 continue
261 continue
262 elif format == "hgpatch":
262 elif format == "hgpatch":
263 # parse values when importing the result of an hg export
263 # parse values when importing the result of an hg export
264 if line.startswith("# User "):
264 if line.startswith("# User "):
265 user = line[7:]
265 user = line[7:]
266 elif line.startswith("# Date "):
266 elif line.startswith("# Date "):
267 date = line[7:]
267 date = line[7:]
268 elif not line.startswith("# ") and line:
268 elif not line.startswith("# ") and line:
269 message.append(line)
269 message.append(line)
270 format = None
270 format = None
271 elif line == '# HG changeset patch':
271 elif line == '# HG changeset patch':
272 format = "hgpatch"
272 format = "hgpatch"
273 elif (format != "tagdone" and (line.startswith("Subject: ") or
273 elif (format != "tagdone" and (line.startswith("Subject: ") or
274 line.startswith("subject: "))):
274 line.startswith("subject: "))):
275 subject = line[9:]
275 subject = line[9:]
276 format = "tag"
276 format = "tag"
277 elif (format != "tagdone" and (line.startswith("From: ") or
277 elif (format != "tagdone" and (line.startswith("From: ") or
278 line.startswith("from: "))):
278 line.startswith("from: "))):
279 user = line[6:]
279 user = line[6:]
280 format = "tag"
280 format = "tag"
281 elif format == "tag" and line == "":
281 elif format == "tag" and line == "":
282 # when looking for tags (subject: from: etc) they
282 # when looking for tags (subject: from: etc) they
283 # end once you find a blank line in the source
283 # end once you find a blank line in the source
284 format = "tagdone"
284 format = "tagdone"
285 elif message or line:
285 elif message or line:
286 message.append(line)
286 message.append(line)
287 comments.append(line)
287 comments.append(line)
288
288
289 eatdiff(message)
289 eatdiff(message)
290 eatdiff(comments)
290 eatdiff(comments)
291 eatempty(message)
291 eatempty(message)
292 eatempty(comments)
292 eatempty(comments)
293
293
294 # make sure message isn't empty
294 # make sure message isn't empty
295 if format and format.startswith("tag") and subject:
295 if format and format.startswith("tag") and subject:
296 message.insert(0, "")
296 message.insert(0, "")
297 message.insert(0, subject)
297 message.insert(0, subject)
298 return (message, comments, user, date, diffstart > 1)
298 return (message, comments, user, date, diffstart > 1)
299
299
300 def printdiff(self, repo, node1, node2=None, files=None,
300 def printdiff(self, repo, node1, node2=None, files=None,
301 fp=None, changes=None, opts=None):
301 fp=None, changes=None, opts=None):
302 patch.diff(repo, node1, node2, files,
302 patch.diff(repo, node1, node2, files,
303 fp=fp, changes=changes, opts=self.diffopts())
303 fp=fp, changes=changes, opts=self.diffopts())
304
304
305 def mergeone(self, repo, mergeq, head, patch, rev, wlock):
305 def mergeone(self, repo, mergeq, head, patch, rev, wlock):
306 # first try just applying the patch
306 # first try just applying the patch
307 (err, n) = self.apply(repo, [ patch ], update_status=False,
307 (err, n) = self.apply(repo, [ patch ], update_status=False,
308 strict=True, merge=rev, wlock=wlock)
308 strict=True, merge=rev, wlock=wlock)
309
309
310 if err == 0:
310 if err == 0:
311 return (err, n)
311 return (err, n)
312
312
313 if n is None:
313 if n is None:
314 raise util.Abort(_("apply failed for patch %s") % patch)
314 raise util.Abort(_("apply failed for patch %s") % patch)
315
315
316 self.ui.warn("patch didn't work out, merging %s\n" % patch)
316 self.ui.warn("patch didn't work out, merging %s\n" % patch)
317
317
318 # apply failed, strip away that rev and merge.
318 # apply failed, strip away that rev and merge.
319 hg.clean(repo, head, wlock=wlock)
319 hg.clean(repo, head, wlock=wlock)
320 self.strip(repo, n, update=False, backup='strip', wlock=wlock)
320 self.strip(repo, n, update=False, backup='strip', wlock=wlock)
321
321
322 c = repo.changelog.read(rev)
322 c = repo.changelog.read(rev)
323 ret = hg.merge(repo, rev, wlock=wlock)
323 ret = hg.merge(repo, rev, wlock=wlock)
324 if ret:
324 if ret:
325 raise util.Abort(_("update returned %d") % ret)
325 raise util.Abort(_("update returned %d") % ret)
326 n = repo.commit(None, c[4], c[1], force=1, wlock=wlock)
326 n = repo.commit(None, c[4], c[1], force=1, wlock=wlock)
327 if n == None:
327 if n == None:
328 raise util.Abort(_("repo commit failed"))
328 raise util.Abort(_("repo commit failed"))
329 try:
329 try:
330 message, comments, user, date, patchfound = mergeq.readheaders(patch)
330 message, comments, user, date, patchfound = mergeq.readheaders(patch)
331 except:
331 except:
332 raise util.Abort(_("unable to read %s") % patch)
332 raise util.Abort(_("unable to read %s") % patch)
333
333
334 patchf = self.opener(patch, "w")
334 patchf = self.opener(patch, "w")
335 if comments:
335 if comments:
336 comments = "\n".join(comments) + '\n\n'
336 comments = "\n".join(comments) + '\n\n'
337 patchf.write(comments)
337 patchf.write(comments)
338 self.printdiff(repo, head, n, fp=patchf)
338 self.printdiff(repo, head, n, fp=patchf)
339 patchf.close()
339 patchf.close()
340 return (0, n)
340 return (0, n)
341
341
342 def qparents(self, repo, rev=None):
342 def qparents(self, repo, rev=None):
343 if rev is None:
343 if rev is None:
344 (p1, p2) = repo.dirstate.parents()
344 (p1, p2) = repo.dirstate.parents()
345 if p2 == revlog.nullid:
345 if p2 == revlog.nullid:
346 return p1
346 return p1
347 if len(self.applied) == 0:
347 if len(self.applied) == 0:
348 return None
348 return None
349 return revlog.bin(self.applied[-1].rev)
349 return revlog.bin(self.applied[-1].rev)
350 pp = repo.changelog.parents(rev)
350 pp = repo.changelog.parents(rev)
351 if pp[1] != revlog.nullid:
351 if pp[1] != revlog.nullid:
352 arevs = [ x.rev for x in self.applied ]
352 arevs = [ x.rev for x in self.applied ]
353 p0 = revlog.hex(pp[0])
353 p0 = revlog.hex(pp[0])
354 p1 = revlog.hex(pp[1])
354 p1 = revlog.hex(pp[1])
355 if p0 in arevs:
355 if p0 in arevs:
356 return pp[0]
356 return pp[0]
357 if p1 in arevs:
357 if p1 in arevs:
358 return pp[1]
358 return pp[1]
359 return pp[0]
359 return pp[0]
360
360
361 def mergepatch(self, repo, mergeq, series, wlock):
361 def mergepatch(self, repo, mergeq, series, wlock):
362 if len(self.applied) == 0:
362 if len(self.applied) == 0:
363 # each of the patches merged in will have two parents. This
363 # each of the patches merged in will have two parents. This
364 # can confuse the qrefresh, qdiff, and strip code because it
364 # can confuse the qrefresh, qdiff, and strip code because it
365 # needs to know which parent is actually in the patch queue.
365 # needs to know which parent is actually in the patch queue.
366 # so, we insert a merge marker with only one parent. This way
366 # so, we insert a merge marker with only one parent. This way
367 # the first patch in the queue is never a merge patch
367 # the first patch in the queue is never a merge patch
368 #
368 #
369 pname = ".hg.patches.merge.marker"
369 pname = ".hg.patches.merge.marker"
370 n = repo.commit(None, '[mq]: merge marker', user=None, force=1,
370 n = repo.commit(None, '[mq]: merge marker', user=None, force=1,
371 wlock=wlock)
371 wlock=wlock)
372 self.applied.append(statusentry(revlog.hex(n), pname))
372 self.applied.append(statusentry(revlog.hex(n), pname))
373 self.applied_dirty = 1
373 self.applied_dirty = 1
374
374
375 head = self.qparents(repo)
375 head = self.qparents(repo)
376
376
377 for patch in series:
377 for patch in series:
378 patch = mergeq.lookup(patch, strict=True)
378 patch = mergeq.lookup(patch, strict=True)
379 if not patch:
379 if not patch:
380 self.ui.warn("patch %s does not exist\n" % patch)
380 self.ui.warn("patch %s does not exist\n" % patch)
381 return (1, None)
381 return (1, None)
382 pushable, reason = self.pushable(patch)
382 pushable, reason = self.pushable(patch)
383 if not pushable:
383 if not pushable:
384 self.explain_pushable(patch, all_patches=True)
384 self.explain_pushable(patch, all_patches=True)
385 continue
385 continue
386 info = mergeq.isapplied(patch)
386 info = mergeq.isapplied(patch)
387 if not info:
387 if not info:
388 self.ui.warn("patch %s is not applied\n" % patch)
388 self.ui.warn("patch %s is not applied\n" % patch)
389 return (1, None)
389 return (1, None)
390 rev = revlog.bin(info[1])
390 rev = revlog.bin(info[1])
391 (err, head) = self.mergeone(repo, mergeq, head, patch, rev, wlock)
391 (err, head) = self.mergeone(repo, mergeq, head, patch, rev, wlock)
392 if head:
392 if head:
393 self.applied.append(statusentry(revlog.hex(head), patch))
393 self.applied.append(statusentry(revlog.hex(head), patch))
394 self.applied_dirty = 1
394 self.applied_dirty = 1
395 if err:
395 if err:
396 return (err, head)
396 return (err, head)
397 return (0, head)
397 return (0, head)
398
398
399 def patch(self, repo, patchfile):
399 def patch(self, repo, patchfile):
400 '''Apply patchfile to the working directory.
400 '''Apply patchfile to the working directory.
401 patchfile: file name of patch'''
401 patchfile: file name of patch'''
402 try:
402 try:
403 (files, fuzz) = patch.patch(patchfile, self.ui, strip=1,
403 (files, fuzz) = patch.patch(patchfile, self.ui, strip=1,
404 cwd=repo.root)
404 cwd=repo.root)
405 except Exception, inst:
405 except Exception, inst:
406 self.ui.note(str(inst) + '\n')
406 self.ui.note(str(inst) + '\n')
407 if not self.ui.verbose:
407 if not self.ui.verbose:
408 self.ui.warn("patch failed, unable to continue (try -v)\n")
408 self.ui.warn("patch failed, unable to continue (try -v)\n")
409 return (False, [], False)
409 return (False, [], False)
410
410
411 return (True, files.keys(), fuzz)
411 return (True, files.keys(), fuzz)
412
412
413 def apply(self, repo, series, list=False, update_status=True,
413 def apply(self, repo, series, list=False, update_status=True,
414 strict=False, patchdir=None, merge=None, wlock=None):
414 strict=False, patchdir=None, merge=None, wlock=None):
415 # TODO unify with commands.py
415 # TODO unify with commands.py
416 if not patchdir:
416 if not patchdir:
417 patchdir = self.path
417 patchdir = self.path
418 err = 0
418 err = 0
419 if not wlock:
419 if not wlock:
420 wlock = repo.wlock()
420 wlock = repo.wlock()
421 lock = repo.lock()
421 lock = repo.lock()
422 tr = repo.transaction()
422 tr = repo.transaction()
423 n = None
423 n = None
424 for patch in series:
424 for patch in series:
425 pushable, reason = self.pushable(patch)
425 pushable, reason = self.pushable(patch)
426 if not pushable:
426 if not pushable:
427 self.explain_pushable(patch, all_patches=True)
427 self.explain_pushable(patch, all_patches=True)
428 continue
428 continue
429 self.ui.warn("applying %s\n" % patch)
429 self.ui.warn("applying %s\n" % patch)
430 pf = os.path.join(patchdir, patch)
430 pf = os.path.join(patchdir, patch)
431
431
432 try:
432 try:
433 message, comments, user, date, patchfound = self.readheaders(patch)
433 message, comments, user, date, patchfound = self.readheaders(patch)
434 except:
434 except:
435 self.ui.warn("Unable to read %s\n" % pf)
435 self.ui.warn("Unable to read %s\n" % pf)
436 err = 1
436 err = 1
437 break
437 break
438
438
439 if not message:
439 if not message:
440 message = "imported patch %s\n" % patch
440 message = "imported patch %s\n" % patch
441 else:
441 else:
442 if list:
442 if list:
443 message.append("\nimported patch %s" % patch)
443 message.append("\nimported patch %s" % patch)
444 message = '\n'.join(message)
444 message = '\n'.join(message)
445
445
446 (patcherr, files, fuzz) = self.patch(repo, pf)
446 (patcherr, files, fuzz) = self.patch(repo, pf)
447 patcherr = not patcherr
447 patcherr = not patcherr
448
448
449 if merge and len(files) > 0:
449 if merge and len(files) > 0:
450 # Mark as merged and update dirstate parent info
450 # Mark as merged and update dirstate parent info
451 repo.dirstate.update(repo.dirstate.filterfiles(files), 'm')
451 repo.dirstate.update(repo.dirstate.filterfiles(files), 'm')
452 p1, p2 = repo.dirstate.parents()
452 p1, p2 = repo.dirstate.parents()
453 repo.dirstate.setparents(p1, merge)
453 repo.dirstate.setparents(p1, merge)
454 if len(files) > 0:
454 if len(files) > 0:
455 cwd = repo.getcwd()
455 cwd = repo.getcwd()
456 cfiles = files
456 cfiles = files
457 if cwd:
457 if cwd:
458 cfiles = [util.pathto(cwd, f) for f in files]
458 cfiles = [util.pathto(cwd, f) for f in files]
459 cmdutil.addremove(repo, cfiles, wlock=wlock)
459 cmdutil.addremove(repo, cfiles, wlock=wlock)
460 n = repo.commit(files, message, user, date, force=1, lock=lock,
460 n = repo.commit(files, message, user, date, force=1, lock=lock,
461 wlock=wlock)
461 wlock=wlock)
462
462
463 if n == None:
463 if n == None:
464 raise util.Abort(_("repo commit failed"))
464 raise util.Abort(_("repo commit failed"))
465
465
466 if update_status:
466 if update_status:
467 self.applied.append(statusentry(revlog.hex(n), patch))
467 self.applied.append(statusentry(revlog.hex(n), patch))
468
468
469 if patcherr:
469 if patcherr:
470 if not patchfound:
470 if not patchfound:
471 self.ui.warn("patch %s is empty\n" % patch)
471 self.ui.warn("patch %s is empty\n" % patch)
472 err = 0
472 err = 0
473 else:
473 else:
474 self.ui.warn("patch failed, rejects left in working dir\n")
474 self.ui.warn("patch failed, rejects left in working dir\n")
475 err = 1
475 err = 1
476 break
476 break
477
477
478 if fuzz and strict:
478 if fuzz and strict:
479 self.ui.warn("fuzz found when applying patch, stopping\n")
479 self.ui.warn("fuzz found when applying patch, stopping\n")
480 err = 1
480 err = 1
481 break
481 break
482 tr.close()
482 tr.close()
483 return (err, n)
483 return (err, n)
484
484
485 def delete(self, repo, patches, keep=False):
485 def delete(self, repo, patches, keep=False):
486 realpatches = []
486 realpatches = []
487 for patch in patches:
487 for patch in patches:
488 patch = self.lookup(patch, strict=True)
488 patch = self.lookup(patch, strict=True)
489 info = self.isapplied(patch)
489 info = self.isapplied(patch)
490 if info:
490 if info:
491 raise util.Abort(_("cannot delete applied patch %s") % patch)
491 raise util.Abort(_("cannot delete applied patch %s") % patch)
492 if patch not in self.series:
492 if patch not in self.series:
493 raise util.Abort(_("patch %s not in series file") % patch)
493 raise util.Abort(_("patch %s not in series file") % patch)
494 realpatches.append(patch)
494 realpatches.append(patch)
495
495
496 if not keep:
496 if not keep:
497 r = self.qrepo()
497 r = self.qrepo()
498 if r:
498 if r:
499 r.remove(realpatches, True)
499 r.remove(realpatches, True)
500 else:
500 else:
501 os.unlink(self.join(patch))
501 os.unlink(self.join(patch))
502
502
503 indices = [self.find_series(p) for p in realpatches]
503 indices = [self.find_series(p) for p in realpatches]
504 indices.sort()
504 indices.sort()
505 for i in indices[-1::-1]:
505 for i in indices[-1::-1]:
506 del self.full_series[i]
506 del self.full_series[i]
507 self.parse_series()
507 self.parse_series()
508 self.series_dirty = 1
508 self.series_dirty = 1
509
509
510 def check_toppatch(self, repo):
510 def check_toppatch(self, repo):
511 if len(self.applied) > 0:
511 if len(self.applied) > 0:
512 top = revlog.bin(self.applied[-1].rev)
512 top = revlog.bin(self.applied[-1].rev)
513 pp = repo.dirstate.parents()
513 pp = repo.dirstate.parents()
514 if top not in pp:
514 if top not in pp:
515 raise util.Abort(_("queue top not at same revision as working directory"))
515 raise util.Abort(_("queue top not at same revision as working directory"))
516 return top
516 return top
517 return None
517 return None
518 def check_localchanges(self, repo, force=False, refresh=True):
518 def check_localchanges(self, repo, force=False, refresh=True):
519 m, a, r, d = repo.status()[:4]
519 m, a, r, d = repo.status()[:4]
520 if m or a or r or d:
520 if m or a or r or d:
521 if not force:
521 if not force:
522 if refresh:
522 if refresh:
523 raise util.Abort(_("local changes found, refresh first"))
523 raise util.Abort(_("local changes found, refresh first"))
524 else:
524 else:
525 raise util.Abort(_("local changes found"))
525 raise util.Abort(_("local changes found"))
526 return m, a, r, d
526 return m, a, r, d
527 def new(self, repo, patch, msg=None, force=None):
527 def new(self, repo, patch, msg=None, force=None):
528 if os.path.exists(self.join(patch)):
528 if os.path.exists(self.join(patch)):
529 raise util.Abort(_('patch "%s" already exists') % patch)
529 raise util.Abort(_('patch "%s" already exists') % patch)
530 m, a, r, d = self.check_localchanges(repo, force)
530 m, a, r, d = self.check_localchanges(repo, force)
531 commitfiles = m + a + r
531 commitfiles = m + a + r
532 self.check_toppatch(repo)
532 self.check_toppatch(repo)
533 wlock = repo.wlock()
533 wlock = repo.wlock()
534 insert = self.full_series_end()
534 insert = self.full_series_end()
535 if msg:
535 if msg:
536 n = repo.commit(commitfiles, "[mq]: %s" % msg, force=True,
536 n = repo.commit(commitfiles, "[mq]: %s" % msg, force=True,
537 wlock=wlock)
537 wlock=wlock)
538 else:
538 else:
539 n = repo.commit(commitfiles,
539 n = repo.commit(commitfiles,
540 "New patch: %s" % patch, force=True, wlock=wlock)
540 "New patch: %s" % patch, force=True, wlock=wlock)
541 if n == None:
541 if n == None:
542 raise util.Abort(_("repo commit failed"))
542 raise util.Abort(_("repo commit failed"))
543 self.full_series[insert:insert] = [patch]
543 self.full_series[insert:insert] = [patch]
544 self.applied.append(statusentry(revlog.hex(n), patch))
544 self.applied.append(statusentry(revlog.hex(n), patch))
545 self.parse_series()
545 self.parse_series()
546 self.series_dirty = 1
546 self.series_dirty = 1
547 self.applied_dirty = 1
547 self.applied_dirty = 1
548 p = self.opener(patch, "w")
548 p = self.opener(patch, "w")
549 if msg:
549 if msg:
550 msg = msg + "\n"
550 msg = msg + "\n"
551 p.write(msg)
551 p.write(msg)
552 p.close()
552 p.close()
553 wlock = None
553 wlock = None
554 r = self.qrepo()
554 r = self.qrepo()
555 if r: r.add([patch])
555 if r: r.add([patch])
556 if commitfiles:
556 if commitfiles:
557 self.refresh(repo, short=True)
557 self.refresh(repo, short=True)
558
558
559 def strip(self, repo, rev, update=True, backup="all", wlock=None):
559 def strip(self, repo, rev, update=True, backup="all", wlock=None):
560 def limitheads(chlog, stop):
560 def limitheads(chlog, stop):
561 """return the list of all nodes that have no children"""
561 """return the list of all nodes that have no children"""
562 p = {}
562 p = {}
563 h = []
563 h = []
564 stoprev = 0
564 stoprev = 0
565 if stop in chlog.nodemap:
565 if stop in chlog.nodemap:
566 stoprev = chlog.rev(stop)
566 stoprev = chlog.rev(stop)
567
567
568 for r in range(chlog.count() - 1, -1, -1):
568 for r in range(chlog.count() - 1, -1, -1):
569 n = chlog.node(r)
569 n = chlog.node(r)
570 if n not in p:
570 if n not in p:
571 h.append(n)
571 h.append(n)
572 if n == stop:
572 if n == stop:
573 break
573 break
574 if r < stoprev:
574 if r < stoprev:
575 break
575 break
576 for pn in chlog.parents(n):
576 for pn in chlog.parents(n):
577 p[pn] = 1
577 p[pn] = 1
578 return h
578 return h
579
579
580 def bundle(cg):
580 def bundle(cg):
581 backupdir = repo.join("strip-backup")
581 backupdir = repo.join("strip-backup")
582 if not os.path.isdir(backupdir):
582 if not os.path.isdir(backupdir):
583 os.mkdir(backupdir)
583 os.mkdir(backupdir)
584 name = os.path.join(backupdir, "%s" % revlog.short(rev))
584 name = os.path.join(backupdir, "%s" % revlog.short(rev))
585 name = savename(name)
585 name = savename(name)
586 self.ui.warn("saving bundle to %s\n" % name)
586 self.ui.warn("saving bundle to %s\n" % name)
587 # TODO, exclusive open
587 # TODO, exclusive open
588 f = open(name, "wb")
588 f = open(name, "wb")
589 try:
589 try:
590 f.write("HG10")
590 f.write("HG10")
591 z = bz2.BZ2Compressor(9)
591 z = bz2.BZ2Compressor(9)
592 while 1:
592 while 1:
593 chunk = cg.read(4096)
593 chunk = cg.read(4096)
594 if not chunk:
594 if not chunk:
595 break
595 break
596 f.write(z.compress(chunk))
596 f.write(z.compress(chunk))
597 f.write(z.flush())
597 f.write(z.flush())
598 except:
598 except:
599 os.unlink(name)
599 os.unlink(name)
600 raise
600 raise
601 f.close()
601 f.close()
602 return name
602 return name
603
603
604 def stripall(rev, revnum):
604 def stripall(rev, revnum):
605 cl = repo.changelog
605 cl = repo.changelog
606 c = cl.read(rev)
606 c = cl.read(rev)
607 mm = repo.manifest.read(c[0])
607 mm = repo.manifest.read(c[0])
608 seen = {}
608 seen = {}
609
609
610 for x in xrange(revnum, cl.count()):
610 for x in xrange(revnum, cl.count()):
611 c = cl.read(cl.node(x))
611 c = cl.read(cl.node(x))
612 for f in c[3]:
612 for f in c[3]:
613 if f in seen:
613 if f in seen:
614 continue
614 continue
615 seen[f] = 1
615 seen[f] = 1
616 if f in mm:
616 if f in mm:
617 filerev = mm[f]
617 filerev = mm[f]
618 else:
618 else:
619 filerev = 0
619 filerev = 0
620 seen[f] = filerev
620 seen[f] = filerev
621 # we go in two steps here so the strip loop happens in a
621 # we go in two steps here so the strip loop happens in a
622 # sensible order. When stripping many files, this helps keep
622 # sensible order. When stripping many files, this helps keep
623 # our disk access patterns under control.
623 # our disk access patterns under control.
624 seen_list = seen.keys()
624 seen_list = seen.keys()
625 seen_list.sort()
625 seen_list.sort()
626 for f in seen_list:
626 for f in seen_list:
627 ff = repo.file(f)
627 ff = repo.file(f)
628 filerev = seen[f]
628 filerev = seen[f]
629 if filerev != 0:
629 if filerev != 0:
630 if filerev in ff.nodemap:
630 if filerev in ff.nodemap:
631 filerev = ff.rev(filerev)
631 filerev = ff.rev(filerev)
632 else:
632 else:
633 filerev = 0
633 filerev = 0
634 ff.strip(filerev, revnum)
634 ff.strip(filerev, revnum)
635
635
636 if not wlock:
636 if not wlock:
637 wlock = repo.wlock()
637 wlock = repo.wlock()
638 lock = repo.lock()
638 lock = repo.lock()
639 chlog = repo.changelog
639 chlog = repo.changelog
640 # TODO delete the undo files, and handle undo of merge sets
640 # TODO delete the undo files, and handle undo of merge sets
641 pp = chlog.parents(rev)
641 pp = chlog.parents(rev)
642 revnum = chlog.rev(rev)
642 revnum = chlog.rev(rev)
643
643
644 if update:
644 if update:
645 self.check_localchanges(repo, refresh=False)
645 self.check_localchanges(repo, refresh=False)
646 urev = self.qparents(repo, rev)
646 urev = self.qparents(repo, rev)
647 hg.clean(repo, urev, wlock=wlock)
647 hg.clean(repo, urev, wlock=wlock)
648 repo.dirstate.write()
648 repo.dirstate.write()
649
649
650 # save is a list of all the branches we are truncating away
650 # save is a list of all the branches we are truncating away
651 # that we actually want to keep. changegroup will be used
651 # that we actually want to keep. changegroup will be used
652 # to preserve them and add them back after the truncate
652 # to preserve them and add them back after the truncate
653 saveheads = []
653 saveheads = []
654 savebases = {}
654 savebases = {}
655
655
656 heads = limitheads(chlog, rev)
656 heads = limitheads(chlog, rev)
657 seen = {}
657 seen = {}
658
658
659 # search through all the heads, finding those where the revision
659 # search through all the heads, finding those where the revision
660 # we want to strip away is an ancestor. Also look for merges
660 # we want to strip away is an ancestor. Also look for merges
661 # that might be turned into new heads by the strip.
661 # that might be turned into new heads by the strip.
662 while heads:
662 while heads:
663 h = heads.pop()
663 h = heads.pop()
664 n = h
664 n = h
665 while True:
665 while True:
666 seen[n] = 1
666 seen[n] = 1
667 pp = chlog.parents(n)
667 pp = chlog.parents(n)
668 if pp[1] != revlog.nullid and chlog.rev(pp[1]) > revnum:
668 if pp[1] != revlog.nullid and chlog.rev(pp[1]) > revnum:
669 if pp[1] not in seen:
669 if pp[1] not in seen:
670 heads.append(pp[1])
670 heads.append(pp[1])
671 if pp[0] == revlog.nullid:
671 if pp[0] == revlog.nullid:
672 break
672 break
673 if chlog.rev(pp[0]) < revnum:
673 if chlog.rev(pp[0]) < revnum:
674 break
674 break
675 n = pp[0]
675 n = pp[0]
676 if n == rev:
676 if n == rev:
677 break
677 break
678 r = chlog.reachable(h, rev)
678 r = chlog.reachable(h, rev)
679 if rev not in r:
679 if rev not in r:
680 saveheads.append(h)
680 saveheads.append(h)
681 for x in r:
681 for x in r:
682 if chlog.rev(x) > revnum:
682 if chlog.rev(x) > revnum:
683 savebases[x] = 1
683 savebases[x] = 1
684
684
685 # create a changegroup for all the branches we need to keep
685 # create a changegroup for all the branches we need to keep
686 if backup == "all":
686 if backup == "all":
687 backupch = repo.changegroupsubset([rev], chlog.heads(), 'strip')
687 backupch = repo.changegroupsubset([rev], chlog.heads(), 'strip')
688 bundle(backupch)
688 bundle(backupch)
689 if saveheads:
689 if saveheads:
690 backupch = repo.changegroupsubset(savebases.keys(), saveheads, 'strip')
690 backupch = repo.changegroupsubset(savebases.keys(), saveheads, 'strip')
691 chgrpfile = bundle(backupch)
691 chgrpfile = bundle(backupch)
692
692
693 stripall(rev, revnum)
693 stripall(rev, revnum)
694
694
695 change = chlog.read(rev)
695 change = chlog.read(rev)
696 repo.manifest.strip(repo.manifest.rev(change[0]), revnum)
696 repo.manifest.strip(repo.manifest.rev(change[0]), revnum)
697 chlog.strip(revnum, revnum)
697 chlog.strip(revnum, revnum)
698 if saveheads:
698 if saveheads:
699 self.ui.status("adding branch\n")
699 self.ui.status("adding branch\n")
700 commands.unbundle(self.ui, repo, chgrpfile, update=False)
700 commands.unbundle(self.ui, repo, chgrpfile, update=False)
701 if backup != "strip":
701 if backup != "strip":
702 os.unlink(chgrpfile)
702 os.unlink(chgrpfile)
703
703
704 def isapplied(self, patch):
704 def isapplied(self, patch):
705 """returns (index, rev, patch)"""
705 """returns (index, rev, patch)"""
706 for i in xrange(len(self.applied)):
706 for i in xrange(len(self.applied)):
707 a = self.applied[i]
707 a = self.applied[i]
708 if a.name == patch:
708 if a.name == patch:
709 return (i, a.rev, a.name)
709 return (i, a.rev, a.name)
710 return None
710 return None
711
711
712 # if the exact patch name does not exist, we try a few
712 # if the exact patch name does not exist, we try a few
713 # variations. If strict is passed, we try only #1
713 # variations. If strict is passed, we try only #1
714 #
714 #
715 # 1) a number to indicate an offset in the series file
715 # 1) a number to indicate an offset in the series file
716 # 2) a unique substring of the patch name was given
716 # 2) a unique substring of the patch name was given
717 # 3) patchname[-+]num to indicate an offset in the series file
717 # 3) patchname[-+]num to indicate an offset in the series file
718 def lookup(self, patch, strict=False):
718 def lookup(self, patch, strict=False):
719 patch = patch and str(patch)
719 patch = patch and str(patch)
720
720
721 def partial_name(s):
721 def partial_name(s):
722 if s in self.series:
722 if s in self.series:
723 return s
723 return s
724 matches = [x for x in self.series if s in x]
724 matches = [x for x in self.series if s in x]
725 if len(matches) > 1:
725 if len(matches) > 1:
726 self.ui.warn(_('patch name "%s" is ambiguous:\n') % s)
726 self.ui.warn(_('patch name "%s" is ambiguous:\n') % s)
727 for m in matches:
727 for m in matches:
728 self.ui.warn(' %s\n' % m)
728 self.ui.warn(' %s\n' % m)
729 return None
729 return None
730 if matches:
730 if matches:
731 return matches[0]
731 return matches[0]
732 if len(self.series) > 0 and len(self.applied) > 0:
732 if len(self.series) > 0 and len(self.applied) > 0:
733 if s == 'qtip':
733 if s == 'qtip':
734 return self.series[self.series_end()-1]
734 return self.series[self.series_end()-1]
735 if s == 'qbase':
735 if s == 'qbase':
736 return self.series[0]
736 return self.series[0]
737 return None
737 return None
738 if patch == None:
738 if patch == None:
739 return None
739 return None
740
740
741 # we don't want to return a partial match until we make
741 # we don't want to return a partial match until we make
742 # sure the file name passed in does not exist (checked below)
742 # sure the file name passed in does not exist (checked below)
743 res = partial_name(patch)
743 res = partial_name(patch)
744 if res and res == patch:
744 if res and res == patch:
745 return res
745 return res
746
746
747 if not os.path.isfile(self.join(patch)):
747 if not os.path.isfile(self.join(patch)):
748 try:
748 try:
749 sno = int(patch)
749 sno = int(patch)
750 except(ValueError, OverflowError):
750 except(ValueError, OverflowError):
751 pass
751 pass
752 else:
752 else:
753 if sno < len(self.series):
753 if sno < len(self.series):
754 return self.series[sno]
754 return self.series[sno]
755 if not strict:
755 if not strict:
756 # return any partial match made above
756 # return any partial match made above
757 if res:
757 if res:
758 return res
758 return res
759 minus = patch.rsplit('-', 1)
759 minus = patch.rsplit('-', 1)
760 if len(minus) > 1:
760 if len(minus) > 1:
761 res = partial_name(minus[0])
761 res = partial_name(minus[0])
762 if res:
762 if res:
763 i = self.series.index(res)
763 i = self.series.index(res)
764 try:
764 try:
765 off = int(minus[1] or 1)
765 off = int(minus[1] or 1)
766 except(ValueError, OverflowError):
766 except(ValueError, OverflowError):
767 pass
767 pass
768 else:
768 else:
769 if i - off >= 0:
769 if i - off >= 0:
770 return self.series[i - off]
770 return self.series[i - off]
771 plus = patch.rsplit('+', 1)
771 plus = patch.rsplit('+', 1)
772 if len(plus) > 1:
772 if len(plus) > 1:
773 res = partial_name(plus[0])
773 res = partial_name(plus[0])
774 if res:
774 if res:
775 i = self.series.index(res)
775 i = self.series.index(res)
776 try:
776 try:
777 off = int(plus[1] or 1)
777 off = int(plus[1] or 1)
778 except(ValueError, OverflowError):
778 except(ValueError, OverflowError):
779 pass
779 pass
780 else:
780 else:
781 if i + off < len(self.series):
781 if i + off < len(self.series):
782 return self.series[i + off]
782 return self.series[i + off]
783 raise util.Abort(_("patch %s not in series") % patch)
783 raise util.Abort(_("patch %s not in series") % patch)
784
784
785 def push(self, repo, patch=None, force=False, list=False,
785 def push(self, repo, patch=None, force=False, list=False,
786 mergeq=None, wlock=None):
786 mergeq=None, wlock=None):
787 if not wlock:
787 if not wlock:
788 wlock = repo.wlock()
788 wlock = repo.wlock()
789 patch = self.lookup(patch)
789 patch = self.lookup(patch)
790 if patch and self.isapplied(patch):
790 if patch and self.isapplied(patch):
791 self.ui.warn(_("patch %s is already applied\n") % patch)
791 self.ui.warn(_("patch %s is already applied\n") % patch)
792 sys.exit(1)
792 sys.exit(1)
793 if self.series_end() == len(self.series):
793 if self.series_end() == len(self.series):
794 self.ui.warn(_("patch series fully applied\n"))
794 self.ui.warn(_("patch series fully applied\n"))
795 sys.exit(1)
795 sys.exit(1)
796 if not force:
796 if not force:
797 self.check_localchanges(repo)
797 self.check_localchanges(repo)
798
798
799 self.applied_dirty = 1;
799 self.applied_dirty = 1;
800 start = self.series_end()
800 start = self.series_end()
801 if start > 0:
801 if start > 0:
802 self.check_toppatch(repo)
802 self.check_toppatch(repo)
803 if not patch:
803 if not patch:
804 patch = self.series[start]
804 patch = self.series[start]
805 end = start + 1
805 end = start + 1
806 else:
806 else:
807 end = self.series.index(patch, start) + 1
807 end = self.series.index(patch, start) + 1
808 s = self.series[start:end]
808 s = self.series[start:end]
809 if mergeq:
809 if mergeq:
810 ret = self.mergepatch(repo, mergeq, s, wlock)
810 ret = self.mergepatch(repo, mergeq, s, wlock)
811 else:
811 else:
812 ret = self.apply(repo, s, list, wlock=wlock)
812 ret = self.apply(repo, s, list, wlock=wlock)
813 top = self.applied[-1].name
813 top = self.applied[-1].name
814 if ret[0]:
814 if ret[0]:
815 self.ui.write("Errors during apply, please fix and refresh %s\n" %
815 self.ui.write("Errors during apply, please fix and refresh %s\n" %
816 top)
816 top)
817 else:
817 else:
818 self.ui.write("Now at: %s\n" % top)
818 self.ui.write("Now at: %s\n" % top)
819 return ret[0]
819 return ret[0]
820
820
821 def pop(self, repo, patch=None, force=False, update=True, all=False,
821 def pop(self, repo, patch=None, force=False, update=True, all=False,
822 wlock=None):
822 wlock=None):
823 def getfile(f, rev):
823 def getfile(f, rev):
824 t = repo.file(f).read(rev)
824 t = repo.file(f).read(rev)
825 try:
825 try:
826 repo.wfile(f, "w").write(t)
826 repo.wfile(f, "w").write(t)
827 except IOError:
827 except IOError:
828 try:
828 try:
829 os.makedirs(os.path.dirname(repo.wjoin(f)))
829 os.makedirs(os.path.dirname(repo.wjoin(f)))
830 except OSError, err:
830 except OSError, err:
831 if err.errno != errno.EEXIST: raise
831 if err.errno != errno.EEXIST: raise
832 repo.wfile(f, "w").write(t)
832 repo.wfile(f, "w").write(t)
833
833
834 if not wlock:
834 if not wlock:
835 wlock = repo.wlock()
835 wlock = repo.wlock()
836 if patch:
836 if patch:
837 # index, rev, patch
837 # index, rev, patch
838 info = self.isapplied(patch)
838 info = self.isapplied(patch)
839 if not info:
839 if not info:
840 patch = self.lookup(patch)
840 patch = self.lookup(patch)
841 info = self.isapplied(patch)
841 info = self.isapplied(patch)
842 if not info:
842 if not info:
843 raise util.Abort(_("patch %s is not applied") % patch)
843 raise util.Abort(_("patch %s is not applied") % patch)
844 if len(self.applied) == 0:
844 if len(self.applied) == 0:
845 self.ui.warn(_("no patches applied\n"))
845 self.ui.warn(_("no patches applied\n"))
846 sys.exit(1)
846 sys.exit(1)
847
847
848 if not update:
848 if not update:
849 parents = repo.dirstate.parents()
849 parents = repo.dirstate.parents()
850 rr = [ revlog.bin(x.rev) for x in self.applied ]
850 rr = [ revlog.bin(x.rev) for x in self.applied ]
851 for p in parents:
851 for p in parents:
852 if p in rr:
852 if p in rr:
853 self.ui.warn("qpop: forcing dirstate update\n")
853 self.ui.warn("qpop: forcing dirstate update\n")
854 update = True
854 update = True
855
855
856 if not force and update:
856 if not force and update:
857 self.check_localchanges(repo)
857 self.check_localchanges(repo)
858
858
859 self.applied_dirty = 1;
859 self.applied_dirty = 1;
860 end = len(self.applied)
860 end = len(self.applied)
861 if not patch:
861 if not patch:
862 if all:
862 if all:
863 popi = 0
863 popi = 0
864 else:
864 else:
865 popi = len(self.applied) - 1
865 popi = len(self.applied) - 1
866 else:
866 else:
867 popi = info[0] + 1
867 popi = info[0] + 1
868 if popi >= end:
868 if popi >= end:
869 self.ui.warn("qpop: %s is already at the top\n" % patch)
869 self.ui.warn("qpop: %s is already at the top\n" % patch)
870 return
870 return
871 info = [ popi ] + [self.applied[popi].rev, self.applied[popi].name]
871 info = [ popi ] + [self.applied[popi].rev, self.applied[popi].name]
872
872
873 start = info[0]
873 start = info[0]
874 rev = revlog.bin(info[1])
874 rev = revlog.bin(info[1])
875
875
876 # we know there are no local changes, so we can make a simplified
876 # we know there are no local changes, so we can make a simplified
877 # form of hg.update.
877 # form of hg.update.
878 if update:
878 if update:
879 top = self.check_toppatch(repo)
879 top = self.check_toppatch(repo)
880 qp = self.qparents(repo, rev)
880 qp = self.qparents(repo, rev)
881 changes = repo.changelog.read(qp)
881 changes = repo.changelog.read(qp)
882 mmap = repo.manifest.read(changes[0])
882 mmap = repo.manifest.read(changes[0])
883 m, a, r, d, u = repo.status(qp, top)[:5]
883 m, a, r, d, u = repo.status(qp, top)[:5]
884 if d:
884 if d:
885 raise util.Abort("deletions found between repo revs")
885 raise util.Abort("deletions found between repo revs")
886 for f in m:
886 for f in m:
887 getfile(f, mmap[f])
887 getfile(f, mmap[f])
888 for f in r:
888 for f in r:
889 getfile(f, mmap[f])
889 getfile(f, mmap[f])
890 util.set_exec(repo.wjoin(f), mmap.execf(f))
890 util.set_exec(repo.wjoin(f), mmap.execf(f))
891 repo.dirstate.update(m + r, 'n')
891 repo.dirstate.update(m + r, 'n')
892 for f in a:
892 for f in a:
893 try: os.unlink(repo.wjoin(f))
893 try: os.unlink(repo.wjoin(f))
894 except: raise
894 except: raise
895 try: os.removedirs(os.path.dirname(repo.wjoin(f)))
895 try: os.removedirs(os.path.dirname(repo.wjoin(f)))
896 except: pass
896 except: pass
897 if a:
897 if a:
898 repo.dirstate.forget(a)
898 repo.dirstate.forget(a)
899 repo.dirstate.setparents(qp, revlog.nullid)
899 repo.dirstate.setparents(qp, revlog.nullid)
900 self.strip(repo, rev, update=False, backup='strip', wlock=wlock)
900 self.strip(repo, rev, update=False, backup='strip', wlock=wlock)
901 del self.applied[start:end]
901 del self.applied[start:end]
902 if len(self.applied):
902 if len(self.applied):
903 self.ui.write("Now at: %s\n" % self.applied[-1].name)
903 self.ui.write("Now at: %s\n" % self.applied[-1].name)
904 else:
904 else:
905 self.ui.write("Patch queue now empty\n")
905 self.ui.write("Patch queue now empty\n")
906
906
907 def diff(self, repo, files):
907 def diff(self, repo, files):
908 top = self.check_toppatch(repo)
908 top = self.check_toppatch(repo)
909 if not top:
909 if not top:
910 self.ui.write("No patches applied\n")
910 self.ui.write("No patches applied\n")
911 return
911 return
912 qp = self.qparents(repo, top)
912 qp = self.qparents(repo, top)
913 self.printdiff(repo, qp, files=files)
913 self.printdiff(repo, qp, files=files)
914
914
915 def refresh(self, repo, msg='', short=False):
915 def refresh(self, repo, msg='', short=False):
916 if len(self.applied) == 0:
916 if len(self.applied) == 0:
917 self.ui.write("No patches applied\n")
917 self.ui.write("No patches applied\n")
918 return
918 return
919 wlock = repo.wlock()
919 wlock = repo.wlock()
920 self.check_toppatch(repo)
920 self.check_toppatch(repo)
921 (top, patch) = (self.applied[-1].rev, self.applied[-1].name)
921 (top, patch) = (self.applied[-1].rev, self.applied[-1].name)
922 top = revlog.bin(top)
922 top = revlog.bin(top)
923 cparents = repo.changelog.parents(top)
923 cparents = repo.changelog.parents(top)
924 patchparent = self.qparents(repo, top)
924 patchparent = self.qparents(repo, top)
925 message, comments, user, date, patchfound = self.readheaders(patch)
925 message, comments, user, date, patchfound = self.readheaders(patch)
926
926
927 patchf = self.opener(patch, "w")
927 patchf = self.opener(patch, "w")
928 msg = msg.rstrip()
928 msg = msg.rstrip()
929 if msg:
929 if msg:
930 if comments:
930 if comments:
931 # Remove existing message.
931 # Remove existing message.
932 ci = 0
932 ci = 0
933 for mi in range(len(message)):
933 for mi in range(len(message)):
934 while message[mi] != comments[ci]:
934 while message[mi] != comments[ci]:
935 ci += 1
935 ci += 1
936 del comments[ci]
936 del comments[ci]
937 comments.append(msg)
937 comments.append(msg)
938 if comments:
938 if comments:
939 comments = "\n".join(comments) + '\n\n'
939 comments = "\n".join(comments) + '\n\n'
940 patchf.write(comments)
940 patchf.write(comments)
941
941
942 tip = repo.changelog.tip()
942 tip = repo.changelog.tip()
943 if top == tip:
943 if top == tip:
944 # if the top of our patch queue is also the tip, there is an
944 # if the top of our patch queue is also the tip, there is an
945 # optimization here. We update the dirstate in place and strip
945 # optimization here. We update the dirstate in place and strip
946 # off the tip commit. Then just commit the current directory
946 # off the tip commit. Then just commit the current directory
947 # tree. We can also send repo.commit the list of files
947 # tree. We can also send repo.commit the list of files
948 # changed to speed up the diff
948 # changed to speed up the diff
949 #
949 #
950 # in short mode, we only diff the files included in the
950 # in short mode, we only diff the files included in the
951 # patch already
951 # patch already
952 #
952 #
953 # this should really read:
953 # this should really read:
954 # mm, dd, aa, aa2, uu = repo.status(tip, patchparent)[:5]
954 # mm, dd, aa, aa2, uu = repo.status(tip, patchparent)[:5]
955 # but we do it backwards to take advantage of manifest/chlog
955 # but we do it backwards to take advantage of manifest/chlog
956 # caching against the next repo.status call
956 # caching against the next repo.status call
957 #
957 #
958 mm, aa, dd, aa2, uu = repo.status(patchparent, tip)[:5]
958 mm, aa, dd, aa2, uu = repo.status(patchparent, tip)[:5]
959 if short:
959 if short:
960 filelist = mm + aa + dd
960 filelist = mm + aa + dd
961 else:
961 else:
962 filelist = None
962 filelist = None
963 m, a, r, d, u = repo.status(files=filelist)[:5]
963 m, a, r, d, u = repo.status(files=filelist)[:5]
964
964
965 # we might end up with files that were added between tip and
965 # we might end up with files that were added between tip and
966 # the dirstate parent, but then changed in the local dirstate.
966 # the dirstate parent, but then changed in the local dirstate.
967 # in this case, we want them to only show up in the added section
967 # in this case, we want them to only show up in the added section
968 for x in m:
968 for x in m:
969 if x not in aa:
969 if x not in aa:
970 mm.append(x)
970 mm.append(x)
971 # we might end up with files added by the local dirstate that
971 # we might end up with files added by the local dirstate that
972 # were deleted by the patch. In this case, they should only
972 # were deleted by the patch. In this case, they should only
973 # show up in the changed section.
973 # show up in the changed section.
974 for x in a:
974 for x in a:
975 if x in dd:
975 if x in dd:
976 del dd[dd.index(x)]
976 del dd[dd.index(x)]
977 mm.append(x)
977 mm.append(x)
978 else:
978 else:
979 aa.append(x)
979 aa.append(x)
980 # make sure any files deleted in the local dirstate
980 # make sure any files deleted in the local dirstate
981 # are not in the add or change column of the patch
981 # are not in the add or change column of the patch
982 forget = []
982 forget = []
983 for x in d + r:
983 for x in d + r:
984 if x in aa:
984 if x in aa:
985 del aa[aa.index(x)]
985 del aa[aa.index(x)]
986 forget.append(x)
986 forget.append(x)
987 continue
987 continue
988 elif x in mm:
988 elif x in mm:
989 del mm[mm.index(x)]
989 del mm[mm.index(x)]
990 dd.append(x)
990 dd.append(x)
991
991
992 m = list(util.unique(mm))
992 m = list(util.unique(mm))
993 r = list(util.unique(dd))
993 r = list(util.unique(dd))
994 a = list(util.unique(aa))
994 a = list(util.unique(aa))
995 filelist = list(util.unique(m + r + a))
995 filelist = list(util.unique(m + r + a))
996 self.printdiff(repo, patchparent, files=filelist,
996 self.printdiff(repo, patchparent, files=filelist,
997 changes=(m, a, r, [], u), fp=patchf)
997 changes=(m, a, r, [], u), fp=patchf)
998 patchf.close()
998 patchf.close()
999
999
1000 changes = repo.changelog.read(tip)
1000 changes = repo.changelog.read(tip)
1001 repo.dirstate.setparents(*cparents)
1001 repo.dirstate.setparents(*cparents)
1002 repo.dirstate.update(a, 'a')
1002 repo.dirstate.update(a, 'a')
1003 repo.dirstate.update(r, 'r')
1003 repo.dirstate.update(r, 'r')
1004 repo.dirstate.update(m, 'n')
1004 repo.dirstate.update(m, 'n')
1005 repo.dirstate.forget(forget)
1005 repo.dirstate.forget(forget)
1006
1006
1007 if not msg:
1007 if not msg:
1008 if not message:
1008 if not message:
1009 message = "patch queue: %s\n" % patch
1009 message = "patch queue: %s\n" % patch
1010 else:
1010 else:
1011 message = "\n".join(message)
1011 message = "\n".join(message)
1012 else:
1012 else:
1013 message = msg
1013 message = msg
1014
1014
1015 self.strip(repo, top, update=False, backup='strip', wlock=wlock)
1015 self.strip(repo, top, update=False, backup='strip', wlock=wlock)
1016 n = repo.commit(filelist, message, changes[1], force=1, wlock=wlock)
1016 n = repo.commit(filelist, message, changes[1], force=1, wlock=wlock)
1017 self.applied[-1] = statusentry(revlog.hex(n), patch)
1017 self.applied[-1] = statusentry(revlog.hex(n), patch)
1018 self.applied_dirty = 1
1018 self.applied_dirty = 1
1019 else:
1019 else:
1020 self.printdiff(repo, patchparent, fp=patchf)
1020 self.printdiff(repo, patchparent, fp=patchf)
1021 patchf.close()
1021 patchf.close()
1022 self.pop(repo, force=True, wlock=wlock)
1022 self.pop(repo, force=True, wlock=wlock)
1023 self.push(repo, force=True, wlock=wlock)
1023 self.push(repo, force=True, wlock=wlock)
1024
1024
1025 def init(self, repo, create=False):
1025 def init(self, repo, create=False):
1026 if os.path.isdir(self.path):
1026 if os.path.isdir(self.path):
1027 raise util.Abort(_("patch queue directory already exists"))
1027 raise util.Abort(_("patch queue directory already exists"))
1028 os.mkdir(self.path)
1028 os.mkdir(self.path)
1029 if create:
1029 if create:
1030 return self.qrepo(create=True)
1030 return self.qrepo(create=True)
1031
1031
1032 def unapplied(self, repo, patch=None):
1032 def unapplied(self, repo, patch=None):
1033 if patch and patch not in self.series:
1033 if patch and patch not in self.series:
1034 raise util.Abort(_("patch %s is not in series file") % patch)
1034 raise util.Abort(_("patch %s is not in series file") % patch)
1035 if not patch:
1035 if not patch:
1036 start = self.series_end()
1036 start = self.series_end()
1037 else:
1037 else:
1038 start = self.series.index(patch) + 1
1038 start = self.series.index(patch) + 1
1039 unapplied = []
1039 unapplied = []
1040 for i in xrange(start, len(self.series)):
1040 for i in xrange(start, len(self.series)):
1041 pushable, reason = self.pushable(i)
1041 pushable, reason = self.pushable(i)
1042 if pushable:
1042 if pushable:
1043 unapplied.append((i, self.series[i]))
1043 unapplied.append((i, self.series[i]))
1044 self.explain_pushable(i)
1044 self.explain_pushable(i)
1045 return unapplied
1045 return unapplied
1046
1046
1047 def qseries(self, repo, missing=None, summary=False):
1047 def qseries(self, repo, missing=None, summary=False):
1048 start = self.series_end(all_patches=True)
1048 start = self.series_end(all_patches=True)
1049 if not missing:
1049 if not missing:
1050 for i in range(len(self.series)):
1050 for i in range(len(self.series)):
1051 patch = self.series[i]
1051 patch = self.series[i]
1052 if self.ui.verbose:
1052 if self.ui.verbose:
1053 if i < start:
1053 if i < start:
1054 status = 'A'
1054 status = 'A'
1055 elif self.pushable(i)[0]:
1055 elif self.pushable(i)[0]:
1056 status = 'U'
1056 status = 'U'
1057 else:
1057 else:
1058 status = 'G'
1058 status = 'G'
1059 self.ui.write('%d %s ' % (i, status))
1059 self.ui.write('%d %s ' % (i, status))
1060 if summary:
1060 if summary:
1061 msg = self.readheaders(patch)[0]
1061 msg = self.readheaders(patch)[0]
1062 msg = msg and ': ' + msg[0] or ': '
1062 msg = msg and ': ' + msg[0] or ': '
1063 else:
1063 else:
1064 msg = ''
1064 msg = ''
1065 self.ui.write('%s%s\n' % (patch, msg))
1065 self.ui.write('%s%s\n' % (patch, msg))
1066 else:
1066 else:
1067 msng_list = []
1067 msng_list = []
1068 for root, dirs, files in os.walk(self.path):
1068 for root, dirs, files in os.walk(self.path):
1069 d = root[len(self.path) + 1:]
1069 d = root[len(self.path) + 1:]
1070 for f in files:
1070 for f in files:
1071 fl = os.path.join(d, f)
1071 fl = os.path.join(d, f)
1072 if (fl not in self.series and
1072 if (fl not in self.series and
1073 fl not in (self.status_path, self.series_path)
1073 fl not in (self.status_path, self.series_path)
1074 and not fl.startswith('.')):
1074 and not fl.startswith('.')):
1075 msng_list.append(fl)
1075 msng_list.append(fl)
1076 msng_list.sort()
1076 msng_list.sort()
1077 for x in msng_list:
1077 for x in msng_list:
1078 if self.ui.verbose:
1078 if self.ui.verbose:
1079 self.ui.write("D ")
1079 self.ui.write("D ")
1080 self.ui.write("%s\n" % x)
1080 self.ui.write("%s\n" % x)
1081
1081
1082 def issaveline(self, l):
1082 def issaveline(self, l):
1083 if l.name == '.hg.patches.save.line':
1083 if l.name == '.hg.patches.save.line':
1084 return True
1084 return True
1085
1085
1086 def qrepo(self, create=False):
1086 def qrepo(self, create=False):
1087 if create or os.path.isdir(self.join(".hg")):
1087 if create or os.path.isdir(self.join(".hg")):
1088 return hg.repository(self.ui, path=self.path, create=create)
1088 return hg.repository(self.ui, path=self.path, create=create)
1089
1089
1090 def restore(self, repo, rev, delete=None, qupdate=None):
1090 def restore(self, repo, rev, delete=None, qupdate=None):
1091 c = repo.changelog.read(rev)
1091 c = repo.changelog.read(rev)
1092 desc = c[4].strip()
1092 desc = c[4].strip()
1093 lines = desc.splitlines()
1093 lines = desc.splitlines()
1094 i = 0
1094 i = 0
1095 datastart = None
1095 datastart = None
1096 series = []
1096 series = []
1097 applied = []
1097 applied = []
1098 qpp = None
1098 qpp = None
1099 for i in xrange(0, len(lines)):
1099 for i in xrange(0, len(lines)):
1100 if lines[i] == 'Patch Data:':
1100 if lines[i] == 'Patch Data:':
1101 datastart = i + 1
1101 datastart = i + 1
1102 elif lines[i].startswith('Dirstate:'):
1102 elif lines[i].startswith('Dirstate:'):
1103 l = lines[i].rstrip()
1103 l = lines[i].rstrip()
1104 l = l[10:].split(' ')
1104 l = l[10:].split(' ')
1105 qpp = [ hg.bin(x) for x in l ]
1105 qpp = [ hg.bin(x) for x in l ]
1106 elif datastart != None:
1106 elif datastart != None:
1107 l = lines[i].rstrip()
1107 l = lines[i].rstrip()
1108 se = statusentry(l)
1108 se = statusentry(l)
1109 file_ = se.name
1109 file_ = se.name
1110 if se.rev:
1110 if se.rev:
1111 applied.append(se)
1111 applied.append(se)
1112 series.append(file_)
1112 series.append(file_)
1113 if datastart == None:
1113 if datastart == None:
1114 self.ui.warn("No saved patch data found\n")
1114 self.ui.warn("No saved patch data found\n")
1115 return 1
1115 return 1
1116 self.ui.warn("restoring status: %s\n" % lines[0])
1116 self.ui.warn("restoring status: %s\n" % lines[0])
1117 self.full_series = series
1117 self.full_series = series
1118 self.applied = applied
1118 self.applied = applied
1119 self.parse_series()
1119 self.parse_series()
1120 self.series_dirty = 1
1120 self.series_dirty = 1
1121 self.applied_dirty = 1
1121 self.applied_dirty = 1
1122 heads = repo.changelog.heads()
1122 heads = repo.changelog.heads()
1123 if delete:
1123 if delete:
1124 if rev not in heads:
1124 if rev not in heads:
1125 self.ui.warn("save entry has children, leaving it alone\n")
1125 self.ui.warn("save entry has children, leaving it alone\n")
1126 else:
1126 else:
1127 self.ui.warn("removing save entry %s\n" % hg.short(rev))
1127 self.ui.warn("removing save entry %s\n" % hg.short(rev))
1128 pp = repo.dirstate.parents()
1128 pp = repo.dirstate.parents()
1129 if rev in pp:
1129 if rev in pp:
1130 update = True
1130 update = True
1131 else:
1131 else:
1132 update = False
1132 update = False
1133 self.strip(repo, rev, update=update, backup='strip')
1133 self.strip(repo, rev, update=update, backup='strip')
1134 if qpp:
1134 if qpp:
1135 self.ui.warn("saved queue repository parents: %s %s\n" %
1135 self.ui.warn("saved queue repository parents: %s %s\n" %
1136 (hg.short(qpp[0]), hg.short(qpp[1])))
1136 (hg.short(qpp[0]), hg.short(qpp[1])))
1137 if qupdate:
1137 if qupdate:
1138 print "queue directory updating"
1138 print "queue directory updating"
1139 r = self.qrepo()
1139 r = self.qrepo()
1140 if not r:
1140 if not r:
1141 self.ui.warn("Unable to load queue repository\n")
1141 self.ui.warn("Unable to load queue repository\n")
1142 return 1
1142 return 1
1143 hg.clean(r, qpp[0])
1143 hg.clean(r, qpp[0])
1144
1144
1145 def save(self, repo, msg=None):
1145 def save(self, repo, msg=None):
1146 if len(self.applied) == 0:
1146 if len(self.applied) == 0:
1147 self.ui.warn("save: no patches applied, exiting\n")
1147 self.ui.warn("save: no patches applied, exiting\n")
1148 return 1
1148 return 1
1149 if self.issaveline(self.applied[-1]):
1149 if self.issaveline(self.applied[-1]):
1150 self.ui.warn("status is already saved\n")
1150 self.ui.warn("status is already saved\n")
1151 return 1
1151 return 1
1152
1152
1153 ar = [ ':' + x for x in self.full_series ]
1153 ar = [ ':' + x for x in self.full_series ]
1154 if not msg:
1154 if not msg:
1155 msg = "hg patches saved state"
1155 msg = "hg patches saved state"
1156 else:
1156 else:
1157 msg = "hg patches: " + msg.rstrip('\r\n')
1157 msg = "hg patches: " + msg.rstrip('\r\n')
1158 r = self.qrepo()
1158 r = self.qrepo()
1159 if r:
1159 if r:
1160 pp = r.dirstate.parents()
1160 pp = r.dirstate.parents()
1161 msg += "\nDirstate: %s %s" % (hg.hex(pp[0]), hg.hex(pp[1]))
1161 msg += "\nDirstate: %s %s" % (hg.hex(pp[0]), hg.hex(pp[1]))
1162 msg += "\n\nPatch Data:\n"
1162 msg += "\n\nPatch Data:\n"
1163 text = msg + "\n".join([str(x) for x in self.applied]) + '\n' + (ar and
1163 text = msg + "\n".join([str(x) for x in self.applied]) + '\n' + (ar and
1164 "\n".join(ar) + '\n' or "")
1164 "\n".join(ar) + '\n' or "")
1165 n = repo.commit(None, text, user=None, force=1)
1165 n = repo.commit(None, text, user=None, force=1)
1166 if not n:
1166 if not n:
1167 self.ui.warn("repo commit failed\n")
1167 self.ui.warn("repo commit failed\n")
1168 return 1
1168 return 1
1169 self.applied.append(statusentry(revlog.hex(n),'.hg.patches.save.line'))
1169 self.applied.append(statusentry(revlog.hex(n),'.hg.patches.save.line'))
1170 self.applied_dirty = 1
1170 self.applied_dirty = 1
1171
1171
1172 def full_series_end(self):
1172 def full_series_end(self):
1173 if len(self.applied) > 0:
1173 if len(self.applied) > 0:
1174 p = self.applied[-1].name
1174 p = self.applied[-1].name
1175 end = self.find_series(p)
1175 end = self.find_series(p)
1176 if end == None:
1176 if end == None:
1177 return len(self.full_series)
1177 return len(self.full_series)
1178 return end + 1
1178 return end + 1
1179 return 0
1179 return 0
1180
1180
1181 def series_end(self, all_patches=False):
1181 def series_end(self, all_patches=False):
1182 end = 0
1182 end = 0
1183 def next(start):
1183 def next(start):
1184 if all_patches:
1184 if all_patches:
1185 return start
1185 return start
1186 i = start
1186 i = start
1187 while i < len(self.series):
1187 while i < len(self.series):
1188 p, reason = self.pushable(i)
1188 p, reason = self.pushable(i)
1189 if p:
1189 if p:
1190 break
1190 break
1191 self.explain_pushable(i)
1191 self.explain_pushable(i)
1192 i += 1
1192 i += 1
1193 return i
1193 return i
1194 if len(self.applied) > 0:
1194 if len(self.applied) > 0:
1195 p = self.applied[-1].name
1195 p = self.applied[-1].name
1196 try:
1196 try:
1197 end = self.series.index(p)
1197 end = self.series.index(p)
1198 except ValueError:
1198 except ValueError:
1199 return 0
1199 return 0
1200 return next(end + 1)
1200 return next(end + 1)
1201 return next(end)
1201 return next(end)
1202
1202
1203 def qapplied(self, repo, patch=None):
1203 def qapplied(self, repo, patch=None):
1204 if patch and patch not in self.series:
1204 if patch and patch not in self.series:
1205 raise util.Abort(_("patch %s is not in series file") % patch)
1205 raise util.Abort(_("patch %s is not in series file") % patch)
1206 if not patch:
1206 if not patch:
1207 end = len(self.applied)
1207 end = len(self.applied)
1208 else:
1208 else:
1209 end = self.series.index(patch) + 1
1209 end = self.series.index(patch) + 1
1210 for x in xrange(end):
1210 for x in xrange(end):
1211 p = self.appliedname(x)
1211 p = self.appliedname(x)
1212 self.ui.write("%s\n" % p)
1212 self.ui.write("%s\n" % p)
1213
1213
1214 def appliedname(self, index):
1214 def appliedname(self, index):
1215 pname = self.applied[index].name
1215 pname = self.applied[index].name
1216 if not self.ui.verbose:
1216 if not self.ui.verbose:
1217 p = pname
1217 p = pname
1218 else:
1218 else:
1219 p = str(self.series.index(pname)) + " " + p
1219 p = str(self.series.index(pname)) + " " + p
1220 return p
1220 return p
1221
1221
1222 def top(self, repo):
1222 def top(self, repo):
1223 if len(self.applied):
1223 if len(self.applied):
1224 p = self.appliedname(-1)
1224 p = self.appliedname(-1)
1225 self.ui.write(p + '\n')
1225 self.ui.write(p + '\n')
1226 else:
1226 else:
1227 self.ui.write("No patches applied\n")
1227 self.ui.write("No patches applied\n")
1228
1228
1229 def next(self, repo):
1229 def next(self, repo):
1230 end = self.series_end()
1230 end = self.series_end()
1231 if end == len(self.series):
1231 if end == len(self.series):
1232 self.ui.write("All patches applied\n")
1232 self.ui.write("All patches applied\n")
1233 else:
1233 else:
1234 p = self.series[end]
1234 p = self.series[end]
1235 if self.ui.verbose:
1235 if self.ui.verbose:
1236 self.ui.write("%d " % self.series.index(p))
1236 self.ui.write("%d " % self.series.index(p))
1237 self.ui.write(p + '\n')
1237 self.ui.write(p + '\n')
1238
1238
1239 def prev(self, repo):
1239 def prev(self, repo):
1240 if len(self.applied) > 1:
1240 if len(self.applied) > 1:
1241 p = self.appliedname(-2)
1241 p = self.appliedname(-2)
1242 self.ui.write(p + '\n')
1242 self.ui.write(p + '\n')
1243 elif len(self.applied) == 1:
1243 elif len(self.applied) == 1:
1244 self.ui.write("Only one patch applied\n")
1244 self.ui.write("Only one patch applied\n")
1245 else:
1245 else:
1246 self.ui.write("No patches applied\n")
1246 self.ui.write("No patches applied\n")
1247
1247
1248 def qimport(self, repo, files, patch=None, existing=None, force=None):
1248 def qimport(self, repo, files, patch=None, existing=None, force=None):
1249 if len(files) > 1 and patch:
1249 if len(files) > 1 and patch:
1250 raise util.Abort(_('option "-n" not valid when importing multiple '
1250 raise util.Abort(_('option "-n" not valid when importing multiple '
1251 'files'))
1251 'files'))
1252 i = 0
1252 i = 0
1253 added = []
1253 added = []
1254 for filename in files:
1254 for filename in files:
1255 if existing:
1255 if existing:
1256 if not patch:
1256 if not patch:
1257 patch = filename
1257 patch = filename
1258 if not os.path.isfile(self.join(patch)):
1258 if not os.path.isfile(self.join(patch)):
1259 raise util.Abort(_("patch %s does not exist") % patch)
1259 raise util.Abort(_("patch %s does not exist") % patch)
1260 else:
1260 else:
1261 try:
1261 try:
1262 text = file(filename).read()
1262 text = file(filename).read()
1263 except IOError:
1263 except IOError:
1264 raise util.Abort(_("unable to read %s") % patch)
1264 raise util.Abort(_("unable to read %s") % patch)
1265 if not patch:
1265 if not patch:
1266 patch = os.path.split(filename)[1]
1266 patch = os.path.split(filename)[1]
1267 if not force and os.path.exists(self.join(patch)):
1267 if not force and os.path.exists(self.join(patch)):
1268 raise util.Abort(_('patch "%s" already exists') % patch)
1268 raise util.Abort(_('patch "%s" already exists') % patch)
1269 patchf = self.opener(patch, "w")
1269 patchf = self.opener(patch, "w")
1270 patchf.write(text)
1270 patchf.write(text)
1271 if patch in self.series:
1271 if patch in self.series:
1272 raise util.Abort(_('patch %s is already in the series file')
1272 raise util.Abort(_('patch %s is already in the series file')
1273 % patch)
1273 % patch)
1274 index = self.full_series_end() + i
1274 index = self.full_series_end() + i
1275 self.full_series[index:index] = [patch]
1275 self.full_series[index:index] = [patch]
1276 self.parse_series()
1276 self.parse_series()
1277 self.ui.warn("adding %s to series file\n" % patch)
1277 self.ui.warn("adding %s to series file\n" % patch)
1278 i += 1
1278 i += 1
1279 added.append(patch)
1279 added.append(patch)
1280 patch = None
1280 patch = None
1281 self.series_dirty = 1
1281 self.series_dirty = 1
1282 qrepo = self.qrepo()
1282 qrepo = self.qrepo()
1283 if qrepo:
1283 if qrepo:
1284 qrepo.add(added)
1284 qrepo.add(added)
1285
1285
1286 def delete(ui, repo, patch, *patches, **opts):
1286 def delete(ui, repo, patch, *patches, **opts):
1287 """remove patches from queue
1287 """remove patches from queue
1288
1288
1289 The patches must not be applied.
1289 The patches must not be applied.
1290 With -k, the patch files are preserved in the patch directory."""
1290 With -k, the patch files are preserved in the patch directory."""
1291 q = repo.mq
1291 q = repo.mq
1292 q.delete(repo, (patch,) + patches, keep=opts.get('keep'))
1292 q.delete(repo, (patch,) + patches, keep=opts.get('keep'))
1293 q.save_dirty()
1293 q.save_dirty()
1294 return 0
1294 return 0
1295
1295
1296 def applied(ui, repo, patch=None, **opts):
1296 def applied(ui, repo, patch=None, **opts):
1297 """print the patches already applied"""
1297 """print the patches already applied"""
1298 repo.mq.qapplied(repo, patch)
1298 repo.mq.qapplied(repo, patch)
1299 return 0
1299 return 0
1300
1300
1301 def unapplied(ui, repo, patch=None, **opts):
1301 def unapplied(ui, repo, patch=None, **opts):
1302 """print the patches not yet applied"""
1302 """print the patches not yet applied"""
1303 for i, p in repo.mq.unapplied(repo, patch):
1303 for i, p in repo.mq.unapplied(repo, patch):
1304 if ui.verbose:
1304 if ui.verbose:
1305 ui.write("%d " % i)
1305 ui.write("%d " % i)
1306 ui.write("%s\n" % p)
1306 ui.write("%s\n" % p)
1307
1307
1308 def qimport(ui, repo, *filename, **opts):
1308 def qimport(ui, repo, *filename, **opts):
1309 """import a patch"""
1309 """import a patch"""
1310 q = repo.mq
1310 q = repo.mq
1311 q.qimport(repo, filename, patch=opts['name'],
1311 q.qimport(repo, filename, patch=opts['name'],
1312 existing=opts['existing'], force=opts['force'])
1312 existing=opts['existing'], force=opts['force'])
1313 q.save_dirty()
1313 q.save_dirty()
1314 return 0
1314 return 0
1315
1315
1316 def init(ui, repo, **opts):
1316 def init(ui, repo, **opts):
1317 """init a new queue repository
1317 """init a new queue repository
1318
1318
1319 The queue repository is unversioned by default. If -c is
1319 The queue repository is unversioned by default. If -c is
1320 specified, qinit will create a separate nested repository
1320 specified, qinit will create a separate nested repository
1321 for patches. Use qcommit to commit changes to this queue
1321 for patches. Use qcommit to commit changes to this queue
1322 repository."""
1322 repository."""
1323 q = repo.mq
1323 q = repo.mq
1324 r = q.init(repo, create=opts['create_repo'])
1324 r = q.init(repo, create=opts['create_repo'])
1325 q.save_dirty()
1325 q.save_dirty()
1326 if r:
1326 if r:
1327 fp = r.wopener('.hgignore', 'w')
1327 fp = r.wopener('.hgignore', 'w')
1328 print >> fp, 'syntax: glob'
1328 print >> fp, 'syntax: glob'
1329 print >> fp, 'status'
1329 print >> fp, 'status'
1330 fp.close()
1330 fp.close()
1331 r.wopener('series', 'w').close()
1331 r.wopener('series', 'w').close()
1332 r.add(['.hgignore', 'series'])
1332 r.add(['.hgignore', 'series'])
1333 return 0
1333 return 0
1334
1334
1335 def clone(ui, source, dest=None, **opts):
1335 def clone(ui, source, dest=None, **opts):
1336 '''clone main and patch repository at same time
1336 '''clone main and patch repository at same time
1337
1337
1338 If source is local, destination will have no patches applied. If
1338 If source is local, destination will have no patches applied. If
1339 source is remote, this command can not check if patches are
1339 source is remote, this command can not check if patches are
1340 applied in source, so cannot guarantee that patches are not
1340 applied in source, so cannot guarantee that patches are not
1341 applied in destination. If you clone remote repository, be sure
1341 applied in destination. If you clone remote repository, be sure
1342 before that it has no patches applied.
1342 before that it has no patches applied.
1343
1343
1344 Source patch repository is looked for in <src>/.hg/patches by
1344 Source patch repository is looked for in <src>/.hg/patches by
1345 default. Use -p <url> to change.
1345 default. Use -p <url> to change.
1346 '''
1346 '''
1347 commands.setremoteconfig(ui, opts)
1347 commands.setremoteconfig(ui, opts)
1348 if dest is None:
1348 if dest is None:
1349 dest = hg.defaultdest(source)
1349 dest = hg.defaultdest(source)
1350 sr = hg.repository(ui, ui.expandpath(source))
1350 sr = hg.repository(ui, ui.expandpath(source))
1351 qbase, destrev = None, None
1351 qbase, destrev = None, None
1352 if sr.local():
1352 if sr.local():
1353 reposetup(ui, sr)
1353 reposetup(ui, sr)
1354 if sr.mq.applied:
1354 if sr.mq.applied:
1355 qbase = revlog.bin(sr.mq.applied[0].rev)
1355 qbase = revlog.bin(sr.mq.applied[0].rev)
1356 if not hg.islocal(dest):
1356 if not hg.islocal(dest):
1357 destrev = sr.parents(qbase)[0]
1357 destrev = sr.parents(qbase)[0]
1358 ui.note(_('cloning main repo\n'))
1358 ui.note(_('cloning main repo\n'))
1359 sr, dr = hg.clone(ui, sr, dest,
1359 sr, dr = hg.clone(ui, sr, dest,
1360 pull=opts['pull'],
1360 pull=opts['pull'],
1361 rev=destrev,
1361 rev=destrev,
1362 update=False,
1362 update=False,
1363 stream=opts['uncompressed'])
1363 stream=opts['uncompressed'])
1364 ui.note(_('cloning patch repo\n'))
1364 ui.note(_('cloning patch repo\n'))
1365 spr, dpr = hg.clone(ui, opts['patches'] or (sr.url() + '/.hg/patches'),
1365 spr, dpr = hg.clone(ui, opts['patches'] or (sr.url() + '/.hg/patches'),
1366 dr.url() + '/.hg/patches',
1366 dr.url() + '/.hg/patches',
1367 pull=opts['pull'],
1367 pull=opts['pull'],
1368 update=not opts['noupdate'],
1368 update=not opts['noupdate'],
1369 stream=opts['uncompressed'])
1369 stream=opts['uncompressed'])
1370 if dr.local():
1370 if dr.local():
1371 if qbase:
1371 if qbase:
1372 ui.note(_('stripping applied patches from destination repo\n'))
1372 ui.note(_('stripping applied patches from destination repo\n'))
1373 reposetup(ui, dr)
1373 reposetup(ui, dr)
1374 dr.mq.strip(dr, qbase, update=False, backup=None)
1374 dr.mq.strip(dr, qbase, update=False, backup=None)
1375 if not opts['noupdate']:
1375 if not opts['noupdate']:
1376 ui.note(_('updating destination repo\n'))
1376 ui.note(_('updating destination repo\n'))
1377 hg.update(dr, dr.changelog.tip())
1377 hg.update(dr, dr.changelog.tip())
1378
1378
1379 def commit(ui, repo, *pats, **opts):
1379 def commit(ui, repo, *pats, **opts):
1380 """commit changes in the queue repository"""
1380 """commit changes in the queue repository"""
1381 q = repo.mq
1381 q = repo.mq
1382 r = q.qrepo()
1382 r = q.qrepo()
1383 if not r: raise util.Abort('no queue repository')
1383 if not r: raise util.Abort('no queue repository')
1384 commands.commit(r.ui, r, *pats, **opts)
1384 commands.commit(r.ui, r, *pats, **opts)
1385
1385
1386 def series(ui, repo, **opts):
1386 def series(ui, repo, **opts):
1387 """print the entire series file"""
1387 """print the entire series file"""
1388 repo.mq.qseries(repo, missing=opts['missing'], summary=opts['summary'])
1388 repo.mq.qseries(repo, missing=opts['missing'], summary=opts['summary'])
1389 return 0
1389 return 0
1390
1390
1391 def top(ui, repo, **opts):
1391 def top(ui, repo, **opts):
1392 """print the name of the current patch"""
1392 """print the name of the current patch"""
1393 repo.mq.top(repo)
1393 repo.mq.top(repo)
1394 return 0
1394 return 0
1395
1395
1396 def next(ui, repo, **opts):
1396 def next(ui, repo, **opts):
1397 """print the name of the next patch"""
1397 """print the name of the next patch"""
1398 repo.mq.next(repo)
1398 repo.mq.next(repo)
1399 return 0
1399 return 0
1400
1400
1401 def prev(ui, repo, **opts):
1401 def prev(ui, repo, **opts):
1402 """print the name of the previous patch"""
1402 """print the name of the previous patch"""
1403 repo.mq.prev(repo)
1403 repo.mq.prev(repo)
1404 return 0
1404 return 0
1405
1405
1406 def new(ui, repo, patch, **opts):
1406 def new(ui, repo, patch, **opts):
1407 """create a new patch
1407 """create a new patch
1408
1408
1409 qnew creates a new patch on top of the currently-applied patch
1409 qnew creates a new patch on top of the currently-applied patch
1410 (if any). It will refuse to run if there are any outstanding
1410 (if any). It will refuse to run if there are any outstanding
1411 changes unless -f is specified, in which case the patch will
1411 changes unless -f is specified, in which case the patch will
1412 be initialised with them.
1412 be initialised with them.
1413
1413
1414 -m or -l set the patch header as well as the commit message.
1414 -m or -l set the patch header as well as the commit message.
1415 If neither is specified, the patch header is empty and the
1415 If neither is specified, the patch header is empty and the
1416 commit message is 'New patch: PATCH'"""
1416 commit message is 'New patch: PATCH'"""
1417 q = repo.mq
1417 q = repo.mq
1418 message = commands.logmessage(opts)
1418 message = commands.logmessage(opts)
1419 q.new(repo, patch, msg=message, force=opts['force'])
1419 q.new(repo, patch, msg=message, force=opts['force'])
1420 q.save_dirty()
1420 q.save_dirty()
1421 return 0
1421 return 0
1422
1422
1423 def refresh(ui, repo, **opts):
1423 def refresh(ui, repo, **opts):
1424 """update the current patch"""
1424 """update the current patch"""
1425 q = repo.mq
1425 q = repo.mq
1426 message = commands.logmessage(opts)
1426 message = commands.logmessage(opts)
1427 if opts['edit']:
1427 if opts['edit']:
1428 if message:
1428 if message:
1429 raise util.Abort(_('option "-e" incompatible with "-m" or "-l"'))
1429 raise util.Abort(_('option "-e" incompatible with "-m" or "-l"'))
1430 patch = q.applied[-1].name
1430 patch = q.applied[-1].name
1431 (message, comment, user, date, hasdiff) = q.readheaders(patch)
1431 (message, comment, user, date, hasdiff) = q.readheaders(patch)
1432 message = ui.edit('\n'.join(message), user or ui.username())
1432 message = ui.edit('\n'.join(message), user or ui.username())
1433 q.refresh(repo, msg=message, short=opts['short'])
1433 q.refresh(repo, msg=message, short=opts['short'])
1434 q.save_dirty()
1434 q.save_dirty()
1435 return 0
1435 return 0
1436
1436
1437 def diff(ui, repo, *files, **opts):
1437 def diff(ui, repo, *files, **opts):
1438 """diff of the current patch"""
1438 """diff of the current patch"""
1439 # deep in the dirstate code, the walkhelper method wants a list, not a tuple
1439 # deep in the dirstate code, the walkhelper method wants a list, not a tuple
1440 repo.mq.diff(repo, list(files))
1440 repo.mq.diff(repo, list(files))
1441 return 0
1441 return 0
1442
1442
1443 def fold(ui, repo, *files, **opts):
1443 def fold(ui, repo, *files, **opts):
1444 """fold the named patches into the current patch
1444 """fold the named patches into the current patch
1445
1445
1446 Patches must not yet be applied. Each patch will be successively
1446 Patches must not yet be applied. Each patch will be successively
1447 applied to the current patch in the order given. If all the
1447 applied to the current patch in the order given. If all the
1448 patches apply successfully, the current patch will be refreshed
1448 patches apply successfully, the current patch will be refreshed
1449 with the new cumulative patch, and the folded patches will
1449 with the new cumulative patch, and the folded patches will
1450 be deleted. With -k/--keep, the folded patch files will not
1450 be deleted. With -k/--keep, the folded patch files will not
1451 be removed afterwards.
1451 be removed afterwards.
1452
1452
1453 The header for each folded patch will be concatenated with
1453 The header for each folded patch will be concatenated with
1454 the current patch header, separated by a line of '* * *'."""
1454 the current patch header, separated by a line of '* * *'."""
1455
1455
1456 q = repo.mq
1456 q = repo.mq
1457
1457
1458 if not files:
1458 if not files:
1459 raise util.Abort(_('qfold requires at least one patch name'))
1459 raise util.Abort(_('qfold requires at least one patch name'))
1460 if not q.check_toppatch(repo):
1460 if not q.check_toppatch(repo):
1461 raise util.Abort(_('No patches applied\n'))
1461 raise util.Abort(_('No patches applied\n'))
1462
1462
1463 message = commands.logmessage(opts)
1463 message = commands.logmessage(opts)
1464 if opts['edit']:
1464 if opts['edit']:
1465 if message:
1465 if message:
1466 raise util.Abort(_('option "-e" incompatible with "-m" or "-l"'))
1466 raise util.Abort(_('option "-e" incompatible with "-m" or "-l"'))
1467
1467
1468 parent = q.lookup('qtip')
1468 parent = q.lookup('qtip')
1469 patches = []
1469 patches = []
1470 messages = []
1470 messages = []
1471 for f in files:
1471 for f in files:
1472 patch = q.lookup(f)
1472 patch = q.lookup(f)
1473 if patch in patches or patch == parent:
1473 if patch in patches or patch == parent:
1474 ui.warn(_('Skipping already folded patch %s') % patch)
1474 ui.warn(_('Skipping already folded patch %s') % patch)
1475 if q.isapplied(patch):
1475 if q.isapplied(patch):
1476 raise util.Abort(_('qfold cannot fold already applied patch %s') % patch)
1476 raise util.Abort(_('qfold cannot fold already applied patch %s') % patch)
1477 patches.append(patch)
1477 patches.append(patch)
1478
1478
1479 for patch in patches:
1479 for patch in patches:
1480 if not message:
1480 if not message:
1481 messages.append(q.readheaders(patch)[0])
1481 messages.append(q.readheaders(patch)[0])
1482 pf = q.join(patch)
1482 pf = q.join(patch)
1483 (patchsuccess, files, fuzz) = q.patch(repo, pf)
1483 (patchsuccess, files, fuzz) = q.patch(repo, pf)
1484 if not patchsuccess:
1484 if not patchsuccess:
1485 raise util.Abort(_('Error folding patch %s') % patch)
1485 raise util.Abort(_('Error folding patch %s') % patch)
1486
1486
1487 if not message:
1487 if not message:
1488 message, comments, user = q.readheaders(parent)[0:3]
1488 message, comments, user = q.readheaders(parent)[0:3]
1489 for msg in messages:
1489 for msg in messages:
1490 message.append('* * *')
1490 message.append('* * *')
1491 message.extend(msg)
1491 message.extend(msg)
1492 message = '\n'.join(message)
1492 message = '\n'.join(message)
1493
1493
1494 if opts['edit']:
1494 if opts['edit']:
1495 message = ui.edit(message, user or ui.username())
1495 message = ui.edit(message, user or ui.username())
1496
1496
1497 q.refresh(repo, msg=message)
1497 q.refresh(repo, msg=message)
1498
1498
1499 for patch in patches:
1499 for patch in patches:
1500 q.delete(repo, patch, keep=opts['keep'])
1500 q.delete(repo, patch, keep=opts['keep'])
1501
1501
1502 q.save_dirty()
1502 q.save_dirty()
1503
1503
1504 def guard(ui, repo, *args, **opts):
1504 def guard(ui, repo, *args, **opts):
1505 '''set or print guards for a patch
1505 '''set or print guards for a patch
1506
1506
1507 guards control whether a patch can be pushed. a patch with no
1507 guards control whether a patch can be pushed. a patch with no
1508 guards is aways pushed. a patch with posative guard ("+foo") is
1508 guards is aways pushed. a patch with posative guard ("+foo") is
1509 pushed only if qselect command enables guard "foo". a patch with
1509 pushed only if qselect command enables guard "foo". a patch with
1510 nagative guard ("-foo") is never pushed if qselect command enables
1510 nagative guard ("-foo") is never pushed if qselect command enables
1511 guard "foo".
1511 guard "foo".
1512
1512
1513 with no arguments, default is to print current active guards.
1513 with no arguments, default is to print current active guards.
1514 with arguments, set active guards for patch.
1514 with arguments, set active guards for patch.
1515
1515
1516 to set nagative guard "-foo" on topmost patch ("--" is needed so
1516 to set nagative guard "-foo" on topmost patch ("--" is needed so
1517 hg will not interpret "-foo" as argument):
1517 hg will not interpret "-foo" as argument):
1518 hg qguard -- -foo
1518 hg qguard -- -foo
1519
1519
1520 to set guards on other patch:
1520 to set guards on other patch:
1521 hg qguard other.patch +2.6.17 -stable
1521 hg qguard other.patch +2.6.17 -stable
1522 '''
1522 '''
1523 def status(idx):
1523 def status(idx):
1524 guards = q.series_guards[idx] or ['unguarded']
1524 guards = q.series_guards[idx] or ['unguarded']
1525 ui.write('%s: %s\n' % (q.series[idx], ' '.join(guards)))
1525 ui.write('%s: %s\n' % (q.series[idx], ' '.join(guards)))
1526 q = repo.mq
1526 q = repo.mq
1527 patch = None
1527 patch = None
1528 args = list(args)
1528 args = list(args)
1529 if opts['list']:
1529 if opts['list']:
1530 if args or opts['none']:
1530 if args or opts['none']:
1531 raise util.Abort(_('cannot mix -l/--list with options or arguments'))
1531 raise util.Abort(_('cannot mix -l/--list with options or arguments'))
1532 for i in xrange(len(q.series)):
1532 for i in xrange(len(q.series)):
1533 status(i)
1533 status(i)
1534 return
1534 return
1535 if not args or args[0][0:1] in '-+':
1535 if not args or args[0][0:1] in '-+':
1536 if not q.applied:
1536 if not q.applied:
1537 raise util.Abort(_('no patches applied'))
1537 raise util.Abort(_('no patches applied'))
1538 patch = q.applied[-1].name
1538 patch = q.applied[-1].name
1539 if patch is None and args[0][0:1] not in '-+':
1539 if patch is None and args[0][0:1] not in '-+':
1540 patch = args.pop(0)
1540 patch = args.pop(0)
1541 if patch is None:
1541 if patch is None:
1542 raise util.Abort(_('no patch to work with'))
1542 raise util.Abort(_('no patch to work with'))
1543 if args or opts['none']:
1543 if args or opts['none']:
1544 q.set_guards(q.find_series(patch), args)
1544 q.set_guards(q.find_series(patch), args)
1545 q.save_dirty()
1545 q.save_dirty()
1546 else:
1546 else:
1547 status(q.series.index(q.lookup(patch)))
1547 status(q.series.index(q.lookup(patch)))
1548
1548
1549 def header(ui, repo, patch=None):
1549 def header(ui, repo, patch=None):
1550 """Print the header of the topmost or specified patch"""
1550 """Print the header of the topmost or specified patch"""
1551 q = repo.mq
1551 q = repo.mq
1552
1552
1553 if patch:
1553 if patch:
1554 patch = q.lookup(patch)
1554 patch = q.lookup(patch)
1555 else:
1555 else:
1556 if not q.applied:
1556 if not q.applied:
1557 ui.write('No patches applied\n')
1557 ui.write('No patches applied\n')
1558 return
1558 return
1559 patch = q.lookup('qtip')
1559 patch = q.lookup('qtip')
1560 message = repo.mq.readheaders(patch)[0]
1560 message = repo.mq.readheaders(patch)[0]
1561
1561
1562 ui.write('\n'.join(message) + '\n')
1562 ui.write('\n'.join(message) + '\n')
1563
1563
1564 def lastsavename(path):
1564 def lastsavename(path):
1565 (directory, base) = os.path.split(path)
1565 (directory, base) = os.path.split(path)
1566 names = os.listdir(directory)
1566 names = os.listdir(directory)
1567 namere = re.compile("%s.([0-9]+)" % base)
1567 namere = re.compile("%s.([0-9]+)" % base)
1568 maxindex = None
1568 maxindex = None
1569 maxname = None
1569 maxname = None
1570 for f in names:
1570 for f in names:
1571 m = namere.match(f)
1571 m = namere.match(f)
1572 if m:
1572 if m:
1573 index = int(m.group(1))
1573 index = int(m.group(1))
1574 if maxindex == None or index > maxindex:
1574 if maxindex == None or index > maxindex:
1575 maxindex = index
1575 maxindex = index
1576 maxname = f
1576 maxname = f
1577 if maxname:
1577 if maxname:
1578 return (os.path.join(directory, maxname), maxindex)
1578 return (os.path.join(directory, maxname), maxindex)
1579 return (None, None)
1579 return (None, None)
1580
1580
1581 def savename(path):
1581 def savename(path):
1582 (last, index) = lastsavename(path)
1582 (last, index) = lastsavename(path)
1583 if last is None:
1583 if last is None:
1584 index = 0
1584 index = 0
1585 newpath = path + ".%d" % (index + 1)
1585 newpath = path + ".%d" % (index + 1)
1586 return newpath
1586 return newpath
1587
1587
1588 def push(ui, repo, patch=None, **opts):
1588 def push(ui, repo, patch=None, **opts):
1589 """push the next patch onto the stack"""
1589 """push the next patch onto the stack"""
1590 q = repo.mq
1590 q = repo.mq
1591 mergeq = None
1591 mergeq = None
1592
1592
1593 if opts['all']:
1593 if opts['all']:
1594 patch = q.series[-1]
1594 patch = q.series[-1]
1595 if opts['merge']:
1595 if opts['merge']:
1596 if opts['name']:
1596 if opts['name']:
1597 newpath = opts['name']
1597 newpath = opts['name']
1598 else:
1598 else:
1599 newpath, i = lastsavename(q.path)
1599 newpath, i = lastsavename(q.path)
1600 if not newpath:
1600 if not newpath:
1601 ui.warn("no saved queues found, please use -n\n")
1601 ui.warn("no saved queues found, please use -n\n")
1602 return 1
1602 return 1
1603 mergeq = queue(ui, repo.join(""), newpath)
1603 mergeq = queue(ui, repo.join(""), newpath)
1604 ui.warn("merging with queue at: %s\n" % mergeq.path)
1604 ui.warn("merging with queue at: %s\n" % mergeq.path)
1605 ret = q.push(repo, patch, force=opts['force'], list=opts['list'],
1605 ret = q.push(repo, patch, force=opts['force'], list=opts['list'],
1606 mergeq=mergeq)
1606 mergeq=mergeq)
1607 q.save_dirty()
1607 q.save_dirty()
1608 return ret
1608 return ret
1609
1609
1610 def pop(ui, repo, patch=None, **opts):
1610 def pop(ui, repo, patch=None, **opts):
1611 """pop the current patch off the stack"""
1611 """pop the current patch off the stack"""
1612 localupdate = True
1612 localupdate = True
1613 if opts['name']:
1613 if opts['name']:
1614 q = queue(ui, repo.join(""), repo.join(opts['name']))
1614 q = queue(ui, repo.join(""), repo.join(opts['name']))
1615 ui.warn('using patch queue: %s\n' % q.path)
1615 ui.warn('using patch queue: %s\n' % q.path)
1616 localupdate = False
1616 localupdate = False
1617 else:
1617 else:
1618 q = repo.mq
1618 q = repo.mq
1619 q.pop(repo, patch, force=opts['force'], update=localupdate, all=opts['all'])
1619 q.pop(repo, patch, force=opts['force'], update=localupdate, all=opts['all'])
1620 q.save_dirty()
1620 q.save_dirty()
1621 return 0
1621 return 0
1622
1622
1623 def rename(ui, repo, patch, name=None, **opts):
1623 def rename(ui, repo, patch, name=None, **opts):
1624 """rename a patch
1624 """rename a patch
1625
1625
1626 With one argument, renames the current patch to PATCH1.
1626 With one argument, renames the current patch to PATCH1.
1627 With two arguments, renames PATCH1 to PATCH2."""
1627 With two arguments, renames PATCH1 to PATCH2."""
1628
1628
1629 q = repo.mq
1629 q = repo.mq
1630
1630
1631 if not name:
1631 if not name:
1632 name = patch
1632 name = patch
1633 patch = None
1633 patch = None
1634
1634
1635 if name in q.series:
1635 if name in q.series:
1636 raise util.Abort(_('A patch named %s already exists in the series file') % name)
1636 raise util.Abort(_('A patch named %s already exists in the series file') % name)
1637
1637
1638 absdest = q.join(name)
1638 absdest = q.join(name)
1639 if os.path.exists(absdest):
1639 if os.path.exists(absdest):
1640 raise util.Abort(_('%s already exists') % absdest)
1640 raise util.Abort(_('%s already exists') % absdest)
1641
1641
1642 if patch:
1642 if patch:
1643 patch = q.lookup(patch)
1643 patch = q.lookup(patch)
1644 else:
1644 else:
1645 if not q.applied:
1645 if not q.applied:
1646 ui.write(_('No patches applied\n'))
1646 ui.write(_('No patches applied\n'))
1647 return
1647 return
1648 patch = q.lookup('qtip')
1648 patch = q.lookup('qtip')
1649
1649
1650 if ui.verbose:
1650 if ui.verbose:
1651 ui.write('Renaming %s to %s\n' % (patch, name))
1651 ui.write('Renaming %s to %s\n' % (patch, name))
1652 i = q.find_series(patch)
1652 i = q.find_series(patch)
1653 q.full_series[i] = name
1653 q.full_series[i] = name
1654 q.parse_series()
1654 q.parse_series()
1655 q.series_dirty = 1
1655 q.series_dirty = 1
1656
1656
1657 info = q.isapplied(patch)
1657 info = q.isapplied(patch)
1658 if info:
1658 if info:
1659 q.applied[info[0]] = statusentry(info[1], name)
1659 q.applied[info[0]] = statusentry(info[1], name)
1660 q.applied_dirty = 1
1660 q.applied_dirty = 1
1661
1661
1662 util.rename(q.join(patch), absdest)
1662 util.rename(q.join(patch), absdest)
1663 r = q.qrepo()
1663 r = q.qrepo()
1664 if r:
1664 if r:
1665 wlock = r.wlock()
1665 wlock = r.wlock()
1666 if r.dirstate.state(name) == 'r':
1666 if r.dirstate.state(name) == 'r':
1667 r.undelete([name], wlock)
1667 r.undelete([name], wlock)
1668 r.copy(patch, name, wlock)
1668 r.copy(patch, name, wlock)
1669 r.remove([patch], False, wlock)
1669 r.remove([patch], False, wlock)
1670
1670
1671 q.save_dirty()
1671 q.save_dirty()
1672
1672
1673 def restore(ui, repo, rev, **opts):
1673 def restore(ui, repo, rev, **opts):
1674 """restore the queue state saved by a rev"""
1674 """restore the queue state saved by a rev"""
1675 rev = repo.lookup(rev)
1675 rev = repo.lookup(rev)
1676 q = repo.mq
1676 q = repo.mq
1677 q.restore(repo, rev, delete=opts['delete'],
1677 q.restore(repo, rev, delete=opts['delete'],
1678 qupdate=opts['update'])
1678 qupdate=opts['update'])
1679 q.save_dirty()
1679 q.save_dirty()
1680 return 0
1680 return 0
1681
1681
1682 def save(ui, repo, **opts):
1682 def save(ui, repo, **opts):
1683 """save current queue state"""
1683 """save current queue state"""
1684 q = repo.mq
1684 q = repo.mq
1685 message = commands.logmessage(opts)
1685 message = commands.logmessage(opts)
1686 ret = q.save(repo, msg=message)
1686 ret = q.save(repo, msg=message)
1687 if ret:
1687 if ret:
1688 return ret
1688 return ret
1689 q.save_dirty()
1689 q.save_dirty()
1690 if opts['copy']:
1690 if opts['copy']:
1691 path = q.path
1691 path = q.path
1692 if opts['name']:
1692 if opts['name']:
1693 newpath = os.path.join(q.basepath, opts['name'])
1693 newpath = os.path.join(q.basepath, opts['name'])
1694 if os.path.exists(newpath):
1694 if os.path.exists(newpath):
1695 if not os.path.isdir(newpath):
1695 if not os.path.isdir(newpath):
1696 raise util.Abort(_('destination %s exists and is not '
1696 raise util.Abort(_('destination %s exists and is not '
1697 'a directory') % newpath)
1697 'a directory') % newpath)
1698 if not opts['force']:
1698 if not opts['force']:
1699 raise util.Abort(_('destination %s exists, '
1699 raise util.Abort(_('destination %s exists, '
1700 'use -f to force') % newpath)
1700 'use -f to force') % newpath)
1701 else:
1701 else:
1702 newpath = savename(path)
1702 newpath = savename(path)
1703 ui.warn("copy %s to %s\n" % (path, newpath))
1703 ui.warn("copy %s to %s\n" % (path, newpath))
1704 util.copyfiles(path, newpath)
1704 util.copyfiles(path, newpath)
1705 if opts['empty']:
1705 if opts['empty']:
1706 try:
1706 try:
1707 os.unlink(q.join(q.status_path))
1707 os.unlink(q.join(q.status_path))
1708 except:
1708 except:
1709 pass
1709 pass
1710 return 0
1710 return 0
1711
1711
1712 def strip(ui, repo, rev, **opts):
1712 def strip(ui, repo, rev, **opts):
1713 """strip a revision and all later revs on the same branch"""
1713 """strip a revision and all later revs on the same branch"""
1714 rev = repo.lookup(rev)
1714 rev = repo.lookup(rev)
1715 backup = 'all'
1715 backup = 'all'
1716 if opts['backup']:
1716 if opts['backup']:
1717 backup = 'strip'
1717 backup = 'strip'
1718 elif opts['nobackup']:
1718 elif opts['nobackup']:
1719 backup = 'none'
1719 backup = 'none'
1720 repo.mq.strip(repo, rev, backup=backup)
1720 repo.mq.strip(repo, rev, backup=backup)
1721 return 0
1721 return 0
1722
1722
1723 def select(ui, repo, *args, **opts):
1723 def select(ui, repo, *args, **opts):
1724 '''set or print guarded patches to push
1724 '''set or print guarded patches to push
1725
1725
1726 use qguard command to set or print guards on patch. then use
1726 use qguard command to set or print guards on patch. then use
1727 qselect to tell mq which guards to use. example:
1727 qselect to tell mq which guards to use. example:
1728
1728
1729 qguard foo.patch -stable (nagative guard)
1729 qguard foo.patch -stable (nagative guard)
1730 qguard bar.patch +stable (posative guard)
1730 qguard bar.patch +stable (posative guard)
1731 qselect stable
1731 qselect stable
1732
1732
1733 this sets "stable" guard. mq will skip foo.patch (because it has
1733 this sets "stable" guard. mq will skip foo.patch (because it has
1734 nagative match) but push bar.patch (because it has posative
1734 nagative match) but push bar.patch (because it has posative
1735 match). patch is pushed if any posative guards match and no
1735 match). patch is pushed if any posative guards match and no
1736 nagative guards match.
1736 nagative guards match.
1737
1737
1738 with no arguments, default is to print current active guards.
1738 with no arguments, default is to print current active guards.
1739 with arguments, set active guards as given.
1739 with arguments, set active guards as given.
1740
1740
1741 use -n/--none to deactivate guards (no other arguments needed).
1741 use -n/--none to deactivate guards (no other arguments needed).
1742 when no guards active, patches with posative guards are skipped,
1742 when no guards active, patches with posative guards are skipped,
1743 patches with nagative guards are pushed.
1743 patches with nagative guards are pushed.
1744
1744
1745 qselect can change guards of applied patches. it does not pop
1745 qselect can change guards of applied patches. it does not pop
1746 guarded patches by default. use --pop to pop back to last applied
1746 guarded patches by default. use --pop to pop back to last applied
1747 patch that is not guarded. use --reapply (implies --pop) to push
1747 patch that is not guarded. use --reapply (implies --pop) to push
1748 back to current patch afterwards, but skip guarded patches.
1748 back to current patch afterwards, but skip guarded patches.
1749
1749
1750 use -s/--series to print list of all guards in series file (no
1750 use -s/--series to print list of all guards in series file (no
1751 other arguments needed). use -v for more information.'''
1751 other arguments needed). use -v for more information.'''
1752
1752
1753 q = repo.mq
1753 q = repo.mq
1754 guards = q.active()
1754 guards = q.active()
1755 if args or opts['none']:
1755 if args or opts['none']:
1756 old_unapplied = q.unapplied(repo)
1756 old_unapplied = q.unapplied(repo)
1757 old_guarded = [i for i in xrange(len(q.applied)) if
1757 old_guarded = [i for i in xrange(len(q.applied)) if
1758 not q.pushable(i)[0]]
1758 not q.pushable(i)[0]]
1759 q.set_active(args)
1759 q.set_active(args)
1760 q.save_dirty()
1760 q.save_dirty()
1761 if not args:
1761 if not args:
1762 ui.status(_('guards deactivated\n'))
1762 ui.status(_('guards deactivated\n'))
1763 if not opts['pop'] and not opts['reapply']:
1763 if not opts['pop'] and not opts['reapply']:
1764 unapplied = q.unapplied(repo)
1764 unapplied = q.unapplied(repo)
1765 guarded = [i for i in xrange(len(q.applied))
1765 guarded = [i for i in xrange(len(q.applied))
1766 if not q.pushable(i)[0]]
1766 if not q.pushable(i)[0]]
1767 if len(unapplied) != len(old_unapplied):
1767 if len(unapplied) != len(old_unapplied):
1768 ui.status(_('number of unguarded, unapplied patches has '
1768 ui.status(_('number of unguarded, unapplied patches has '
1769 'changed from %d to %d\n') %
1769 'changed from %d to %d\n') %
1770 (len(old_unapplied), len(unapplied)))
1770 (len(old_unapplied), len(unapplied)))
1771 if len(guarded) != len(old_guarded):
1771 if len(guarded) != len(old_guarded):
1772 ui.status(_('number of guarded, applied patches has changed '
1772 ui.status(_('number of guarded, applied patches has changed '
1773 'from %d to %d\n') %
1773 'from %d to %d\n') %
1774 (len(old_guarded), len(guarded)))
1774 (len(old_guarded), len(guarded)))
1775 elif opts['series']:
1775 elif opts['series']:
1776 guards = {}
1776 guards = {}
1777 noguards = 0
1777 noguards = 0
1778 for gs in q.series_guards:
1778 for gs in q.series_guards:
1779 if not gs:
1779 if not gs:
1780 noguards += 1
1780 noguards += 1
1781 for g in gs:
1781 for g in gs:
1782 guards.setdefault(g, 0)
1782 guards.setdefault(g, 0)
1783 guards[g] += 1
1783 guards[g] += 1
1784 if ui.verbose:
1784 if ui.verbose:
1785 guards['NONE'] = noguards
1785 guards['NONE'] = noguards
1786 guards = guards.items()
1786 guards = guards.items()
1787 guards.sort(lambda a, b: cmp(a[0][1:], b[0][1:]))
1787 guards.sort(lambda a, b: cmp(a[0][1:], b[0][1:]))
1788 if guards:
1788 if guards:
1789 ui.note(_('guards in series file:\n'))
1789 ui.note(_('guards in series file:\n'))
1790 for guard, count in guards:
1790 for guard, count in guards:
1791 ui.note('%2d ' % count)
1791 ui.note('%2d ' % count)
1792 ui.write(guard, '\n')
1792 ui.write(guard, '\n')
1793 else:
1793 else:
1794 ui.note(_('no guards in series file\n'))
1794 ui.note(_('no guards in series file\n'))
1795 else:
1795 else:
1796 if guards:
1796 if guards:
1797 ui.note(_('active guards:\n'))
1797 ui.note(_('active guards:\n'))
1798 for g in guards:
1798 for g in guards:
1799 ui.write(g, '\n')
1799 ui.write(g, '\n')
1800 else:
1800 else:
1801 ui.write(_('no active guards\n'))
1801 ui.write(_('no active guards\n'))
1802 reapply = opts['reapply'] and q.applied and q.appliedname(-1)
1802 reapply = opts['reapply'] and q.applied and q.appliedname(-1)
1803 popped = False
1803 popped = False
1804 if opts['pop'] or opts['reapply']:
1804 if opts['pop'] or opts['reapply']:
1805 for i in xrange(len(q.applied)):
1805 for i in xrange(len(q.applied)):
1806 pushable, reason = q.pushable(i)
1806 pushable, reason = q.pushable(i)
1807 if not pushable:
1807 if not pushable:
1808 ui.status(_('popping guarded patches\n'))
1808 ui.status(_('popping guarded patches\n'))
1809 popped = True
1809 popped = True
1810 if i == 0:
1810 if i == 0:
1811 q.pop(repo, all=True)
1811 q.pop(repo, all=True)
1812 else:
1812 else:
1813 q.pop(repo, i-1)
1813 q.pop(repo, i-1)
1814 break
1814 break
1815 if popped:
1815 if popped:
1816 try:
1816 try:
1817 if reapply:
1817 if reapply:
1818 ui.status(_('reapplying unguarded patches\n'))
1818 ui.status(_('reapplying unguarded patches\n'))
1819 q.push(repo, reapply)
1819 q.push(repo, reapply)
1820 finally:
1820 finally:
1821 q.save_dirty()
1821 q.save_dirty()
1822
1822
1823 def reposetup(ui, repo):
1823 def reposetup(ui, repo):
1824 class mqrepo(repo.__class__):
1824 class mqrepo(repo.__class__):
1825 def abort_if_wdir_patched(self, errmsg, force=False):
1825 def abort_if_wdir_patched(self, errmsg, force=False):
1826 if self.mq.applied and not force:
1826 if self.mq.applied and not force:
1827 parent = revlog.hex(self.dirstate.parents()[0])
1827 parent = revlog.hex(self.dirstate.parents()[0])
1828 if parent in [s.rev for s in self.mq.applied]:
1828 if parent in [s.rev for s in self.mq.applied]:
1829 raise util.Abort(errmsg)
1829 raise util.Abort(errmsg)
1830
1830
1831 def commit(self, *args, **opts):
1831 def commit(self, *args, **opts):
1832 if len(args) >= 6:
1832 if len(args) >= 6:
1833 force = args[5]
1833 force = args[5]
1834 else:
1834 else:
1835 force = opts.get('force')
1835 force = opts.get('force')
1836 self.abort_if_wdir_patched(
1836 self.abort_if_wdir_patched(
1837 _('cannot commit over an applied mq patch'),
1837 _('cannot commit over an applied mq patch'),
1838 force)
1838 force)
1839
1839
1840 return super(mqrepo, self).commit(*args, **opts)
1840 return super(mqrepo, self).commit(*args, **opts)
1841
1841
1842 def push(self, remote, force=False, revs=None):
1842 def push(self, remote, force=False, revs=None):
1843 if self.mq.applied and not force:
1843 if self.mq.applied and not force:
1844 raise util.Abort(_('source has mq patches applied'))
1844 raise util.Abort(_('source has mq patches applied'))
1845 return super(mqrepo, self).push(remote, force, revs)
1845 return super(mqrepo, self).push(remote, force, revs)
1846
1846
1847 def tags(self):
1847 def tags(self):
1848 if self.tagscache:
1848 if self.tagscache:
1849 return self.tagscache
1849 return self.tagscache
1850
1850
1851 tagscache = super(mqrepo, self).tags()
1851 tagscache = super(mqrepo, self).tags()
1852
1852
1853 q = self.mq
1853 q = self.mq
1854 if not q.applied:
1854 if not q.applied:
1855 return tagscache
1855 return tagscache
1856
1856
1857 mqtags = [(patch.rev, patch.name) for patch in q.applied]
1857 mqtags = [(patch.rev, patch.name) for patch in q.applied]
1858 mqtags.append((mqtags[-1][0], 'qtip'))
1858 mqtags.append((mqtags[-1][0], 'qtip'))
1859 mqtags.append((mqtags[0][0], 'qbase'))
1859 mqtags.append((mqtags[0][0], 'qbase'))
1860 for patch in mqtags:
1860 for patch in mqtags:
1861 if patch[1] in tagscache:
1861 if patch[1] in tagscache:
1862 self.ui.warn('Tag %s overrides mq patch of the same name\n' % patch[1])
1862 self.ui.warn('Tag %s overrides mq patch of the same name\n' % patch[1])
1863 else:
1863 else:
1864 tagscache[patch[1]] = revlog.bin(patch[0])
1864 tagscache[patch[1]] = revlog.bin(patch[0])
1865
1865
1866 return tagscache
1866 return tagscache
1867
1867
1868 if repo.local():
1868 if repo.local():
1869 repo.__class__ = mqrepo
1869 repo.__class__ = mqrepo
1870 repo.mq = queue(ui, repo.join(""))
1870 repo.mq = queue(ui, repo.join(""))
1871
1871
1872 cmdtable = {
1872 cmdtable = {
1873 "qapplied": (applied, [], 'hg qapplied [PATCH]'),
1873 "qapplied": (applied, [], 'hg qapplied [PATCH]'),
1874 "qclone": (clone,
1874 "qclone": (clone,
1875 [('', 'pull', None, _('use pull protocol to copy metadata')),
1875 [('', 'pull', None, _('use pull protocol to copy metadata')),
1876 ('U', 'noupdate', None, _('do not update the new working directories')),
1876 ('U', 'noupdate', None, _('do not update the new working directories')),
1877 ('', 'uncompressed', None,
1877 ('', 'uncompressed', None,
1878 _('use uncompressed transfer (fast over LAN)')),
1878 _('use uncompressed transfer (fast over LAN)')),
1879 ('e', 'ssh', '', _('specify ssh command to use')),
1879 ('e', 'ssh', '', _('specify ssh command to use')),
1880 ('p', 'patches', '', _('location of source patch repo')),
1880 ('p', 'patches', '', _('location of source patch repo')),
1881 ('', 'remotecmd', '',
1881 ('', 'remotecmd', '',
1882 _('specify hg command to run on the remote side'))],
1882 _('specify hg command to run on the remote side'))],
1883 'hg qclone [OPTION]... SOURCE [DEST]'),
1883 'hg qclone [OPTION]... SOURCE [DEST]'),
1884 "qcommit|qci":
1884 "qcommit|qci":
1885 (commit,
1885 (commit,
1886 commands.table["^commit|ci"][1],
1886 commands.table["^commit|ci"][1],
1887 'hg qcommit [OPTION]... [FILE]...'),
1887 'hg qcommit [OPTION]... [FILE]...'),
1888 "^qdiff": (diff, [], 'hg qdiff [FILE]...'),
1888 "^qdiff": (diff, [], 'hg qdiff [FILE]...'),
1889 "qdelete|qremove|qrm":
1889 "qdelete|qremove|qrm":
1890 (delete,
1890 (delete,
1891 [('k', 'keep', None, _('keep patch file'))],
1891 [('k', 'keep', None, _('keep patch file'))],
1892 'hg qdelete [-k] PATCH'),
1892 'hg qdelete [-k] PATCH'),
1893 'qfold':
1893 'qfold':
1894 (fold,
1894 (fold,
1895 [('e', 'edit', None, _('edit patch header')),
1895 [('e', 'edit', None, _('edit patch header')),
1896 ('k', 'keep', None, _('keep folded patch files')),
1896 ('k', 'keep', None, _('keep folded patch files')),
1897 ('m', 'message', '', _('set patch header to <text>')),
1897 ('m', 'message', '', _('set patch header to <text>')),
1898 ('l', 'logfile', '', _('set patch header to contents of <file>'))],
1898 ('l', 'logfile', '', _('set patch header to contents of <file>'))],
1899 'hg qfold [-e] [-m <text>] [-l <file] PATCH...'),
1899 'hg qfold [-e] [-m <text>] [-l <file] PATCH...'),
1900 'qguard': (guard, [('l', 'list', None, _('list all patches and guards')),
1900 'qguard': (guard, [('l', 'list', None, _('list all patches and guards')),
1901 ('n', 'none', None, _('drop all guards'))],
1901 ('n', 'none', None, _('drop all guards'))],
1902 'hg qguard [PATCH] [+GUARD...] [-GUARD...]'),
1902 'hg qguard [PATCH] [+GUARD...] [-GUARD...]'),
1903 'qheader': (header, [],
1903 'qheader': (header, [],
1904 _('hg qheader [PATCH]')),
1904 _('hg qheader [PATCH]')),
1905 "^qimport":
1905 "^qimport":
1906 (qimport,
1906 (qimport,
1907 [('e', 'existing', None, 'import file in patch dir'),
1907 [('e', 'existing', None, 'import file in patch dir'),
1908 ('n', 'name', '', 'patch file name'),
1908 ('n', 'name', '', 'patch file name'),
1909 ('f', 'force', None, 'overwrite existing files')],
1909 ('f', 'force', None, 'overwrite existing files')],
1910 'hg qimport [-e] [-n NAME] [-f] FILE...'),
1910 'hg qimport [-e] [-n NAME] [-f] FILE...'),
1911 "^qinit":
1911 "^qinit":
1912 (init,
1912 (init,
1913 [('c', 'create-repo', None, 'create queue repository')],
1913 [('c', 'create-repo', None, 'create queue repository')],
1914 'hg qinit [-c]'),
1914 'hg qinit [-c]'),
1915 "qnew":
1915 "qnew":
1916 (new,
1916 (new,
1917 [('m', 'message', '', _('use <text> as commit message')),
1917 [('m', 'message', '', _('use <text> as commit message')),
1918 ('l', 'logfile', '', _('read the commit message from <file>')),
1918 ('l', 'logfile', '', _('read the commit message from <file>')),
1919 ('f', 'force', None, _('import uncommitted changes into patch'))],
1919 ('f', 'force', None, _('import uncommitted changes into patch'))],
1920 'hg qnew [-m TEXT] [-l FILE] [-f] PATCH'),
1920 'hg qnew [-m TEXT] [-l FILE] [-f] PATCH'),
1921 "qnext": (next, [], 'hg qnext'),
1921 "qnext": (next, [], 'hg qnext'),
1922 "qprev": (prev, [], 'hg qprev'),
1922 "qprev": (prev, [], 'hg qprev'),
1923 "^qpop":
1923 "^qpop":
1924 (pop,
1924 (pop,
1925 [('a', 'all', None, 'pop all patches'),
1925 [('a', 'all', None, 'pop all patches'),
1926 ('n', 'name', '', 'queue name to pop'),
1926 ('n', 'name', '', 'queue name to pop'),
1927 ('f', 'force', None, 'forget any local changes')],
1927 ('f', 'force', None, 'forget any local changes')],
1928 'hg qpop [-a] [-n NAME] [-f] [PATCH | INDEX]'),
1928 'hg qpop [-a] [-n NAME] [-f] [PATCH | INDEX]'),
1929 "^qpush":
1929 "^qpush":
1930 (push,
1930 (push,
1931 [('f', 'force', None, 'apply if the patch has rejects'),
1931 [('f', 'force', None, 'apply if the patch has rejects'),
1932 ('l', 'list', None, 'list patch name in commit text'),
1932 ('l', 'list', None, 'list patch name in commit text'),
1933 ('a', 'all', None, 'apply all patches'),
1933 ('a', 'all', None, 'apply all patches'),
1934 ('m', 'merge', None, 'merge from another queue'),
1934 ('m', 'merge', None, 'merge from another queue'),
1935 ('n', 'name', '', 'merge queue name')],
1935 ('n', 'name', '', 'merge queue name')],
1936 'hg qpush [-f] [-l] [-a] [-m] [-n NAME] [PATCH | INDEX]'),
1936 'hg qpush [-f] [-l] [-a] [-m] [-n NAME] [PATCH | INDEX]'),
1937 "^qrefresh":
1937 "^qrefresh":
1938 (refresh,
1938 (refresh,
1939 [('e', 'edit', None, _('edit commit message')),
1939 [('e', 'edit', None, _('edit commit message')),
1940 ('m', 'message', '', _('change commit message with <text>')),
1940 ('m', 'message', '', _('change commit message with <text>')),
1941 ('l', 'logfile', '', _('change commit message with <file> content')),
1941 ('l', 'logfile', '', _('change commit message with <file> content')),
1942 ('s', 'short', None, 'short refresh')],
1942 ('s', 'short', None, 'short refresh')],
1943 'hg qrefresh [-e] [-m TEXT] [-l FILE] [-s]'),
1943 'hg qrefresh [-e] [-m TEXT] [-l FILE] [-s]'),
1944 'qrename|qmv':
1944 'qrename|qmv':
1945 (rename, [], 'hg qrename PATCH1 [PATCH2]'),
1945 (rename, [], 'hg qrename PATCH1 [PATCH2]'),
1946 "qrestore":
1946 "qrestore":
1947 (restore,
1947 (restore,
1948 [('d', 'delete', None, 'delete save entry'),
1948 [('d', 'delete', None, 'delete save entry'),
1949 ('u', 'update', None, 'update queue working dir')],
1949 ('u', 'update', None, 'update queue working dir')],
1950 'hg qrestore [-d] [-u] REV'),
1950 'hg qrestore [-d] [-u] REV'),
1951 "qsave":
1951 "qsave":
1952 (save,
1952 (save,
1953 [('m', 'message', '', _('use <text> as commit message')),
1953 [('m', 'message', '', _('use <text> as commit message')),
1954 ('l', 'logfile', '', _('read the commit message from <file>')),
1954 ('l', 'logfile', '', _('read the commit message from <file>')),
1955 ('c', 'copy', None, 'copy patch directory'),
1955 ('c', 'copy', None, 'copy patch directory'),
1956 ('n', 'name', '', 'copy directory name'),
1956 ('n', 'name', '', 'copy directory name'),
1957 ('e', 'empty', None, 'clear queue status file'),
1957 ('e', 'empty', None, 'clear queue status file'),
1958 ('f', 'force', None, 'force copy')],
1958 ('f', 'force', None, 'force copy')],
1959 'hg qsave [-m TEXT] [-l FILE] [-c] [-n NAME] [-e] [-f]'),
1959 'hg qsave [-m TEXT] [-l FILE] [-c] [-n NAME] [-e] [-f]'),
1960 "qselect": (select,
1960 "qselect": (select,
1961 [('n', 'none', None, _('disable all guards')),
1961 [('n', 'none', None, _('disable all guards')),
1962 ('s', 'series', None, _('list all guards in series file')),
1962 ('s', 'series', None, _('list all guards in series file')),
1963 ('', 'pop', None,
1963 ('', 'pop', None,
1964 _('pop to before first guarded applied patch')),
1964 _('pop to before first guarded applied patch')),
1965 ('', 'reapply', None, _('pop, then reapply patches'))],
1965 ('', 'reapply', None, _('pop, then reapply patches'))],
1966 'hg qselect [OPTION...] [GUARD...]'),
1966 'hg qselect [OPTION...] [GUARD...]'),
1967 "qseries":
1967 "qseries":
1968 (series,
1968 (series,
1969 [('m', 'missing', None, 'print patches not in series'),
1969 [('m', 'missing', None, 'print patches not in series'),
1970 ('s', 'summary', None, _('print first line of patch header'))],
1970 ('s', 'summary', None, _('print first line of patch header'))],
1971 'hg qseries [-m]'),
1971 'hg qseries [-m]'),
1972 "^strip":
1972 "^strip":
1973 (strip,
1973 (strip,
1974 [('f', 'force', None, 'force multi-head removal'),
1974 [('f', 'force', None, 'force multi-head removal'),
1975 ('b', 'backup', None, 'bundle unrelated changesets'),
1975 ('b', 'backup', None, 'bundle unrelated changesets'),
1976 ('n', 'nobackup', None, 'no backups')],
1976 ('n', 'nobackup', None, 'no backups')],
1977 'hg strip [-f] [-b] [-n] REV'),
1977 'hg strip [-f] [-b] [-n] REV'),
1978 "qtop": (top, [], 'hg qtop'),
1978 "qtop": (top, [], 'hg qtop'),
1979 "qunapplied": (unapplied, [], 'hg qunapplied [PATCH]'),
1979 "qunapplied": (unapplied, [], 'hg qunapplied [PATCH]'),
1980 }
1980 }
@@ -1,276 +1,276 b''
1 # notify.py - email notifications for mercurial
1 # notify.py - email notifications for mercurial
2 #
2 #
3 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
3 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7 #
7 #
8 # hook extension to email notifications to people when changesets are
8 # hook extension to email notifications to people when changesets are
9 # committed to a repo they subscribe to.
9 # committed to a repo they subscribe to.
10 #
10 #
11 # default mode is to print messages to stdout, for testing and
11 # default mode is to print messages to stdout, for testing and
12 # configuring.
12 # configuring.
13 #
13 #
14 # to use, configure notify extension and enable in hgrc like this:
14 # to use, configure notify extension and enable in hgrc like this:
15 #
15 #
16 # [extensions]
16 # [extensions]
17 # hgext.notify =
17 # hgext.notify =
18 #
18 #
19 # [hooks]
19 # [hooks]
20 # # one email for each incoming changeset
20 # # one email for each incoming changeset
21 # incoming.notify = python:hgext.notify.hook
21 # incoming.notify = python:hgext.notify.hook
22 # # batch emails when many changesets incoming at one time
22 # # batch emails when many changesets incoming at one time
23 # changegroup.notify = python:hgext.notify.hook
23 # changegroup.notify = python:hgext.notify.hook
24 #
24 #
25 # [notify]
25 # [notify]
26 # # config items go in here
26 # # config items go in here
27 #
27 #
28 # config items:
28 # config items:
29 #
29 #
30 # REQUIRED:
30 # REQUIRED:
31 # config = /path/to/file # file containing subscriptions
31 # config = /path/to/file # file containing subscriptions
32 #
32 #
33 # OPTIONAL:
33 # OPTIONAL:
34 # test = True # print messages to stdout for testing
34 # test = True # print messages to stdout for testing
35 # strip = 3 # number of slashes to strip for url paths
35 # strip = 3 # number of slashes to strip for url paths
36 # domain = example.com # domain to use if committer missing domain
36 # domain = example.com # domain to use if committer missing domain
37 # style = ... # style file to use when formatting email
37 # style = ... # style file to use when formatting email
38 # template = ... # template to use when formatting email
38 # template = ... # template to use when formatting email
39 # incoming = ... # template to use when run as incoming hook
39 # incoming = ... # template to use when run as incoming hook
40 # changegroup = ... # template when run as changegroup hook
40 # changegroup = ... # template when run as changegroup hook
41 # maxdiff = 300 # max lines of diffs to include (0=none, -1=all)
41 # maxdiff = 300 # max lines of diffs to include (0=none, -1=all)
42 # maxsubject = 67 # truncate subject line longer than this
42 # maxsubject = 67 # truncate subject line longer than this
43 # sources = serve # notify if source of incoming changes in this list
43 # sources = serve # notify if source of incoming changes in this list
44 # # (serve == ssh or http, push, pull, bundle)
44 # # (serve == ssh or http, push, pull, bundle)
45 # [email]
45 # [email]
46 # from = user@host.com # email address to send as if none given
46 # from = user@host.com # email address to send as if none given
47 # [web]
47 # [web]
48 # baseurl = http://hgserver/... # root of hg web site for browsing commits
48 # baseurl = http://hgserver/... # root of hg web site for browsing commits
49 #
49 #
50 # notify config file has same format as regular hgrc. it has two
50 # notify config file has same format as regular hgrc. it has two
51 # sections so you can express subscriptions in whatever way is handier
51 # sections so you can express subscriptions in whatever way is handier
52 # for you.
52 # for you.
53 #
53 #
54 # [usersubs]
54 # [usersubs]
55 # # key is subscriber email, value is ","-separated list of glob patterns
55 # # key is subscriber email, value is ","-separated list of glob patterns
56 # user@host = pattern
56 # user@host = pattern
57 #
57 #
58 # [reposubs]
58 # [reposubs]
59 # # key is glob pattern, value is ","-separated list of subscriber emails
59 # # key is glob pattern, value is ","-separated list of subscriber emails
60 # pattern = user@host
60 # pattern = user@host
61 #
61 #
62 # glob patterns are matched against path to repo root.
62 # glob patterns are matched against path to repo root.
63 #
63 #
64 # if you like, you can put notify config file in repo that users can
64 # if you like, you can put notify config file in repo that users can
65 # push changes to, they can manage their own subscriptions.
65 # push changes to, they can manage their own subscriptions.
66
66
67 from mercurial.demandload import *
67 from mercurial.demandload import *
68 from mercurial.i18n import gettext as _
68 from mercurial.i18n import gettext as _
69 from mercurial.node import *
69 from mercurial.node import *
70 demandload(globals(), 'email.Parser mercurial:commands,patch,templater,util')
70 demandload(globals(), 'mercurial:commands,patch,templater,util,mail')
71 demandload(globals(), 'fnmatch socket time')
71 demandload(globals(), 'email.Parser fnmatch socket time')
72
72
73 # template for single changeset can include email headers.
73 # template for single changeset can include email headers.
74 single_template = '''
74 single_template = '''
75 Subject: changeset in {webroot}: {desc|firstline|strip}
75 Subject: changeset in {webroot}: {desc|firstline|strip}
76 From: {author}
76 From: {author}
77
77
78 changeset {node|short} in {root}
78 changeset {node|short} in {root}
79 details: {baseurl}{webroot}?cmd=changeset;node={node|short}
79 details: {baseurl}{webroot}?cmd=changeset;node={node|short}
80 description:
80 description:
81 \t{desc|tabindent|strip}
81 \t{desc|tabindent|strip}
82 '''.lstrip()
82 '''.lstrip()
83
83
84 # template for multiple changesets should not contain email headers,
84 # template for multiple changesets should not contain email headers,
85 # because only first set of headers will be used and result will look
85 # because only first set of headers will be used and result will look
86 # strange.
86 # strange.
87 multiple_template = '''
87 multiple_template = '''
88 changeset {node|short} in {root}
88 changeset {node|short} in {root}
89 details: {baseurl}{webroot}?cmd=changeset;node={node|short}
89 details: {baseurl}{webroot}?cmd=changeset;node={node|short}
90 summary: {desc|firstline}
90 summary: {desc|firstline}
91 '''
91 '''
92
92
93 deftemplates = {
93 deftemplates = {
94 'changegroup': multiple_template,
94 'changegroup': multiple_template,
95 }
95 }
96
96
97 class notifier(object):
97 class notifier(object):
98 '''email notification class.'''
98 '''email notification class.'''
99
99
100 def __init__(self, ui, repo, hooktype):
100 def __init__(self, ui, repo, hooktype):
101 self.ui = ui
101 self.ui = ui
102 cfg = self.ui.config('notify', 'config')
102 cfg = self.ui.config('notify', 'config')
103 if cfg:
103 if cfg:
104 self.ui.readconfig(cfg)
104 self.ui.readconfig(cfg)
105 self.repo = repo
105 self.repo = repo
106 self.stripcount = int(self.ui.config('notify', 'strip', 0))
106 self.stripcount = int(self.ui.config('notify', 'strip', 0))
107 self.root = self.strip(self.repo.root)
107 self.root = self.strip(self.repo.root)
108 self.domain = self.ui.config('notify', 'domain')
108 self.domain = self.ui.config('notify', 'domain')
109 self.sio = templater.stringio()
109 self.sio = templater.stringio()
110 self.subs = self.subscribers()
110 self.subs = self.subscribers()
111
111
112 mapfile = self.ui.config('notify', 'style')
112 mapfile = self.ui.config('notify', 'style')
113 template = (self.ui.config('notify', hooktype) or
113 template = (self.ui.config('notify', hooktype) or
114 self.ui.config('notify', 'template'))
114 self.ui.config('notify', 'template'))
115 self.t = templater.changeset_templater(self.ui, self.repo, mapfile,
115 self.t = templater.changeset_templater(self.ui, self.repo, mapfile,
116 self.sio)
116 self.sio)
117 if not mapfile and not template:
117 if not mapfile and not template:
118 template = deftemplates.get(hooktype) or single_template
118 template = deftemplates.get(hooktype) or single_template
119 if template:
119 if template:
120 template = templater.parsestring(template, quoted=False)
120 template = templater.parsestring(template, quoted=False)
121 self.t.use_template(template)
121 self.t.use_template(template)
122
122
123 def strip(self, path):
123 def strip(self, path):
124 '''strip leading slashes from local path, turn into web-safe path.'''
124 '''strip leading slashes from local path, turn into web-safe path.'''
125
125
126 path = util.pconvert(path)
126 path = util.pconvert(path)
127 count = self.stripcount
127 count = self.stripcount
128 while count > 0:
128 while count > 0:
129 c = path.find('/')
129 c = path.find('/')
130 if c == -1:
130 if c == -1:
131 break
131 break
132 path = path[c+1:]
132 path = path[c+1:]
133 count -= 1
133 count -= 1
134 return path
134 return path
135
135
136 def fixmail(self, addr):
136 def fixmail(self, addr):
137 '''try to clean up email addresses.'''
137 '''try to clean up email addresses.'''
138
138
139 addr = templater.email(addr.strip())
139 addr = templater.email(addr.strip())
140 a = addr.find('@localhost')
140 a = addr.find('@localhost')
141 if a != -1:
141 if a != -1:
142 addr = addr[:a]
142 addr = addr[:a]
143 if '@' not in addr:
143 if '@' not in addr:
144 return addr + '@' + self.domain
144 return addr + '@' + self.domain
145 return addr
145 return addr
146
146
147 def subscribers(self):
147 def subscribers(self):
148 '''return list of email addresses of subscribers to this repo.'''
148 '''return list of email addresses of subscribers to this repo.'''
149
149
150 subs = {}
150 subs = {}
151 for user, pats in self.ui.configitems('usersubs'):
151 for user, pats in self.ui.configitems('usersubs'):
152 for pat in pats.split(','):
152 for pat in pats.split(','):
153 if fnmatch.fnmatch(self.repo.root, pat.strip()):
153 if fnmatch.fnmatch(self.repo.root, pat.strip()):
154 subs[self.fixmail(user)] = 1
154 subs[self.fixmail(user)] = 1
155 for pat, users in self.ui.configitems('reposubs'):
155 for pat, users in self.ui.configitems('reposubs'):
156 if fnmatch.fnmatch(self.repo.root, pat):
156 if fnmatch.fnmatch(self.repo.root, pat):
157 for user in users.split(','):
157 for user in users.split(','):
158 subs[self.fixmail(user)] = 1
158 subs[self.fixmail(user)] = 1
159 subs = subs.keys()
159 subs = subs.keys()
160 subs.sort()
160 subs.sort()
161 return subs
161 return subs
162
162
163 def url(self, path=None):
163 def url(self, path=None):
164 return self.ui.config('web', 'baseurl') + (path or self.root)
164 return self.ui.config('web', 'baseurl') + (path or self.root)
165
165
166 def node(self, node):
166 def node(self, node):
167 '''format one changeset.'''
167 '''format one changeset.'''
168
168
169 self.t.show(changenode=node, changes=self.repo.changelog.read(node),
169 self.t.show(changenode=node, changes=self.repo.changelog.read(node),
170 baseurl=self.ui.config('web', 'baseurl'),
170 baseurl=self.ui.config('web', 'baseurl'),
171 root=self.repo.root,
171 root=self.repo.root,
172 webroot=self.root)
172 webroot=self.root)
173
173
174 def skipsource(self, source):
174 def skipsource(self, source):
175 '''true if incoming changes from this source should be skipped.'''
175 '''true if incoming changes from this source should be skipped.'''
176 ok_sources = self.ui.config('notify', 'sources', 'serve').split()
176 ok_sources = self.ui.config('notify', 'sources', 'serve').split()
177 return source not in ok_sources
177 return source not in ok_sources
178
178
179 def send(self, node, count):
179 def send(self, node, count):
180 '''send message.'''
180 '''send message.'''
181
181
182 p = email.Parser.Parser()
182 p = email.Parser.Parser()
183 self.sio.seek(0)
183 self.sio.seek(0)
184 msg = p.parse(self.sio)
184 msg = p.parse(self.sio)
185
185
186 def fix_subject():
186 def fix_subject():
187 '''try to make subject line exist and be useful.'''
187 '''try to make subject line exist and be useful.'''
188
188
189 subject = msg['Subject']
189 subject = msg['Subject']
190 if not subject:
190 if not subject:
191 if count > 1:
191 if count > 1:
192 subject = _('%s: %d new changesets') % (self.root, count)
192 subject = _('%s: %d new changesets') % (self.root, count)
193 else:
193 else:
194 changes = self.repo.changelog.read(node)
194 changes = self.repo.changelog.read(node)
195 s = changes[4].lstrip().split('\n', 1)[0].rstrip()
195 s = changes[4].lstrip().split('\n', 1)[0].rstrip()
196 subject = '%s: %s' % (self.root, s)
196 subject = '%s: %s' % (self.root, s)
197 maxsubject = int(self.ui.config('notify', 'maxsubject', 67))
197 maxsubject = int(self.ui.config('notify', 'maxsubject', 67))
198 if maxsubject and len(subject) > maxsubject:
198 if maxsubject and len(subject) > maxsubject:
199 subject = subject[:maxsubject-3] + '...'
199 subject = subject[:maxsubject-3] + '...'
200 del msg['Subject']
200 del msg['Subject']
201 msg['Subject'] = subject
201 msg['Subject'] = subject
202
202
203 def fix_sender():
203 def fix_sender():
204 '''try to make message have proper sender.'''
204 '''try to make message have proper sender.'''
205
205
206 sender = msg['From']
206 sender = msg['From']
207 if not sender:
207 if not sender:
208 sender = self.ui.config('email', 'from') or self.ui.username()
208 sender = self.ui.config('email', 'from') or self.ui.username()
209 if '@' not in sender or '@localhost' in sender:
209 if '@' not in sender or '@localhost' in sender:
210 sender = self.fixmail(sender)
210 sender = self.fixmail(sender)
211 del msg['From']
211 del msg['From']
212 msg['From'] = sender
212 msg['From'] = sender
213
213
214 fix_subject()
214 fix_subject()
215 fix_sender()
215 fix_sender()
216
216
217 msg['X-Hg-Notification'] = 'changeset ' + short(node)
217 msg['X-Hg-Notification'] = 'changeset ' + short(node)
218 if not msg['Message-Id']:
218 if not msg['Message-Id']:
219 msg['Message-Id'] = ('<hg.%s.%s.%s@%s>' %
219 msg['Message-Id'] = ('<hg.%s.%s.%s@%s>' %
220 (short(node), int(time.time()),
220 (short(node), int(time.time()),
221 hash(self.repo.root), socket.getfqdn()))
221 hash(self.repo.root), socket.getfqdn()))
222 msg['To'] = ', '.join(self.subs)
222 msg['To'] = ', '.join(self.subs)
223
223
224 msgtext = msg.as_string(0)
224 msgtext = msg.as_string(0)
225 if self.ui.configbool('notify', 'test', True):
225 if self.ui.configbool('notify', 'test', True):
226 self.ui.write(msgtext)
226 self.ui.write(msgtext)
227 if not msgtext.endswith('\n'):
227 if not msgtext.endswith('\n'):
228 self.ui.write('\n')
228 self.ui.write('\n')
229 else:
229 else:
230 self.ui.status(_('notify: sending %d subscribers %d changes\n') %
230 self.ui.status(_('notify: sending %d subscribers %d changes\n') %
231 (len(self.subs), count))
231 (len(self.subs), count))
232 mail = self.ui.sendmail()
232 mail.sendmail(self.ui, templater.email(msg['From']),
233 mail.sendmail(templater.email(msg['From']), self.subs, msgtext)
233 self.subs, msgtext)
234
234
235 def diff(self, node, ref):
235 def diff(self, node, ref):
236 maxdiff = int(self.ui.config('notify', 'maxdiff', 300))
236 maxdiff = int(self.ui.config('notify', 'maxdiff', 300))
237 if maxdiff == 0:
237 if maxdiff == 0:
238 return
238 return
239 fp = templater.stringio()
239 fp = templater.stringio()
240 prev = self.repo.changelog.parents(node)[0]
240 prev = self.repo.changelog.parents(node)[0]
241 patch.diff(self.repo, fp, prev, ref)
241 patch.diff(self.repo, fp, prev, ref)
242 difflines = fp.getvalue().splitlines(1)
242 difflines = fp.getvalue().splitlines(1)
243 if maxdiff > 0 and len(difflines) > maxdiff:
243 if maxdiff > 0 and len(difflines) > maxdiff:
244 self.sio.write(_('\ndiffs (truncated from %d to %d lines):\n\n') %
244 self.sio.write(_('\ndiffs (truncated from %d to %d lines):\n\n') %
245 (len(difflines), maxdiff))
245 (len(difflines), maxdiff))
246 difflines = difflines[:maxdiff]
246 difflines = difflines[:maxdiff]
247 elif difflines:
247 elif difflines:
248 self.sio.write(_('\ndiffs (%d lines):\n\n') % len(difflines))
248 self.sio.write(_('\ndiffs (%d lines):\n\n') % len(difflines))
249 self.sio.write(*difflines)
249 self.sio.write(*difflines)
250
250
251 def hook(ui, repo, hooktype, node=None, source=None, **kwargs):
251 def hook(ui, repo, hooktype, node=None, source=None, **kwargs):
252 '''send email notifications to interested subscribers.
252 '''send email notifications to interested subscribers.
253
253
254 if used as changegroup hook, send one email for all changesets in
254 if used as changegroup hook, send one email for all changesets in
255 changegroup. else send one email per changeset.'''
255 changegroup. else send one email per changeset.'''
256 n = notifier(ui, repo, hooktype)
256 n = notifier(ui, repo, hooktype)
257 if not n.subs:
257 if not n.subs:
258 ui.debug(_('notify: no subscribers to repo %s\n' % n.root))
258 ui.debug(_('notify: no subscribers to repo %s\n' % n.root))
259 return
259 return
260 if n.skipsource(source):
260 if n.skipsource(source):
261 ui.debug(_('notify: changes have source "%s" - skipping\n') %
261 ui.debug(_('notify: changes have source "%s" - skipping\n') %
262 source)
262 source)
263 return
263 return
264 node = bin(node)
264 node = bin(node)
265 if hooktype == 'changegroup':
265 if hooktype == 'changegroup':
266 start = repo.changelog.rev(node)
266 start = repo.changelog.rev(node)
267 end = repo.changelog.count()
267 end = repo.changelog.count()
268 count = end - start
268 count = end - start
269 for rev in xrange(start, end):
269 for rev in xrange(start, end):
270 n.node(repo.changelog.node(rev))
270 n.node(repo.changelog.node(rev))
271 n.diff(node, repo.changelog.tip())
271 n.diff(node, repo.changelog.tip())
272 else:
272 else:
273 count = 1
273 count = 1
274 n.node(node)
274 n.node(node)
275 n.diff(node, node)
275 n.diff(node, node)
276 n.send(node, count)
276 n.send(node, count)
@@ -1,309 +1,309 b''
1 # Command for sending a collection of Mercurial changesets as a series
1 # Command for sending a collection of Mercurial changesets as a series
2 # of patch emails.
2 # of patch emails.
3 #
3 #
4 # The series is started off with a "[PATCH 0 of N]" introduction,
4 # The series is started off with a "[PATCH 0 of N]" introduction,
5 # which describes the series as a whole.
5 # which describes the series as a whole.
6 #
6 #
7 # Each patch email has a Subject line of "[PATCH M of N] ...", using
7 # Each patch email has a Subject line of "[PATCH M of N] ...", using
8 # the first line of the changeset description as the subject text.
8 # the first line of the changeset description as the subject text.
9 # The message contains two or three body parts:
9 # The message contains two or three body parts:
10 #
10 #
11 # The remainder of the changeset description.
11 # The remainder of the changeset description.
12 #
12 #
13 # [Optional] If the diffstat program is installed, the result of
13 # [Optional] If the diffstat program is installed, the result of
14 # running diffstat on the patch.
14 # running diffstat on the patch.
15 #
15 #
16 # The patch itself, as generated by "hg export".
16 # The patch itself, as generated by "hg export".
17 #
17 #
18 # Each message refers to all of its predecessors using the In-Reply-To
18 # Each message refers to all of its predecessors using the In-Reply-To
19 # and References headers, so they will show up as a sequence in
19 # and References headers, so they will show up as a sequence in
20 # threaded mail and news readers, and in mail archives.
20 # threaded mail and news readers, and in mail archives.
21 #
21 #
22 # For each changeset, you will be prompted with a diffstat summary and
22 # For each changeset, you will be prompted with a diffstat summary and
23 # the changeset summary, so you can be sure you are sending the right
23 # the changeset summary, so you can be sure you are sending the right
24 # changes.
24 # changes.
25 #
25 #
26 # It is best to run this script with the "-n" (test only) flag before
26 # It is best to run this script with the "-n" (test only) flag before
27 # firing it up "for real", in which case it will use your pager to
27 # firing it up "for real", in which case it will use your pager to
28 # display each of the messages that it would send.
28 # display each of the messages that it would send.
29 #
29 #
30 # The "-m" (mbox) option will create an mbox file instead of sending
30 # The "-m" (mbox) option will create an mbox file instead of sending
31 # the messages directly. This can be reviewed e.g. with "mutt -R -f mbox",
31 # the messages directly. This can be reviewed e.g. with "mutt -R -f mbox",
32 # and finally sent with "formail -s sendmail -bm -t < mbox".
32 # and finally sent with "formail -s sendmail -bm -t < mbox".
33 #
33 #
34 # To configure other defaults, add a section like this to your hgrc
34 # To configure other defaults, add a section like this to your hgrc
35 # file:
35 # file:
36 #
36 #
37 # [email]
37 # [email]
38 # from = My Name <my@email>
38 # from = My Name <my@email>
39 # to = recipient1, recipient2, ...
39 # to = recipient1, recipient2, ...
40 # cc = cc1, cc2, ...
40 # cc = cc1, cc2, ...
41 # bcc = bcc1, bcc2, ...
41 # bcc = bcc1, bcc2, ...
42
42
43 from mercurial.demandload import *
43 from mercurial.demandload import *
44 demandload(globals(), '''email.MIMEMultipart email.MIMEText email.Utils
44 demandload(globals(), '''email.MIMEMultipart email.MIMEText email.Utils
45 mercurial:commands,hg,ui
45 mercurial:commands,hg,ui
46 os errno popen2 socket sys tempfile time''')
46 os errno popen2 socket sys tempfile time''')
47 from mercurial.i18n import gettext as _
47 from mercurial.i18n import gettext as _
48 from mercurial.node import *
48 from mercurial.node import *
49
49
50 try:
50 try:
51 # readline gives raw_input editing capabilities, but is not
51 # readline gives raw_input editing capabilities, but is not
52 # present on windows
52 # present on windows
53 import readline
53 import readline
54 except ImportError: pass
54 except ImportError: pass
55
55
56 def diffstat(patch):
56 def diffstat(patch):
57 fd, name = tempfile.mkstemp(prefix="hg-patchbomb-", suffix=".txt")
57 fd, name = tempfile.mkstemp(prefix="hg-patchbomb-", suffix=".txt")
58 try:
58 try:
59 p = popen2.Popen3('diffstat -p1 -w79 2>/dev/null > ' + name)
59 p = popen2.Popen3('diffstat -p1 -w79 2>/dev/null > ' + name)
60 try:
60 try:
61 for line in patch: print >> p.tochild, line
61 for line in patch: print >> p.tochild, line
62 p.tochild.close()
62 p.tochild.close()
63 if p.wait(): return
63 if p.wait(): return
64 fp = os.fdopen(fd, 'r')
64 fp = os.fdopen(fd, 'r')
65 stat = []
65 stat = []
66 for line in fp: stat.append(line.lstrip())
66 for line in fp: stat.append(line.lstrip())
67 last = stat.pop()
67 last = stat.pop()
68 stat.insert(0, last)
68 stat.insert(0, last)
69 stat = ''.join(stat)
69 stat = ''.join(stat)
70 if stat.startswith('0 files'): raise ValueError
70 if stat.startswith('0 files'): raise ValueError
71 return stat
71 return stat
72 except: raise
72 except: raise
73 finally:
73 finally:
74 try: os.unlink(name)
74 try: os.unlink(name)
75 except: pass
75 except: pass
76
76
77 def patchbomb(ui, repo, *revs, **opts):
77 def patchbomb(ui, repo, *revs, **opts):
78 '''send changesets as a series of patch emails
78 '''send changesets as a series of patch emails
79
79
80 The series starts with a "[PATCH 0 of N]" introduction, which
80 The series starts with a "[PATCH 0 of N]" introduction, which
81 describes the series as a whole.
81 describes the series as a whole.
82
82
83 Each patch email has a Subject line of "[PATCH M of N] ...", using
83 Each patch email has a Subject line of "[PATCH M of N] ...", using
84 the first line of the changeset description as the subject text.
84 the first line of the changeset description as the subject text.
85 The message contains two or three body parts. First, the rest of
85 The message contains two or three body parts. First, the rest of
86 the changeset description. Next, (optionally) if the diffstat
86 the changeset description. Next, (optionally) if the diffstat
87 program is installed, the result of running diffstat on the patch.
87 program is installed, the result of running diffstat on the patch.
88 Finally, the patch itself, as generated by "hg export".'''
88 Finally, the patch itself, as generated by "hg export".'''
89 def prompt(prompt, default = None, rest = ': ', empty_ok = False):
89 def prompt(prompt, default = None, rest = ': ', empty_ok = False):
90 if default: prompt += ' [%s]' % default
90 if default: prompt += ' [%s]' % default
91 prompt += rest
91 prompt += rest
92 while True:
92 while True:
93 r = raw_input(prompt)
93 r = raw_input(prompt)
94 if r: return r
94 if r: return r
95 if default is not None: return default
95 if default is not None: return default
96 if empty_ok: return r
96 if empty_ok: return r
97 ui.warn(_('Please enter a valid value.\n'))
97 ui.warn(_('Please enter a valid value.\n'))
98
98
99 def confirm(s):
99 def confirm(s):
100 if not prompt(s, default = 'y', rest = '? ').lower().startswith('y'):
100 if not prompt(s, default = 'y', rest = '? ').lower().startswith('y'):
101 raise ValueError
101 raise ValueError
102
102
103 def cdiffstat(summary, patch):
103 def cdiffstat(summary, patch):
104 s = diffstat(patch)
104 s = diffstat(patch)
105 if s:
105 if s:
106 if summary:
106 if summary:
107 ui.write(summary, '\n')
107 ui.write(summary, '\n')
108 ui.write(s, '\n')
108 ui.write(s, '\n')
109 confirm(_('Does the diffstat above look okay'))
109 confirm(_('Does the diffstat above look okay'))
110 return s
110 return s
111
111
112 def makepatch(patch, idx, total):
112 def makepatch(patch, idx, total):
113 desc = []
113 desc = []
114 node = None
114 node = None
115 body = ''
115 body = ''
116 for line in patch:
116 for line in patch:
117 if line.startswith('#'):
117 if line.startswith('#'):
118 if line.startswith('# Node ID'): node = line.split()[-1]
118 if line.startswith('# Node ID'): node = line.split()[-1]
119 continue
119 continue
120 if line.startswith('diff -r'): break
120 if line.startswith('diff -r'): break
121 desc.append(line)
121 desc.append(line)
122 if not node: raise ValueError
122 if not node: raise ValueError
123
123
124 #body = ('\n'.join(desc[1:]).strip() or
124 #body = ('\n'.join(desc[1:]).strip() or
125 # 'Patch subject is complete summary.')
125 # 'Patch subject is complete summary.')
126 #body += '\n\n\n'
126 #body += '\n\n\n'
127
127
128 if opts['plain']:
128 if opts['plain']:
129 while patch and patch[0].startswith('# '): patch.pop(0)
129 while patch and patch[0].startswith('# '): patch.pop(0)
130 if patch: patch.pop(0)
130 if patch: patch.pop(0)
131 while patch and not patch[0].strip(): patch.pop(0)
131 while patch and not patch[0].strip(): patch.pop(0)
132 if opts['diffstat']:
132 if opts['diffstat']:
133 body += cdiffstat('\n'.join(desc), patch) + '\n\n'
133 body += cdiffstat('\n'.join(desc), patch) + '\n\n'
134 if opts['attach']:
134 if opts['attach']:
135 msg = email.MIMEMultipart.MIMEMultipart()
135 msg = email.MIMEMultipart.MIMEMultipart()
136 if body: msg.attach(email.MIMEText.MIMEText(body, 'plain'))
136 if body: msg.attach(email.MIMEText.MIMEText(body, 'plain'))
137 p = email.MIMEText.MIMEText('\n'.join(patch), 'x-patch')
137 p = email.MIMEText.MIMEText('\n'.join(patch), 'x-patch')
138 binnode = bin(node)
138 binnode = bin(node)
139 # if node is mq patch, it will have patch file name as tag
139 # if node is mq patch, it will have patch file name as tag
140 patchname = [t for t in repo.nodetags(binnode)
140 patchname = [t for t in repo.nodetags(binnode)
141 if t.endswith('.patch') or t.endswith('.diff')]
141 if t.endswith('.patch') or t.endswith('.diff')]
142 if patchname:
142 if patchname:
143 patchname = patchname[0]
143 patchname = patchname[0]
144 elif total > 1:
144 elif total > 1:
145 patchname = commands.make_filename(repo, '%b-%n.patch',
145 patchname = commands.make_filename(repo, '%b-%n.patch',
146 binnode, idx, total)
146 binnode, idx, total)
147 else:
147 else:
148 patchname = commands.make_filename(repo, '%b.patch', binnode)
148 patchname = commands.make_filename(repo, '%b.patch', binnode)
149 p['Content-Disposition'] = 'inline; filename=' + patchname
149 p['Content-Disposition'] = 'inline; filename=' + patchname
150 msg.attach(p)
150 msg.attach(p)
151 else:
151 else:
152 body += '\n'.join(patch)
152 body += '\n'.join(patch)
153 msg = email.MIMEText.MIMEText(body)
153 msg = email.MIMEText.MIMEText(body)
154 if total == 1:
154 if total == 1:
155 subj = '[PATCH] ' + desc[0].strip()
155 subj = '[PATCH] ' + desc[0].strip()
156 else:
156 else:
157 subj = '[PATCH %d of %d] %s' % (idx, total, desc[0].strip())
157 subj = '[PATCH %d of %d] %s' % (idx, total, desc[0].strip())
158 if subj.endswith('.'): subj = subj[:-1]
158 if subj.endswith('.'): subj = subj[:-1]
159 msg['Subject'] = subj
159 msg['Subject'] = subj
160 msg['X-Mercurial-Node'] = node
160 msg['X-Mercurial-Node'] = node
161 return msg
161 return msg
162
162
163 start_time = int(time.time())
163 start_time = int(time.time())
164
164
165 def genmsgid(id):
165 def genmsgid(id):
166 return '<%s.%s@%s>' % (id[:20], start_time, socket.getfqdn())
166 return '<%s.%s@%s>' % (id[:20], start_time, socket.getfqdn())
167
167
168 patches = []
168 patches = []
169
169
170 class exportee:
170 class exportee:
171 def __init__(self, container):
171 def __init__(self, container):
172 self.lines = []
172 self.lines = []
173 self.container = container
173 self.container = container
174 self.name = 'email'
174 self.name = 'email'
175
175
176 def write(self, data):
176 def write(self, data):
177 self.lines.append(data)
177 self.lines.append(data)
178
178
179 def close(self):
179 def close(self):
180 self.container.append(''.join(self.lines).split('\n'))
180 self.container.append(''.join(self.lines).split('\n'))
181 self.lines = []
181 self.lines = []
182
182
183 commands.export(ui, repo, *revs, **{'output': exportee(patches),
183 commands.export(ui, repo, *revs, **{'output': exportee(patches),
184 'switch_parent': False,
184 'switch_parent': False,
185 'text': None})
185 'text': None})
186
186
187 jumbo = []
187 jumbo = []
188 msgs = []
188 msgs = []
189
189
190 ui.write(_('This patch series consists of %d patches.\n\n') % len(patches))
190 ui.write(_('This patch series consists of %d patches.\n\n') % len(patches))
191
191
192 for p, i in zip(patches, range(len(patches))):
192 for p, i in zip(patches, range(len(patches))):
193 jumbo.extend(p)
193 jumbo.extend(p)
194 msgs.append(makepatch(p, i + 1, len(patches)))
194 msgs.append(makepatch(p, i + 1, len(patches)))
195
195
196 sender = (opts['from'] or ui.config('email', 'from') or
196 sender = (opts['from'] or ui.config('email', 'from') or
197 ui.config('patchbomb', 'from') or
197 ui.config('patchbomb', 'from') or
198 prompt('From', ui.username()))
198 prompt('From', ui.username()))
199
199
200 def getaddrs(opt, prpt, default = None):
200 def getaddrs(opt, prpt, default = None):
201 addrs = opts[opt] or (ui.config('email', opt) or
201 addrs = opts[opt] or (ui.config('email', opt) or
202 ui.config('patchbomb', opt) or
202 ui.config('patchbomb', opt) or
203 prompt(prpt, default = default)).split(',')
203 prompt(prpt, default = default)).split(',')
204 return [a.strip() for a in addrs if a.strip()]
204 return [a.strip() for a in addrs if a.strip()]
205 to = getaddrs('to', 'To')
205 to = getaddrs('to', 'To')
206 cc = getaddrs('cc', 'Cc', '')
206 cc = getaddrs('cc', 'Cc', '')
207
207
208 bcc = opts['bcc'] or (ui.config('email', 'bcc') or
208 bcc = opts['bcc'] or (ui.config('email', 'bcc') or
209 ui.config('patchbomb', 'bcc') or '').split(',')
209 ui.config('patchbomb', 'bcc') or '').split(',')
210 bcc = [a.strip() for a in bcc if a.strip()]
210 bcc = [a.strip() for a in bcc if a.strip()]
211
211
212 if len(patches) > 1:
212 if len(patches) > 1:
213 ui.write(_('\nWrite the introductory message for the patch series.\n\n'))
213 ui.write(_('\nWrite the introductory message for the patch series.\n\n'))
214
214
215 subj = '[PATCH 0 of %d] %s' % (
215 subj = '[PATCH 0 of %d] %s' % (
216 len(patches),
216 len(patches),
217 opts['subject'] or
217 opts['subject'] or
218 prompt('Subject:', rest = ' [PATCH 0 of %d] ' % len(patches)))
218 prompt('Subject:', rest = ' [PATCH 0 of %d] ' % len(patches)))
219
219
220 ui.write(_('Finish with ^D or a dot on a line by itself.\n\n'))
220 ui.write(_('Finish with ^D or a dot on a line by itself.\n\n'))
221
221
222 body = []
222 body = []
223
223
224 while True:
224 while True:
225 try: l = raw_input()
225 try: l = raw_input()
226 except EOFError: break
226 except EOFError: break
227 if l == '.': break
227 if l == '.': break
228 body.append(l)
228 body.append(l)
229
229
230 if opts['diffstat']:
230 if opts['diffstat']:
231 d = cdiffstat(_('Final summary:\n'), jumbo)
231 d = cdiffstat(_('Final summary:\n'), jumbo)
232 if d: body.append('\n' + d)
232 if d: body.append('\n' + d)
233
233
234 body = '\n'.join(body) + '\n'
234 body = '\n'.join(body) + '\n'
235
235
236 msg = email.MIMEText.MIMEText(body)
236 msg = email.MIMEText.MIMEText(body)
237 msg['Subject'] = subj
237 msg['Subject'] = subj
238
238
239 msgs.insert(0, msg)
239 msgs.insert(0, msg)
240
240
241 ui.write('\n')
241 ui.write('\n')
242
242
243 if not opts['test'] and not opts['mbox']:
243 if not opts['test'] and not opts['mbox']:
244 mail = ui.sendmail()
244 mailer = mail.connect(ui)
245 parent = None
245 parent = None
246
246
247 # Calculate UTC offset
247 # Calculate UTC offset
248 if time.daylight: offset = time.altzone
248 if time.daylight: offset = time.altzone
249 else: offset = time.timezone
249 else: offset = time.timezone
250 if offset <= 0: sign, offset = '+', -offset
250 if offset <= 0: sign, offset = '+', -offset
251 else: sign = '-'
251 else: sign = '-'
252 offset = '%s%02d%02d' % (sign, offset / 3600, (offset % 3600) / 60)
252 offset = '%s%02d%02d' % (sign, offset / 3600, (offset % 3600) / 60)
253
253
254 sender_addr = email.Utils.parseaddr(sender)[1]
254 sender_addr = email.Utils.parseaddr(sender)[1]
255 for m in msgs:
255 for m in msgs:
256 try:
256 try:
257 m['Message-Id'] = genmsgid(m['X-Mercurial-Node'])
257 m['Message-Id'] = genmsgid(m['X-Mercurial-Node'])
258 except TypeError:
258 except TypeError:
259 m['Message-Id'] = genmsgid('patchbomb')
259 m['Message-Id'] = genmsgid('patchbomb')
260 if parent:
260 if parent:
261 m['In-Reply-To'] = parent
261 m['In-Reply-To'] = parent
262 else:
262 else:
263 parent = m['Message-Id']
263 parent = m['Message-Id']
264 m['Date'] = time.strftime("%a, %d %b %Y %H:%M:%S", time.localtime(start_time)) + ' ' + offset
264 m['Date'] = time.strftime("%a, %d %b %Y %H:%M:%S", time.localtime(start_time)) + ' ' + offset
265
265
266 start_time += 1
266 start_time += 1
267 m['From'] = sender
267 m['From'] = sender
268 m['To'] = ', '.join(to)
268 m['To'] = ', '.join(to)
269 if cc: m['Cc'] = ', '.join(cc)
269 if cc: m['Cc'] = ', '.join(cc)
270 if bcc: m['Bcc'] = ', '.join(bcc)
270 if bcc: m['Bcc'] = ', '.join(bcc)
271 if opts['test']:
271 if opts['test']:
272 ui.status('Displaying ', m['Subject'], ' ...\n')
272 ui.status('Displaying ', m['Subject'], ' ...\n')
273 fp = os.popen(os.getenv('PAGER', 'more'), 'w')
273 fp = os.popen(os.getenv('PAGER', 'more'), 'w')
274 try:
274 try:
275 fp.write(m.as_string(0))
275 fp.write(m.as_string(0))
276 fp.write('\n')
276 fp.write('\n')
277 except IOError, inst:
277 except IOError, inst:
278 if inst.errno != errno.EPIPE:
278 if inst.errno != errno.EPIPE:
279 raise
279 raise
280 fp.close()
280 fp.close()
281 elif opts['mbox']:
281 elif opts['mbox']:
282 ui.status('Writing ', m['Subject'], ' ...\n')
282 ui.status('Writing ', m['Subject'], ' ...\n')
283 fp = open(opts['mbox'], m.has_key('In-Reply-To') and 'ab+' or 'wb+')
283 fp = open(opts['mbox'], m.has_key('In-Reply-To') and 'ab+' or 'wb+')
284 date = time.asctime(time.localtime(start_time))
284 date = time.asctime(time.localtime(start_time))
285 fp.write('From %s %s\n' % (sender_addr, date))
285 fp.write('From %s %s\n' % (sender_addr, date))
286 fp.write(m.as_string(0))
286 fp.write(m.as_string(0))
287 fp.write('\n\n')
287 fp.write('\n\n')
288 fp.close()
288 fp.close()
289 else:
289 else:
290 ui.status('Sending ', m['Subject'], ' ...\n')
290 ui.status('Sending ', m['Subject'], ' ...\n')
291 # Exim does not remove the Bcc field
291 # Exim does not remove the Bcc field
292 del m['Bcc']
292 del m['Bcc']
293 mail.sendmail(sender, to + bcc + cc, m.as_string(0))
293 mailer.sendmail(sender, to + bcc + cc, m.as_string(0))
294
294
295 cmdtable = {
295 cmdtable = {
296 'email':
296 'email':
297 (patchbomb,
297 (patchbomb,
298 [('a', 'attach', None, 'send patches as inline attachments'),
298 [('a', 'attach', None, 'send patches as inline attachments'),
299 ('', 'bcc', [], 'email addresses of blind copy recipients'),
299 ('', 'bcc', [], 'email addresses of blind copy recipients'),
300 ('c', 'cc', [], 'email addresses of copy recipients'),
300 ('c', 'cc', [], 'email addresses of copy recipients'),
301 ('d', 'diffstat', None, 'add diffstat output to messages'),
301 ('d', 'diffstat', None, 'add diffstat output to messages'),
302 ('f', 'from', '', 'email address of sender'),
302 ('f', 'from', '', 'email address of sender'),
303 ('', 'plain', None, 'omit hg patch header'),
303 ('', 'plain', None, 'omit hg patch header'),
304 ('n', 'test', None, 'print messages that would be sent'),
304 ('n', 'test', None, 'print messages that would be sent'),
305 ('m', 'mbox', '', 'write messages to mbox file instead of sending them'),
305 ('m', 'mbox', '', 'write messages to mbox file instead of sending them'),
306 ('s', 'subject', '', 'subject of introductory message'),
306 ('s', 'subject', '', 'subject of introductory message'),
307 ('t', 'to', [], 'email addresses of recipients')],
307 ('t', 'to', [], 'email addresses of recipients')],
308 "hg email [OPTION]... [REV]...")
308 "hg email [OPTION]... [REV]...")
309 }
309 }
@@ -1,3521 +1,3522 b''
1 # commands.py - command processing for mercurial
1 # commands.py - command processing for mercurial
2 #
2 #
3 # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7
7
8 from demandload import demandload
8 from demandload import demandload
9 from node import *
9 from node import *
10 from i18n import gettext as _
10 from i18n import gettext as _
11 demandload(globals(), "os re sys signal shutil imp urllib pdb")
11 demandload(globals(), "os re sys signal shutil imp urllib pdb")
12 demandload(globals(), "fancyopts ui hg util lock revlog templater bundlerepo")
12 demandload(globals(), "fancyopts ui hg util lock revlog templater bundlerepo")
13 demandload(globals(), "fnmatch difflib patch random signal tempfile time")
13 demandload(globals(), "fnmatch difflib patch random signal tempfile time")
14 demandload(globals(), "traceback errno socket version struct atexit sets bz2")
14 demandload(globals(), "traceback errno socket version struct atexit sets bz2")
15 demandload(globals(), "archival cStringIO changegroup")
15 demandload(globals(), "archival cStringIO changegroup")
16 demandload(globals(), "cmdutil hgweb.server sshserver")
16 demandload(globals(), "cmdutil hgweb.server sshserver")
17
17
18 class UnknownCommand(Exception):
18 class UnknownCommand(Exception):
19 """Exception raised if command is not in the command table."""
19 """Exception raised if command is not in the command table."""
20 class AmbiguousCommand(Exception):
20 class AmbiguousCommand(Exception):
21 """Exception raised if command shortcut matches more than one command."""
21 """Exception raised if command shortcut matches more than one command."""
22
22
23 def bail_if_changed(repo):
23 def bail_if_changed(repo):
24 modified, added, removed, deleted = repo.status()[:4]
24 modified, added, removed, deleted = repo.status()[:4]
25 if modified or added or removed or deleted:
25 if modified or added or removed or deleted:
26 raise util.Abort(_("outstanding uncommitted changes"))
26 raise util.Abort(_("outstanding uncommitted changes"))
27
27
28 def relpath(repo, args):
28 def relpath(repo, args):
29 cwd = repo.getcwd()
29 cwd = repo.getcwd()
30 if cwd:
30 if cwd:
31 return [util.normpath(os.path.join(cwd, x)) for x in args]
31 return [util.normpath(os.path.join(cwd, x)) for x in args]
32 return args
32 return args
33
33
34 def logmessage(opts):
34 def logmessage(opts):
35 """ get the log message according to -m and -l option """
35 """ get the log message according to -m and -l option """
36 message = opts['message']
36 message = opts['message']
37 logfile = opts['logfile']
37 logfile = opts['logfile']
38
38
39 if message and logfile:
39 if message and logfile:
40 raise util.Abort(_('options --message and --logfile are mutually '
40 raise util.Abort(_('options --message and --logfile are mutually '
41 'exclusive'))
41 'exclusive'))
42 if not message and logfile:
42 if not message and logfile:
43 try:
43 try:
44 if logfile == '-':
44 if logfile == '-':
45 message = sys.stdin.read()
45 message = sys.stdin.read()
46 else:
46 else:
47 message = open(logfile).read()
47 message = open(logfile).read()
48 except IOError, inst:
48 except IOError, inst:
49 raise util.Abort(_("can't read commit message '%s': %s") %
49 raise util.Abort(_("can't read commit message '%s': %s") %
50 (logfile, inst.strerror))
50 (logfile, inst.strerror))
51 return message
51 return message
52
52
53 def walkchangerevs(ui, repo, pats, opts):
53 def walkchangerevs(ui, repo, pats, opts):
54 '''Iterate over files and the revs they changed in.
54 '''Iterate over files and the revs they changed in.
55
55
56 Callers most commonly need to iterate backwards over the history
56 Callers most commonly need to iterate backwards over the history
57 it is interested in. Doing so has awful (quadratic-looking)
57 it is interested in. Doing so has awful (quadratic-looking)
58 performance, so we use iterators in a "windowed" way.
58 performance, so we use iterators in a "windowed" way.
59
59
60 We walk a window of revisions in the desired order. Within the
60 We walk a window of revisions in the desired order. Within the
61 window, we first walk forwards to gather data, then in the desired
61 window, we first walk forwards to gather data, then in the desired
62 order (usually backwards) to display it.
62 order (usually backwards) to display it.
63
63
64 This function returns an (iterator, getchange, matchfn) tuple. The
64 This function returns an (iterator, getchange, matchfn) tuple. The
65 getchange function returns the changelog entry for a numeric
65 getchange function returns the changelog entry for a numeric
66 revision. The iterator yields 3-tuples. They will be of one of
66 revision. The iterator yields 3-tuples. They will be of one of
67 the following forms:
67 the following forms:
68
68
69 "window", incrementing, lastrev: stepping through a window,
69 "window", incrementing, lastrev: stepping through a window,
70 positive if walking forwards through revs, last rev in the
70 positive if walking forwards through revs, last rev in the
71 sequence iterated over - use to reset state for the current window
71 sequence iterated over - use to reset state for the current window
72
72
73 "add", rev, fns: out-of-order traversal of the given file names
73 "add", rev, fns: out-of-order traversal of the given file names
74 fns, which changed during revision rev - use to gather data for
74 fns, which changed during revision rev - use to gather data for
75 possible display
75 possible display
76
76
77 "iter", rev, None: in-order traversal of the revs earlier iterated
77 "iter", rev, None: in-order traversal of the revs earlier iterated
78 over with "add" - use to display data'''
78 over with "add" - use to display data'''
79
79
80 def increasing_windows(start, end, windowsize=8, sizelimit=512):
80 def increasing_windows(start, end, windowsize=8, sizelimit=512):
81 if start < end:
81 if start < end:
82 while start < end:
82 while start < end:
83 yield start, min(windowsize, end-start)
83 yield start, min(windowsize, end-start)
84 start += windowsize
84 start += windowsize
85 if windowsize < sizelimit:
85 if windowsize < sizelimit:
86 windowsize *= 2
86 windowsize *= 2
87 else:
87 else:
88 while start > end:
88 while start > end:
89 yield start, min(windowsize, start-end-1)
89 yield start, min(windowsize, start-end-1)
90 start -= windowsize
90 start -= windowsize
91 if windowsize < sizelimit:
91 if windowsize < sizelimit:
92 windowsize *= 2
92 windowsize *= 2
93
93
94
94
95 files, matchfn, anypats = cmdutil.matchpats(repo, pats, opts)
95 files, matchfn, anypats = cmdutil.matchpats(repo, pats, opts)
96 follow = opts.get('follow') or opts.get('follow_first')
96 follow = opts.get('follow') or opts.get('follow_first')
97
97
98 if repo.changelog.count() == 0:
98 if repo.changelog.count() == 0:
99 return [], False, matchfn
99 return [], False, matchfn
100
100
101 if follow:
101 if follow:
102 p = repo.dirstate.parents()[0]
102 p = repo.dirstate.parents()[0]
103 if p == nullid:
103 if p == nullid:
104 ui.warn(_('No working directory revision; defaulting to tip\n'))
104 ui.warn(_('No working directory revision; defaulting to tip\n'))
105 start = 'tip'
105 start = 'tip'
106 else:
106 else:
107 start = repo.changelog.rev(p)
107 start = repo.changelog.rev(p)
108 defrange = '%s:0' % start
108 defrange = '%s:0' % start
109 else:
109 else:
110 defrange = 'tip:0'
110 defrange = 'tip:0'
111 revs = map(int, revrange(ui, repo, opts['rev'] or [defrange]))
111 revs = map(int, revrange(ui, repo, opts['rev'] or [defrange]))
112 wanted = {}
112 wanted = {}
113 slowpath = anypats
113 slowpath = anypats
114 fncache = {}
114 fncache = {}
115
115
116 chcache = {}
116 chcache = {}
117 def getchange(rev):
117 def getchange(rev):
118 ch = chcache.get(rev)
118 ch = chcache.get(rev)
119 if ch is None:
119 if ch is None:
120 chcache[rev] = ch = repo.changelog.read(repo.lookup(str(rev)))
120 chcache[rev] = ch = repo.changelog.read(repo.lookup(str(rev)))
121 return ch
121 return ch
122
122
123 if not slowpath and not files:
123 if not slowpath and not files:
124 # No files, no patterns. Display all revs.
124 # No files, no patterns. Display all revs.
125 wanted = dict(zip(revs, revs))
125 wanted = dict(zip(revs, revs))
126 copies = []
126 copies = []
127 if not slowpath:
127 if not slowpath:
128 # Only files, no patterns. Check the history of each file.
128 # Only files, no patterns. Check the history of each file.
129 def filerevgen(filelog, node):
129 def filerevgen(filelog, node):
130 cl_count = repo.changelog.count()
130 cl_count = repo.changelog.count()
131 if node is None:
131 if node is None:
132 last = filelog.count() - 1
132 last = filelog.count() - 1
133 else:
133 else:
134 last = filelog.rev(node)
134 last = filelog.rev(node)
135 for i, window in increasing_windows(last, -1):
135 for i, window in increasing_windows(last, -1):
136 revs = []
136 revs = []
137 for j in xrange(i - window, i + 1):
137 for j in xrange(i - window, i + 1):
138 n = filelog.node(j)
138 n = filelog.node(j)
139 revs.append((filelog.linkrev(n),
139 revs.append((filelog.linkrev(n),
140 follow and filelog.renamed(n)))
140 follow and filelog.renamed(n)))
141 revs.reverse()
141 revs.reverse()
142 for rev in revs:
142 for rev in revs:
143 # only yield rev for which we have the changelog, it can
143 # only yield rev for which we have the changelog, it can
144 # happen while doing "hg log" during a pull or commit
144 # happen while doing "hg log" during a pull or commit
145 if rev[0] < cl_count:
145 if rev[0] < cl_count:
146 yield rev
146 yield rev
147 def iterfiles():
147 def iterfiles():
148 for filename in files:
148 for filename in files:
149 yield filename, None
149 yield filename, None
150 for filename_node in copies:
150 for filename_node in copies:
151 yield filename_node
151 yield filename_node
152 minrev, maxrev = min(revs), max(revs)
152 minrev, maxrev = min(revs), max(revs)
153 for file_, node in iterfiles():
153 for file_, node in iterfiles():
154 filelog = repo.file(file_)
154 filelog = repo.file(file_)
155 # A zero count may be a directory or deleted file, so
155 # A zero count may be a directory or deleted file, so
156 # try to find matching entries on the slow path.
156 # try to find matching entries on the slow path.
157 if filelog.count() == 0:
157 if filelog.count() == 0:
158 slowpath = True
158 slowpath = True
159 break
159 break
160 for rev, copied in filerevgen(filelog, node):
160 for rev, copied in filerevgen(filelog, node):
161 if rev <= maxrev:
161 if rev <= maxrev:
162 if rev < minrev:
162 if rev < minrev:
163 break
163 break
164 fncache.setdefault(rev, [])
164 fncache.setdefault(rev, [])
165 fncache[rev].append(file_)
165 fncache[rev].append(file_)
166 wanted[rev] = 1
166 wanted[rev] = 1
167 if follow and copied:
167 if follow and copied:
168 copies.append(copied)
168 copies.append(copied)
169 if slowpath:
169 if slowpath:
170 if follow:
170 if follow:
171 raise util.Abort(_('can only follow copies/renames for explicit '
171 raise util.Abort(_('can only follow copies/renames for explicit '
172 'file names'))
172 'file names'))
173
173
174 # The slow path checks files modified in every changeset.
174 # The slow path checks files modified in every changeset.
175 def changerevgen():
175 def changerevgen():
176 for i, window in increasing_windows(repo.changelog.count()-1, -1):
176 for i, window in increasing_windows(repo.changelog.count()-1, -1):
177 for j in xrange(i - window, i + 1):
177 for j in xrange(i - window, i + 1):
178 yield j, getchange(j)[3]
178 yield j, getchange(j)[3]
179
179
180 for rev, changefiles in changerevgen():
180 for rev, changefiles in changerevgen():
181 matches = filter(matchfn, changefiles)
181 matches = filter(matchfn, changefiles)
182 if matches:
182 if matches:
183 fncache[rev] = matches
183 fncache[rev] = matches
184 wanted[rev] = 1
184 wanted[rev] = 1
185
185
186 class followfilter:
186 class followfilter:
187 def __init__(self, onlyfirst=False):
187 def __init__(self, onlyfirst=False):
188 self.startrev = -1
188 self.startrev = -1
189 self.roots = []
189 self.roots = []
190 self.onlyfirst = onlyfirst
190 self.onlyfirst = onlyfirst
191
191
192 def match(self, rev):
192 def match(self, rev):
193 def realparents(rev):
193 def realparents(rev):
194 if self.onlyfirst:
194 if self.onlyfirst:
195 return repo.changelog.parentrevs(rev)[0:1]
195 return repo.changelog.parentrevs(rev)[0:1]
196 else:
196 else:
197 return filter(lambda x: x != -1, repo.changelog.parentrevs(rev))
197 return filter(lambda x: x != -1, repo.changelog.parentrevs(rev))
198
198
199 if self.startrev == -1:
199 if self.startrev == -1:
200 self.startrev = rev
200 self.startrev = rev
201 return True
201 return True
202
202
203 if rev > self.startrev:
203 if rev > self.startrev:
204 # forward: all descendants
204 # forward: all descendants
205 if not self.roots:
205 if not self.roots:
206 self.roots.append(self.startrev)
206 self.roots.append(self.startrev)
207 for parent in realparents(rev):
207 for parent in realparents(rev):
208 if parent in self.roots:
208 if parent in self.roots:
209 self.roots.append(rev)
209 self.roots.append(rev)
210 return True
210 return True
211 else:
211 else:
212 # backwards: all parents
212 # backwards: all parents
213 if not self.roots:
213 if not self.roots:
214 self.roots.extend(realparents(self.startrev))
214 self.roots.extend(realparents(self.startrev))
215 if rev in self.roots:
215 if rev in self.roots:
216 self.roots.remove(rev)
216 self.roots.remove(rev)
217 self.roots.extend(realparents(rev))
217 self.roots.extend(realparents(rev))
218 return True
218 return True
219
219
220 return False
220 return False
221
221
222 # it might be worthwhile to do this in the iterator if the rev range
222 # it might be worthwhile to do this in the iterator if the rev range
223 # is descending and the prune args are all within that range
223 # is descending and the prune args are all within that range
224 for rev in opts.get('prune', ()):
224 for rev in opts.get('prune', ()):
225 rev = repo.changelog.rev(repo.lookup(rev))
225 rev = repo.changelog.rev(repo.lookup(rev))
226 ff = followfilter()
226 ff = followfilter()
227 stop = min(revs[0], revs[-1])
227 stop = min(revs[0], revs[-1])
228 for x in range(rev, stop-1, -1):
228 for x in range(rev, stop-1, -1):
229 if ff.match(x) and wanted.has_key(x):
229 if ff.match(x) and wanted.has_key(x):
230 del wanted[x]
230 del wanted[x]
231
231
232 def iterate():
232 def iterate():
233 if follow and not files:
233 if follow and not files:
234 ff = followfilter(onlyfirst=opts.get('follow_first'))
234 ff = followfilter(onlyfirst=opts.get('follow_first'))
235 def want(rev):
235 def want(rev):
236 if ff.match(rev) and rev in wanted:
236 if ff.match(rev) and rev in wanted:
237 return True
237 return True
238 return False
238 return False
239 else:
239 else:
240 def want(rev):
240 def want(rev):
241 return rev in wanted
241 return rev in wanted
242
242
243 for i, window in increasing_windows(0, len(revs)):
243 for i, window in increasing_windows(0, len(revs)):
244 yield 'window', revs[0] < revs[-1], revs[-1]
244 yield 'window', revs[0] < revs[-1], revs[-1]
245 nrevs = [rev for rev in revs[i:i+window] if want(rev)]
245 nrevs = [rev for rev in revs[i:i+window] if want(rev)]
246 srevs = list(nrevs)
246 srevs = list(nrevs)
247 srevs.sort()
247 srevs.sort()
248 for rev in srevs:
248 for rev in srevs:
249 fns = fncache.get(rev) or filter(matchfn, getchange(rev)[3])
249 fns = fncache.get(rev) or filter(matchfn, getchange(rev)[3])
250 yield 'add', rev, fns
250 yield 'add', rev, fns
251 for rev in nrevs:
251 for rev in nrevs:
252 yield 'iter', rev, None
252 yield 'iter', rev, None
253 return iterate(), getchange, matchfn
253 return iterate(), getchange, matchfn
254
254
255 revrangesep = ':'
255 revrangesep = ':'
256
256
257 def revfix(repo, val, defval):
257 def revfix(repo, val, defval):
258 '''turn user-level id of changeset into rev number.
258 '''turn user-level id of changeset into rev number.
259 user-level id can be tag, changeset, rev number, or negative rev
259 user-level id can be tag, changeset, rev number, or negative rev
260 number relative to number of revs (-1 is tip, etc).'''
260 number relative to number of revs (-1 is tip, etc).'''
261 if not val:
261 if not val:
262 return defval
262 return defval
263 try:
263 try:
264 num = int(val)
264 num = int(val)
265 if str(num) != val:
265 if str(num) != val:
266 raise ValueError
266 raise ValueError
267 if num < 0:
267 if num < 0:
268 num += repo.changelog.count()
268 num += repo.changelog.count()
269 if num < 0:
269 if num < 0:
270 num = 0
270 num = 0
271 elif num >= repo.changelog.count():
271 elif num >= repo.changelog.count():
272 raise ValueError
272 raise ValueError
273 except ValueError:
273 except ValueError:
274 try:
274 try:
275 num = repo.changelog.rev(repo.lookup(val))
275 num = repo.changelog.rev(repo.lookup(val))
276 except KeyError:
276 except KeyError:
277 raise util.Abort(_('invalid revision identifier %s'), val)
277 raise util.Abort(_('invalid revision identifier %s'), val)
278 return num
278 return num
279
279
280 def revpair(ui, repo, revs):
280 def revpair(ui, repo, revs):
281 '''return pair of nodes, given list of revisions. second item can
281 '''return pair of nodes, given list of revisions. second item can
282 be None, meaning use working dir.'''
282 be None, meaning use working dir.'''
283 if not revs:
283 if not revs:
284 return repo.dirstate.parents()[0], None
284 return repo.dirstate.parents()[0], None
285 end = None
285 end = None
286 if len(revs) == 1:
286 if len(revs) == 1:
287 start = revs[0]
287 start = revs[0]
288 if revrangesep in start:
288 if revrangesep in start:
289 start, end = start.split(revrangesep, 1)
289 start, end = start.split(revrangesep, 1)
290 start = revfix(repo, start, 0)
290 start = revfix(repo, start, 0)
291 end = revfix(repo, end, repo.changelog.count() - 1)
291 end = revfix(repo, end, repo.changelog.count() - 1)
292 else:
292 else:
293 start = revfix(repo, start, None)
293 start = revfix(repo, start, None)
294 elif len(revs) == 2:
294 elif len(revs) == 2:
295 if revrangesep in revs[0] or revrangesep in revs[1]:
295 if revrangesep in revs[0] or revrangesep in revs[1]:
296 raise util.Abort(_('too many revisions specified'))
296 raise util.Abort(_('too many revisions specified'))
297 start = revfix(repo, revs[0], None)
297 start = revfix(repo, revs[0], None)
298 end = revfix(repo, revs[1], None)
298 end = revfix(repo, revs[1], None)
299 else:
299 else:
300 raise util.Abort(_('too many revisions specified'))
300 raise util.Abort(_('too many revisions specified'))
301 if end is not None: end = repo.lookup(str(end))
301 if end is not None: end = repo.lookup(str(end))
302 return repo.lookup(str(start)), end
302 return repo.lookup(str(start)), end
303
303
304 def revrange(ui, repo, revs):
304 def revrange(ui, repo, revs):
305 """Yield revision as strings from a list of revision specifications."""
305 """Yield revision as strings from a list of revision specifications."""
306 seen = {}
306 seen = {}
307 for spec in revs:
307 for spec in revs:
308 if revrangesep in spec:
308 if revrangesep in spec:
309 start, end = spec.split(revrangesep, 1)
309 start, end = spec.split(revrangesep, 1)
310 start = revfix(repo, start, 0)
310 start = revfix(repo, start, 0)
311 end = revfix(repo, end, repo.changelog.count() - 1)
311 end = revfix(repo, end, repo.changelog.count() - 1)
312 step = start > end and -1 or 1
312 step = start > end and -1 or 1
313 for rev in xrange(start, end+step, step):
313 for rev in xrange(start, end+step, step):
314 if rev in seen:
314 if rev in seen:
315 continue
315 continue
316 seen[rev] = 1
316 seen[rev] = 1
317 yield str(rev)
317 yield str(rev)
318 else:
318 else:
319 rev = revfix(repo, spec, None)
319 rev = revfix(repo, spec, None)
320 if rev in seen:
320 if rev in seen:
321 continue
321 continue
322 seen[rev] = 1
322 seen[rev] = 1
323 yield str(rev)
323 yield str(rev)
324
324
325 def write_bundle(cg, filename=None, compress=True):
325 def write_bundle(cg, filename=None, compress=True):
326 """Write a bundle file and return its filename.
326 """Write a bundle file and return its filename.
327
327
328 Existing files will not be overwritten.
328 Existing files will not be overwritten.
329 If no filename is specified, a temporary file is created.
329 If no filename is specified, a temporary file is created.
330 bz2 compression can be turned off.
330 bz2 compression can be turned off.
331 The bundle file will be deleted in case of errors.
331 The bundle file will be deleted in case of errors.
332 """
332 """
333 class nocompress(object):
333 class nocompress(object):
334 def compress(self, x):
334 def compress(self, x):
335 return x
335 return x
336 def flush(self):
336 def flush(self):
337 return ""
337 return ""
338
338
339 fh = None
339 fh = None
340 cleanup = None
340 cleanup = None
341 try:
341 try:
342 if filename:
342 if filename:
343 if os.path.exists(filename):
343 if os.path.exists(filename):
344 raise util.Abort(_("file '%s' already exists"), filename)
344 raise util.Abort(_("file '%s' already exists"), filename)
345 fh = open(filename, "wb")
345 fh = open(filename, "wb")
346 else:
346 else:
347 fd, filename = tempfile.mkstemp(prefix="hg-bundle-", suffix=".hg")
347 fd, filename = tempfile.mkstemp(prefix="hg-bundle-", suffix=".hg")
348 fh = os.fdopen(fd, "wb")
348 fh = os.fdopen(fd, "wb")
349 cleanup = filename
349 cleanup = filename
350
350
351 if compress:
351 if compress:
352 fh.write("HG10")
352 fh.write("HG10")
353 z = bz2.BZ2Compressor(9)
353 z = bz2.BZ2Compressor(9)
354 else:
354 else:
355 fh.write("HG10UN")
355 fh.write("HG10UN")
356 z = nocompress()
356 z = nocompress()
357 # parse the changegroup data, otherwise we will block
357 # parse the changegroup data, otherwise we will block
358 # in case of sshrepo because we don't know the end of the stream
358 # in case of sshrepo because we don't know the end of the stream
359
359
360 # an empty chunkiter is the end of the changegroup
360 # an empty chunkiter is the end of the changegroup
361 empty = False
361 empty = False
362 while not empty:
362 while not empty:
363 empty = True
363 empty = True
364 for chunk in changegroup.chunkiter(cg):
364 for chunk in changegroup.chunkiter(cg):
365 empty = False
365 empty = False
366 fh.write(z.compress(changegroup.genchunk(chunk)))
366 fh.write(z.compress(changegroup.genchunk(chunk)))
367 fh.write(z.compress(changegroup.closechunk()))
367 fh.write(z.compress(changegroup.closechunk()))
368 fh.write(z.flush())
368 fh.write(z.flush())
369 cleanup = None
369 cleanup = None
370 return filename
370 return filename
371 finally:
371 finally:
372 if fh is not None:
372 if fh is not None:
373 fh.close()
373 fh.close()
374 if cleanup is not None:
374 if cleanup is not None:
375 os.unlink(cleanup)
375 os.unlink(cleanup)
376
376
377 def trimuser(ui, name, rev, revcache):
377 def trimuser(ui, name, rev, revcache):
378 """trim the name of the user who committed a change"""
378 """trim the name of the user who committed a change"""
379 user = revcache.get(rev)
379 user = revcache.get(rev)
380 if user is None:
380 if user is None:
381 user = revcache[rev] = ui.shortuser(name)
381 user = revcache[rev] = ui.shortuser(name)
382 return user
382 return user
383
383
384 class changeset_printer(object):
384 class changeset_printer(object):
385 '''show changeset information when templating not requested.'''
385 '''show changeset information when templating not requested.'''
386
386
387 def __init__(self, ui, repo):
387 def __init__(self, ui, repo):
388 self.ui = ui
388 self.ui = ui
389 self.repo = repo
389 self.repo = repo
390
390
391 def show(self, rev=0, changenode=None, brinfo=None):
391 def show(self, rev=0, changenode=None, brinfo=None):
392 '''show a single changeset or file revision'''
392 '''show a single changeset or file revision'''
393 log = self.repo.changelog
393 log = self.repo.changelog
394 if changenode is None:
394 if changenode is None:
395 changenode = log.node(rev)
395 changenode = log.node(rev)
396 elif not rev:
396 elif not rev:
397 rev = log.rev(changenode)
397 rev = log.rev(changenode)
398
398
399 if self.ui.quiet:
399 if self.ui.quiet:
400 self.ui.write("%d:%s\n" % (rev, short(changenode)))
400 self.ui.write("%d:%s\n" % (rev, short(changenode)))
401 return
401 return
402
402
403 changes = log.read(changenode)
403 changes = log.read(changenode)
404 date = util.datestr(changes[2])
404 date = util.datestr(changes[2])
405
405
406 parents = [(log.rev(p), self.ui.verbose and hex(p) or short(p))
406 parents = [(log.rev(p), self.ui.verbose and hex(p) or short(p))
407 for p in log.parents(changenode)
407 for p in log.parents(changenode)
408 if self.ui.debugflag or p != nullid]
408 if self.ui.debugflag or p != nullid]
409 if (not self.ui.debugflag and len(parents) == 1 and
409 if (not self.ui.debugflag and len(parents) == 1 and
410 parents[0][0] == rev-1):
410 parents[0][0] == rev-1):
411 parents = []
411 parents = []
412
412
413 if self.ui.verbose:
413 if self.ui.verbose:
414 self.ui.write(_("changeset: %d:%s\n") % (rev, hex(changenode)))
414 self.ui.write(_("changeset: %d:%s\n") % (rev, hex(changenode)))
415 else:
415 else:
416 self.ui.write(_("changeset: %d:%s\n") % (rev, short(changenode)))
416 self.ui.write(_("changeset: %d:%s\n") % (rev, short(changenode)))
417
417
418 for tag in self.repo.nodetags(changenode):
418 for tag in self.repo.nodetags(changenode):
419 self.ui.status(_("tag: %s\n") % tag)
419 self.ui.status(_("tag: %s\n") % tag)
420 for parent in parents:
420 for parent in parents:
421 self.ui.write(_("parent: %d:%s\n") % parent)
421 self.ui.write(_("parent: %d:%s\n") % parent)
422
422
423 if brinfo and changenode in brinfo:
423 if brinfo and changenode in brinfo:
424 br = brinfo[changenode]
424 br = brinfo[changenode]
425 self.ui.write(_("branch: %s\n") % " ".join(br))
425 self.ui.write(_("branch: %s\n") % " ".join(br))
426
426
427 self.ui.debug(_("manifest: %d:%s\n") %
427 self.ui.debug(_("manifest: %d:%s\n") %
428 (self.repo.manifest.rev(changes[0]), hex(changes[0])))
428 (self.repo.manifest.rev(changes[0]), hex(changes[0])))
429 self.ui.status(_("user: %s\n") % changes[1])
429 self.ui.status(_("user: %s\n") % changes[1])
430 self.ui.status(_("date: %s\n") % date)
430 self.ui.status(_("date: %s\n") % date)
431
431
432 if self.ui.debugflag:
432 if self.ui.debugflag:
433 files = self.repo.status(log.parents(changenode)[0], changenode)[:3]
433 files = self.repo.status(log.parents(changenode)[0], changenode)[:3]
434 for key, value in zip([_("files:"), _("files+:"), _("files-:")],
434 for key, value in zip([_("files:"), _("files+:"), _("files-:")],
435 files):
435 files):
436 if value:
436 if value:
437 self.ui.note("%-12s %s\n" % (key, " ".join(value)))
437 self.ui.note("%-12s %s\n" % (key, " ".join(value)))
438 else:
438 else:
439 self.ui.note(_("files: %s\n") % " ".join(changes[3]))
439 self.ui.note(_("files: %s\n") % " ".join(changes[3]))
440
440
441 description = changes[4].strip()
441 description = changes[4].strip()
442 if description:
442 if description:
443 if self.ui.verbose:
443 if self.ui.verbose:
444 self.ui.status(_("description:\n"))
444 self.ui.status(_("description:\n"))
445 self.ui.status(description)
445 self.ui.status(description)
446 self.ui.status("\n\n")
446 self.ui.status("\n\n")
447 else:
447 else:
448 self.ui.status(_("summary: %s\n") %
448 self.ui.status(_("summary: %s\n") %
449 description.splitlines()[0])
449 description.splitlines()[0])
450 self.ui.status("\n")
450 self.ui.status("\n")
451
451
452 def show_changeset(ui, repo, opts):
452 def show_changeset(ui, repo, opts):
453 '''show one changeset. uses template or regular display. caller
453 '''show one changeset. uses template or regular display. caller
454 can pass in 'style' and 'template' options in opts.'''
454 can pass in 'style' and 'template' options in opts.'''
455
455
456 tmpl = opts.get('template')
456 tmpl = opts.get('template')
457 if tmpl:
457 if tmpl:
458 tmpl = templater.parsestring(tmpl, quoted=False)
458 tmpl = templater.parsestring(tmpl, quoted=False)
459 else:
459 else:
460 tmpl = ui.config('ui', 'logtemplate')
460 tmpl = ui.config('ui', 'logtemplate')
461 if tmpl: tmpl = templater.parsestring(tmpl)
461 if tmpl: tmpl = templater.parsestring(tmpl)
462 mapfile = opts.get('style') or ui.config('ui', 'style')
462 mapfile = opts.get('style') or ui.config('ui', 'style')
463 if tmpl or mapfile:
463 if tmpl or mapfile:
464 if mapfile:
464 if mapfile:
465 if not os.path.isfile(mapfile):
465 if not os.path.isfile(mapfile):
466 mapname = templater.templatepath('map-cmdline.' + mapfile)
466 mapname = templater.templatepath('map-cmdline.' + mapfile)
467 if not mapname: mapname = templater.templatepath(mapfile)
467 if not mapname: mapname = templater.templatepath(mapfile)
468 if mapname: mapfile = mapname
468 if mapname: mapfile = mapname
469 try:
469 try:
470 t = templater.changeset_templater(ui, repo, mapfile)
470 t = templater.changeset_templater(ui, repo, mapfile)
471 except SyntaxError, inst:
471 except SyntaxError, inst:
472 raise util.Abort(inst.args[0])
472 raise util.Abort(inst.args[0])
473 if tmpl: t.use_template(tmpl)
473 if tmpl: t.use_template(tmpl)
474 return t
474 return t
475 return changeset_printer(ui, repo)
475 return changeset_printer(ui, repo)
476
476
477 def setremoteconfig(ui, opts):
477 def setremoteconfig(ui, opts):
478 "copy remote options to ui tree"
478 "copy remote options to ui tree"
479 if opts.get('ssh'):
479 if opts.get('ssh'):
480 ui.setconfig("ui", "ssh", opts['ssh'])
480 ui.setconfig("ui", "ssh", opts['ssh'])
481 if opts.get('remotecmd'):
481 if opts.get('remotecmd'):
482 ui.setconfig("ui", "remotecmd", opts['remotecmd'])
482 ui.setconfig("ui", "remotecmd", opts['remotecmd'])
483
483
484 def show_version(ui):
484 def show_version(ui):
485 """output version and copyright information"""
485 """output version and copyright information"""
486 ui.write(_("Mercurial Distributed SCM (version %s)\n")
486 ui.write(_("Mercurial Distributed SCM (version %s)\n")
487 % version.get_version())
487 % version.get_version())
488 ui.status(_(
488 ui.status(_(
489 "\nCopyright (C) 2005, 2006 Matt Mackall <mpm@selenic.com>\n"
489 "\nCopyright (C) 2005, 2006 Matt Mackall <mpm@selenic.com>\n"
490 "This is free software; see the source for copying conditions. "
490 "This is free software; see the source for copying conditions. "
491 "There is NO\nwarranty; "
491 "There is NO\nwarranty; "
492 "not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.\n"
492 "not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.\n"
493 ))
493 ))
494
494
495 def help_(ui, name=None, with_version=False):
495 def help_(ui, name=None, with_version=False):
496 """show help for a command, extension, or list of commands
496 """show help for a command, extension, or list of commands
497
497
498 With no arguments, print a list of commands and short help.
498 With no arguments, print a list of commands and short help.
499
499
500 Given a command name, print help for that command.
500 Given a command name, print help for that command.
501
501
502 Given an extension name, print help for that extension, and the
502 Given an extension name, print help for that extension, and the
503 commands it provides."""
503 commands it provides."""
504 option_lists = []
504 option_lists = []
505
505
506 def helpcmd(name):
506 def helpcmd(name):
507 if with_version:
507 if with_version:
508 show_version(ui)
508 show_version(ui)
509 ui.write('\n')
509 ui.write('\n')
510 aliases, i = findcmd(name)
510 aliases, i = findcmd(name)
511 # synopsis
511 # synopsis
512 ui.write("%s\n\n" % i[2])
512 ui.write("%s\n\n" % i[2])
513
513
514 # description
514 # description
515 doc = i[0].__doc__
515 doc = i[0].__doc__
516 if not doc:
516 if not doc:
517 doc = _("(No help text available)")
517 doc = _("(No help text available)")
518 if ui.quiet:
518 if ui.quiet:
519 doc = doc.splitlines(0)[0]
519 doc = doc.splitlines(0)[0]
520 ui.write("%s\n" % doc.rstrip())
520 ui.write("%s\n" % doc.rstrip())
521
521
522 if not ui.quiet:
522 if not ui.quiet:
523 # aliases
523 # aliases
524 if len(aliases) > 1:
524 if len(aliases) > 1:
525 ui.write(_("\naliases: %s\n") % ', '.join(aliases[1:]))
525 ui.write(_("\naliases: %s\n") % ', '.join(aliases[1:]))
526
526
527 # options
527 # options
528 if i[1]:
528 if i[1]:
529 option_lists.append(("options", i[1]))
529 option_lists.append(("options", i[1]))
530
530
531 def helplist(select=None):
531 def helplist(select=None):
532 h = {}
532 h = {}
533 cmds = {}
533 cmds = {}
534 for c, e in table.items():
534 for c, e in table.items():
535 f = c.split("|", 1)[0]
535 f = c.split("|", 1)[0]
536 if select and not select(f):
536 if select and not select(f):
537 continue
537 continue
538 if name == "shortlist" and not f.startswith("^"):
538 if name == "shortlist" and not f.startswith("^"):
539 continue
539 continue
540 f = f.lstrip("^")
540 f = f.lstrip("^")
541 if not ui.debugflag and f.startswith("debug"):
541 if not ui.debugflag and f.startswith("debug"):
542 continue
542 continue
543 doc = e[0].__doc__
543 doc = e[0].__doc__
544 if not doc:
544 if not doc:
545 doc = _("(No help text available)")
545 doc = _("(No help text available)")
546 h[f] = doc.splitlines(0)[0].rstrip()
546 h[f] = doc.splitlines(0)[0].rstrip()
547 cmds[f] = c.lstrip("^")
547 cmds[f] = c.lstrip("^")
548
548
549 fns = h.keys()
549 fns = h.keys()
550 fns.sort()
550 fns.sort()
551 m = max(map(len, fns))
551 m = max(map(len, fns))
552 for f in fns:
552 for f in fns:
553 if ui.verbose:
553 if ui.verbose:
554 commands = cmds[f].replace("|",", ")
554 commands = cmds[f].replace("|",", ")
555 ui.write(" %s:\n %s\n"%(commands, h[f]))
555 ui.write(" %s:\n %s\n"%(commands, h[f]))
556 else:
556 else:
557 ui.write(' %-*s %s\n' % (m, f, h[f]))
557 ui.write(' %-*s %s\n' % (m, f, h[f]))
558
558
559 def helpext(name):
559 def helpext(name):
560 try:
560 try:
561 mod = findext(name)
561 mod = findext(name)
562 except KeyError:
562 except KeyError:
563 raise UnknownCommand(name)
563 raise UnknownCommand(name)
564
564
565 doc = (mod.__doc__ or _('No help text available')).splitlines(0)
565 doc = (mod.__doc__ or _('No help text available')).splitlines(0)
566 ui.write(_('%s extension - %s\n') % (name.split('.')[-1], doc[0]))
566 ui.write(_('%s extension - %s\n') % (name.split('.')[-1], doc[0]))
567 for d in doc[1:]:
567 for d in doc[1:]:
568 ui.write(d, '\n')
568 ui.write(d, '\n')
569
569
570 ui.status('\n')
570 ui.status('\n')
571 if ui.verbose:
571 if ui.verbose:
572 ui.status(_('list of commands:\n\n'))
572 ui.status(_('list of commands:\n\n'))
573 else:
573 else:
574 ui.status(_('list of commands (use "hg help -v %s" '
574 ui.status(_('list of commands (use "hg help -v %s" '
575 'to show aliases and global options):\n\n') % name)
575 'to show aliases and global options):\n\n') % name)
576
576
577 modcmds = dict.fromkeys([c.split('|', 1)[0] for c in mod.cmdtable])
577 modcmds = dict.fromkeys([c.split('|', 1)[0] for c in mod.cmdtable])
578 helplist(modcmds.has_key)
578 helplist(modcmds.has_key)
579
579
580 if name and name != 'shortlist':
580 if name and name != 'shortlist':
581 try:
581 try:
582 helpcmd(name)
582 helpcmd(name)
583 except UnknownCommand:
583 except UnknownCommand:
584 helpext(name)
584 helpext(name)
585
585
586 else:
586 else:
587 # program name
587 # program name
588 if ui.verbose or with_version:
588 if ui.verbose or with_version:
589 show_version(ui)
589 show_version(ui)
590 else:
590 else:
591 ui.status(_("Mercurial Distributed SCM\n"))
591 ui.status(_("Mercurial Distributed SCM\n"))
592 ui.status('\n')
592 ui.status('\n')
593
593
594 # list of commands
594 # list of commands
595 if name == "shortlist":
595 if name == "shortlist":
596 ui.status(_('basic commands (use "hg help" '
596 ui.status(_('basic commands (use "hg help" '
597 'for the full list or option "-v" for details):\n\n'))
597 'for the full list or option "-v" for details):\n\n'))
598 elif ui.verbose:
598 elif ui.verbose:
599 ui.status(_('list of commands:\n\n'))
599 ui.status(_('list of commands:\n\n'))
600 else:
600 else:
601 ui.status(_('list of commands (use "hg help -v" '
601 ui.status(_('list of commands (use "hg help -v" '
602 'to show aliases and global options):\n\n'))
602 'to show aliases and global options):\n\n'))
603
603
604 helplist()
604 helplist()
605
605
606 # global options
606 # global options
607 if ui.verbose:
607 if ui.verbose:
608 option_lists.append(("global options", globalopts))
608 option_lists.append(("global options", globalopts))
609
609
610 # list all option lists
610 # list all option lists
611 opt_output = []
611 opt_output = []
612 for title, options in option_lists:
612 for title, options in option_lists:
613 opt_output.append(("\n%s:\n" % title, None))
613 opt_output.append(("\n%s:\n" % title, None))
614 for shortopt, longopt, default, desc in options:
614 for shortopt, longopt, default, desc in options:
615 opt_output.append(("%2s%s" % (shortopt and "-%s" % shortopt,
615 opt_output.append(("%2s%s" % (shortopt and "-%s" % shortopt,
616 longopt and " --%s" % longopt),
616 longopt and " --%s" % longopt),
617 "%s%s" % (desc,
617 "%s%s" % (desc,
618 default
618 default
619 and _(" (default: %s)") % default
619 and _(" (default: %s)") % default
620 or "")))
620 or "")))
621
621
622 if opt_output:
622 if opt_output:
623 opts_len = max([len(line[0]) for line in opt_output if line[1]])
623 opts_len = max([len(line[0]) for line in opt_output if line[1]])
624 for first, second in opt_output:
624 for first, second in opt_output:
625 if second:
625 if second:
626 ui.write(" %-*s %s\n" % (opts_len, first, second))
626 ui.write(" %-*s %s\n" % (opts_len, first, second))
627 else:
627 else:
628 ui.write("%s\n" % first)
628 ui.write("%s\n" % first)
629
629
630 # Commands start here, listed alphabetically
630 # Commands start here, listed alphabetically
631
631
632 def add(ui, repo, *pats, **opts):
632 def add(ui, repo, *pats, **opts):
633 """add the specified files on the next commit
633 """add the specified files on the next commit
634
634
635 Schedule files to be version controlled and added to the repository.
635 Schedule files to be version controlled and added to the repository.
636
636
637 The files will be added to the repository at the next commit.
637 The files will be added to the repository at the next commit.
638
638
639 If no names are given, add all files in the repository.
639 If no names are given, add all files in the repository.
640 """
640 """
641
641
642 names = []
642 names = []
643 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts):
643 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts):
644 if exact:
644 if exact:
645 if ui.verbose:
645 if ui.verbose:
646 ui.status(_('adding %s\n') % rel)
646 ui.status(_('adding %s\n') % rel)
647 names.append(abs)
647 names.append(abs)
648 elif repo.dirstate.state(abs) == '?':
648 elif repo.dirstate.state(abs) == '?':
649 ui.status(_('adding %s\n') % rel)
649 ui.status(_('adding %s\n') % rel)
650 names.append(abs)
650 names.append(abs)
651 if not opts.get('dry_run'):
651 if not opts.get('dry_run'):
652 repo.add(names)
652 repo.add(names)
653
653
654 def addremove(ui, repo, *pats, **opts):
654 def addremove(ui, repo, *pats, **opts):
655 """add all new files, delete all missing files (DEPRECATED)
655 """add all new files, delete all missing files (DEPRECATED)
656
656
657 (DEPRECATED)
657 (DEPRECATED)
658 Add all new files and remove all missing files from the repository.
658 Add all new files and remove all missing files from the repository.
659
659
660 New files are ignored if they match any of the patterns in .hgignore. As
660 New files are ignored if they match any of the patterns in .hgignore. As
661 with add, these changes take effect at the next commit.
661 with add, these changes take effect at the next commit.
662
662
663 This command is now deprecated and will be removed in a future
663 This command is now deprecated and will be removed in a future
664 release. Please use add and remove --after instead.
664 release. Please use add and remove --after instead.
665 """
665 """
666 ui.warn(_('(the addremove command is deprecated; use add and remove '
666 ui.warn(_('(the addremove command is deprecated; use add and remove '
667 '--after instead)\n'))
667 '--after instead)\n'))
668 return cmdutil.addremove(repo, pats, opts)
668 return cmdutil.addremove(repo, pats, opts)
669
669
670 def annotate(ui, repo, *pats, **opts):
670 def annotate(ui, repo, *pats, **opts):
671 """show changeset information per file line
671 """show changeset information per file line
672
672
673 List changes in files, showing the revision id responsible for each line
673 List changes in files, showing the revision id responsible for each line
674
674
675 This command is useful to discover who did a change or when a change took
675 This command is useful to discover who did a change or when a change took
676 place.
676 place.
677
677
678 Without the -a option, annotate will avoid processing files it
678 Without the -a option, annotate will avoid processing files it
679 detects as binary. With -a, annotate will generate an annotation
679 detects as binary. With -a, annotate will generate an annotation
680 anyway, probably with undesirable results.
680 anyway, probably with undesirable results.
681 """
681 """
682 def getnode(rev):
682 def getnode(rev):
683 return short(repo.changelog.node(rev))
683 return short(repo.changelog.node(rev))
684
684
685 ucache = {}
685 ucache = {}
686 def getname(rev):
686 def getname(rev):
687 try:
687 try:
688 return ucache[rev]
688 return ucache[rev]
689 except:
689 except:
690 u = trimuser(ui, repo.changectx(rev).user(), rev, ucache)
690 u = trimuser(ui, repo.changectx(rev).user(), rev, ucache)
691 ucache[rev] = u
691 ucache[rev] = u
692 return u
692 return u
693
693
694 dcache = {}
694 dcache = {}
695 def getdate(rev):
695 def getdate(rev):
696 datestr = dcache.get(rev)
696 datestr = dcache.get(rev)
697 if datestr is None:
697 if datestr is None:
698 datestr = dcache[rev] = util.datestr(repo.changectx(rev).date())
698 datestr = dcache[rev] = util.datestr(repo.changectx(rev).date())
699 return datestr
699 return datestr
700
700
701 if not pats:
701 if not pats:
702 raise util.Abort(_('at least one file name or pattern required'))
702 raise util.Abort(_('at least one file name or pattern required'))
703
703
704 opmap = [['user', getname], ['number', str], ['changeset', getnode],
704 opmap = [['user', getname], ['number', str], ['changeset', getnode],
705 ['date', getdate]]
705 ['date', getdate]]
706 if not opts['user'] and not opts['changeset'] and not opts['date']:
706 if not opts['user'] and not opts['changeset'] and not opts['date']:
707 opts['number'] = 1
707 opts['number'] = 1
708
708
709 ctx = repo.changectx(opts['rev'] or repo.dirstate.parents()[0])
709 ctx = repo.changectx(opts['rev'] or repo.dirstate.parents()[0])
710
710
711 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts,
711 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts,
712 node=ctx.node()):
712 node=ctx.node()):
713 fctx = ctx.filectx(abs)
713 fctx = ctx.filectx(abs)
714 if not opts['text'] and util.binary(fctx.data()):
714 if not opts['text'] and util.binary(fctx.data()):
715 ui.write(_("%s: binary file\n") % ((pats and rel) or abs))
715 ui.write(_("%s: binary file\n") % ((pats and rel) or abs))
716 continue
716 continue
717
717
718 lines = fctx.annotate()
718 lines = fctx.annotate()
719 pieces = []
719 pieces = []
720
720
721 for o, f in opmap:
721 for o, f in opmap:
722 if opts[o]:
722 if opts[o]:
723 l = [f(n) for n, dummy in lines]
723 l = [f(n) for n, dummy in lines]
724 if l:
724 if l:
725 m = max(map(len, l))
725 m = max(map(len, l))
726 pieces.append(["%*s" % (m, x) for x in l])
726 pieces.append(["%*s" % (m, x) for x in l])
727
727
728 if pieces:
728 if pieces:
729 for p, l in zip(zip(*pieces), lines):
729 for p, l in zip(zip(*pieces), lines):
730 ui.write("%s: %s" % (" ".join(p), l[1]))
730 ui.write("%s: %s" % (" ".join(p), l[1]))
731
731
732 def archive(ui, repo, dest, **opts):
732 def archive(ui, repo, dest, **opts):
733 '''create unversioned archive of a repository revision
733 '''create unversioned archive of a repository revision
734
734
735 By default, the revision used is the parent of the working
735 By default, the revision used is the parent of the working
736 directory; use "-r" to specify a different revision.
736 directory; use "-r" to specify a different revision.
737
737
738 To specify the type of archive to create, use "-t". Valid
738 To specify the type of archive to create, use "-t". Valid
739 types are:
739 types are:
740
740
741 "files" (default): a directory full of files
741 "files" (default): a directory full of files
742 "tar": tar archive, uncompressed
742 "tar": tar archive, uncompressed
743 "tbz2": tar archive, compressed using bzip2
743 "tbz2": tar archive, compressed using bzip2
744 "tgz": tar archive, compressed using gzip
744 "tgz": tar archive, compressed using gzip
745 "uzip": zip archive, uncompressed
745 "uzip": zip archive, uncompressed
746 "zip": zip archive, compressed using deflate
746 "zip": zip archive, compressed using deflate
747
747
748 The exact name of the destination archive or directory is given
748 The exact name of the destination archive or directory is given
749 using a format string; see "hg help export" for details.
749 using a format string; see "hg help export" for details.
750
750
751 Each member added to an archive file has a directory prefix
751 Each member added to an archive file has a directory prefix
752 prepended. Use "-p" to specify a format string for the prefix.
752 prepended. Use "-p" to specify a format string for the prefix.
753 The default is the basename of the archive, with suffixes removed.
753 The default is the basename of the archive, with suffixes removed.
754 '''
754 '''
755
755
756 if opts['rev']:
756 if opts['rev']:
757 node = repo.lookup(opts['rev'])
757 node = repo.lookup(opts['rev'])
758 else:
758 else:
759 node, p2 = repo.dirstate.parents()
759 node, p2 = repo.dirstate.parents()
760 if p2 != nullid:
760 if p2 != nullid:
761 raise util.Abort(_('uncommitted merge - please provide a '
761 raise util.Abort(_('uncommitted merge - please provide a '
762 'specific revision'))
762 'specific revision'))
763
763
764 dest = cmdutil.make_filename(repo, dest, node)
764 dest = cmdutil.make_filename(repo, dest, node)
765 if os.path.realpath(dest) == repo.root:
765 if os.path.realpath(dest) == repo.root:
766 raise util.Abort(_('repository root cannot be destination'))
766 raise util.Abort(_('repository root cannot be destination'))
767 dummy, matchfn, dummy = cmdutil.matchpats(repo, [], opts)
767 dummy, matchfn, dummy = cmdutil.matchpats(repo, [], opts)
768 kind = opts.get('type') or 'files'
768 kind = opts.get('type') or 'files'
769 prefix = opts['prefix']
769 prefix = opts['prefix']
770 if dest == '-':
770 if dest == '-':
771 if kind == 'files':
771 if kind == 'files':
772 raise util.Abort(_('cannot archive plain files to stdout'))
772 raise util.Abort(_('cannot archive plain files to stdout'))
773 dest = sys.stdout
773 dest = sys.stdout
774 if not prefix: prefix = os.path.basename(repo.root) + '-%h'
774 if not prefix: prefix = os.path.basename(repo.root) + '-%h'
775 prefix = cmdutil.make_filename(repo, prefix, node)
775 prefix = cmdutil.make_filename(repo, prefix, node)
776 archival.archive(repo, dest, node, kind, not opts['no_decode'],
776 archival.archive(repo, dest, node, kind, not opts['no_decode'],
777 matchfn, prefix)
777 matchfn, prefix)
778
778
779 def backout(ui, repo, rev, **opts):
779 def backout(ui, repo, rev, **opts):
780 '''reverse effect of earlier changeset
780 '''reverse effect of earlier changeset
781
781
782 Commit the backed out changes as a new changeset. The new
782 Commit the backed out changes as a new changeset. The new
783 changeset is a child of the backed out changeset.
783 changeset is a child of the backed out changeset.
784
784
785 If you back out a changeset other than the tip, a new head is
785 If you back out a changeset other than the tip, a new head is
786 created. This head is the parent of the working directory. If
786 created. This head is the parent of the working directory. If
787 you back out an old changeset, your working directory will appear
787 you back out an old changeset, your working directory will appear
788 old after the backout. You should merge the backout changeset
788 old after the backout. You should merge the backout changeset
789 with another head.
789 with another head.
790
790
791 The --merge option remembers the parent of the working directory
791 The --merge option remembers the parent of the working directory
792 before starting the backout, then merges the new head with that
792 before starting the backout, then merges the new head with that
793 changeset afterwards. This saves you from doing the merge by
793 changeset afterwards. This saves you from doing the merge by
794 hand. The result of this merge is not committed, as for a normal
794 hand. The result of this merge is not committed, as for a normal
795 merge.'''
795 merge.'''
796
796
797 bail_if_changed(repo)
797 bail_if_changed(repo)
798 op1, op2 = repo.dirstate.parents()
798 op1, op2 = repo.dirstate.parents()
799 if op2 != nullid:
799 if op2 != nullid:
800 raise util.Abort(_('outstanding uncommitted merge'))
800 raise util.Abort(_('outstanding uncommitted merge'))
801 node = repo.lookup(rev)
801 node = repo.lookup(rev)
802 p1, p2 = repo.changelog.parents(node)
802 p1, p2 = repo.changelog.parents(node)
803 if p1 == nullid:
803 if p1 == nullid:
804 raise util.Abort(_('cannot back out a change with no parents'))
804 raise util.Abort(_('cannot back out a change with no parents'))
805 if p2 != nullid:
805 if p2 != nullid:
806 if not opts['parent']:
806 if not opts['parent']:
807 raise util.Abort(_('cannot back out a merge changeset without '
807 raise util.Abort(_('cannot back out a merge changeset without '
808 '--parent'))
808 '--parent'))
809 p = repo.lookup(opts['parent'])
809 p = repo.lookup(opts['parent'])
810 if p not in (p1, p2):
810 if p not in (p1, p2):
811 raise util.Abort(_('%s is not a parent of %s' %
811 raise util.Abort(_('%s is not a parent of %s' %
812 (short(p), short(node))))
812 (short(p), short(node))))
813 parent = p
813 parent = p
814 else:
814 else:
815 if opts['parent']:
815 if opts['parent']:
816 raise util.Abort(_('cannot use --parent on non-merge changeset'))
816 raise util.Abort(_('cannot use --parent on non-merge changeset'))
817 parent = p1
817 parent = p1
818 hg.clean(repo, node, show_stats=False)
818 hg.clean(repo, node, show_stats=False)
819 revert_opts = opts.copy()
819 revert_opts = opts.copy()
820 revert_opts['rev'] = hex(parent)
820 revert_opts['rev'] = hex(parent)
821 revert(ui, repo, **revert_opts)
821 revert(ui, repo, **revert_opts)
822 commit_opts = opts.copy()
822 commit_opts = opts.copy()
823 commit_opts['addremove'] = False
823 commit_opts['addremove'] = False
824 if not commit_opts['message'] and not commit_opts['logfile']:
824 if not commit_opts['message'] and not commit_opts['logfile']:
825 commit_opts['message'] = _("Backed out changeset %s") % (hex(node))
825 commit_opts['message'] = _("Backed out changeset %s") % (hex(node))
826 commit_opts['force_editor'] = True
826 commit_opts['force_editor'] = True
827 commit(ui, repo, **commit_opts)
827 commit(ui, repo, **commit_opts)
828 def nice(node):
828 def nice(node):
829 return '%d:%s' % (repo.changelog.rev(node), short(node))
829 return '%d:%s' % (repo.changelog.rev(node), short(node))
830 ui.status(_('changeset %s backs out changeset %s\n') %
830 ui.status(_('changeset %s backs out changeset %s\n') %
831 (nice(repo.changelog.tip()), nice(node)))
831 (nice(repo.changelog.tip()), nice(node)))
832 if op1 != node:
832 if op1 != node:
833 if opts['merge']:
833 if opts['merge']:
834 ui.status(_('merging with changeset %s\n') % nice(op1))
834 ui.status(_('merging with changeset %s\n') % nice(op1))
835 n = _lookup(repo, hex(op1))
835 n = _lookup(repo, hex(op1))
836 hg.merge(repo, n)
836 hg.merge(repo, n)
837 else:
837 else:
838 ui.status(_('the backout changeset is a new head - '
838 ui.status(_('the backout changeset is a new head - '
839 'do not forget to merge\n'))
839 'do not forget to merge\n'))
840 ui.status(_('(use "backout --merge" '
840 ui.status(_('(use "backout --merge" '
841 'if you want to auto-merge)\n'))
841 'if you want to auto-merge)\n'))
842
842
843 def bundle(ui, repo, fname, dest=None, **opts):
843 def bundle(ui, repo, fname, dest=None, **opts):
844 """create a changegroup file
844 """create a changegroup file
845
845
846 Generate a compressed changegroup file collecting all changesets
846 Generate a compressed changegroup file collecting all changesets
847 not found in the other repository.
847 not found in the other repository.
848
848
849 This file can then be transferred using conventional means and
849 This file can then be transferred using conventional means and
850 applied to another repository with the unbundle command. This is
850 applied to another repository with the unbundle command. This is
851 useful when native push and pull are not available or when
851 useful when native push and pull are not available or when
852 exporting an entire repository is undesirable. The standard file
852 exporting an entire repository is undesirable. The standard file
853 extension is ".hg".
853 extension is ".hg".
854
854
855 Unlike import/export, this exactly preserves all changeset
855 Unlike import/export, this exactly preserves all changeset
856 contents including permissions, rename data, and revision history.
856 contents including permissions, rename data, and revision history.
857 """
857 """
858 dest = ui.expandpath(dest or 'default-push', dest or 'default')
858 dest = ui.expandpath(dest or 'default-push', dest or 'default')
859 other = hg.repository(ui, dest)
859 other = hg.repository(ui, dest)
860 o = repo.findoutgoing(other, force=opts['force'])
860 o = repo.findoutgoing(other, force=opts['force'])
861 cg = repo.changegroup(o, 'bundle')
861 cg = repo.changegroup(o, 'bundle')
862 write_bundle(cg, fname)
862 write_bundle(cg, fname)
863
863
864 def cat(ui, repo, file1, *pats, **opts):
864 def cat(ui, repo, file1, *pats, **opts):
865 """output the latest or given revisions of files
865 """output the latest or given revisions of files
866
866
867 Print the specified files as they were at the given revision.
867 Print the specified files as they were at the given revision.
868 If no revision is given then the tip is used.
868 If no revision is given then the tip is used.
869
869
870 Output may be to a file, in which case the name of the file is
870 Output may be to a file, in which case the name of the file is
871 given using a format string. The formatting rules are the same as
871 given using a format string. The formatting rules are the same as
872 for the export command, with the following additions:
872 for the export command, with the following additions:
873
873
874 %s basename of file being printed
874 %s basename of file being printed
875 %d dirname of file being printed, or '.' if in repo root
875 %d dirname of file being printed, or '.' if in repo root
876 %p root-relative path name of file being printed
876 %p root-relative path name of file being printed
877 """
877 """
878 ctx = repo.changectx(opts['rev'] or "-1")
878 ctx = repo.changectx(opts['rev'] or "-1")
879 for src, abs, rel, exact in cmdutil.walk(repo, (file1,) + pats, opts,
879 for src, abs, rel, exact in cmdutil.walk(repo, (file1,) + pats, opts,
880 ctx.node()):
880 ctx.node()):
881 fp = cmdutil.make_file(repo, opts['output'], ctx.node(), pathname=abs)
881 fp = cmdutil.make_file(repo, opts['output'], ctx.node(), pathname=abs)
882 fp.write(ctx.filectx(abs).data())
882 fp.write(ctx.filectx(abs).data())
883
883
884 def clone(ui, source, dest=None, **opts):
884 def clone(ui, source, dest=None, **opts):
885 """make a copy of an existing repository
885 """make a copy of an existing repository
886
886
887 Create a copy of an existing repository in a new directory.
887 Create a copy of an existing repository in a new directory.
888
888
889 If no destination directory name is specified, it defaults to the
889 If no destination directory name is specified, it defaults to the
890 basename of the source.
890 basename of the source.
891
891
892 The location of the source is added to the new repository's
892 The location of the source is added to the new repository's
893 .hg/hgrc file, as the default to be used for future pulls.
893 .hg/hgrc file, as the default to be used for future pulls.
894
894
895 For efficiency, hardlinks are used for cloning whenever the source
895 For efficiency, hardlinks are used for cloning whenever the source
896 and destination are on the same filesystem (note this applies only
896 and destination are on the same filesystem (note this applies only
897 to the repository data, not to the checked out files). Some
897 to the repository data, not to the checked out files). Some
898 filesystems, such as AFS, implement hardlinking incorrectly, but
898 filesystems, such as AFS, implement hardlinking incorrectly, but
899 do not report errors. In these cases, use the --pull option to
899 do not report errors. In these cases, use the --pull option to
900 avoid hardlinking.
900 avoid hardlinking.
901
901
902 You can safely clone repositories and checked out files using full
902 You can safely clone repositories and checked out files using full
903 hardlinks with
903 hardlinks with
904
904
905 $ cp -al REPO REPOCLONE
905 $ cp -al REPO REPOCLONE
906
906
907 which is the fastest way to clone. However, the operation is not
907 which is the fastest way to clone. However, the operation is not
908 atomic (making sure REPO is not modified during the operation is
908 atomic (making sure REPO is not modified during the operation is
909 up to you) and you have to make sure your editor breaks hardlinks
909 up to you) and you have to make sure your editor breaks hardlinks
910 (Emacs and most Linux Kernel tools do so).
910 (Emacs and most Linux Kernel tools do so).
911
911
912 If you use the -r option to clone up to a specific revision, no
912 If you use the -r option to clone up to a specific revision, no
913 subsequent revisions will be present in the cloned repository.
913 subsequent revisions will be present in the cloned repository.
914 This option implies --pull, even on local repositories.
914 This option implies --pull, even on local repositories.
915
915
916 See pull for valid source format details.
916 See pull for valid source format details.
917
917
918 It is possible to specify an ssh:// URL as the destination, but no
918 It is possible to specify an ssh:// URL as the destination, but no
919 .hg/hgrc will be created on the remote side. Look at the help text
919 .hg/hgrc will be created on the remote side. Look at the help text
920 for the pull command for important details about ssh:// URLs.
920 for the pull command for important details about ssh:// URLs.
921 """
921 """
922 setremoteconfig(ui, opts)
922 setremoteconfig(ui, opts)
923 hg.clone(ui, ui.expandpath(source), dest,
923 hg.clone(ui, ui.expandpath(source), dest,
924 pull=opts['pull'],
924 pull=opts['pull'],
925 stream=opts['uncompressed'],
925 stream=opts['uncompressed'],
926 rev=opts['rev'],
926 rev=opts['rev'],
927 update=not opts['noupdate'])
927 update=not opts['noupdate'])
928
928
929 def commit(ui, repo, *pats, **opts):
929 def commit(ui, repo, *pats, **opts):
930 """commit the specified files or all outstanding changes
930 """commit the specified files or all outstanding changes
931
931
932 Commit changes to the given files into the repository.
932 Commit changes to the given files into the repository.
933
933
934 If a list of files is omitted, all changes reported by "hg status"
934 If a list of files is omitted, all changes reported by "hg status"
935 will be committed.
935 will be committed.
936
936
937 If no commit message is specified, the editor configured in your hgrc
937 If no commit message is specified, the editor configured in your hgrc
938 or in the EDITOR environment variable is started to enter a message.
938 or in the EDITOR environment variable is started to enter a message.
939 """
939 """
940 message = logmessage(opts)
940 message = logmessage(opts)
941
941
942 if opts['addremove']:
942 if opts['addremove']:
943 cmdutil.addremove(repo, pats, opts)
943 cmdutil.addremove(repo, pats, opts)
944 fns, match, anypats = cmdutil.matchpats(repo, pats, opts)
944 fns, match, anypats = cmdutil.matchpats(repo, pats, opts)
945 if pats:
945 if pats:
946 modified, added, removed = repo.status(files=fns, match=match)[:3]
946 modified, added, removed = repo.status(files=fns, match=match)[:3]
947 files = modified + added + removed
947 files = modified + added + removed
948 else:
948 else:
949 files = []
949 files = []
950 try:
950 try:
951 repo.commit(files, message, opts['user'], opts['date'], match,
951 repo.commit(files, message, opts['user'], opts['date'], match,
952 force_editor=opts.get('force_editor'))
952 force_editor=opts.get('force_editor'))
953 except ValueError, inst:
953 except ValueError, inst:
954 raise util.Abort(str(inst))
954 raise util.Abort(str(inst))
955
955
956 def docopy(ui, repo, pats, opts, wlock):
956 def docopy(ui, repo, pats, opts, wlock):
957 # called with the repo lock held
957 # called with the repo lock held
958 cwd = repo.getcwd()
958 cwd = repo.getcwd()
959 errors = 0
959 errors = 0
960 copied = []
960 copied = []
961 targets = {}
961 targets = {}
962
962
963 def okaytocopy(abs, rel, exact):
963 def okaytocopy(abs, rel, exact):
964 reasons = {'?': _('is not managed'),
964 reasons = {'?': _('is not managed'),
965 'a': _('has been marked for add'),
965 'a': _('has been marked for add'),
966 'r': _('has been marked for remove')}
966 'r': _('has been marked for remove')}
967 state = repo.dirstate.state(abs)
967 state = repo.dirstate.state(abs)
968 reason = reasons.get(state)
968 reason = reasons.get(state)
969 if reason:
969 if reason:
970 if state == 'a':
970 if state == 'a':
971 origsrc = repo.dirstate.copied(abs)
971 origsrc = repo.dirstate.copied(abs)
972 if origsrc is not None:
972 if origsrc is not None:
973 return origsrc
973 return origsrc
974 if exact:
974 if exact:
975 ui.warn(_('%s: not copying - file %s\n') % (rel, reason))
975 ui.warn(_('%s: not copying - file %s\n') % (rel, reason))
976 else:
976 else:
977 return abs
977 return abs
978
978
979 def copy(origsrc, abssrc, relsrc, target, exact):
979 def copy(origsrc, abssrc, relsrc, target, exact):
980 abstarget = util.canonpath(repo.root, cwd, target)
980 abstarget = util.canonpath(repo.root, cwd, target)
981 reltarget = util.pathto(cwd, abstarget)
981 reltarget = util.pathto(cwd, abstarget)
982 prevsrc = targets.get(abstarget)
982 prevsrc = targets.get(abstarget)
983 if prevsrc is not None:
983 if prevsrc is not None:
984 ui.warn(_('%s: not overwriting - %s collides with %s\n') %
984 ui.warn(_('%s: not overwriting - %s collides with %s\n') %
985 (reltarget, abssrc, prevsrc))
985 (reltarget, abssrc, prevsrc))
986 return
986 return
987 if (not opts['after'] and os.path.exists(reltarget) or
987 if (not opts['after'] and os.path.exists(reltarget) or
988 opts['after'] and repo.dirstate.state(abstarget) not in '?r'):
988 opts['after'] and repo.dirstate.state(abstarget) not in '?r'):
989 if not opts['force']:
989 if not opts['force']:
990 ui.warn(_('%s: not overwriting - file exists\n') %
990 ui.warn(_('%s: not overwriting - file exists\n') %
991 reltarget)
991 reltarget)
992 return
992 return
993 if not opts['after'] and not opts.get('dry_run'):
993 if not opts['after'] and not opts.get('dry_run'):
994 os.unlink(reltarget)
994 os.unlink(reltarget)
995 if opts['after']:
995 if opts['after']:
996 if not os.path.exists(reltarget):
996 if not os.path.exists(reltarget):
997 return
997 return
998 else:
998 else:
999 targetdir = os.path.dirname(reltarget) or '.'
999 targetdir = os.path.dirname(reltarget) or '.'
1000 if not os.path.isdir(targetdir) and not opts.get('dry_run'):
1000 if not os.path.isdir(targetdir) and not opts.get('dry_run'):
1001 os.makedirs(targetdir)
1001 os.makedirs(targetdir)
1002 try:
1002 try:
1003 restore = repo.dirstate.state(abstarget) == 'r'
1003 restore = repo.dirstate.state(abstarget) == 'r'
1004 if restore and not opts.get('dry_run'):
1004 if restore and not opts.get('dry_run'):
1005 repo.undelete([abstarget], wlock)
1005 repo.undelete([abstarget], wlock)
1006 try:
1006 try:
1007 if not opts.get('dry_run'):
1007 if not opts.get('dry_run'):
1008 shutil.copyfile(relsrc, reltarget)
1008 shutil.copyfile(relsrc, reltarget)
1009 shutil.copymode(relsrc, reltarget)
1009 shutil.copymode(relsrc, reltarget)
1010 restore = False
1010 restore = False
1011 finally:
1011 finally:
1012 if restore:
1012 if restore:
1013 repo.remove([abstarget], wlock)
1013 repo.remove([abstarget], wlock)
1014 except shutil.Error, inst:
1014 except shutil.Error, inst:
1015 raise util.Abort(str(inst))
1015 raise util.Abort(str(inst))
1016 except IOError, inst:
1016 except IOError, inst:
1017 if inst.errno == errno.ENOENT:
1017 if inst.errno == errno.ENOENT:
1018 ui.warn(_('%s: deleted in working copy\n') % relsrc)
1018 ui.warn(_('%s: deleted in working copy\n') % relsrc)
1019 else:
1019 else:
1020 ui.warn(_('%s: cannot copy - %s\n') %
1020 ui.warn(_('%s: cannot copy - %s\n') %
1021 (relsrc, inst.strerror))
1021 (relsrc, inst.strerror))
1022 errors += 1
1022 errors += 1
1023 return
1023 return
1024 if ui.verbose or not exact:
1024 if ui.verbose or not exact:
1025 ui.status(_('copying %s to %s\n') % (relsrc, reltarget))
1025 ui.status(_('copying %s to %s\n') % (relsrc, reltarget))
1026 targets[abstarget] = abssrc
1026 targets[abstarget] = abssrc
1027 if abstarget != origsrc and not opts.get('dry_run'):
1027 if abstarget != origsrc and not opts.get('dry_run'):
1028 repo.copy(origsrc, abstarget, wlock)
1028 repo.copy(origsrc, abstarget, wlock)
1029 copied.append((abssrc, relsrc, exact))
1029 copied.append((abssrc, relsrc, exact))
1030
1030
1031 def targetpathfn(pat, dest, srcs):
1031 def targetpathfn(pat, dest, srcs):
1032 if os.path.isdir(pat):
1032 if os.path.isdir(pat):
1033 abspfx = util.canonpath(repo.root, cwd, pat)
1033 abspfx = util.canonpath(repo.root, cwd, pat)
1034 if destdirexists:
1034 if destdirexists:
1035 striplen = len(os.path.split(abspfx)[0])
1035 striplen = len(os.path.split(abspfx)[0])
1036 else:
1036 else:
1037 striplen = len(abspfx)
1037 striplen = len(abspfx)
1038 if striplen:
1038 if striplen:
1039 striplen += len(os.sep)
1039 striplen += len(os.sep)
1040 res = lambda p: os.path.join(dest, p[striplen:])
1040 res = lambda p: os.path.join(dest, p[striplen:])
1041 elif destdirexists:
1041 elif destdirexists:
1042 res = lambda p: os.path.join(dest, os.path.basename(p))
1042 res = lambda p: os.path.join(dest, os.path.basename(p))
1043 else:
1043 else:
1044 res = lambda p: dest
1044 res = lambda p: dest
1045 return res
1045 return res
1046
1046
1047 def targetpathafterfn(pat, dest, srcs):
1047 def targetpathafterfn(pat, dest, srcs):
1048 if util.patkind(pat, None)[0]:
1048 if util.patkind(pat, None)[0]:
1049 # a mercurial pattern
1049 # a mercurial pattern
1050 res = lambda p: os.path.join(dest, os.path.basename(p))
1050 res = lambda p: os.path.join(dest, os.path.basename(p))
1051 else:
1051 else:
1052 abspfx = util.canonpath(repo.root, cwd, pat)
1052 abspfx = util.canonpath(repo.root, cwd, pat)
1053 if len(abspfx) < len(srcs[0][0]):
1053 if len(abspfx) < len(srcs[0][0]):
1054 # A directory. Either the target path contains the last
1054 # A directory. Either the target path contains the last
1055 # component of the source path or it does not.
1055 # component of the source path or it does not.
1056 def evalpath(striplen):
1056 def evalpath(striplen):
1057 score = 0
1057 score = 0
1058 for s in srcs:
1058 for s in srcs:
1059 t = os.path.join(dest, s[0][striplen:])
1059 t = os.path.join(dest, s[0][striplen:])
1060 if os.path.exists(t):
1060 if os.path.exists(t):
1061 score += 1
1061 score += 1
1062 return score
1062 return score
1063
1063
1064 striplen = len(abspfx)
1064 striplen = len(abspfx)
1065 if striplen:
1065 if striplen:
1066 striplen += len(os.sep)
1066 striplen += len(os.sep)
1067 if os.path.isdir(os.path.join(dest, os.path.split(abspfx)[1])):
1067 if os.path.isdir(os.path.join(dest, os.path.split(abspfx)[1])):
1068 score = evalpath(striplen)
1068 score = evalpath(striplen)
1069 striplen1 = len(os.path.split(abspfx)[0])
1069 striplen1 = len(os.path.split(abspfx)[0])
1070 if striplen1:
1070 if striplen1:
1071 striplen1 += len(os.sep)
1071 striplen1 += len(os.sep)
1072 if evalpath(striplen1) > score:
1072 if evalpath(striplen1) > score:
1073 striplen = striplen1
1073 striplen = striplen1
1074 res = lambda p: os.path.join(dest, p[striplen:])
1074 res = lambda p: os.path.join(dest, p[striplen:])
1075 else:
1075 else:
1076 # a file
1076 # a file
1077 if destdirexists:
1077 if destdirexists:
1078 res = lambda p: os.path.join(dest, os.path.basename(p))
1078 res = lambda p: os.path.join(dest, os.path.basename(p))
1079 else:
1079 else:
1080 res = lambda p: dest
1080 res = lambda p: dest
1081 return res
1081 return res
1082
1082
1083
1083
1084 pats = list(pats)
1084 pats = list(pats)
1085 if not pats:
1085 if not pats:
1086 raise util.Abort(_('no source or destination specified'))
1086 raise util.Abort(_('no source or destination specified'))
1087 if len(pats) == 1:
1087 if len(pats) == 1:
1088 raise util.Abort(_('no destination specified'))
1088 raise util.Abort(_('no destination specified'))
1089 dest = pats.pop()
1089 dest = pats.pop()
1090 destdirexists = os.path.isdir(dest)
1090 destdirexists = os.path.isdir(dest)
1091 if (len(pats) > 1 or util.patkind(pats[0], None)[0]) and not destdirexists:
1091 if (len(pats) > 1 or util.patkind(pats[0], None)[0]) and not destdirexists:
1092 raise util.Abort(_('with multiple sources, destination must be an '
1092 raise util.Abort(_('with multiple sources, destination must be an '
1093 'existing directory'))
1093 'existing directory'))
1094 if opts['after']:
1094 if opts['after']:
1095 tfn = targetpathafterfn
1095 tfn = targetpathafterfn
1096 else:
1096 else:
1097 tfn = targetpathfn
1097 tfn = targetpathfn
1098 copylist = []
1098 copylist = []
1099 for pat in pats:
1099 for pat in pats:
1100 srcs = []
1100 srcs = []
1101 for tag, abssrc, relsrc, exact in cmdutil.walk(repo, [pat], opts):
1101 for tag, abssrc, relsrc, exact in cmdutil.walk(repo, [pat], opts):
1102 origsrc = okaytocopy(abssrc, relsrc, exact)
1102 origsrc = okaytocopy(abssrc, relsrc, exact)
1103 if origsrc:
1103 if origsrc:
1104 srcs.append((origsrc, abssrc, relsrc, exact))
1104 srcs.append((origsrc, abssrc, relsrc, exact))
1105 if not srcs:
1105 if not srcs:
1106 continue
1106 continue
1107 copylist.append((tfn(pat, dest, srcs), srcs))
1107 copylist.append((tfn(pat, dest, srcs), srcs))
1108 if not copylist:
1108 if not copylist:
1109 raise util.Abort(_('no files to copy'))
1109 raise util.Abort(_('no files to copy'))
1110
1110
1111 for targetpath, srcs in copylist:
1111 for targetpath, srcs in copylist:
1112 for origsrc, abssrc, relsrc, exact in srcs:
1112 for origsrc, abssrc, relsrc, exact in srcs:
1113 copy(origsrc, abssrc, relsrc, targetpath(abssrc), exact)
1113 copy(origsrc, abssrc, relsrc, targetpath(abssrc), exact)
1114
1114
1115 if errors:
1115 if errors:
1116 ui.warn(_('(consider using --after)\n'))
1116 ui.warn(_('(consider using --after)\n'))
1117 return errors, copied
1117 return errors, copied
1118
1118
1119 def copy(ui, repo, *pats, **opts):
1119 def copy(ui, repo, *pats, **opts):
1120 """mark files as copied for the next commit
1120 """mark files as copied for the next commit
1121
1121
1122 Mark dest as having copies of source files. If dest is a
1122 Mark dest as having copies of source files. If dest is a
1123 directory, copies are put in that directory. If dest is a file,
1123 directory, copies are put in that directory. If dest is a file,
1124 there can only be one source.
1124 there can only be one source.
1125
1125
1126 By default, this command copies the contents of files as they
1126 By default, this command copies the contents of files as they
1127 stand in the working directory. If invoked with --after, the
1127 stand in the working directory. If invoked with --after, the
1128 operation is recorded, but no copying is performed.
1128 operation is recorded, but no copying is performed.
1129
1129
1130 This command takes effect in the next commit.
1130 This command takes effect in the next commit.
1131
1131
1132 NOTE: This command should be treated as experimental. While it
1132 NOTE: This command should be treated as experimental. While it
1133 should properly record copied files, this information is not yet
1133 should properly record copied files, this information is not yet
1134 fully used by merge, nor fully reported by log.
1134 fully used by merge, nor fully reported by log.
1135 """
1135 """
1136 wlock = repo.wlock(0)
1136 wlock = repo.wlock(0)
1137 errs, copied = docopy(ui, repo, pats, opts, wlock)
1137 errs, copied = docopy(ui, repo, pats, opts, wlock)
1138 return errs
1138 return errs
1139
1139
1140 def debugancestor(ui, index, rev1, rev2):
1140 def debugancestor(ui, index, rev1, rev2):
1141 """find the ancestor revision of two revisions in a given index"""
1141 """find the ancestor revision of two revisions in a given index"""
1142 r = revlog.revlog(util.opener(os.getcwd(), audit=False), index, "", 0)
1142 r = revlog.revlog(util.opener(os.getcwd(), audit=False), index, "", 0)
1143 a = r.ancestor(r.lookup(rev1), r.lookup(rev2))
1143 a = r.ancestor(r.lookup(rev1), r.lookup(rev2))
1144 ui.write("%d:%s\n" % (r.rev(a), hex(a)))
1144 ui.write("%d:%s\n" % (r.rev(a), hex(a)))
1145
1145
1146 def debugcomplete(ui, cmd='', **opts):
1146 def debugcomplete(ui, cmd='', **opts):
1147 """returns the completion list associated with the given command"""
1147 """returns the completion list associated with the given command"""
1148
1148
1149 if opts['options']:
1149 if opts['options']:
1150 options = []
1150 options = []
1151 otables = [globalopts]
1151 otables = [globalopts]
1152 if cmd:
1152 if cmd:
1153 aliases, entry = findcmd(cmd)
1153 aliases, entry = findcmd(cmd)
1154 otables.append(entry[1])
1154 otables.append(entry[1])
1155 for t in otables:
1155 for t in otables:
1156 for o in t:
1156 for o in t:
1157 if o[0]:
1157 if o[0]:
1158 options.append('-%s' % o[0])
1158 options.append('-%s' % o[0])
1159 options.append('--%s' % o[1])
1159 options.append('--%s' % o[1])
1160 ui.write("%s\n" % "\n".join(options))
1160 ui.write("%s\n" % "\n".join(options))
1161 return
1161 return
1162
1162
1163 clist = findpossible(cmd).keys()
1163 clist = findpossible(cmd).keys()
1164 clist.sort()
1164 clist.sort()
1165 ui.write("%s\n" % "\n".join(clist))
1165 ui.write("%s\n" % "\n".join(clist))
1166
1166
1167 def debugrebuildstate(ui, repo, rev=None):
1167 def debugrebuildstate(ui, repo, rev=None):
1168 """rebuild the dirstate as it would look like for the given revision"""
1168 """rebuild the dirstate as it would look like for the given revision"""
1169 if not rev:
1169 if not rev:
1170 rev = repo.changelog.tip()
1170 rev = repo.changelog.tip()
1171 else:
1171 else:
1172 rev = repo.lookup(rev)
1172 rev = repo.lookup(rev)
1173 change = repo.changelog.read(rev)
1173 change = repo.changelog.read(rev)
1174 n = change[0]
1174 n = change[0]
1175 files = repo.manifest.read(n)
1175 files = repo.manifest.read(n)
1176 wlock = repo.wlock()
1176 wlock = repo.wlock()
1177 repo.dirstate.rebuild(rev, files)
1177 repo.dirstate.rebuild(rev, files)
1178
1178
1179 def debugcheckstate(ui, repo):
1179 def debugcheckstate(ui, repo):
1180 """validate the correctness of the current dirstate"""
1180 """validate the correctness of the current dirstate"""
1181 parent1, parent2 = repo.dirstate.parents()
1181 parent1, parent2 = repo.dirstate.parents()
1182 repo.dirstate.read()
1182 repo.dirstate.read()
1183 dc = repo.dirstate.map
1183 dc = repo.dirstate.map
1184 keys = dc.keys()
1184 keys = dc.keys()
1185 keys.sort()
1185 keys.sort()
1186 m1n = repo.changelog.read(parent1)[0]
1186 m1n = repo.changelog.read(parent1)[0]
1187 m2n = repo.changelog.read(parent2)[0]
1187 m2n = repo.changelog.read(parent2)[0]
1188 m1 = repo.manifest.read(m1n)
1188 m1 = repo.manifest.read(m1n)
1189 m2 = repo.manifest.read(m2n)
1189 m2 = repo.manifest.read(m2n)
1190 errors = 0
1190 errors = 0
1191 for f in dc:
1191 for f in dc:
1192 state = repo.dirstate.state(f)
1192 state = repo.dirstate.state(f)
1193 if state in "nr" and f not in m1:
1193 if state in "nr" and f not in m1:
1194 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
1194 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
1195 errors += 1
1195 errors += 1
1196 if state in "a" and f in m1:
1196 if state in "a" and f in m1:
1197 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
1197 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
1198 errors += 1
1198 errors += 1
1199 if state in "m" and f not in m1 and f not in m2:
1199 if state in "m" and f not in m1 and f not in m2:
1200 ui.warn(_("%s in state %s, but not in either manifest\n") %
1200 ui.warn(_("%s in state %s, but not in either manifest\n") %
1201 (f, state))
1201 (f, state))
1202 errors += 1
1202 errors += 1
1203 for f in m1:
1203 for f in m1:
1204 state = repo.dirstate.state(f)
1204 state = repo.dirstate.state(f)
1205 if state not in "nrm":
1205 if state not in "nrm":
1206 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
1206 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
1207 errors += 1
1207 errors += 1
1208 if errors:
1208 if errors:
1209 error = _(".hg/dirstate inconsistent with current parent's manifest")
1209 error = _(".hg/dirstate inconsistent with current parent's manifest")
1210 raise util.Abort(error)
1210 raise util.Abort(error)
1211
1211
1212 def debugconfig(ui, repo, *values):
1212 def debugconfig(ui, repo, *values):
1213 """show combined config settings from all hgrc files
1213 """show combined config settings from all hgrc files
1214
1214
1215 With no args, print names and values of all config items.
1215 With no args, print names and values of all config items.
1216
1216
1217 With one arg of the form section.name, print just the value of
1217 With one arg of the form section.name, print just the value of
1218 that config item.
1218 that config item.
1219
1219
1220 With multiple args, print names and values of all config items
1220 With multiple args, print names and values of all config items
1221 with matching section names."""
1221 with matching section names."""
1222
1222
1223 if values:
1223 if values:
1224 if len([v for v in values if '.' in v]) > 1:
1224 if len([v for v in values if '.' in v]) > 1:
1225 raise util.Abort(_('only one config item permitted'))
1225 raise util.Abort(_('only one config item permitted'))
1226 for section, name, value in ui.walkconfig():
1226 for section, name, value in ui.walkconfig():
1227 sectname = section + '.' + name
1227 sectname = section + '.' + name
1228 if values:
1228 if values:
1229 for v in values:
1229 for v in values:
1230 if v == section:
1230 if v == section:
1231 ui.write('%s=%s\n' % (sectname, value))
1231 ui.write('%s=%s\n' % (sectname, value))
1232 elif v == sectname:
1232 elif v == sectname:
1233 ui.write(value, '\n')
1233 ui.write(value, '\n')
1234 else:
1234 else:
1235 ui.write('%s=%s\n' % (sectname, value))
1235 ui.write('%s=%s\n' % (sectname, value))
1236
1236
1237 def debugsetparents(ui, repo, rev1, rev2=None):
1237 def debugsetparents(ui, repo, rev1, rev2=None):
1238 """manually set the parents of the current working directory
1238 """manually set the parents of the current working directory
1239
1239
1240 This is useful for writing repository conversion tools, but should
1240 This is useful for writing repository conversion tools, but should
1241 be used with care.
1241 be used with care.
1242 """
1242 """
1243
1243
1244 if not rev2:
1244 if not rev2:
1245 rev2 = hex(nullid)
1245 rev2 = hex(nullid)
1246
1246
1247 repo.dirstate.setparents(repo.lookup(rev1), repo.lookup(rev2))
1247 repo.dirstate.setparents(repo.lookup(rev1), repo.lookup(rev2))
1248
1248
1249 def debugstate(ui, repo):
1249 def debugstate(ui, repo):
1250 """show the contents of the current dirstate"""
1250 """show the contents of the current dirstate"""
1251 repo.dirstate.read()
1251 repo.dirstate.read()
1252 dc = repo.dirstate.map
1252 dc = repo.dirstate.map
1253 keys = dc.keys()
1253 keys = dc.keys()
1254 keys.sort()
1254 keys.sort()
1255 for file_ in keys:
1255 for file_ in keys:
1256 ui.write("%c %3o %10d %s %s\n"
1256 ui.write("%c %3o %10d %s %s\n"
1257 % (dc[file_][0], dc[file_][1] & 0777, dc[file_][2],
1257 % (dc[file_][0], dc[file_][1] & 0777, dc[file_][2],
1258 time.strftime("%x %X",
1258 time.strftime("%x %X",
1259 time.localtime(dc[file_][3])), file_))
1259 time.localtime(dc[file_][3])), file_))
1260 for f in repo.dirstate.copies:
1260 for f in repo.dirstate.copies:
1261 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copies[f], f))
1261 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copies[f], f))
1262
1262
1263 def debugdata(ui, file_, rev):
1263 def debugdata(ui, file_, rev):
1264 """dump the contents of an data file revision"""
1264 """dump the contents of an data file revision"""
1265 r = revlog.revlog(util.opener(os.getcwd(), audit=False),
1265 r = revlog.revlog(util.opener(os.getcwd(), audit=False),
1266 file_[:-2] + ".i", file_, 0)
1266 file_[:-2] + ".i", file_, 0)
1267 try:
1267 try:
1268 ui.write(r.revision(r.lookup(rev)))
1268 ui.write(r.revision(r.lookup(rev)))
1269 except KeyError:
1269 except KeyError:
1270 raise util.Abort(_('invalid revision identifier %s'), rev)
1270 raise util.Abort(_('invalid revision identifier %s'), rev)
1271
1271
1272 def debugindex(ui, file_):
1272 def debugindex(ui, file_):
1273 """dump the contents of an index file"""
1273 """dump the contents of an index file"""
1274 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_, "", 0)
1274 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_, "", 0)
1275 ui.write(" rev offset length base linkrev" +
1275 ui.write(" rev offset length base linkrev" +
1276 " nodeid p1 p2\n")
1276 " nodeid p1 p2\n")
1277 for i in range(r.count()):
1277 for i in range(r.count()):
1278 node = r.node(i)
1278 node = r.node(i)
1279 pp = r.parents(node)
1279 pp = r.parents(node)
1280 ui.write("% 6d % 9d % 7d % 6d % 7d %s %s %s\n" % (
1280 ui.write("% 6d % 9d % 7d % 6d % 7d %s %s %s\n" % (
1281 i, r.start(i), r.length(i), r.base(i), r.linkrev(node),
1281 i, r.start(i), r.length(i), r.base(i), r.linkrev(node),
1282 short(node), short(pp[0]), short(pp[1])))
1282 short(node), short(pp[0]), short(pp[1])))
1283
1283
1284 def debugindexdot(ui, file_):
1284 def debugindexdot(ui, file_):
1285 """dump an index DAG as a .dot file"""
1285 """dump an index DAG as a .dot file"""
1286 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_, "", 0)
1286 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_, "", 0)
1287 ui.write("digraph G {\n")
1287 ui.write("digraph G {\n")
1288 for i in range(r.count()):
1288 for i in range(r.count()):
1289 node = r.node(i)
1289 node = r.node(i)
1290 pp = r.parents(node)
1290 pp = r.parents(node)
1291 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
1291 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
1292 if pp[1] != nullid:
1292 if pp[1] != nullid:
1293 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
1293 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
1294 ui.write("}\n")
1294 ui.write("}\n")
1295
1295
1296 def debugrename(ui, repo, file, rev=None):
1296 def debugrename(ui, repo, file, rev=None):
1297 """dump rename information"""
1297 """dump rename information"""
1298 r = repo.file(relpath(repo, [file])[0])
1298 r = repo.file(relpath(repo, [file])[0])
1299 if rev:
1299 if rev:
1300 try:
1300 try:
1301 # assume all revision numbers are for changesets
1301 # assume all revision numbers are for changesets
1302 n = repo.lookup(rev)
1302 n = repo.lookup(rev)
1303 change = repo.changelog.read(n)
1303 change = repo.changelog.read(n)
1304 m = repo.manifest.read(change[0])
1304 m = repo.manifest.read(change[0])
1305 n = m[relpath(repo, [file])[0]]
1305 n = m[relpath(repo, [file])[0]]
1306 except (hg.RepoError, KeyError):
1306 except (hg.RepoError, KeyError):
1307 n = r.lookup(rev)
1307 n = r.lookup(rev)
1308 else:
1308 else:
1309 n = r.tip()
1309 n = r.tip()
1310 m = r.renamed(n)
1310 m = r.renamed(n)
1311 if m:
1311 if m:
1312 ui.write(_("renamed from %s:%s\n") % (m[0], hex(m[1])))
1312 ui.write(_("renamed from %s:%s\n") % (m[0], hex(m[1])))
1313 else:
1313 else:
1314 ui.write(_("not renamed\n"))
1314 ui.write(_("not renamed\n"))
1315
1315
1316 def debugwalk(ui, repo, *pats, **opts):
1316 def debugwalk(ui, repo, *pats, **opts):
1317 """show how files match on given patterns"""
1317 """show how files match on given patterns"""
1318 items = list(cmdutil.walk(repo, pats, opts))
1318 items = list(cmdutil.walk(repo, pats, opts))
1319 if not items:
1319 if not items:
1320 return
1320 return
1321 fmt = '%%s %%-%ds %%-%ds %%s' % (
1321 fmt = '%%s %%-%ds %%-%ds %%s' % (
1322 max([len(abs) for (src, abs, rel, exact) in items]),
1322 max([len(abs) for (src, abs, rel, exact) in items]),
1323 max([len(rel) for (src, abs, rel, exact) in items]))
1323 max([len(rel) for (src, abs, rel, exact) in items]))
1324 for src, abs, rel, exact in items:
1324 for src, abs, rel, exact in items:
1325 line = fmt % (src, abs, rel, exact and 'exact' or '')
1325 line = fmt % (src, abs, rel, exact and 'exact' or '')
1326 ui.write("%s\n" % line.rstrip())
1326 ui.write("%s\n" % line.rstrip())
1327
1327
1328 def diff(ui, repo, *pats, **opts):
1328 def diff(ui, repo, *pats, **opts):
1329 """diff repository (or selected files)
1329 """diff repository (or selected files)
1330
1330
1331 Show differences between revisions for the specified files.
1331 Show differences between revisions for the specified files.
1332
1332
1333 Differences between files are shown using the unified diff format.
1333 Differences between files are shown using the unified diff format.
1334
1334
1335 When two revision arguments are given, then changes are shown
1335 When two revision arguments are given, then changes are shown
1336 between those revisions. If only one revision is specified then
1336 between those revisions. If only one revision is specified then
1337 that revision is compared to the working directory, and, when no
1337 that revision is compared to the working directory, and, when no
1338 revisions are specified, the working directory files are compared
1338 revisions are specified, the working directory files are compared
1339 to its parent.
1339 to its parent.
1340
1340
1341 Without the -a option, diff will avoid generating diffs of files
1341 Without the -a option, diff will avoid generating diffs of files
1342 it detects as binary. With -a, diff will generate a diff anyway,
1342 it detects as binary. With -a, diff will generate a diff anyway,
1343 probably with undesirable results.
1343 probably with undesirable results.
1344 """
1344 """
1345 node1, node2 = revpair(ui, repo, opts['rev'])
1345 node1, node2 = revpair(ui, repo, opts['rev'])
1346
1346
1347 fns, matchfn, anypats = cmdutil.matchpats(repo, pats, opts)
1347 fns, matchfn, anypats = cmdutil.matchpats(repo, pats, opts)
1348
1348
1349 patch.diff(repo, node1, node2, fns, match=matchfn,
1349 patch.diff(repo, node1, node2, fns, match=matchfn,
1350 opts=ui.diffopts(opts))
1350 opts=patch.diffopts(ui, opts))
1351
1351
1352 def export(ui, repo, *changesets, **opts):
1352 def export(ui, repo, *changesets, **opts):
1353 """dump the header and diffs for one or more changesets
1353 """dump the header and diffs for one or more changesets
1354
1354
1355 Print the changeset header and diffs for one or more revisions.
1355 Print the changeset header and diffs for one or more revisions.
1356
1356
1357 The information shown in the changeset header is: author,
1357 The information shown in the changeset header is: author,
1358 changeset hash, parent and commit comment.
1358 changeset hash, parent and commit comment.
1359
1359
1360 Output may be to a file, in which case the name of the file is
1360 Output may be to a file, in which case the name of the file is
1361 given using a format string. The formatting rules are as follows:
1361 given using a format string. The formatting rules are as follows:
1362
1362
1363 %% literal "%" character
1363 %% literal "%" character
1364 %H changeset hash (40 bytes of hexadecimal)
1364 %H changeset hash (40 bytes of hexadecimal)
1365 %N number of patches being generated
1365 %N number of patches being generated
1366 %R changeset revision number
1366 %R changeset revision number
1367 %b basename of the exporting repository
1367 %b basename of the exporting repository
1368 %h short-form changeset hash (12 bytes of hexadecimal)
1368 %h short-form changeset hash (12 bytes of hexadecimal)
1369 %n zero-padded sequence number, starting at 1
1369 %n zero-padded sequence number, starting at 1
1370 %r zero-padded changeset revision number
1370 %r zero-padded changeset revision number
1371
1371
1372 Without the -a option, export will avoid generating diffs of files
1372 Without the -a option, export will avoid generating diffs of files
1373 it detects as binary. With -a, export will generate a diff anyway,
1373 it detects as binary. With -a, export will generate a diff anyway,
1374 probably with undesirable results.
1374 probably with undesirable results.
1375
1375
1376 With the --switch-parent option, the diff will be against the second
1376 With the --switch-parent option, the diff will be against the second
1377 parent. It can be useful to review a merge.
1377 parent. It can be useful to review a merge.
1378 """
1378 """
1379 if not changesets:
1379 if not changesets:
1380 raise util.Abort(_("export requires at least one changeset"))
1380 raise util.Abort(_("export requires at least one changeset"))
1381 revs = list(revrange(ui, repo, changesets))
1381 revs = list(revrange(ui, repo, changesets))
1382 if len(revs) > 1:
1382 if len(revs) > 1:
1383 ui.note(_('exporting patches:\n'))
1383 ui.note(_('exporting patches:\n'))
1384 else:
1384 else:
1385 ui.note(_('exporting patch:\n'))
1385 ui.note(_('exporting patch:\n'))
1386 patch.export(repo, map(repo.lookup, revs), template=opts['output'],
1386 patch.export(repo, map(repo.lookup, revs), template=opts['output'],
1387 switch_parent=opts['switch_parent'], opts=ui.diffopts(opts))
1387 switch_parent=opts['switch_parent'],
1388 opts=patch.diffopts(ui, opts))
1388
1389
1389 def forget(ui, repo, *pats, **opts):
1390 def forget(ui, repo, *pats, **opts):
1390 """don't add the specified files on the next commit (DEPRECATED)
1391 """don't add the specified files on the next commit (DEPRECATED)
1391
1392
1392 (DEPRECATED)
1393 (DEPRECATED)
1393 Undo an 'hg add' scheduled for the next commit.
1394 Undo an 'hg add' scheduled for the next commit.
1394
1395
1395 This command is now deprecated and will be removed in a future
1396 This command is now deprecated and will be removed in a future
1396 release. Please use revert instead.
1397 release. Please use revert instead.
1397 """
1398 """
1398 ui.warn(_("(the forget command is deprecated; use revert instead)\n"))
1399 ui.warn(_("(the forget command is deprecated; use revert instead)\n"))
1399 forget = []
1400 forget = []
1400 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts):
1401 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts):
1401 if repo.dirstate.state(abs) == 'a':
1402 if repo.dirstate.state(abs) == 'a':
1402 forget.append(abs)
1403 forget.append(abs)
1403 if ui.verbose or not exact:
1404 if ui.verbose or not exact:
1404 ui.status(_('forgetting %s\n') % ((pats and rel) or abs))
1405 ui.status(_('forgetting %s\n') % ((pats and rel) or abs))
1405 repo.forget(forget)
1406 repo.forget(forget)
1406
1407
1407 def grep(ui, repo, pattern, *pats, **opts):
1408 def grep(ui, repo, pattern, *pats, **opts):
1408 """search for a pattern in specified files and revisions
1409 """search for a pattern in specified files and revisions
1409
1410
1410 Search revisions of files for a regular expression.
1411 Search revisions of files for a regular expression.
1411
1412
1412 This command behaves differently than Unix grep. It only accepts
1413 This command behaves differently than Unix grep. It only accepts
1413 Python/Perl regexps. It searches repository history, not the
1414 Python/Perl regexps. It searches repository history, not the
1414 working directory. It always prints the revision number in which
1415 working directory. It always prints the revision number in which
1415 a match appears.
1416 a match appears.
1416
1417
1417 By default, grep only prints output for the first revision of a
1418 By default, grep only prints output for the first revision of a
1418 file in which it finds a match. To get it to print every revision
1419 file in which it finds a match. To get it to print every revision
1419 that contains a change in match status ("-" for a match that
1420 that contains a change in match status ("-" for a match that
1420 becomes a non-match, or "+" for a non-match that becomes a match),
1421 becomes a non-match, or "+" for a non-match that becomes a match),
1421 use the --all flag.
1422 use the --all flag.
1422 """
1423 """
1423 reflags = 0
1424 reflags = 0
1424 if opts['ignore_case']:
1425 if opts['ignore_case']:
1425 reflags |= re.I
1426 reflags |= re.I
1426 regexp = re.compile(pattern, reflags)
1427 regexp = re.compile(pattern, reflags)
1427 sep, eol = ':', '\n'
1428 sep, eol = ':', '\n'
1428 if opts['print0']:
1429 if opts['print0']:
1429 sep = eol = '\0'
1430 sep = eol = '\0'
1430
1431
1431 fcache = {}
1432 fcache = {}
1432 def getfile(fn):
1433 def getfile(fn):
1433 if fn not in fcache:
1434 if fn not in fcache:
1434 fcache[fn] = repo.file(fn)
1435 fcache[fn] = repo.file(fn)
1435 return fcache[fn]
1436 return fcache[fn]
1436
1437
1437 def matchlines(body):
1438 def matchlines(body):
1438 begin = 0
1439 begin = 0
1439 linenum = 0
1440 linenum = 0
1440 while True:
1441 while True:
1441 match = regexp.search(body, begin)
1442 match = regexp.search(body, begin)
1442 if not match:
1443 if not match:
1443 break
1444 break
1444 mstart, mend = match.span()
1445 mstart, mend = match.span()
1445 linenum += body.count('\n', begin, mstart) + 1
1446 linenum += body.count('\n', begin, mstart) + 1
1446 lstart = body.rfind('\n', begin, mstart) + 1 or begin
1447 lstart = body.rfind('\n', begin, mstart) + 1 or begin
1447 lend = body.find('\n', mend)
1448 lend = body.find('\n', mend)
1448 yield linenum, mstart - lstart, mend - lstart, body[lstart:lend]
1449 yield linenum, mstart - lstart, mend - lstart, body[lstart:lend]
1449 begin = lend + 1
1450 begin = lend + 1
1450
1451
1451 class linestate(object):
1452 class linestate(object):
1452 def __init__(self, line, linenum, colstart, colend):
1453 def __init__(self, line, linenum, colstart, colend):
1453 self.line = line
1454 self.line = line
1454 self.linenum = linenum
1455 self.linenum = linenum
1455 self.colstart = colstart
1456 self.colstart = colstart
1456 self.colend = colend
1457 self.colend = colend
1457
1458
1458 def __eq__(self, other):
1459 def __eq__(self, other):
1459 return self.line == other.line
1460 return self.line == other.line
1460
1461
1461 matches = {}
1462 matches = {}
1462 copies = {}
1463 copies = {}
1463 def grepbody(fn, rev, body):
1464 def grepbody(fn, rev, body):
1464 matches[rev].setdefault(fn, [])
1465 matches[rev].setdefault(fn, [])
1465 m = matches[rev][fn]
1466 m = matches[rev][fn]
1466 for lnum, cstart, cend, line in matchlines(body):
1467 for lnum, cstart, cend, line in matchlines(body):
1467 s = linestate(line, lnum, cstart, cend)
1468 s = linestate(line, lnum, cstart, cend)
1468 m.append(s)
1469 m.append(s)
1469
1470
1470 def difflinestates(a, b):
1471 def difflinestates(a, b):
1471 sm = difflib.SequenceMatcher(None, a, b)
1472 sm = difflib.SequenceMatcher(None, a, b)
1472 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
1473 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
1473 if tag == 'insert':
1474 if tag == 'insert':
1474 for i in range(blo, bhi):
1475 for i in range(blo, bhi):
1475 yield ('+', b[i])
1476 yield ('+', b[i])
1476 elif tag == 'delete':
1477 elif tag == 'delete':
1477 for i in range(alo, ahi):
1478 for i in range(alo, ahi):
1478 yield ('-', a[i])
1479 yield ('-', a[i])
1479 elif tag == 'replace':
1480 elif tag == 'replace':
1480 for i in range(alo, ahi):
1481 for i in range(alo, ahi):
1481 yield ('-', a[i])
1482 yield ('-', a[i])
1482 for i in range(blo, bhi):
1483 for i in range(blo, bhi):
1483 yield ('+', b[i])
1484 yield ('+', b[i])
1484
1485
1485 prev = {}
1486 prev = {}
1486 ucache = {}
1487 ucache = {}
1487 def display(fn, rev, states, prevstates):
1488 def display(fn, rev, states, prevstates):
1488 counts = {'-': 0, '+': 0}
1489 counts = {'-': 0, '+': 0}
1489 filerevmatches = {}
1490 filerevmatches = {}
1490 if incrementing or not opts['all']:
1491 if incrementing or not opts['all']:
1491 a, b = prevstates, states
1492 a, b = prevstates, states
1492 else:
1493 else:
1493 a, b = states, prevstates
1494 a, b = states, prevstates
1494 for change, l in difflinestates(a, b):
1495 for change, l in difflinestates(a, b):
1495 if incrementing or not opts['all']:
1496 if incrementing or not opts['all']:
1496 r = rev
1497 r = rev
1497 else:
1498 else:
1498 r = prev[fn]
1499 r = prev[fn]
1499 cols = [fn, str(r)]
1500 cols = [fn, str(r)]
1500 if opts['line_number']:
1501 if opts['line_number']:
1501 cols.append(str(l.linenum))
1502 cols.append(str(l.linenum))
1502 if opts['all']:
1503 if opts['all']:
1503 cols.append(change)
1504 cols.append(change)
1504 if opts['user']:
1505 if opts['user']:
1505 cols.append(trimuser(ui, getchange(r)[1], rev,
1506 cols.append(trimuser(ui, getchange(r)[1], rev,
1506 ucache))
1507 ucache))
1507 if opts['files_with_matches']:
1508 if opts['files_with_matches']:
1508 c = (fn, rev)
1509 c = (fn, rev)
1509 if c in filerevmatches:
1510 if c in filerevmatches:
1510 continue
1511 continue
1511 filerevmatches[c] = 1
1512 filerevmatches[c] = 1
1512 else:
1513 else:
1513 cols.append(l.line)
1514 cols.append(l.line)
1514 ui.write(sep.join(cols), eol)
1515 ui.write(sep.join(cols), eol)
1515 counts[change] += 1
1516 counts[change] += 1
1516 return counts['+'], counts['-']
1517 return counts['+'], counts['-']
1517
1518
1518 fstate = {}
1519 fstate = {}
1519 skip = {}
1520 skip = {}
1520 changeiter, getchange, matchfn = walkchangerevs(ui, repo, pats, opts)
1521 changeiter, getchange, matchfn = walkchangerevs(ui, repo, pats, opts)
1521 count = 0
1522 count = 0
1522 incrementing = False
1523 incrementing = False
1523 follow = opts.get('follow')
1524 follow = opts.get('follow')
1524 for st, rev, fns in changeiter:
1525 for st, rev, fns in changeiter:
1525 if st == 'window':
1526 if st == 'window':
1526 incrementing = rev
1527 incrementing = rev
1527 matches.clear()
1528 matches.clear()
1528 copies.clear()
1529 copies.clear()
1529 elif st == 'add':
1530 elif st == 'add':
1530 change = repo.changelog.read(repo.lookup(str(rev)))
1531 change = repo.changelog.read(repo.lookup(str(rev)))
1531 mf = repo.manifest.read(change[0])
1532 mf = repo.manifest.read(change[0])
1532 matches[rev] = {}
1533 matches[rev] = {}
1533 for fn in fns:
1534 for fn in fns:
1534 if fn in skip:
1535 if fn in skip:
1535 continue
1536 continue
1536 fstate.setdefault(fn, {})
1537 fstate.setdefault(fn, {})
1537 copies.setdefault(rev, {})
1538 copies.setdefault(rev, {})
1538 try:
1539 try:
1539 grepbody(fn, rev, getfile(fn).read(mf[fn]))
1540 grepbody(fn, rev, getfile(fn).read(mf[fn]))
1540 if follow:
1541 if follow:
1541 copied = getfile(fn).renamed(mf[fn])
1542 copied = getfile(fn).renamed(mf[fn])
1542 if copied:
1543 if copied:
1543 copies[rev][fn] = copied[0]
1544 copies[rev][fn] = copied[0]
1544 except KeyError:
1545 except KeyError:
1545 pass
1546 pass
1546 elif st == 'iter':
1547 elif st == 'iter':
1547 states = matches[rev].items()
1548 states = matches[rev].items()
1548 states.sort()
1549 states.sort()
1549 for fn, m in states:
1550 for fn, m in states:
1550 copy = copies[rev].get(fn)
1551 copy = copies[rev].get(fn)
1551 if fn in skip:
1552 if fn in skip:
1552 if copy:
1553 if copy:
1553 skip[copy] = True
1554 skip[copy] = True
1554 continue
1555 continue
1555 if incrementing or not opts['all'] or fstate[fn]:
1556 if incrementing or not opts['all'] or fstate[fn]:
1556 pos, neg = display(fn, rev, m, fstate[fn])
1557 pos, neg = display(fn, rev, m, fstate[fn])
1557 count += pos + neg
1558 count += pos + neg
1558 if pos and not opts['all']:
1559 if pos and not opts['all']:
1559 skip[fn] = True
1560 skip[fn] = True
1560 if copy:
1561 if copy:
1561 skip[copy] = True
1562 skip[copy] = True
1562 fstate[fn] = m
1563 fstate[fn] = m
1563 if copy:
1564 if copy:
1564 fstate[copy] = m
1565 fstate[copy] = m
1565 prev[fn] = rev
1566 prev[fn] = rev
1566
1567
1567 if not incrementing:
1568 if not incrementing:
1568 fstate = fstate.items()
1569 fstate = fstate.items()
1569 fstate.sort()
1570 fstate.sort()
1570 for fn, state in fstate:
1571 for fn, state in fstate:
1571 if fn in skip:
1572 if fn in skip:
1572 continue
1573 continue
1573 if fn not in copies[prev[fn]]:
1574 if fn not in copies[prev[fn]]:
1574 display(fn, rev, {}, state)
1575 display(fn, rev, {}, state)
1575 return (count == 0 and 1) or 0
1576 return (count == 0 and 1) or 0
1576
1577
1577 def heads(ui, repo, **opts):
1578 def heads(ui, repo, **opts):
1578 """show current repository heads
1579 """show current repository heads
1579
1580
1580 Show all repository head changesets.
1581 Show all repository head changesets.
1581
1582
1582 Repository "heads" are changesets that don't have children
1583 Repository "heads" are changesets that don't have children
1583 changesets. They are where development generally takes place and
1584 changesets. They are where development generally takes place and
1584 are the usual targets for update and merge operations.
1585 are the usual targets for update and merge operations.
1585 """
1586 """
1586 if opts['rev']:
1587 if opts['rev']:
1587 heads = repo.heads(repo.lookup(opts['rev']))
1588 heads = repo.heads(repo.lookup(opts['rev']))
1588 else:
1589 else:
1589 heads = repo.heads()
1590 heads = repo.heads()
1590 br = None
1591 br = None
1591 if opts['branches']:
1592 if opts['branches']:
1592 br = repo.branchlookup(heads)
1593 br = repo.branchlookup(heads)
1593 displayer = show_changeset(ui, repo, opts)
1594 displayer = show_changeset(ui, repo, opts)
1594 for n in heads:
1595 for n in heads:
1595 displayer.show(changenode=n, brinfo=br)
1596 displayer.show(changenode=n, brinfo=br)
1596
1597
1597 def identify(ui, repo):
1598 def identify(ui, repo):
1598 """print information about the working copy
1599 """print information about the working copy
1599
1600
1600 Print a short summary of the current state of the repo.
1601 Print a short summary of the current state of the repo.
1601
1602
1602 This summary identifies the repository state using one or two parent
1603 This summary identifies the repository state using one or two parent
1603 hash identifiers, followed by a "+" if there are uncommitted changes
1604 hash identifiers, followed by a "+" if there are uncommitted changes
1604 in the working directory, followed by a list of tags for this revision.
1605 in the working directory, followed by a list of tags for this revision.
1605 """
1606 """
1606 parents = [p for p in repo.dirstate.parents() if p != nullid]
1607 parents = [p for p in repo.dirstate.parents() if p != nullid]
1607 if not parents:
1608 if not parents:
1608 ui.write(_("unknown\n"))
1609 ui.write(_("unknown\n"))
1609 return
1610 return
1610
1611
1611 hexfunc = ui.verbose and hex or short
1612 hexfunc = ui.verbose and hex or short
1612 modified, added, removed, deleted = repo.status()[:4]
1613 modified, added, removed, deleted = repo.status()[:4]
1613 output = ["%s%s" %
1614 output = ["%s%s" %
1614 ('+'.join([hexfunc(parent) for parent in parents]),
1615 ('+'.join([hexfunc(parent) for parent in parents]),
1615 (modified or added or removed or deleted) and "+" or "")]
1616 (modified or added or removed or deleted) and "+" or "")]
1616
1617
1617 if not ui.quiet:
1618 if not ui.quiet:
1618 # multiple tags for a single parent separated by '/'
1619 # multiple tags for a single parent separated by '/'
1619 parenttags = ['/'.join(tags)
1620 parenttags = ['/'.join(tags)
1620 for tags in map(repo.nodetags, parents) if tags]
1621 for tags in map(repo.nodetags, parents) if tags]
1621 # tags for multiple parents separated by ' + '
1622 # tags for multiple parents separated by ' + '
1622 if parenttags:
1623 if parenttags:
1623 output.append(' + '.join(parenttags))
1624 output.append(' + '.join(parenttags))
1624
1625
1625 ui.write("%s\n" % ' '.join(output))
1626 ui.write("%s\n" % ' '.join(output))
1626
1627
1627 def import_(ui, repo, patch1, *patches, **opts):
1628 def import_(ui, repo, patch1, *patches, **opts):
1628 """import an ordered set of patches
1629 """import an ordered set of patches
1629
1630
1630 Import a list of patches and commit them individually.
1631 Import a list of patches and commit them individually.
1631
1632
1632 If there are outstanding changes in the working directory, import
1633 If there are outstanding changes in the working directory, import
1633 will abort unless given the -f flag.
1634 will abort unless given the -f flag.
1634
1635
1635 You can import a patch straight from a mail message. Even patches
1636 You can import a patch straight from a mail message. Even patches
1636 as attachments work (body part must be type text/plain or
1637 as attachments work (body part must be type text/plain or
1637 text/x-patch to be used). From and Subject headers of email
1638 text/x-patch to be used). From and Subject headers of email
1638 message are used as default committer and commit message. All
1639 message are used as default committer and commit message. All
1639 text/plain body parts before first diff are added to commit
1640 text/plain body parts before first diff are added to commit
1640 message.
1641 message.
1641
1642
1642 If imported patch was generated by hg export, user and description
1643 If imported patch was generated by hg export, user and description
1643 from patch override values from message headers and body. Values
1644 from patch override values from message headers and body. Values
1644 given on command line with -m and -u override these.
1645 given on command line with -m and -u override these.
1645
1646
1646 To read a patch from standard input, use patch name "-".
1647 To read a patch from standard input, use patch name "-".
1647 """
1648 """
1648 patches = (patch1,) + patches
1649 patches = (patch1,) + patches
1649
1650
1650 if not opts['force']:
1651 if not opts['force']:
1651 bail_if_changed(repo)
1652 bail_if_changed(repo)
1652
1653
1653 d = opts["base"]
1654 d = opts["base"]
1654 strip = opts["strip"]
1655 strip = opts["strip"]
1655
1656
1656 wlock = repo.wlock()
1657 wlock = repo.wlock()
1657 lock = repo.lock()
1658 lock = repo.lock()
1658
1659
1659 for p in patches:
1660 for p in patches:
1660 pf = os.path.join(d, p)
1661 pf = os.path.join(d, p)
1661
1662
1662 if pf == '-':
1663 if pf == '-':
1663 ui.status(_("applying patch from stdin\n"))
1664 ui.status(_("applying patch from stdin\n"))
1664 tmpname, message, user, date = patch.extract(ui, sys.stdin)
1665 tmpname, message, user, date = patch.extract(ui, sys.stdin)
1665 else:
1666 else:
1666 ui.status(_("applying %s\n") % p)
1667 ui.status(_("applying %s\n") % p)
1667 tmpname, message, user, date = patch.extract(ui, file(pf))
1668 tmpname, message, user, date = patch.extract(ui, file(pf))
1668
1669
1669 if tmpname is None:
1670 if tmpname is None:
1670 raise util.Abort(_('no diffs found'))
1671 raise util.Abort(_('no diffs found'))
1671
1672
1672 try:
1673 try:
1673 if opts['message']:
1674 if opts['message']:
1674 # pickup the cmdline msg
1675 # pickup the cmdline msg
1675 message = opts['message']
1676 message = opts['message']
1676 elif message:
1677 elif message:
1677 # pickup the patch msg
1678 # pickup the patch msg
1678 message = message.strip()
1679 message = message.strip()
1679 else:
1680 else:
1680 # launch the editor
1681 # launch the editor
1681 message = None
1682 message = None
1682 ui.debug(_('message:\n%s\n') % message)
1683 ui.debug(_('message:\n%s\n') % message)
1683
1684
1684 files, fuzz = patch.patch(tmpname, ui, strip=strip, cwd=repo.root)
1685 files, fuzz = patch.patch(tmpname, ui, strip=strip, cwd=repo.root)
1685 removes = []
1686 removes = []
1686 if len(files) > 0:
1687 if len(files) > 0:
1687 cfiles = files.keys()
1688 cfiles = files.keys()
1688 copies = []
1689 copies = []
1689 copts = {'after': False, 'force': False}
1690 copts = {'after': False, 'force': False}
1690 cwd = repo.getcwd()
1691 cwd = repo.getcwd()
1691 if cwd:
1692 if cwd:
1692 cfiles = [util.pathto(cwd, f) for f in files.keys()]
1693 cfiles = [util.pathto(cwd, f) for f in files.keys()]
1693 for f in files:
1694 for f in files:
1694 ctype, gp = files[f]
1695 ctype, gp = files[f]
1695 if ctype == 'RENAME':
1696 if ctype == 'RENAME':
1696 copies.append((gp.oldpath, gp.path, gp.copymod))
1697 copies.append((gp.oldpath, gp.path, gp.copymod))
1697 removes.append(gp.oldpath)
1698 removes.append(gp.oldpath)
1698 elif ctype == 'COPY':
1699 elif ctype == 'COPY':
1699 copies.append((gp.oldpath, gp.path, gp.copymod))
1700 copies.append((gp.oldpath, gp.path, gp.copymod))
1700 elif ctype == 'DELETE':
1701 elif ctype == 'DELETE':
1701 removes.append(gp.path)
1702 removes.append(gp.path)
1702 for src, dst, after in copies:
1703 for src, dst, after in copies:
1703 absdst = os.path.join(repo.root, dst)
1704 absdst = os.path.join(repo.root, dst)
1704 if not after and os.path.exists(absdst):
1705 if not after and os.path.exists(absdst):
1705 raise util.Abort(_('patch creates existing file %s') % dst)
1706 raise util.Abort(_('patch creates existing file %s') % dst)
1706 if cwd:
1707 if cwd:
1707 src, dst = [util.pathto(cwd, f) for f in (src, dst)]
1708 src, dst = [util.pathto(cwd, f) for f in (src, dst)]
1708 copts['after'] = after
1709 copts['after'] = after
1709 errs, copied = docopy(ui, repo, (src, dst), copts, wlock=wlock)
1710 errs, copied = docopy(ui, repo, (src, dst), copts, wlock=wlock)
1710 if errs:
1711 if errs:
1711 raise util.Abort(errs)
1712 raise util.Abort(errs)
1712 if removes:
1713 if removes:
1713 repo.remove(removes, True, wlock=wlock)
1714 repo.remove(removes, True, wlock=wlock)
1714 for f in files:
1715 for f in files:
1715 ctype, gp = files[f]
1716 ctype, gp = files[f]
1716 if gp and gp.mode:
1717 if gp and gp.mode:
1717 x = gp.mode & 0100 != 0
1718 x = gp.mode & 0100 != 0
1718 dst = os.path.join(repo.root, gp.path)
1719 dst = os.path.join(repo.root, gp.path)
1719 util.set_exec(dst, x)
1720 util.set_exec(dst, x)
1720 cmdutil.addremove(repo, cfiles, wlock=wlock)
1721 cmdutil.addremove(repo, cfiles, wlock=wlock)
1721 files = files.keys()
1722 files = files.keys()
1722 files.extend([r for r in removes if r not in files])
1723 files.extend([r for r in removes if r not in files])
1723 repo.commit(files, message, user, date, wlock=wlock, lock=lock)
1724 repo.commit(files, message, user, date, wlock=wlock, lock=lock)
1724 finally:
1725 finally:
1725 os.unlink(tmpname)
1726 os.unlink(tmpname)
1726
1727
1727 def incoming(ui, repo, source="default", **opts):
1728 def incoming(ui, repo, source="default", **opts):
1728 """show new changesets found in source
1729 """show new changesets found in source
1729
1730
1730 Show new changesets found in the specified path/URL or the default
1731 Show new changesets found in the specified path/URL or the default
1731 pull location. These are the changesets that would be pulled if a pull
1732 pull location. These are the changesets that would be pulled if a pull
1732 was requested.
1733 was requested.
1733
1734
1734 For remote repository, using --bundle avoids downloading the changesets
1735 For remote repository, using --bundle avoids downloading the changesets
1735 twice if the incoming is followed by a pull.
1736 twice if the incoming is followed by a pull.
1736
1737
1737 See pull for valid source format details.
1738 See pull for valid source format details.
1738 """
1739 """
1739 source = ui.expandpath(source)
1740 source = ui.expandpath(source)
1740 setremoteconfig(ui, opts)
1741 setremoteconfig(ui, opts)
1741
1742
1742 other = hg.repository(ui, source)
1743 other = hg.repository(ui, source)
1743 incoming = repo.findincoming(other, force=opts["force"])
1744 incoming = repo.findincoming(other, force=opts["force"])
1744 if not incoming:
1745 if not incoming:
1745 ui.status(_("no changes found\n"))
1746 ui.status(_("no changes found\n"))
1746 return
1747 return
1747
1748
1748 cleanup = None
1749 cleanup = None
1749 try:
1750 try:
1750 fname = opts["bundle"]
1751 fname = opts["bundle"]
1751 if fname or not other.local():
1752 if fname or not other.local():
1752 # create a bundle (uncompressed if other repo is not local)
1753 # create a bundle (uncompressed if other repo is not local)
1753 cg = other.changegroup(incoming, "incoming")
1754 cg = other.changegroup(incoming, "incoming")
1754 fname = cleanup = write_bundle(cg, fname, compress=other.local())
1755 fname = cleanup = write_bundle(cg, fname, compress=other.local())
1755 # keep written bundle?
1756 # keep written bundle?
1756 if opts["bundle"]:
1757 if opts["bundle"]:
1757 cleanup = None
1758 cleanup = None
1758 if not other.local():
1759 if not other.local():
1759 # use the created uncompressed bundlerepo
1760 # use the created uncompressed bundlerepo
1760 other = bundlerepo.bundlerepository(ui, repo.root, fname)
1761 other = bundlerepo.bundlerepository(ui, repo.root, fname)
1761
1762
1762 revs = None
1763 revs = None
1763 if opts['rev']:
1764 if opts['rev']:
1764 revs = [other.lookup(rev) for rev in opts['rev']]
1765 revs = [other.lookup(rev) for rev in opts['rev']]
1765 o = other.changelog.nodesbetween(incoming, revs)[0]
1766 o = other.changelog.nodesbetween(incoming, revs)[0]
1766 if opts['newest_first']:
1767 if opts['newest_first']:
1767 o.reverse()
1768 o.reverse()
1768 displayer = show_changeset(ui, other, opts)
1769 displayer = show_changeset(ui, other, opts)
1769 for n in o:
1770 for n in o:
1770 parents = [p for p in other.changelog.parents(n) if p != nullid]
1771 parents = [p for p in other.changelog.parents(n) if p != nullid]
1771 if opts['no_merges'] and len(parents) == 2:
1772 if opts['no_merges'] and len(parents) == 2:
1772 continue
1773 continue
1773 displayer.show(changenode=n)
1774 displayer.show(changenode=n)
1774 if opts['patch']:
1775 if opts['patch']:
1775 prev = (parents and parents[0]) or nullid
1776 prev = (parents and parents[0]) or nullid
1776 patch.diff(repo, other, prev, n)
1777 patch.diff(repo, other, prev, n)
1777 ui.write("\n")
1778 ui.write("\n")
1778 finally:
1779 finally:
1779 if hasattr(other, 'close'):
1780 if hasattr(other, 'close'):
1780 other.close()
1781 other.close()
1781 if cleanup:
1782 if cleanup:
1782 os.unlink(cleanup)
1783 os.unlink(cleanup)
1783
1784
1784 def init(ui, dest=".", **opts):
1785 def init(ui, dest=".", **opts):
1785 """create a new repository in the given directory
1786 """create a new repository in the given directory
1786
1787
1787 Initialize a new repository in the given directory. If the given
1788 Initialize a new repository in the given directory. If the given
1788 directory does not exist, it is created.
1789 directory does not exist, it is created.
1789
1790
1790 If no directory is given, the current directory is used.
1791 If no directory is given, the current directory is used.
1791
1792
1792 It is possible to specify an ssh:// URL as the destination.
1793 It is possible to specify an ssh:// URL as the destination.
1793 Look at the help text for the pull command for important details
1794 Look at the help text for the pull command for important details
1794 about ssh:// URLs.
1795 about ssh:// URLs.
1795 """
1796 """
1796 setremoteconfig(ui, opts)
1797 setremoteconfig(ui, opts)
1797 hg.repository(ui, dest, create=1)
1798 hg.repository(ui, dest, create=1)
1798
1799
1799 def locate(ui, repo, *pats, **opts):
1800 def locate(ui, repo, *pats, **opts):
1800 """locate files matching specific patterns
1801 """locate files matching specific patterns
1801
1802
1802 Print all files under Mercurial control whose names match the
1803 Print all files under Mercurial control whose names match the
1803 given patterns.
1804 given patterns.
1804
1805
1805 This command searches the current directory and its
1806 This command searches the current directory and its
1806 subdirectories. To search an entire repository, move to the root
1807 subdirectories. To search an entire repository, move to the root
1807 of the repository.
1808 of the repository.
1808
1809
1809 If no patterns are given to match, this command prints all file
1810 If no patterns are given to match, this command prints all file
1810 names.
1811 names.
1811
1812
1812 If you want to feed the output of this command into the "xargs"
1813 If you want to feed the output of this command into the "xargs"
1813 command, use the "-0" option to both this command and "xargs".
1814 command, use the "-0" option to both this command and "xargs".
1814 This will avoid the problem of "xargs" treating single filenames
1815 This will avoid the problem of "xargs" treating single filenames
1815 that contain white space as multiple filenames.
1816 that contain white space as multiple filenames.
1816 """
1817 """
1817 end = opts['print0'] and '\0' or '\n'
1818 end = opts['print0'] and '\0' or '\n'
1818 rev = opts['rev']
1819 rev = opts['rev']
1819 if rev:
1820 if rev:
1820 node = repo.lookup(rev)
1821 node = repo.lookup(rev)
1821 else:
1822 else:
1822 node = None
1823 node = None
1823
1824
1824 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts, node=node,
1825 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts, node=node,
1825 head='(?:.*/|)'):
1826 head='(?:.*/|)'):
1826 if not node and repo.dirstate.state(abs) == '?':
1827 if not node and repo.dirstate.state(abs) == '?':
1827 continue
1828 continue
1828 if opts['fullpath']:
1829 if opts['fullpath']:
1829 ui.write(os.path.join(repo.root, abs), end)
1830 ui.write(os.path.join(repo.root, abs), end)
1830 else:
1831 else:
1831 ui.write(((pats and rel) or abs), end)
1832 ui.write(((pats and rel) or abs), end)
1832
1833
1833 def log(ui, repo, *pats, **opts):
1834 def log(ui, repo, *pats, **opts):
1834 """show revision history of entire repository or files
1835 """show revision history of entire repository or files
1835
1836
1836 Print the revision history of the specified files or the entire
1837 Print the revision history of the specified files or the entire
1837 project.
1838 project.
1838
1839
1839 File history is shown without following rename or copy history of
1840 File history is shown without following rename or copy history of
1840 files. Use -f/--follow with a file name to follow history across
1841 files. Use -f/--follow with a file name to follow history across
1841 renames and copies. --follow without a file name will only show
1842 renames and copies. --follow without a file name will only show
1842 ancestors or descendants of the starting revision. --follow-first
1843 ancestors or descendants of the starting revision. --follow-first
1843 only follows the first parent of merge revisions.
1844 only follows the first parent of merge revisions.
1844
1845
1845 If no revision range is specified, the default is tip:0 unless
1846 If no revision range is specified, the default is tip:0 unless
1846 --follow is set, in which case the working directory parent is
1847 --follow is set, in which case the working directory parent is
1847 used as the starting revision.
1848 used as the starting revision.
1848
1849
1849 By default this command outputs: changeset id and hash, tags,
1850 By default this command outputs: changeset id and hash, tags,
1850 non-trivial parents, user, date and time, and a summary for each
1851 non-trivial parents, user, date and time, and a summary for each
1851 commit. When the -v/--verbose switch is used, the list of changed
1852 commit. When the -v/--verbose switch is used, the list of changed
1852 files and full commit message is shown.
1853 files and full commit message is shown.
1853 """
1854 """
1854 class dui(object):
1855 class dui(object):
1855 # Implement and delegate some ui protocol. Save hunks of
1856 # Implement and delegate some ui protocol. Save hunks of
1856 # output for later display in the desired order.
1857 # output for later display in the desired order.
1857 def __init__(self, ui):
1858 def __init__(self, ui):
1858 self.ui = ui
1859 self.ui = ui
1859 self.hunk = {}
1860 self.hunk = {}
1860 self.header = {}
1861 self.header = {}
1861 def bump(self, rev):
1862 def bump(self, rev):
1862 self.rev = rev
1863 self.rev = rev
1863 self.hunk[rev] = []
1864 self.hunk[rev] = []
1864 self.header[rev] = []
1865 self.header[rev] = []
1865 def note(self, *args):
1866 def note(self, *args):
1866 if self.verbose:
1867 if self.verbose:
1867 self.write(*args)
1868 self.write(*args)
1868 def status(self, *args):
1869 def status(self, *args):
1869 if not self.quiet:
1870 if not self.quiet:
1870 self.write(*args)
1871 self.write(*args)
1871 def write(self, *args):
1872 def write(self, *args):
1872 self.hunk[self.rev].append(args)
1873 self.hunk[self.rev].append(args)
1873 def write_header(self, *args):
1874 def write_header(self, *args):
1874 self.header[self.rev].append(args)
1875 self.header[self.rev].append(args)
1875 def debug(self, *args):
1876 def debug(self, *args):
1876 if self.debugflag:
1877 if self.debugflag:
1877 self.write(*args)
1878 self.write(*args)
1878 def __getattr__(self, key):
1879 def __getattr__(self, key):
1879 return getattr(self.ui, key)
1880 return getattr(self.ui, key)
1880
1881
1881 changeiter, getchange, matchfn = walkchangerevs(ui, repo, pats, opts)
1882 changeiter, getchange, matchfn = walkchangerevs(ui, repo, pats, opts)
1882
1883
1883 if opts['limit']:
1884 if opts['limit']:
1884 try:
1885 try:
1885 limit = int(opts['limit'])
1886 limit = int(opts['limit'])
1886 except ValueError:
1887 except ValueError:
1887 raise util.Abort(_('limit must be a positive integer'))
1888 raise util.Abort(_('limit must be a positive integer'))
1888 if limit <= 0: raise util.Abort(_('limit must be positive'))
1889 if limit <= 0: raise util.Abort(_('limit must be positive'))
1889 else:
1890 else:
1890 limit = sys.maxint
1891 limit = sys.maxint
1891 count = 0
1892 count = 0
1892
1893
1893 displayer = show_changeset(ui, repo, opts)
1894 displayer = show_changeset(ui, repo, opts)
1894 for st, rev, fns in changeiter:
1895 for st, rev, fns in changeiter:
1895 if st == 'window':
1896 if st == 'window':
1896 du = dui(ui)
1897 du = dui(ui)
1897 displayer.ui = du
1898 displayer.ui = du
1898 elif st == 'add':
1899 elif st == 'add':
1899 du.bump(rev)
1900 du.bump(rev)
1900 changenode = repo.changelog.node(rev)
1901 changenode = repo.changelog.node(rev)
1901 parents = [p for p in repo.changelog.parents(changenode)
1902 parents = [p for p in repo.changelog.parents(changenode)
1902 if p != nullid]
1903 if p != nullid]
1903 if opts['no_merges'] and len(parents) == 2:
1904 if opts['no_merges'] and len(parents) == 2:
1904 continue
1905 continue
1905 if opts['only_merges'] and len(parents) != 2:
1906 if opts['only_merges'] and len(parents) != 2:
1906 continue
1907 continue
1907
1908
1908 if opts['keyword']:
1909 if opts['keyword']:
1909 changes = getchange(rev)
1910 changes = getchange(rev)
1910 miss = 0
1911 miss = 0
1911 for k in [kw.lower() for kw in opts['keyword']]:
1912 for k in [kw.lower() for kw in opts['keyword']]:
1912 if not (k in changes[1].lower() or
1913 if not (k in changes[1].lower() or
1913 k in changes[4].lower() or
1914 k in changes[4].lower() or
1914 k in " ".join(changes[3][:20]).lower()):
1915 k in " ".join(changes[3][:20]).lower()):
1915 miss = 1
1916 miss = 1
1916 break
1917 break
1917 if miss:
1918 if miss:
1918 continue
1919 continue
1919
1920
1920 br = None
1921 br = None
1921 if opts['branches']:
1922 if opts['branches']:
1922 br = repo.branchlookup([repo.changelog.node(rev)])
1923 br = repo.branchlookup([repo.changelog.node(rev)])
1923
1924
1924 displayer.show(rev, brinfo=br)
1925 displayer.show(rev, brinfo=br)
1925 if opts['patch']:
1926 if opts['patch']:
1926 prev = (parents and parents[0]) or nullid
1927 prev = (parents and parents[0]) or nullid
1927 patch.diff(repo, prev, changenode, match=matchfn, fp=du)
1928 patch.diff(repo, prev, changenode, match=matchfn, fp=du)
1928 du.write("\n\n")
1929 du.write("\n\n")
1929 elif st == 'iter':
1930 elif st == 'iter':
1930 if count == limit: break
1931 if count == limit: break
1931 if du.header[rev]:
1932 if du.header[rev]:
1932 for args in du.header[rev]:
1933 for args in du.header[rev]:
1933 ui.write_header(*args)
1934 ui.write_header(*args)
1934 if du.hunk[rev]:
1935 if du.hunk[rev]:
1935 count += 1
1936 count += 1
1936 for args in du.hunk[rev]:
1937 for args in du.hunk[rev]:
1937 ui.write(*args)
1938 ui.write(*args)
1938
1939
1939 def manifest(ui, repo, rev=None):
1940 def manifest(ui, repo, rev=None):
1940 """output the latest or given revision of the project manifest
1941 """output the latest or given revision of the project manifest
1941
1942
1942 Print a list of version controlled files for the given revision.
1943 Print a list of version controlled files for the given revision.
1943
1944
1944 The manifest is the list of files being version controlled. If no revision
1945 The manifest is the list of files being version controlled. If no revision
1945 is given then the tip is used.
1946 is given then the tip is used.
1946 """
1947 """
1947 if rev:
1948 if rev:
1948 try:
1949 try:
1949 # assume all revision numbers are for changesets
1950 # assume all revision numbers are for changesets
1950 n = repo.lookup(rev)
1951 n = repo.lookup(rev)
1951 change = repo.changelog.read(n)
1952 change = repo.changelog.read(n)
1952 n = change[0]
1953 n = change[0]
1953 except hg.RepoError:
1954 except hg.RepoError:
1954 n = repo.manifest.lookup(rev)
1955 n = repo.manifest.lookup(rev)
1955 else:
1956 else:
1956 n = repo.manifest.tip()
1957 n = repo.manifest.tip()
1957 m = repo.manifest.read(n)
1958 m = repo.manifest.read(n)
1958 files = m.keys()
1959 files = m.keys()
1959 files.sort()
1960 files.sort()
1960
1961
1961 for f in files:
1962 for f in files:
1962 ui.write("%40s %3s %s\n" % (hex(m[f]),
1963 ui.write("%40s %3s %s\n" % (hex(m[f]),
1963 m.execf(f) and "755" or "644", f))
1964 m.execf(f) and "755" or "644", f))
1964
1965
1965 def merge(ui, repo, node=None, force=None, branch=None):
1966 def merge(ui, repo, node=None, force=None, branch=None):
1966 """Merge working directory with another revision
1967 """Merge working directory with another revision
1967
1968
1968 Merge the contents of the current working directory and the
1969 Merge the contents of the current working directory and the
1969 requested revision. Files that changed between either parent are
1970 requested revision. Files that changed between either parent are
1970 marked as changed for the next commit and a commit must be
1971 marked as changed for the next commit and a commit must be
1971 performed before any further updates are allowed.
1972 performed before any further updates are allowed.
1972
1973
1973 If no revision is specified, the working directory's parent is a
1974 If no revision is specified, the working directory's parent is a
1974 head revision, and the repository contains exactly one other head,
1975 head revision, and the repository contains exactly one other head,
1975 the other head is merged with by default. Otherwise, an explicit
1976 the other head is merged with by default. Otherwise, an explicit
1976 revision to merge with must be provided.
1977 revision to merge with must be provided.
1977 """
1978 """
1978
1979
1979 if node:
1980 if node:
1980 node = _lookup(repo, node, branch)
1981 node = _lookup(repo, node, branch)
1981 else:
1982 else:
1982 heads = repo.heads()
1983 heads = repo.heads()
1983 if len(heads) > 2:
1984 if len(heads) > 2:
1984 raise util.Abort(_('repo has %d heads - '
1985 raise util.Abort(_('repo has %d heads - '
1985 'please merge with an explicit rev') %
1986 'please merge with an explicit rev') %
1986 len(heads))
1987 len(heads))
1987 if len(heads) == 1:
1988 if len(heads) == 1:
1988 raise util.Abort(_('there is nothing to merge - '
1989 raise util.Abort(_('there is nothing to merge - '
1989 'use "hg update" instead'))
1990 'use "hg update" instead'))
1990 parent = repo.dirstate.parents()[0]
1991 parent = repo.dirstate.parents()[0]
1991 if parent not in heads:
1992 if parent not in heads:
1992 raise util.Abort(_('working dir not at a head rev - '
1993 raise util.Abort(_('working dir not at a head rev - '
1993 'use "hg update" or merge with an explicit rev'))
1994 'use "hg update" or merge with an explicit rev'))
1994 node = parent == heads[0] and heads[-1] or heads[0]
1995 node = parent == heads[0] and heads[-1] or heads[0]
1995 return hg.merge(repo, node, force=force)
1996 return hg.merge(repo, node, force=force)
1996
1997
1997 def outgoing(ui, repo, dest=None, **opts):
1998 def outgoing(ui, repo, dest=None, **opts):
1998 """show changesets not found in destination
1999 """show changesets not found in destination
1999
2000
2000 Show changesets not found in the specified destination repository or
2001 Show changesets not found in the specified destination repository or
2001 the default push location. These are the changesets that would be pushed
2002 the default push location. These are the changesets that would be pushed
2002 if a push was requested.
2003 if a push was requested.
2003
2004
2004 See pull for valid destination format details.
2005 See pull for valid destination format details.
2005 """
2006 """
2006 dest = ui.expandpath(dest or 'default-push', dest or 'default')
2007 dest = ui.expandpath(dest or 'default-push', dest or 'default')
2007 setremoteconfig(ui, opts)
2008 setremoteconfig(ui, opts)
2008 revs = None
2009 revs = None
2009 if opts['rev']:
2010 if opts['rev']:
2010 revs = [repo.lookup(rev) for rev in opts['rev']]
2011 revs = [repo.lookup(rev) for rev in opts['rev']]
2011
2012
2012 other = hg.repository(ui, dest)
2013 other = hg.repository(ui, dest)
2013 o = repo.findoutgoing(other, force=opts['force'])
2014 o = repo.findoutgoing(other, force=opts['force'])
2014 if not o:
2015 if not o:
2015 ui.status(_("no changes found\n"))
2016 ui.status(_("no changes found\n"))
2016 return
2017 return
2017 o = repo.changelog.nodesbetween(o, revs)[0]
2018 o = repo.changelog.nodesbetween(o, revs)[0]
2018 if opts['newest_first']:
2019 if opts['newest_first']:
2019 o.reverse()
2020 o.reverse()
2020 displayer = show_changeset(ui, repo, opts)
2021 displayer = show_changeset(ui, repo, opts)
2021 for n in o:
2022 for n in o:
2022 parents = [p for p in repo.changelog.parents(n) if p != nullid]
2023 parents = [p for p in repo.changelog.parents(n) if p != nullid]
2023 if opts['no_merges'] and len(parents) == 2:
2024 if opts['no_merges'] and len(parents) == 2:
2024 continue
2025 continue
2025 displayer.show(changenode=n)
2026 displayer.show(changenode=n)
2026 if opts['patch']:
2027 if opts['patch']:
2027 prev = (parents and parents[0]) or nullid
2028 prev = (parents and parents[0]) or nullid
2028 patch.diff(repo, prev, n)
2029 patch.diff(repo, prev, n)
2029 ui.write("\n")
2030 ui.write("\n")
2030
2031
2031 def parents(ui, repo, file_=None, rev=None, branches=None, **opts):
2032 def parents(ui, repo, file_=None, rev=None, branches=None, **opts):
2032 """show the parents of the working dir or revision
2033 """show the parents of the working dir or revision
2033
2034
2034 Print the working directory's parent revisions.
2035 Print the working directory's parent revisions.
2035 """
2036 """
2036 # legacy
2037 # legacy
2037 if file_ and not rev:
2038 if file_ and not rev:
2038 try:
2039 try:
2039 rev = repo.lookup(file_)
2040 rev = repo.lookup(file_)
2040 file_ = None
2041 file_ = None
2041 except hg.RepoError:
2042 except hg.RepoError:
2042 pass
2043 pass
2043 else:
2044 else:
2044 ui.warn(_("'hg parent REV' is deprecated, "
2045 ui.warn(_("'hg parent REV' is deprecated, "
2045 "please use 'hg parents -r REV instead\n"))
2046 "please use 'hg parents -r REV instead\n"))
2046
2047
2047 if rev:
2048 if rev:
2048 if file_:
2049 if file_:
2049 ctx = repo.filectx(file_, changeid=rev)
2050 ctx = repo.filectx(file_, changeid=rev)
2050 else:
2051 else:
2051 ctx = repo.changectx(rev)
2052 ctx = repo.changectx(rev)
2052 p = [cp.node() for cp in ctx.parents()]
2053 p = [cp.node() for cp in ctx.parents()]
2053 else:
2054 else:
2054 p = repo.dirstate.parents()
2055 p = repo.dirstate.parents()
2055
2056
2056 br = None
2057 br = None
2057 if branches is not None:
2058 if branches is not None:
2058 br = repo.branchlookup(p)
2059 br = repo.branchlookup(p)
2059 displayer = show_changeset(ui, repo, opts)
2060 displayer = show_changeset(ui, repo, opts)
2060 for n in p:
2061 for n in p:
2061 if n != nullid:
2062 if n != nullid:
2062 displayer.show(changenode=n, brinfo=br)
2063 displayer.show(changenode=n, brinfo=br)
2063
2064
2064 def paths(ui, repo, search=None):
2065 def paths(ui, repo, search=None):
2065 """show definition of symbolic path names
2066 """show definition of symbolic path names
2066
2067
2067 Show definition of symbolic path name NAME. If no name is given, show
2068 Show definition of symbolic path name NAME. If no name is given, show
2068 definition of available names.
2069 definition of available names.
2069
2070
2070 Path names are defined in the [paths] section of /etc/mercurial/hgrc
2071 Path names are defined in the [paths] section of /etc/mercurial/hgrc
2071 and $HOME/.hgrc. If run inside a repository, .hg/hgrc is used, too.
2072 and $HOME/.hgrc. If run inside a repository, .hg/hgrc is used, too.
2072 """
2073 """
2073 if search:
2074 if search:
2074 for name, path in ui.configitems("paths"):
2075 for name, path in ui.configitems("paths"):
2075 if name == search:
2076 if name == search:
2076 ui.write("%s\n" % path)
2077 ui.write("%s\n" % path)
2077 return
2078 return
2078 ui.warn(_("not found!\n"))
2079 ui.warn(_("not found!\n"))
2079 return 1
2080 return 1
2080 else:
2081 else:
2081 for name, path in ui.configitems("paths"):
2082 for name, path in ui.configitems("paths"):
2082 ui.write("%s = %s\n" % (name, path))
2083 ui.write("%s = %s\n" % (name, path))
2083
2084
2084 def postincoming(ui, repo, modheads, optupdate):
2085 def postincoming(ui, repo, modheads, optupdate):
2085 if modheads == 0:
2086 if modheads == 0:
2086 return
2087 return
2087 if optupdate:
2088 if optupdate:
2088 if modheads == 1:
2089 if modheads == 1:
2089 return hg.update(repo, repo.changelog.tip()) # update
2090 return hg.update(repo, repo.changelog.tip()) # update
2090 else:
2091 else:
2091 ui.status(_("not updating, since new heads added\n"))
2092 ui.status(_("not updating, since new heads added\n"))
2092 if modheads > 1:
2093 if modheads > 1:
2093 ui.status(_("(run 'hg heads' to see heads, 'hg merge' to merge)\n"))
2094 ui.status(_("(run 'hg heads' to see heads, 'hg merge' to merge)\n"))
2094 else:
2095 else:
2095 ui.status(_("(run 'hg update' to get a working copy)\n"))
2096 ui.status(_("(run 'hg update' to get a working copy)\n"))
2096
2097
2097 def pull(ui, repo, source="default", **opts):
2098 def pull(ui, repo, source="default", **opts):
2098 """pull changes from the specified source
2099 """pull changes from the specified source
2099
2100
2100 Pull changes from a remote repository to a local one.
2101 Pull changes from a remote repository to a local one.
2101
2102
2102 This finds all changes from the repository at the specified path
2103 This finds all changes from the repository at the specified path
2103 or URL and adds them to the local repository. By default, this
2104 or URL and adds them to the local repository. By default, this
2104 does not update the copy of the project in the working directory.
2105 does not update the copy of the project in the working directory.
2105
2106
2106 Valid URLs are of the form:
2107 Valid URLs are of the form:
2107
2108
2108 local/filesystem/path
2109 local/filesystem/path
2109 http://[user@]host[:port]/[path]
2110 http://[user@]host[:port]/[path]
2110 https://[user@]host[:port]/[path]
2111 https://[user@]host[:port]/[path]
2111 ssh://[user@]host[:port]/[path]
2112 ssh://[user@]host[:port]/[path]
2112
2113
2113 Some notes about using SSH with Mercurial:
2114 Some notes about using SSH with Mercurial:
2114 - SSH requires an accessible shell account on the destination machine
2115 - SSH requires an accessible shell account on the destination machine
2115 and a copy of hg in the remote path or specified with as remotecmd.
2116 and a copy of hg in the remote path or specified with as remotecmd.
2116 - path is relative to the remote user's home directory by default.
2117 - path is relative to the remote user's home directory by default.
2117 Use an extra slash at the start of a path to specify an absolute path:
2118 Use an extra slash at the start of a path to specify an absolute path:
2118 ssh://example.com//tmp/repository
2119 ssh://example.com//tmp/repository
2119 - Mercurial doesn't use its own compression via SSH; the right thing
2120 - Mercurial doesn't use its own compression via SSH; the right thing
2120 to do is to configure it in your ~/.ssh/ssh_config, e.g.:
2121 to do is to configure it in your ~/.ssh/ssh_config, e.g.:
2121 Host *.mylocalnetwork.example.com
2122 Host *.mylocalnetwork.example.com
2122 Compression off
2123 Compression off
2123 Host *
2124 Host *
2124 Compression on
2125 Compression on
2125 Alternatively specify "ssh -C" as your ssh command in your hgrc or
2126 Alternatively specify "ssh -C" as your ssh command in your hgrc or
2126 with the --ssh command line option.
2127 with the --ssh command line option.
2127 """
2128 """
2128 source = ui.expandpath(source)
2129 source = ui.expandpath(source)
2129 setremoteconfig(ui, opts)
2130 setremoteconfig(ui, opts)
2130
2131
2131 other = hg.repository(ui, source)
2132 other = hg.repository(ui, source)
2132 ui.status(_('pulling from %s\n') % (source))
2133 ui.status(_('pulling from %s\n') % (source))
2133 revs = None
2134 revs = None
2134 if opts['rev'] and not other.local():
2135 if opts['rev'] and not other.local():
2135 raise util.Abort(_("pull -r doesn't work for remote repositories yet"))
2136 raise util.Abort(_("pull -r doesn't work for remote repositories yet"))
2136 elif opts['rev']:
2137 elif opts['rev']:
2137 revs = [other.lookup(rev) for rev in opts['rev']]
2138 revs = [other.lookup(rev) for rev in opts['rev']]
2138 modheads = repo.pull(other, heads=revs, force=opts['force'])
2139 modheads = repo.pull(other, heads=revs, force=opts['force'])
2139 return postincoming(ui, repo, modheads, opts['update'])
2140 return postincoming(ui, repo, modheads, opts['update'])
2140
2141
2141 def push(ui, repo, dest=None, **opts):
2142 def push(ui, repo, dest=None, **opts):
2142 """push changes to the specified destination
2143 """push changes to the specified destination
2143
2144
2144 Push changes from the local repository to the given destination.
2145 Push changes from the local repository to the given destination.
2145
2146
2146 This is the symmetrical operation for pull. It helps to move
2147 This is the symmetrical operation for pull. It helps to move
2147 changes from the current repository to a different one. If the
2148 changes from the current repository to a different one. If the
2148 destination is local this is identical to a pull in that directory
2149 destination is local this is identical to a pull in that directory
2149 from the current one.
2150 from the current one.
2150
2151
2151 By default, push will refuse to run if it detects the result would
2152 By default, push will refuse to run if it detects the result would
2152 increase the number of remote heads. This generally indicates the
2153 increase the number of remote heads. This generally indicates the
2153 the client has forgotten to sync and merge before pushing.
2154 the client has forgotten to sync and merge before pushing.
2154
2155
2155 Valid URLs are of the form:
2156 Valid URLs are of the form:
2156
2157
2157 local/filesystem/path
2158 local/filesystem/path
2158 ssh://[user@]host[:port]/[path]
2159 ssh://[user@]host[:port]/[path]
2159
2160
2160 Look at the help text for the pull command for important details
2161 Look at the help text for the pull command for important details
2161 about ssh:// URLs.
2162 about ssh:// URLs.
2162
2163
2163 Pushing to http:// and https:// URLs is possible, too, if this
2164 Pushing to http:// and https:// URLs is possible, too, if this
2164 feature is enabled on the remote Mercurial server.
2165 feature is enabled on the remote Mercurial server.
2165 """
2166 """
2166 dest = ui.expandpath(dest or 'default-push', dest or 'default')
2167 dest = ui.expandpath(dest or 'default-push', dest or 'default')
2167 setremoteconfig(ui, opts)
2168 setremoteconfig(ui, opts)
2168
2169
2169 other = hg.repository(ui, dest)
2170 other = hg.repository(ui, dest)
2170 ui.status('pushing to %s\n' % (dest))
2171 ui.status('pushing to %s\n' % (dest))
2171 revs = None
2172 revs = None
2172 if opts['rev']:
2173 if opts['rev']:
2173 revs = [repo.lookup(rev) for rev in opts['rev']]
2174 revs = [repo.lookup(rev) for rev in opts['rev']]
2174 r = repo.push(other, opts['force'], revs=revs)
2175 r = repo.push(other, opts['force'], revs=revs)
2175 return r == 0
2176 return r == 0
2176
2177
2177 def rawcommit(ui, repo, *flist, **rc):
2178 def rawcommit(ui, repo, *flist, **rc):
2178 """raw commit interface (DEPRECATED)
2179 """raw commit interface (DEPRECATED)
2179
2180
2180 (DEPRECATED)
2181 (DEPRECATED)
2181 Lowlevel commit, for use in helper scripts.
2182 Lowlevel commit, for use in helper scripts.
2182
2183
2183 This command is not intended to be used by normal users, as it is
2184 This command is not intended to be used by normal users, as it is
2184 primarily useful for importing from other SCMs.
2185 primarily useful for importing from other SCMs.
2185
2186
2186 This command is now deprecated and will be removed in a future
2187 This command is now deprecated and will be removed in a future
2187 release, please use debugsetparents and commit instead.
2188 release, please use debugsetparents and commit instead.
2188 """
2189 """
2189
2190
2190 ui.warn(_("(the rawcommit command is deprecated)\n"))
2191 ui.warn(_("(the rawcommit command is deprecated)\n"))
2191
2192
2192 message = rc['message']
2193 message = rc['message']
2193 if not message and rc['logfile']:
2194 if not message and rc['logfile']:
2194 try:
2195 try:
2195 message = open(rc['logfile']).read()
2196 message = open(rc['logfile']).read()
2196 except IOError:
2197 except IOError:
2197 pass
2198 pass
2198 if not message and not rc['logfile']:
2199 if not message and not rc['logfile']:
2199 raise util.Abort(_("missing commit message"))
2200 raise util.Abort(_("missing commit message"))
2200
2201
2201 files = relpath(repo, list(flist))
2202 files = relpath(repo, list(flist))
2202 if rc['files']:
2203 if rc['files']:
2203 files += open(rc['files']).read().splitlines()
2204 files += open(rc['files']).read().splitlines()
2204
2205
2205 rc['parent'] = map(repo.lookup, rc['parent'])
2206 rc['parent'] = map(repo.lookup, rc['parent'])
2206
2207
2207 try:
2208 try:
2208 repo.rawcommit(files, message, rc['user'], rc['date'], *rc['parent'])
2209 repo.rawcommit(files, message, rc['user'], rc['date'], *rc['parent'])
2209 except ValueError, inst:
2210 except ValueError, inst:
2210 raise util.Abort(str(inst))
2211 raise util.Abort(str(inst))
2211
2212
2212 def recover(ui, repo):
2213 def recover(ui, repo):
2213 """roll back an interrupted transaction
2214 """roll back an interrupted transaction
2214
2215
2215 Recover from an interrupted commit or pull.
2216 Recover from an interrupted commit or pull.
2216
2217
2217 This command tries to fix the repository status after an interrupted
2218 This command tries to fix the repository status after an interrupted
2218 operation. It should only be necessary when Mercurial suggests it.
2219 operation. It should only be necessary when Mercurial suggests it.
2219 """
2220 """
2220 if repo.recover():
2221 if repo.recover():
2221 return hg.verify(repo)
2222 return hg.verify(repo)
2222 return 1
2223 return 1
2223
2224
2224 def remove(ui, repo, *pats, **opts):
2225 def remove(ui, repo, *pats, **opts):
2225 """remove the specified files on the next commit
2226 """remove the specified files on the next commit
2226
2227
2227 Schedule the indicated files for removal from the repository.
2228 Schedule the indicated files for removal from the repository.
2228
2229
2229 This command schedules the files to be removed at the next commit.
2230 This command schedules the files to be removed at the next commit.
2230 This only removes files from the current branch, not from the
2231 This only removes files from the current branch, not from the
2231 entire project history. If the files still exist in the working
2232 entire project history. If the files still exist in the working
2232 directory, they will be deleted from it. If invoked with --after,
2233 directory, they will be deleted from it. If invoked with --after,
2233 files that have been manually deleted are marked as removed.
2234 files that have been manually deleted are marked as removed.
2234
2235
2235 Modified files and added files are not removed by default. To
2236 Modified files and added files are not removed by default. To
2236 remove them, use the -f/--force option.
2237 remove them, use the -f/--force option.
2237 """
2238 """
2238 names = []
2239 names = []
2239 if not opts['after'] and not pats:
2240 if not opts['after'] and not pats:
2240 raise util.Abort(_('no files specified'))
2241 raise util.Abort(_('no files specified'))
2241 files, matchfn, anypats = cmdutil.matchpats(repo, pats, opts)
2242 files, matchfn, anypats = cmdutil.matchpats(repo, pats, opts)
2242 exact = dict.fromkeys(files)
2243 exact = dict.fromkeys(files)
2243 mardu = map(dict.fromkeys, repo.status(files=files, match=matchfn))[:5]
2244 mardu = map(dict.fromkeys, repo.status(files=files, match=matchfn))[:5]
2244 modified, added, removed, deleted, unknown = mardu
2245 modified, added, removed, deleted, unknown = mardu
2245 remove, forget = [], []
2246 remove, forget = [], []
2246 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts):
2247 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts):
2247 reason = None
2248 reason = None
2248 if abs not in deleted and opts['after']:
2249 if abs not in deleted and opts['after']:
2249 reason = _('is still present')
2250 reason = _('is still present')
2250 elif abs in modified and not opts['force']:
2251 elif abs in modified and not opts['force']:
2251 reason = _('is modified (use -f to force removal)')
2252 reason = _('is modified (use -f to force removal)')
2252 elif abs in added:
2253 elif abs in added:
2253 if opts['force']:
2254 if opts['force']:
2254 forget.append(abs)
2255 forget.append(abs)
2255 continue
2256 continue
2256 reason = _('has been marked for add (use -f to force removal)')
2257 reason = _('has been marked for add (use -f to force removal)')
2257 elif abs in unknown:
2258 elif abs in unknown:
2258 reason = _('is not managed')
2259 reason = _('is not managed')
2259 elif abs in removed:
2260 elif abs in removed:
2260 continue
2261 continue
2261 if reason:
2262 if reason:
2262 if exact:
2263 if exact:
2263 ui.warn(_('not removing %s: file %s\n') % (rel, reason))
2264 ui.warn(_('not removing %s: file %s\n') % (rel, reason))
2264 else:
2265 else:
2265 if ui.verbose or not exact:
2266 if ui.verbose or not exact:
2266 ui.status(_('removing %s\n') % rel)
2267 ui.status(_('removing %s\n') % rel)
2267 remove.append(abs)
2268 remove.append(abs)
2268 repo.forget(forget)
2269 repo.forget(forget)
2269 repo.remove(remove, unlink=not opts['after'])
2270 repo.remove(remove, unlink=not opts['after'])
2270
2271
2271 def rename(ui, repo, *pats, **opts):
2272 def rename(ui, repo, *pats, **opts):
2272 """rename files; equivalent of copy + remove
2273 """rename files; equivalent of copy + remove
2273
2274
2274 Mark dest as copies of sources; mark sources for deletion. If
2275 Mark dest as copies of sources; mark sources for deletion. If
2275 dest is a directory, copies are put in that directory. If dest is
2276 dest is a directory, copies are put in that directory. If dest is
2276 a file, there can only be one source.
2277 a file, there can only be one source.
2277
2278
2278 By default, this command copies the contents of files as they
2279 By default, this command copies the contents of files as they
2279 stand in the working directory. If invoked with --after, the
2280 stand in the working directory. If invoked with --after, the
2280 operation is recorded, but no copying is performed.
2281 operation is recorded, but no copying is performed.
2281
2282
2282 This command takes effect in the next commit.
2283 This command takes effect in the next commit.
2283
2284
2284 NOTE: This command should be treated as experimental. While it
2285 NOTE: This command should be treated as experimental. While it
2285 should properly record rename files, this information is not yet
2286 should properly record rename files, this information is not yet
2286 fully used by merge, nor fully reported by log.
2287 fully used by merge, nor fully reported by log.
2287 """
2288 """
2288 wlock = repo.wlock(0)
2289 wlock = repo.wlock(0)
2289 errs, copied = docopy(ui, repo, pats, opts, wlock)
2290 errs, copied = docopy(ui, repo, pats, opts, wlock)
2290 names = []
2291 names = []
2291 for abs, rel, exact in copied:
2292 for abs, rel, exact in copied:
2292 if ui.verbose or not exact:
2293 if ui.verbose or not exact:
2293 ui.status(_('removing %s\n') % rel)
2294 ui.status(_('removing %s\n') % rel)
2294 names.append(abs)
2295 names.append(abs)
2295 if not opts.get('dry_run'):
2296 if not opts.get('dry_run'):
2296 repo.remove(names, True, wlock)
2297 repo.remove(names, True, wlock)
2297 return errs
2298 return errs
2298
2299
2299 def revert(ui, repo, *pats, **opts):
2300 def revert(ui, repo, *pats, **opts):
2300 """revert files or dirs to their states as of some revision
2301 """revert files or dirs to their states as of some revision
2301
2302
2302 With no revision specified, revert the named files or directories
2303 With no revision specified, revert the named files or directories
2303 to the contents they had in the parent of the working directory.
2304 to the contents they had in the parent of the working directory.
2304 This restores the contents of the affected files to an unmodified
2305 This restores the contents of the affected files to an unmodified
2305 state. If the working directory has two parents, you must
2306 state. If the working directory has two parents, you must
2306 explicitly specify the revision to revert to.
2307 explicitly specify the revision to revert to.
2307
2308
2308 Modified files are saved with a .orig suffix before reverting.
2309 Modified files are saved with a .orig suffix before reverting.
2309 To disable these backups, use --no-backup.
2310 To disable these backups, use --no-backup.
2310
2311
2311 Using the -r option, revert the given files or directories to
2312 Using the -r option, revert the given files or directories to
2312 their contents as of a specific revision. This can be helpful to"roll
2313 their contents as of a specific revision. This can be helpful to"roll
2313 back" some or all of a change that should not have been committed.
2314 back" some or all of a change that should not have been committed.
2314
2315
2315 Revert modifies the working directory. It does not commit any
2316 Revert modifies the working directory. It does not commit any
2316 changes, or change the parent of the working directory. If you
2317 changes, or change the parent of the working directory. If you
2317 revert to a revision other than the parent of the working
2318 revert to a revision other than the parent of the working
2318 directory, the reverted files will thus appear modified
2319 directory, the reverted files will thus appear modified
2319 afterwards.
2320 afterwards.
2320
2321
2321 If a file has been deleted, it is recreated. If the executable
2322 If a file has been deleted, it is recreated. If the executable
2322 mode of a file was changed, it is reset.
2323 mode of a file was changed, it is reset.
2323
2324
2324 If names are given, all files matching the names are reverted.
2325 If names are given, all files matching the names are reverted.
2325
2326
2326 If no arguments are given, all files in the repository are reverted.
2327 If no arguments are given, all files in the repository are reverted.
2327 """
2328 """
2328 parent, p2 = repo.dirstate.parents()
2329 parent, p2 = repo.dirstate.parents()
2329 if opts['rev']:
2330 if opts['rev']:
2330 node = repo.lookup(opts['rev'])
2331 node = repo.lookup(opts['rev'])
2331 elif p2 != nullid:
2332 elif p2 != nullid:
2332 raise util.Abort(_('working dir has two parents; '
2333 raise util.Abort(_('working dir has two parents; '
2333 'you must specify the revision to revert to'))
2334 'you must specify the revision to revert to'))
2334 else:
2335 else:
2335 node = parent
2336 node = parent
2336 mf = repo.manifest.read(repo.changelog.read(node)[0])
2337 mf = repo.manifest.read(repo.changelog.read(node)[0])
2337 if node == parent:
2338 if node == parent:
2338 pmf = mf
2339 pmf = mf
2339 else:
2340 else:
2340 pmf = None
2341 pmf = None
2341
2342
2342 wlock = repo.wlock()
2343 wlock = repo.wlock()
2343
2344
2344 # need all matching names in dirstate and manifest of target rev,
2345 # need all matching names in dirstate and manifest of target rev,
2345 # so have to walk both. do not print errors if files exist in one
2346 # so have to walk both. do not print errors if files exist in one
2346 # but not other.
2347 # but not other.
2347
2348
2348 names = {}
2349 names = {}
2349 target_only = {}
2350 target_only = {}
2350
2351
2351 # walk dirstate.
2352 # walk dirstate.
2352
2353
2353 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts,
2354 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts,
2354 badmatch=mf.has_key):
2355 badmatch=mf.has_key):
2355 names[abs] = (rel, exact)
2356 names[abs] = (rel, exact)
2356 if src == 'b':
2357 if src == 'b':
2357 target_only[abs] = True
2358 target_only[abs] = True
2358
2359
2359 # walk target manifest.
2360 # walk target manifest.
2360
2361
2361 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts, node=node,
2362 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts, node=node,
2362 badmatch=names.has_key):
2363 badmatch=names.has_key):
2363 if abs in names: continue
2364 if abs in names: continue
2364 names[abs] = (rel, exact)
2365 names[abs] = (rel, exact)
2365 target_only[abs] = True
2366 target_only[abs] = True
2366
2367
2367 changes = repo.status(match=names.has_key, wlock=wlock)[:5]
2368 changes = repo.status(match=names.has_key, wlock=wlock)[:5]
2368 modified, added, removed, deleted, unknown = map(dict.fromkeys, changes)
2369 modified, added, removed, deleted, unknown = map(dict.fromkeys, changes)
2369
2370
2370 revert = ([], _('reverting %s\n'))
2371 revert = ([], _('reverting %s\n'))
2371 add = ([], _('adding %s\n'))
2372 add = ([], _('adding %s\n'))
2372 remove = ([], _('removing %s\n'))
2373 remove = ([], _('removing %s\n'))
2373 forget = ([], _('forgetting %s\n'))
2374 forget = ([], _('forgetting %s\n'))
2374 undelete = ([], _('undeleting %s\n'))
2375 undelete = ([], _('undeleting %s\n'))
2375 update = {}
2376 update = {}
2376
2377
2377 disptable = (
2378 disptable = (
2378 # dispatch table:
2379 # dispatch table:
2379 # file state
2380 # file state
2380 # action if in target manifest
2381 # action if in target manifest
2381 # action if not in target manifest
2382 # action if not in target manifest
2382 # make backup if in target manifest
2383 # make backup if in target manifest
2383 # make backup if not in target manifest
2384 # make backup if not in target manifest
2384 (modified, revert, remove, True, True),
2385 (modified, revert, remove, True, True),
2385 (added, revert, forget, True, False),
2386 (added, revert, forget, True, False),
2386 (removed, undelete, None, False, False),
2387 (removed, undelete, None, False, False),
2387 (deleted, revert, remove, False, False),
2388 (deleted, revert, remove, False, False),
2388 (unknown, add, None, True, False),
2389 (unknown, add, None, True, False),
2389 (target_only, add, None, False, False),
2390 (target_only, add, None, False, False),
2390 )
2391 )
2391
2392
2392 entries = names.items()
2393 entries = names.items()
2393 entries.sort()
2394 entries.sort()
2394
2395
2395 for abs, (rel, exact) in entries:
2396 for abs, (rel, exact) in entries:
2396 mfentry = mf.get(abs)
2397 mfentry = mf.get(abs)
2397 def handle(xlist, dobackup):
2398 def handle(xlist, dobackup):
2398 xlist[0].append(abs)
2399 xlist[0].append(abs)
2399 update[abs] = 1
2400 update[abs] = 1
2400 if dobackup and not opts['no_backup'] and os.path.exists(rel):
2401 if dobackup and not opts['no_backup'] and os.path.exists(rel):
2401 bakname = "%s.orig" % rel
2402 bakname = "%s.orig" % rel
2402 ui.note(_('saving current version of %s as %s\n') %
2403 ui.note(_('saving current version of %s as %s\n') %
2403 (rel, bakname))
2404 (rel, bakname))
2404 if not opts.get('dry_run'):
2405 if not opts.get('dry_run'):
2405 shutil.copyfile(rel, bakname)
2406 shutil.copyfile(rel, bakname)
2406 shutil.copymode(rel, bakname)
2407 shutil.copymode(rel, bakname)
2407 if ui.verbose or not exact:
2408 if ui.verbose or not exact:
2408 ui.status(xlist[1] % rel)
2409 ui.status(xlist[1] % rel)
2409 for table, hitlist, misslist, backuphit, backupmiss in disptable:
2410 for table, hitlist, misslist, backuphit, backupmiss in disptable:
2410 if abs not in table: continue
2411 if abs not in table: continue
2411 # file has changed in dirstate
2412 # file has changed in dirstate
2412 if mfentry:
2413 if mfentry:
2413 handle(hitlist, backuphit)
2414 handle(hitlist, backuphit)
2414 elif misslist is not None:
2415 elif misslist is not None:
2415 handle(misslist, backupmiss)
2416 handle(misslist, backupmiss)
2416 else:
2417 else:
2417 if exact: ui.warn(_('file not managed: %s\n' % rel))
2418 if exact: ui.warn(_('file not managed: %s\n' % rel))
2418 break
2419 break
2419 else:
2420 else:
2420 # file has not changed in dirstate
2421 # file has not changed in dirstate
2421 if node == parent:
2422 if node == parent:
2422 if exact: ui.warn(_('no changes needed to %s\n' % rel))
2423 if exact: ui.warn(_('no changes needed to %s\n' % rel))
2423 continue
2424 continue
2424 if pmf is None:
2425 if pmf is None:
2425 # only need parent manifest in this unlikely case,
2426 # only need parent manifest in this unlikely case,
2426 # so do not read by default
2427 # so do not read by default
2427 pmf = repo.manifest.read(repo.changelog.read(parent)[0])
2428 pmf = repo.manifest.read(repo.changelog.read(parent)[0])
2428 if abs in pmf:
2429 if abs in pmf:
2429 if mfentry:
2430 if mfentry:
2430 # if version of file is same in parent and target
2431 # if version of file is same in parent and target
2431 # manifests, do nothing
2432 # manifests, do nothing
2432 if pmf[abs] != mfentry:
2433 if pmf[abs] != mfentry:
2433 handle(revert, False)
2434 handle(revert, False)
2434 else:
2435 else:
2435 handle(remove, False)
2436 handle(remove, False)
2436
2437
2437 if not opts.get('dry_run'):
2438 if not opts.get('dry_run'):
2438 repo.dirstate.forget(forget[0])
2439 repo.dirstate.forget(forget[0])
2439 r = hg.revert(repo, node, update.has_key, wlock)
2440 r = hg.revert(repo, node, update.has_key, wlock)
2440 repo.dirstate.update(add[0], 'a')
2441 repo.dirstate.update(add[0], 'a')
2441 repo.dirstate.update(undelete[0], 'n')
2442 repo.dirstate.update(undelete[0], 'n')
2442 repo.dirstate.update(remove[0], 'r')
2443 repo.dirstate.update(remove[0], 'r')
2443 return r
2444 return r
2444
2445
2445 def rollback(ui, repo):
2446 def rollback(ui, repo):
2446 """roll back the last transaction in this repository
2447 """roll back the last transaction in this repository
2447
2448
2448 Roll back the last transaction in this repository, restoring the
2449 Roll back the last transaction in this repository, restoring the
2449 project to its state prior to the transaction.
2450 project to its state prior to the transaction.
2450
2451
2451 Transactions are used to encapsulate the effects of all commands
2452 Transactions are used to encapsulate the effects of all commands
2452 that create new changesets or propagate existing changesets into a
2453 that create new changesets or propagate existing changesets into a
2453 repository. For example, the following commands are transactional,
2454 repository. For example, the following commands are transactional,
2454 and their effects can be rolled back:
2455 and their effects can be rolled back:
2455
2456
2456 commit
2457 commit
2457 import
2458 import
2458 pull
2459 pull
2459 push (with this repository as destination)
2460 push (with this repository as destination)
2460 unbundle
2461 unbundle
2461
2462
2462 This command should be used with care. There is only one level of
2463 This command should be used with care. There is only one level of
2463 rollback, and there is no way to undo a rollback.
2464 rollback, and there is no way to undo a rollback.
2464
2465
2465 This command is not intended for use on public repositories. Once
2466 This command is not intended for use on public repositories. Once
2466 changes are visible for pull by other users, rolling a transaction
2467 changes are visible for pull by other users, rolling a transaction
2467 back locally is ineffective (someone else may already have pulled
2468 back locally is ineffective (someone else may already have pulled
2468 the changes). Furthermore, a race is possible with readers of the
2469 the changes). Furthermore, a race is possible with readers of the
2469 repository; for example an in-progress pull from the repository
2470 repository; for example an in-progress pull from the repository
2470 may fail if a rollback is performed.
2471 may fail if a rollback is performed.
2471 """
2472 """
2472 repo.rollback()
2473 repo.rollback()
2473
2474
2474 def root(ui, repo):
2475 def root(ui, repo):
2475 """print the root (top) of the current working dir
2476 """print the root (top) of the current working dir
2476
2477
2477 Print the root directory of the current repository.
2478 Print the root directory of the current repository.
2478 """
2479 """
2479 ui.write(repo.root + "\n")
2480 ui.write(repo.root + "\n")
2480
2481
2481 def serve(ui, repo, **opts):
2482 def serve(ui, repo, **opts):
2482 """export the repository via HTTP
2483 """export the repository via HTTP
2483
2484
2484 Start a local HTTP repository browser and pull server.
2485 Start a local HTTP repository browser and pull server.
2485
2486
2486 By default, the server logs accesses to stdout and errors to
2487 By default, the server logs accesses to stdout and errors to
2487 stderr. Use the "-A" and "-E" options to log to files.
2488 stderr. Use the "-A" and "-E" options to log to files.
2488 """
2489 """
2489
2490
2490 if opts["stdio"]:
2491 if opts["stdio"]:
2491 if repo is None:
2492 if repo is None:
2492 raise hg.RepoError(_('no repo found'))
2493 raise hg.RepoError(_('no repo found'))
2493 s = sshserver.sshserver(ui, repo)
2494 s = sshserver.sshserver(ui, repo)
2494 s.serve_forever()
2495 s.serve_forever()
2495
2496
2496 optlist = ("name templates style address port ipv6"
2497 optlist = ("name templates style address port ipv6"
2497 " accesslog errorlog webdir_conf")
2498 " accesslog errorlog webdir_conf")
2498 for o in optlist.split():
2499 for o in optlist.split():
2499 if opts[o]:
2500 if opts[o]:
2500 ui.setconfig("web", o, opts[o])
2501 ui.setconfig("web", o, opts[o])
2501
2502
2502 if repo is None and not ui.config("web", "webdir_conf"):
2503 if repo is None and not ui.config("web", "webdir_conf"):
2503 raise hg.RepoError(_('no repo found'))
2504 raise hg.RepoError(_('no repo found'))
2504
2505
2505 if opts['daemon'] and not opts['daemon_pipefds']:
2506 if opts['daemon'] and not opts['daemon_pipefds']:
2506 rfd, wfd = os.pipe()
2507 rfd, wfd = os.pipe()
2507 args = sys.argv[:]
2508 args = sys.argv[:]
2508 args.append('--daemon-pipefds=%d,%d' % (rfd, wfd))
2509 args.append('--daemon-pipefds=%d,%d' % (rfd, wfd))
2509 pid = os.spawnvp(os.P_NOWAIT | getattr(os, 'P_DETACH', 0),
2510 pid = os.spawnvp(os.P_NOWAIT | getattr(os, 'P_DETACH', 0),
2510 args[0], args)
2511 args[0], args)
2511 os.close(wfd)
2512 os.close(wfd)
2512 os.read(rfd, 1)
2513 os.read(rfd, 1)
2513 os._exit(0)
2514 os._exit(0)
2514
2515
2515 try:
2516 try:
2516 httpd = hgweb.server.create_server(ui, repo)
2517 httpd = hgweb.server.create_server(ui, repo)
2517 except socket.error, inst:
2518 except socket.error, inst:
2518 raise util.Abort(_('cannot start server: ') + inst.args[1])
2519 raise util.Abort(_('cannot start server: ') + inst.args[1])
2519
2520
2520 if ui.verbose:
2521 if ui.verbose:
2521 addr, port = httpd.socket.getsockname()
2522 addr, port = httpd.socket.getsockname()
2522 if addr == '0.0.0.0':
2523 if addr == '0.0.0.0':
2523 addr = socket.gethostname()
2524 addr = socket.gethostname()
2524 else:
2525 else:
2525 try:
2526 try:
2526 addr = socket.gethostbyaddr(addr)[0]
2527 addr = socket.gethostbyaddr(addr)[0]
2527 except socket.error:
2528 except socket.error:
2528 pass
2529 pass
2529 if port != 80:
2530 if port != 80:
2530 ui.status(_('listening at http://%s:%d/\n') % (addr, port))
2531 ui.status(_('listening at http://%s:%d/\n') % (addr, port))
2531 else:
2532 else:
2532 ui.status(_('listening at http://%s/\n') % addr)
2533 ui.status(_('listening at http://%s/\n') % addr)
2533
2534
2534 if opts['pid_file']:
2535 if opts['pid_file']:
2535 fp = open(opts['pid_file'], 'w')
2536 fp = open(opts['pid_file'], 'w')
2536 fp.write(str(os.getpid()) + '\n')
2537 fp.write(str(os.getpid()) + '\n')
2537 fp.close()
2538 fp.close()
2538
2539
2539 if opts['daemon_pipefds']:
2540 if opts['daemon_pipefds']:
2540 rfd, wfd = [int(x) for x in opts['daemon_pipefds'].split(',')]
2541 rfd, wfd = [int(x) for x in opts['daemon_pipefds'].split(',')]
2541 os.close(rfd)
2542 os.close(rfd)
2542 os.write(wfd, 'y')
2543 os.write(wfd, 'y')
2543 os.close(wfd)
2544 os.close(wfd)
2544 sys.stdout.flush()
2545 sys.stdout.flush()
2545 sys.stderr.flush()
2546 sys.stderr.flush()
2546 fd = os.open(util.nulldev, os.O_RDWR)
2547 fd = os.open(util.nulldev, os.O_RDWR)
2547 if fd != 0: os.dup2(fd, 0)
2548 if fd != 0: os.dup2(fd, 0)
2548 if fd != 1: os.dup2(fd, 1)
2549 if fd != 1: os.dup2(fd, 1)
2549 if fd != 2: os.dup2(fd, 2)
2550 if fd != 2: os.dup2(fd, 2)
2550 if fd not in (0, 1, 2): os.close(fd)
2551 if fd not in (0, 1, 2): os.close(fd)
2551
2552
2552 httpd.serve_forever()
2553 httpd.serve_forever()
2553
2554
2554 def status(ui, repo, *pats, **opts):
2555 def status(ui, repo, *pats, **opts):
2555 """show changed files in the working directory
2556 """show changed files in the working directory
2556
2557
2557 Show status of files in the repository. If names are given, only
2558 Show status of files in the repository. If names are given, only
2558 files that match are shown. Files that are clean or ignored, are
2559 files that match are shown. Files that are clean or ignored, are
2559 not listed unless -c (clean), -i (ignored) or -A is given.
2560 not listed unless -c (clean), -i (ignored) or -A is given.
2560
2561
2561 The codes used to show the status of files are:
2562 The codes used to show the status of files are:
2562 M = modified
2563 M = modified
2563 A = added
2564 A = added
2564 R = removed
2565 R = removed
2565 C = clean
2566 C = clean
2566 ! = deleted, but still tracked
2567 ! = deleted, but still tracked
2567 ? = not tracked
2568 ? = not tracked
2568 I = ignored (not shown by default)
2569 I = ignored (not shown by default)
2569 = the previous added file was copied from here
2570 = the previous added file was copied from here
2570 """
2571 """
2571
2572
2572 all = opts['all']
2573 all = opts['all']
2573
2574
2574 files, matchfn, anypats = cmdutil.matchpats(repo, pats, opts)
2575 files, matchfn, anypats = cmdutil.matchpats(repo, pats, opts)
2575 cwd = (pats and repo.getcwd()) or ''
2576 cwd = (pats and repo.getcwd()) or ''
2576 modified, added, removed, deleted, unknown, ignored, clean = [
2577 modified, added, removed, deleted, unknown, ignored, clean = [
2577 [util.pathto(cwd, x) for x in n]
2578 [util.pathto(cwd, x) for x in n]
2578 for n in repo.status(files=files, match=matchfn,
2579 for n in repo.status(files=files, match=matchfn,
2579 list_ignored=all or opts['ignored'],
2580 list_ignored=all or opts['ignored'],
2580 list_clean=all or opts['clean'])]
2581 list_clean=all or opts['clean'])]
2581
2582
2582 changetypes = (('modified', 'M', modified),
2583 changetypes = (('modified', 'M', modified),
2583 ('added', 'A', added),
2584 ('added', 'A', added),
2584 ('removed', 'R', removed),
2585 ('removed', 'R', removed),
2585 ('deleted', '!', deleted),
2586 ('deleted', '!', deleted),
2586 ('unknown', '?', unknown),
2587 ('unknown', '?', unknown),
2587 ('ignored', 'I', ignored))
2588 ('ignored', 'I', ignored))
2588
2589
2589 explicit_changetypes = changetypes + (('clean', 'C', clean),)
2590 explicit_changetypes = changetypes + (('clean', 'C', clean),)
2590
2591
2591 end = opts['print0'] and '\0' or '\n'
2592 end = opts['print0'] and '\0' or '\n'
2592
2593
2593 for opt, char, changes in ([ct for ct in explicit_changetypes
2594 for opt, char, changes in ([ct for ct in explicit_changetypes
2594 if all or opts[ct[0]]]
2595 if all or opts[ct[0]]]
2595 or changetypes):
2596 or changetypes):
2596 if opts['no_status']:
2597 if opts['no_status']:
2597 format = "%%s%s" % end
2598 format = "%%s%s" % end
2598 else:
2599 else:
2599 format = "%s %%s%s" % (char, end)
2600 format = "%s %%s%s" % (char, end)
2600
2601
2601 for f in changes:
2602 for f in changes:
2602 ui.write(format % f)
2603 ui.write(format % f)
2603 if ((all or opts.get('copies')) and not opts.get('no_status')
2604 if ((all or opts.get('copies')) and not opts.get('no_status')
2604 and opt == 'added' and repo.dirstate.copies.has_key(f)):
2605 and opt == 'added' and repo.dirstate.copies.has_key(f)):
2605 ui.write(' %s%s' % (repo.dirstate.copies[f], end))
2606 ui.write(' %s%s' % (repo.dirstate.copies[f], end))
2606
2607
2607 def tag(ui, repo, name, rev_=None, **opts):
2608 def tag(ui, repo, name, rev_=None, **opts):
2608 """add a tag for the current tip or a given revision
2609 """add a tag for the current tip or a given revision
2609
2610
2610 Name a particular revision using <name>.
2611 Name a particular revision using <name>.
2611
2612
2612 Tags are used to name particular revisions of the repository and are
2613 Tags are used to name particular revisions of the repository and are
2613 very useful to compare different revision, to go back to significant
2614 very useful to compare different revision, to go back to significant
2614 earlier versions or to mark branch points as releases, etc.
2615 earlier versions or to mark branch points as releases, etc.
2615
2616
2616 If no revision is given, the parent of the working directory is used.
2617 If no revision is given, the parent of the working directory is used.
2617
2618
2618 To facilitate version control, distribution, and merging of tags,
2619 To facilitate version control, distribution, and merging of tags,
2619 they are stored as a file named ".hgtags" which is managed
2620 they are stored as a file named ".hgtags" which is managed
2620 similarly to other project files and can be hand-edited if
2621 similarly to other project files and can be hand-edited if
2621 necessary. The file '.hg/localtags' is used for local tags (not
2622 necessary. The file '.hg/localtags' is used for local tags (not
2622 shared among repositories).
2623 shared among repositories).
2623 """
2624 """
2624 if name in ['tip', '.']:
2625 if name in ['tip', '.']:
2625 raise util.Abort(_("the name '%s' is reserved") % name)
2626 raise util.Abort(_("the name '%s' is reserved") % name)
2626 if rev_ is not None:
2627 if rev_ is not None:
2627 ui.warn(_("use of 'hg tag NAME [REV]' is deprecated, "
2628 ui.warn(_("use of 'hg tag NAME [REV]' is deprecated, "
2628 "please use 'hg tag [-r REV] NAME' instead\n"))
2629 "please use 'hg tag [-r REV] NAME' instead\n"))
2629 if opts['rev']:
2630 if opts['rev']:
2630 raise util.Abort(_("use only one form to specify the revision"))
2631 raise util.Abort(_("use only one form to specify the revision"))
2631 if opts['rev']:
2632 if opts['rev']:
2632 rev_ = opts['rev']
2633 rev_ = opts['rev']
2633 if rev_:
2634 if rev_:
2634 r = hex(repo.lookup(rev_))
2635 r = hex(repo.lookup(rev_))
2635 else:
2636 else:
2636 p1, p2 = repo.dirstate.parents()
2637 p1, p2 = repo.dirstate.parents()
2637 if p1 == nullid:
2638 if p1 == nullid:
2638 raise util.Abort(_('no revision to tag'))
2639 raise util.Abort(_('no revision to tag'))
2639 if p2 != nullid:
2640 if p2 != nullid:
2640 raise util.Abort(_('outstanding uncommitted merges'))
2641 raise util.Abort(_('outstanding uncommitted merges'))
2641 r = hex(p1)
2642 r = hex(p1)
2642
2643
2643 repo.tag(name, r, opts['local'], opts['message'], opts['user'],
2644 repo.tag(name, r, opts['local'], opts['message'], opts['user'],
2644 opts['date'])
2645 opts['date'])
2645
2646
2646 def tags(ui, repo):
2647 def tags(ui, repo):
2647 """list repository tags
2648 """list repository tags
2648
2649
2649 List the repository tags.
2650 List the repository tags.
2650
2651
2651 This lists both regular and local tags.
2652 This lists both regular and local tags.
2652 """
2653 """
2653
2654
2654 l = repo.tagslist()
2655 l = repo.tagslist()
2655 l.reverse()
2656 l.reverse()
2656 for t, n in l:
2657 for t, n in l:
2657 try:
2658 try:
2658 r = "%5d:%s" % (repo.changelog.rev(n), hex(n))
2659 r = "%5d:%s" % (repo.changelog.rev(n), hex(n))
2659 except KeyError:
2660 except KeyError:
2660 r = " ?:?"
2661 r = " ?:?"
2661 if ui.quiet:
2662 if ui.quiet:
2662 ui.write("%s\n" % t)
2663 ui.write("%s\n" % t)
2663 else:
2664 else:
2664 ui.write("%-30s %s\n" % (t, r))
2665 ui.write("%-30s %s\n" % (t, r))
2665
2666
2666 def tip(ui, repo, **opts):
2667 def tip(ui, repo, **opts):
2667 """show the tip revision
2668 """show the tip revision
2668
2669
2669 Show the tip revision.
2670 Show the tip revision.
2670 """
2671 """
2671 n = repo.changelog.tip()
2672 n = repo.changelog.tip()
2672 br = None
2673 br = None
2673 if opts['branches']:
2674 if opts['branches']:
2674 br = repo.branchlookup([n])
2675 br = repo.branchlookup([n])
2675 show_changeset(ui, repo, opts).show(changenode=n, brinfo=br)
2676 show_changeset(ui, repo, opts).show(changenode=n, brinfo=br)
2676 if opts['patch']:
2677 if opts['patch']:
2677 patch.diff(repo, repo.changelog.parents(n)[0], n)
2678 patch.diff(repo, repo.changelog.parents(n)[0], n)
2678
2679
2679 def unbundle(ui, repo, fname, **opts):
2680 def unbundle(ui, repo, fname, **opts):
2680 """apply a changegroup file
2681 """apply a changegroup file
2681
2682
2682 Apply a compressed changegroup file generated by the bundle
2683 Apply a compressed changegroup file generated by the bundle
2683 command.
2684 command.
2684 """
2685 """
2685 f = urllib.urlopen(fname)
2686 f = urllib.urlopen(fname)
2686
2687
2687 header = f.read(6)
2688 header = f.read(6)
2688 if not header.startswith("HG"):
2689 if not header.startswith("HG"):
2689 raise util.Abort(_("%s: not a Mercurial bundle file") % fname)
2690 raise util.Abort(_("%s: not a Mercurial bundle file") % fname)
2690 elif not header.startswith("HG10"):
2691 elif not header.startswith("HG10"):
2691 raise util.Abort(_("%s: unknown bundle version") % fname)
2692 raise util.Abort(_("%s: unknown bundle version") % fname)
2692 elif header == "HG10BZ":
2693 elif header == "HG10BZ":
2693 def generator(f):
2694 def generator(f):
2694 zd = bz2.BZ2Decompressor()
2695 zd = bz2.BZ2Decompressor()
2695 zd.decompress("BZ")
2696 zd.decompress("BZ")
2696 for chunk in f:
2697 for chunk in f:
2697 yield zd.decompress(chunk)
2698 yield zd.decompress(chunk)
2698 elif header == "HG10UN":
2699 elif header == "HG10UN":
2699 def generator(f):
2700 def generator(f):
2700 for chunk in f:
2701 for chunk in f:
2701 yield chunk
2702 yield chunk
2702 else:
2703 else:
2703 raise util.Abort(_("%s: unknown bundle compression type")
2704 raise util.Abort(_("%s: unknown bundle compression type")
2704 % fname)
2705 % fname)
2705 gen = generator(util.filechunkiter(f, 4096))
2706 gen = generator(util.filechunkiter(f, 4096))
2706 modheads = repo.addchangegroup(util.chunkbuffer(gen), 'unbundle',
2707 modheads = repo.addchangegroup(util.chunkbuffer(gen), 'unbundle',
2707 'bundle:' + fname)
2708 'bundle:' + fname)
2708 return postincoming(ui, repo, modheads, opts['update'])
2709 return postincoming(ui, repo, modheads, opts['update'])
2709
2710
2710 def undo(ui, repo):
2711 def undo(ui, repo):
2711 """undo the last commit or pull (DEPRECATED)
2712 """undo the last commit or pull (DEPRECATED)
2712
2713
2713 (DEPRECATED)
2714 (DEPRECATED)
2714 This command is now deprecated and will be removed in a future
2715 This command is now deprecated and will be removed in a future
2715 release. Please use the rollback command instead. For usage
2716 release. Please use the rollback command instead. For usage
2716 instructions, see the rollback command.
2717 instructions, see the rollback command.
2717 """
2718 """
2718 ui.warn(_('(the undo command is deprecated; use rollback instead)\n'))
2719 ui.warn(_('(the undo command is deprecated; use rollback instead)\n'))
2719 repo.rollback()
2720 repo.rollback()
2720
2721
2721 def update(ui, repo, node=None, merge=False, clean=False, force=None,
2722 def update(ui, repo, node=None, merge=False, clean=False, force=None,
2722 branch=None):
2723 branch=None):
2723 """update or merge working directory
2724 """update or merge working directory
2724
2725
2725 Update the working directory to the specified revision.
2726 Update the working directory to the specified revision.
2726
2727
2727 If there are no outstanding changes in the working directory and
2728 If there are no outstanding changes in the working directory and
2728 there is a linear relationship between the current version and the
2729 there is a linear relationship between the current version and the
2729 requested version, the result is the requested version.
2730 requested version, the result is the requested version.
2730
2731
2731 To merge the working directory with another revision, use the
2732 To merge the working directory with another revision, use the
2732 merge command.
2733 merge command.
2733
2734
2734 By default, update will refuse to run if doing so would require
2735 By default, update will refuse to run if doing so would require
2735 merging or discarding local changes.
2736 merging or discarding local changes.
2736 """
2737 """
2737 node = _lookup(repo, node, branch)
2738 node = _lookup(repo, node, branch)
2738 if merge:
2739 if merge:
2739 ui.warn(_('(the -m/--merge option is deprecated; '
2740 ui.warn(_('(the -m/--merge option is deprecated; '
2740 'use the merge command instead)\n'))
2741 'use the merge command instead)\n'))
2741 return hg.merge(repo, node, force=force)
2742 return hg.merge(repo, node, force=force)
2742 elif clean:
2743 elif clean:
2743 return hg.clean(repo, node)
2744 return hg.clean(repo, node)
2744 else:
2745 else:
2745 return hg.update(repo, node)
2746 return hg.update(repo, node)
2746
2747
2747 def _lookup(repo, node, branch=None):
2748 def _lookup(repo, node, branch=None):
2748 if branch:
2749 if branch:
2749 br = repo.branchlookup(branch=branch)
2750 br = repo.branchlookup(branch=branch)
2750 found = []
2751 found = []
2751 for x in br:
2752 for x in br:
2752 if branch in br[x]:
2753 if branch in br[x]:
2753 found.append(x)
2754 found.append(x)
2754 if len(found) > 1:
2755 if len(found) > 1:
2755 repo.ui.warn(_("Found multiple heads for %s\n") % branch)
2756 repo.ui.warn(_("Found multiple heads for %s\n") % branch)
2756 for x in found:
2757 for x in found:
2757 show_changeset(ui, repo, {}).show(changenode=x, brinfo=br)
2758 show_changeset(ui, repo, {}).show(changenode=x, brinfo=br)
2758 raise util.Abort("")
2759 raise util.Abort("")
2759 if len(found) == 1:
2760 if len(found) == 1:
2760 node = found[0]
2761 node = found[0]
2761 repo.ui.warn(_("Using head %s for branch %s\n")
2762 repo.ui.warn(_("Using head %s for branch %s\n")
2762 % (short(node), branch))
2763 % (short(node), branch))
2763 else:
2764 else:
2764 raise util.Abort(_("branch %s not found\n") % (branch))
2765 raise util.Abort(_("branch %s not found\n") % (branch))
2765 else:
2766 else:
2766 node = node and repo.lookup(node) or repo.changelog.tip()
2767 node = node and repo.lookup(node) or repo.changelog.tip()
2767 return node
2768 return node
2768
2769
2769 def verify(ui, repo):
2770 def verify(ui, repo):
2770 """verify the integrity of the repository
2771 """verify the integrity of the repository
2771
2772
2772 Verify the integrity of the current repository.
2773 Verify the integrity of the current repository.
2773
2774
2774 This will perform an extensive check of the repository's
2775 This will perform an extensive check of the repository's
2775 integrity, validating the hashes and checksums of each entry in
2776 integrity, validating the hashes and checksums of each entry in
2776 the changelog, manifest, and tracked files, as well as the
2777 the changelog, manifest, and tracked files, as well as the
2777 integrity of their crosslinks and indices.
2778 integrity of their crosslinks and indices.
2778 """
2779 """
2779 return hg.verify(repo)
2780 return hg.verify(repo)
2780
2781
2781 # Command options and aliases are listed here, alphabetically
2782 # Command options and aliases are listed here, alphabetically
2782
2783
2783 table = {
2784 table = {
2784 "^add":
2785 "^add":
2785 (add,
2786 (add,
2786 [('I', 'include', [], _('include names matching the given patterns')),
2787 [('I', 'include', [], _('include names matching the given patterns')),
2787 ('X', 'exclude', [], _('exclude names matching the given patterns')),
2788 ('X', 'exclude', [], _('exclude names matching the given patterns')),
2788 ('n', 'dry-run', None, _('do not perform actions, just print output'))],
2789 ('n', 'dry-run', None, _('do not perform actions, just print output'))],
2789 _('hg add [OPTION]... [FILE]...')),
2790 _('hg add [OPTION]... [FILE]...')),
2790 "debugaddremove|addremove":
2791 "debugaddremove|addremove":
2791 (addremove,
2792 (addremove,
2792 [('I', 'include', [], _('include names matching the given patterns')),
2793 [('I', 'include', [], _('include names matching the given patterns')),
2793 ('X', 'exclude', [], _('exclude names matching the given patterns')),
2794 ('X', 'exclude', [], _('exclude names matching the given patterns')),
2794 ('n', 'dry-run', None, _('do not perform actions, just print output'))],
2795 ('n', 'dry-run', None, _('do not perform actions, just print output'))],
2795 _('hg addremove [OPTION]... [FILE]...')),
2796 _('hg addremove [OPTION]... [FILE]...')),
2796 "^annotate":
2797 "^annotate":
2797 (annotate,
2798 (annotate,
2798 [('r', 'rev', '', _('annotate the specified revision')),
2799 [('r', 'rev', '', _('annotate the specified revision')),
2799 ('a', 'text', None, _('treat all files as text')),
2800 ('a', 'text', None, _('treat all files as text')),
2800 ('u', 'user', None, _('list the author')),
2801 ('u', 'user', None, _('list the author')),
2801 ('d', 'date', None, _('list the date')),
2802 ('d', 'date', None, _('list the date')),
2802 ('n', 'number', None, _('list the revision number (default)')),
2803 ('n', 'number', None, _('list the revision number (default)')),
2803 ('c', 'changeset', None, _('list the changeset')),
2804 ('c', 'changeset', None, _('list the changeset')),
2804 ('I', 'include', [], _('include names matching the given patterns')),
2805 ('I', 'include', [], _('include names matching the given patterns')),
2805 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2806 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2806 _('hg annotate [-r REV] [-a] [-u] [-d] [-n] [-c] FILE...')),
2807 _('hg annotate [-r REV] [-a] [-u] [-d] [-n] [-c] FILE...')),
2807 "archive":
2808 "archive":
2808 (archive,
2809 (archive,
2809 [('', 'no-decode', None, _('do not pass files through decoders')),
2810 [('', 'no-decode', None, _('do not pass files through decoders')),
2810 ('p', 'prefix', '', _('directory prefix for files in archive')),
2811 ('p', 'prefix', '', _('directory prefix for files in archive')),
2811 ('r', 'rev', '', _('revision to distribute')),
2812 ('r', 'rev', '', _('revision to distribute')),
2812 ('t', 'type', '', _('type of distribution to create')),
2813 ('t', 'type', '', _('type of distribution to create')),
2813 ('I', 'include', [], _('include names matching the given patterns')),
2814 ('I', 'include', [], _('include names matching the given patterns')),
2814 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2815 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2815 _('hg archive [OPTION]... DEST')),
2816 _('hg archive [OPTION]... DEST')),
2816 "backout":
2817 "backout":
2817 (backout,
2818 (backout,
2818 [('', 'merge', None,
2819 [('', 'merge', None,
2819 _('merge with old dirstate parent after backout')),
2820 _('merge with old dirstate parent after backout')),
2820 ('m', 'message', '', _('use <text> as commit message')),
2821 ('m', 'message', '', _('use <text> as commit message')),
2821 ('l', 'logfile', '', _('read commit message from <file>')),
2822 ('l', 'logfile', '', _('read commit message from <file>')),
2822 ('d', 'date', '', _('record datecode as commit date')),
2823 ('d', 'date', '', _('record datecode as commit date')),
2823 ('', 'parent', '', _('parent to choose when backing out merge')),
2824 ('', 'parent', '', _('parent to choose when backing out merge')),
2824 ('u', 'user', '', _('record user as committer')),
2825 ('u', 'user', '', _('record user as committer')),
2825 ('I', 'include', [], _('include names matching the given patterns')),
2826 ('I', 'include', [], _('include names matching the given patterns')),
2826 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2827 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2827 _('hg backout [OPTION]... REV')),
2828 _('hg backout [OPTION]... REV')),
2828 "bundle":
2829 "bundle":
2829 (bundle,
2830 (bundle,
2830 [('f', 'force', None,
2831 [('f', 'force', None,
2831 _('run even when remote repository is unrelated'))],
2832 _('run even when remote repository is unrelated'))],
2832 _('hg bundle FILE DEST')),
2833 _('hg bundle FILE DEST')),
2833 "cat":
2834 "cat":
2834 (cat,
2835 (cat,
2835 [('o', 'output', '', _('print output to file with formatted name')),
2836 [('o', 'output', '', _('print output to file with formatted name')),
2836 ('r', 'rev', '', _('print the given revision')),
2837 ('r', 'rev', '', _('print the given revision')),
2837 ('I', 'include', [], _('include names matching the given patterns')),
2838 ('I', 'include', [], _('include names matching the given patterns')),
2838 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2839 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2839 _('hg cat [OPTION]... FILE...')),
2840 _('hg cat [OPTION]... FILE...')),
2840 "^clone":
2841 "^clone":
2841 (clone,
2842 (clone,
2842 [('U', 'noupdate', None, _('do not update the new working directory')),
2843 [('U', 'noupdate', None, _('do not update the new working directory')),
2843 ('r', 'rev', [],
2844 ('r', 'rev', [],
2844 _('a changeset you would like to have after cloning')),
2845 _('a changeset you would like to have after cloning')),
2845 ('', 'pull', None, _('use pull protocol to copy metadata')),
2846 ('', 'pull', None, _('use pull protocol to copy metadata')),
2846 ('', 'uncompressed', None,
2847 ('', 'uncompressed', None,
2847 _('use uncompressed transfer (fast over LAN)')),
2848 _('use uncompressed transfer (fast over LAN)')),
2848 ('e', 'ssh', '', _('specify ssh command to use')),
2849 ('e', 'ssh', '', _('specify ssh command to use')),
2849 ('', 'remotecmd', '',
2850 ('', 'remotecmd', '',
2850 _('specify hg command to run on the remote side'))],
2851 _('specify hg command to run on the remote side'))],
2851 _('hg clone [OPTION]... SOURCE [DEST]')),
2852 _('hg clone [OPTION]... SOURCE [DEST]')),
2852 "^commit|ci":
2853 "^commit|ci":
2853 (commit,
2854 (commit,
2854 [('A', 'addremove', None,
2855 [('A', 'addremove', None,
2855 _('mark new/missing files as added/removed before committing')),
2856 _('mark new/missing files as added/removed before committing')),
2856 ('m', 'message', '', _('use <text> as commit message')),
2857 ('m', 'message', '', _('use <text> as commit message')),
2857 ('l', 'logfile', '', _('read the commit message from <file>')),
2858 ('l', 'logfile', '', _('read the commit message from <file>')),
2858 ('d', 'date', '', _('record datecode as commit date')),
2859 ('d', 'date', '', _('record datecode as commit date')),
2859 ('u', 'user', '', _('record user as commiter')),
2860 ('u', 'user', '', _('record user as commiter')),
2860 ('I', 'include', [], _('include names matching the given patterns')),
2861 ('I', 'include', [], _('include names matching the given patterns')),
2861 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2862 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2862 _('hg commit [OPTION]... [FILE]...')),
2863 _('hg commit [OPTION]... [FILE]...')),
2863 "copy|cp":
2864 "copy|cp":
2864 (copy,
2865 (copy,
2865 [('A', 'after', None, _('record a copy that has already occurred')),
2866 [('A', 'after', None, _('record a copy that has already occurred')),
2866 ('f', 'force', None,
2867 ('f', 'force', None,
2867 _('forcibly copy over an existing managed file')),
2868 _('forcibly copy over an existing managed file')),
2868 ('I', 'include', [], _('include names matching the given patterns')),
2869 ('I', 'include', [], _('include names matching the given patterns')),
2869 ('X', 'exclude', [], _('exclude names matching the given patterns')),
2870 ('X', 'exclude', [], _('exclude names matching the given patterns')),
2870 ('n', 'dry-run', None, _('do not perform actions, just print output'))],
2871 ('n', 'dry-run', None, _('do not perform actions, just print output'))],
2871 _('hg copy [OPTION]... [SOURCE]... DEST')),
2872 _('hg copy [OPTION]... [SOURCE]... DEST')),
2872 "debugancestor": (debugancestor, [], _('debugancestor INDEX REV1 REV2')),
2873 "debugancestor": (debugancestor, [], _('debugancestor INDEX REV1 REV2')),
2873 "debugcomplete":
2874 "debugcomplete":
2874 (debugcomplete,
2875 (debugcomplete,
2875 [('o', 'options', None, _('show the command options'))],
2876 [('o', 'options', None, _('show the command options'))],
2876 _('debugcomplete [-o] CMD')),
2877 _('debugcomplete [-o] CMD')),
2877 "debugrebuildstate":
2878 "debugrebuildstate":
2878 (debugrebuildstate,
2879 (debugrebuildstate,
2879 [('r', 'rev', '', _('revision to rebuild to'))],
2880 [('r', 'rev', '', _('revision to rebuild to'))],
2880 _('debugrebuildstate [-r REV] [REV]')),
2881 _('debugrebuildstate [-r REV] [REV]')),
2881 "debugcheckstate": (debugcheckstate, [], _('debugcheckstate')),
2882 "debugcheckstate": (debugcheckstate, [], _('debugcheckstate')),
2882 "debugconfig": (debugconfig, [], _('debugconfig [NAME]...')),
2883 "debugconfig": (debugconfig, [], _('debugconfig [NAME]...')),
2883 "debugsetparents": (debugsetparents, [], _('debugsetparents REV1 [REV2]')),
2884 "debugsetparents": (debugsetparents, [], _('debugsetparents REV1 [REV2]')),
2884 "debugstate": (debugstate, [], _('debugstate')),
2885 "debugstate": (debugstate, [], _('debugstate')),
2885 "debugdata": (debugdata, [], _('debugdata FILE REV')),
2886 "debugdata": (debugdata, [], _('debugdata FILE REV')),
2886 "debugindex": (debugindex, [], _('debugindex FILE')),
2887 "debugindex": (debugindex, [], _('debugindex FILE')),
2887 "debugindexdot": (debugindexdot, [], _('debugindexdot FILE')),
2888 "debugindexdot": (debugindexdot, [], _('debugindexdot FILE')),
2888 "debugrename": (debugrename, [], _('debugrename FILE [REV]')),
2889 "debugrename": (debugrename, [], _('debugrename FILE [REV]')),
2889 "debugwalk":
2890 "debugwalk":
2890 (debugwalk,
2891 (debugwalk,
2891 [('I', 'include', [], _('include names matching the given patterns')),
2892 [('I', 'include', [], _('include names matching the given patterns')),
2892 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2893 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2893 _('debugwalk [OPTION]... [FILE]...')),
2894 _('debugwalk [OPTION]... [FILE]...')),
2894 "^diff":
2895 "^diff":
2895 (diff,
2896 (diff,
2896 [('r', 'rev', [], _('revision')),
2897 [('r', 'rev', [], _('revision')),
2897 ('a', 'text', None, _('treat all files as text')),
2898 ('a', 'text', None, _('treat all files as text')),
2898 ('p', 'show-function', None,
2899 ('p', 'show-function', None,
2899 _('show which function each change is in')),
2900 _('show which function each change is in')),
2900 ('g', 'git', None, _('use git extended diff format')),
2901 ('g', 'git', None, _('use git extended diff format')),
2901 ('w', 'ignore-all-space', None,
2902 ('w', 'ignore-all-space', None,
2902 _('ignore white space when comparing lines')),
2903 _('ignore white space when comparing lines')),
2903 ('b', 'ignore-space-change', None,
2904 ('b', 'ignore-space-change', None,
2904 _('ignore changes in the amount of white space')),
2905 _('ignore changes in the amount of white space')),
2905 ('B', 'ignore-blank-lines', None,
2906 ('B', 'ignore-blank-lines', None,
2906 _('ignore changes whose lines are all blank')),
2907 _('ignore changes whose lines are all blank')),
2907 ('I', 'include', [], _('include names matching the given patterns')),
2908 ('I', 'include', [], _('include names matching the given patterns')),
2908 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2909 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2909 _('hg diff [-a] [-I] [-X] [-r REV1 [-r REV2]] [FILE]...')),
2910 _('hg diff [-a] [-I] [-X] [-r REV1 [-r REV2]] [FILE]...')),
2910 "^export":
2911 "^export":
2911 (export,
2912 (export,
2912 [('o', 'output', '', _('print output to file with formatted name')),
2913 [('o', 'output', '', _('print output to file with formatted name')),
2913 ('a', 'text', None, _('treat all files as text')),
2914 ('a', 'text', None, _('treat all files as text')),
2914 ('', 'switch-parent', None, _('diff against the second parent'))],
2915 ('', 'switch-parent', None, _('diff against the second parent'))],
2915 _('hg export [-a] [-o OUTFILESPEC] REV...')),
2916 _('hg export [-a] [-o OUTFILESPEC] REV...')),
2916 "debugforget|forget":
2917 "debugforget|forget":
2917 (forget,
2918 (forget,
2918 [('I', 'include', [], _('include names matching the given patterns')),
2919 [('I', 'include', [], _('include names matching the given patterns')),
2919 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2920 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2920 _('hg forget [OPTION]... FILE...')),
2921 _('hg forget [OPTION]... FILE...')),
2921 "grep":
2922 "grep":
2922 (grep,
2923 (grep,
2923 [('0', 'print0', None, _('end fields with NUL')),
2924 [('0', 'print0', None, _('end fields with NUL')),
2924 ('', 'all', None, _('print all revisions that match')),
2925 ('', 'all', None, _('print all revisions that match')),
2925 ('f', 'follow', None,
2926 ('f', 'follow', None,
2926 _('follow changeset history, or file history across copies and renames')),
2927 _('follow changeset history, or file history across copies and renames')),
2927 ('i', 'ignore-case', None, _('ignore case when matching')),
2928 ('i', 'ignore-case', None, _('ignore case when matching')),
2928 ('l', 'files-with-matches', None,
2929 ('l', 'files-with-matches', None,
2929 _('print only filenames and revs that match')),
2930 _('print only filenames and revs that match')),
2930 ('n', 'line-number', None, _('print matching line numbers')),
2931 ('n', 'line-number', None, _('print matching line numbers')),
2931 ('r', 'rev', [], _('search in given revision range')),
2932 ('r', 'rev', [], _('search in given revision range')),
2932 ('u', 'user', None, _('print user who committed change')),
2933 ('u', 'user', None, _('print user who committed change')),
2933 ('I', 'include', [], _('include names matching the given patterns')),
2934 ('I', 'include', [], _('include names matching the given patterns')),
2934 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2935 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2935 _('hg grep [OPTION]... PATTERN [FILE]...')),
2936 _('hg grep [OPTION]... PATTERN [FILE]...')),
2936 "heads":
2937 "heads":
2937 (heads,
2938 (heads,
2938 [('b', 'branches', None, _('show branches')),
2939 [('b', 'branches', None, _('show branches')),
2939 ('', 'style', '', _('display using template map file')),
2940 ('', 'style', '', _('display using template map file')),
2940 ('r', 'rev', '', _('show only heads which are descendants of rev')),
2941 ('r', 'rev', '', _('show only heads which are descendants of rev')),
2941 ('', 'template', '', _('display with template'))],
2942 ('', 'template', '', _('display with template'))],
2942 _('hg heads [-b] [-r <rev>]')),
2943 _('hg heads [-b] [-r <rev>]')),
2943 "help": (help_, [], _('hg help [COMMAND]')),
2944 "help": (help_, [], _('hg help [COMMAND]')),
2944 "identify|id": (identify, [], _('hg identify')),
2945 "identify|id": (identify, [], _('hg identify')),
2945 "import|patch":
2946 "import|patch":
2946 (import_,
2947 (import_,
2947 [('p', 'strip', 1,
2948 [('p', 'strip', 1,
2948 _('directory strip option for patch. This has the same\n'
2949 _('directory strip option for patch. This has the same\n'
2949 'meaning as the corresponding patch option')),
2950 'meaning as the corresponding patch option')),
2950 ('m', 'message', '', _('use <text> as commit message')),
2951 ('m', 'message', '', _('use <text> as commit message')),
2951 ('b', 'base', '', _('base path')),
2952 ('b', 'base', '', _('base path')),
2952 ('f', 'force', None,
2953 ('f', 'force', None,
2953 _('skip check for outstanding uncommitted changes'))],
2954 _('skip check for outstanding uncommitted changes'))],
2954 _('hg import [-p NUM] [-b BASE] [-m MESSAGE] [-f] PATCH...')),
2955 _('hg import [-p NUM] [-b BASE] [-m MESSAGE] [-f] PATCH...')),
2955 "incoming|in": (incoming,
2956 "incoming|in": (incoming,
2956 [('M', 'no-merges', None, _('do not show merges')),
2957 [('M', 'no-merges', None, _('do not show merges')),
2957 ('f', 'force', None,
2958 ('f', 'force', None,
2958 _('run even when remote repository is unrelated')),
2959 _('run even when remote repository is unrelated')),
2959 ('', 'style', '', _('display using template map file')),
2960 ('', 'style', '', _('display using template map file')),
2960 ('n', 'newest-first', None, _('show newest record first')),
2961 ('n', 'newest-first', None, _('show newest record first')),
2961 ('', 'bundle', '', _('file to store the bundles into')),
2962 ('', 'bundle', '', _('file to store the bundles into')),
2962 ('p', 'patch', None, _('show patch')),
2963 ('p', 'patch', None, _('show patch')),
2963 ('r', 'rev', [], _('a specific revision up to which you would like to pull')),
2964 ('r', 'rev', [], _('a specific revision up to which you would like to pull')),
2964 ('', 'template', '', _('display with template')),
2965 ('', 'template', '', _('display with template')),
2965 ('e', 'ssh', '', _('specify ssh command to use')),
2966 ('e', 'ssh', '', _('specify ssh command to use')),
2966 ('', 'remotecmd', '',
2967 ('', 'remotecmd', '',
2967 _('specify hg command to run on the remote side'))],
2968 _('specify hg command to run on the remote side'))],
2968 _('hg incoming [-p] [-n] [-M] [-r REV]...'
2969 _('hg incoming [-p] [-n] [-M] [-r REV]...'
2969 ' [--bundle FILENAME] [SOURCE]')),
2970 ' [--bundle FILENAME] [SOURCE]')),
2970 "^init":
2971 "^init":
2971 (init,
2972 (init,
2972 [('e', 'ssh', '', _('specify ssh command to use')),
2973 [('e', 'ssh', '', _('specify ssh command to use')),
2973 ('', 'remotecmd', '',
2974 ('', 'remotecmd', '',
2974 _('specify hg command to run on the remote side'))],
2975 _('specify hg command to run on the remote side'))],
2975 _('hg init [-e FILE] [--remotecmd FILE] [DEST]')),
2976 _('hg init [-e FILE] [--remotecmd FILE] [DEST]')),
2976 "locate":
2977 "locate":
2977 (locate,
2978 (locate,
2978 [('r', 'rev', '', _('search the repository as it stood at rev')),
2979 [('r', 'rev', '', _('search the repository as it stood at rev')),
2979 ('0', 'print0', None,
2980 ('0', 'print0', None,
2980 _('end filenames with NUL, for use with xargs')),
2981 _('end filenames with NUL, for use with xargs')),
2981 ('f', 'fullpath', None,
2982 ('f', 'fullpath', None,
2982 _('print complete paths from the filesystem root')),
2983 _('print complete paths from the filesystem root')),
2983 ('I', 'include', [], _('include names matching the given patterns')),
2984 ('I', 'include', [], _('include names matching the given patterns')),
2984 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2985 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2985 _('hg locate [OPTION]... [PATTERN]...')),
2986 _('hg locate [OPTION]... [PATTERN]...')),
2986 "^log|history":
2987 "^log|history":
2987 (log,
2988 (log,
2988 [('b', 'branches', None, _('show branches')),
2989 [('b', 'branches', None, _('show branches')),
2989 ('f', 'follow', None,
2990 ('f', 'follow', None,
2990 _('follow changeset history, or file history across copies and renames')),
2991 _('follow changeset history, or file history across copies and renames')),
2991 ('', 'follow-first', None,
2992 ('', 'follow-first', None,
2992 _('only follow the first parent of merge changesets')),
2993 _('only follow the first parent of merge changesets')),
2993 ('k', 'keyword', [], _('search for a keyword')),
2994 ('k', 'keyword', [], _('search for a keyword')),
2994 ('l', 'limit', '', _('limit number of changes displayed')),
2995 ('l', 'limit', '', _('limit number of changes displayed')),
2995 ('r', 'rev', [], _('show the specified revision or range')),
2996 ('r', 'rev', [], _('show the specified revision or range')),
2996 ('M', 'no-merges', None, _('do not show merges')),
2997 ('M', 'no-merges', None, _('do not show merges')),
2997 ('', 'style', '', _('display using template map file')),
2998 ('', 'style', '', _('display using template map file')),
2998 ('m', 'only-merges', None, _('show only merges')),
2999 ('m', 'only-merges', None, _('show only merges')),
2999 ('p', 'patch', None, _('show patch')),
3000 ('p', 'patch', None, _('show patch')),
3000 ('P', 'prune', [], _('do not display revision or any of its ancestors')),
3001 ('P', 'prune', [], _('do not display revision or any of its ancestors')),
3001 ('', 'template', '', _('display with template')),
3002 ('', 'template', '', _('display with template')),
3002 ('I', 'include', [], _('include names matching the given patterns')),
3003 ('I', 'include', [], _('include names matching the given patterns')),
3003 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
3004 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
3004 _('hg log [OPTION]... [FILE]')),
3005 _('hg log [OPTION]... [FILE]')),
3005 "manifest": (manifest, [], _('hg manifest [REV]')),
3006 "manifest": (manifest, [], _('hg manifest [REV]')),
3006 "merge":
3007 "merge":
3007 (merge,
3008 (merge,
3008 [('b', 'branch', '', _('merge with head of a specific branch')),
3009 [('b', 'branch', '', _('merge with head of a specific branch')),
3009 ('f', 'force', None, _('force a merge with outstanding changes'))],
3010 ('f', 'force', None, _('force a merge with outstanding changes'))],
3010 _('hg merge [-b TAG] [-f] [REV]')),
3011 _('hg merge [-b TAG] [-f] [REV]')),
3011 "outgoing|out": (outgoing,
3012 "outgoing|out": (outgoing,
3012 [('M', 'no-merges', None, _('do not show merges')),
3013 [('M', 'no-merges', None, _('do not show merges')),
3013 ('f', 'force', None,
3014 ('f', 'force', None,
3014 _('run even when remote repository is unrelated')),
3015 _('run even when remote repository is unrelated')),
3015 ('p', 'patch', None, _('show patch')),
3016 ('p', 'patch', None, _('show patch')),
3016 ('', 'style', '', _('display using template map file')),
3017 ('', 'style', '', _('display using template map file')),
3017 ('r', 'rev', [], _('a specific revision you would like to push')),
3018 ('r', 'rev', [], _('a specific revision you would like to push')),
3018 ('n', 'newest-first', None, _('show newest record first')),
3019 ('n', 'newest-first', None, _('show newest record first')),
3019 ('', 'template', '', _('display with template')),
3020 ('', 'template', '', _('display with template')),
3020 ('e', 'ssh', '', _('specify ssh command to use')),
3021 ('e', 'ssh', '', _('specify ssh command to use')),
3021 ('', 'remotecmd', '',
3022 ('', 'remotecmd', '',
3022 _('specify hg command to run on the remote side'))],
3023 _('specify hg command to run on the remote side'))],
3023 _('hg outgoing [-M] [-p] [-n] [-r REV]... [DEST]')),
3024 _('hg outgoing [-M] [-p] [-n] [-r REV]... [DEST]')),
3024 "^parents":
3025 "^parents":
3025 (parents,
3026 (parents,
3026 [('b', 'branches', None, _('show branches')),
3027 [('b', 'branches', None, _('show branches')),
3027 ('r', 'rev', '', _('show parents from the specified rev')),
3028 ('r', 'rev', '', _('show parents from the specified rev')),
3028 ('', 'style', '', _('display using template map file')),
3029 ('', 'style', '', _('display using template map file')),
3029 ('', 'template', '', _('display with template'))],
3030 ('', 'template', '', _('display with template'))],
3030 _('hg parents [-b] [-r REV] [FILE]')),
3031 _('hg parents [-b] [-r REV] [FILE]')),
3031 "paths": (paths, [], _('hg paths [NAME]')),
3032 "paths": (paths, [], _('hg paths [NAME]')),
3032 "^pull":
3033 "^pull":
3033 (pull,
3034 (pull,
3034 [('u', 'update', None,
3035 [('u', 'update', None,
3035 _('update the working directory to tip after pull')),
3036 _('update the working directory to tip after pull')),
3036 ('e', 'ssh', '', _('specify ssh command to use')),
3037 ('e', 'ssh', '', _('specify ssh command to use')),
3037 ('f', 'force', None,
3038 ('f', 'force', None,
3038 _('run even when remote repository is unrelated')),
3039 _('run even when remote repository is unrelated')),
3039 ('r', 'rev', [], _('a specific revision up to which you would like to pull')),
3040 ('r', 'rev', [], _('a specific revision up to which you would like to pull')),
3040 ('', 'remotecmd', '',
3041 ('', 'remotecmd', '',
3041 _('specify hg command to run on the remote side'))],
3042 _('specify hg command to run on the remote side'))],
3042 _('hg pull [-u] [-r REV]... [-e FILE] [--remotecmd FILE] [SOURCE]')),
3043 _('hg pull [-u] [-r REV]... [-e FILE] [--remotecmd FILE] [SOURCE]')),
3043 "^push":
3044 "^push":
3044 (push,
3045 (push,
3045 [('f', 'force', None, _('force push')),
3046 [('f', 'force', None, _('force push')),
3046 ('e', 'ssh', '', _('specify ssh command to use')),
3047 ('e', 'ssh', '', _('specify ssh command to use')),
3047 ('r', 'rev', [], _('a specific revision you would like to push')),
3048 ('r', 'rev', [], _('a specific revision you would like to push')),
3048 ('', 'remotecmd', '',
3049 ('', 'remotecmd', '',
3049 _('specify hg command to run on the remote side'))],
3050 _('specify hg command to run on the remote side'))],
3050 _('hg push [-f] [-r REV]... [-e FILE] [--remotecmd FILE] [DEST]')),
3051 _('hg push [-f] [-r REV]... [-e FILE] [--remotecmd FILE] [DEST]')),
3051 "debugrawcommit|rawcommit":
3052 "debugrawcommit|rawcommit":
3052 (rawcommit,
3053 (rawcommit,
3053 [('p', 'parent', [], _('parent')),
3054 [('p', 'parent', [], _('parent')),
3054 ('d', 'date', '', _('date code')),
3055 ('d', 'date', '', _('date code')),
3055 ('u', 'user', '', _('user')),
3056 ('u', 'user', '', _('user')),
3056 ('F', 'files', '', _('file list')),
3057 ('F', 'files', '', _('file list')),
3057 ('m', 'message', '', _('commit message')),
3058 ('m', 'message', '', _('commit message')),
3058 ('l', 'logfile', '', _('commit message file'))],
3059 ('l', 'logfile', '', _('commit message file'))],
3059 _('hg debugrawcommit [OPTION]... [FILE]...')),
3060 _('hg debugrawcommit [OPTION]... [FILE]...')),
3060 "recover": (recover, [], _('hg recover')),
3061 "recover": (recover, [], _('hg recover')),
3061 "^remove|rm":
3062 "^remove|rm":
3062 (remove,
3063 (remove,
3063 [('A', 'after', None, _('record remove that has already occurred')),
3064 [('A', 'after', None, _('record remove that has already occurred')),
3064 ('f', 'force', None, _('remove file even if modified')),
3065 ('f', 'force', None, _('remove file even if modified')),
3065 ('I', 'include', [], _('include names matching the given patterns')),
3066 ('I', 'include', [], _('include names matching the given patterns')),
3066 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
3067 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
3067 _('hg remove [OPTION]... FILE...')),
3068 _('hg remove [OPTION]... FILE...')),
3068 "rename|mv":
3069 "rename|mv":
3069 (rename,
3070 (rename,
3070 [('A', 'after', None, _('record a rename that has already occurred')),
3071 [('A', 'after', None, _('record a rename that has already occurred')),
3071 ('f', 'force', None,
3072 ('f', 'force', None,
3072 _('forcibly copy over an existing managed file')),
3073 _('forcibly copy over an existing managed file')),
3073 ('I', 'include', [], _('include names matching the given patterns')),
3074 ('I', 'include', [], _('include names matching the given patterns')),
3074 ('X', 'exclude', [], _('exclude names matching the given patterns')),
3075 ('X', 'exclude', [], _('exclude names matching the given patterns')),
3075 ('n', 'dry-run', None, _('do not perform actions, just print output'))],
3076 ('n', 'dry-run', None, _('do not perform actions, just print output'))],
3076 _('hg rename [OPTION]... SOURCE... DEST')),
3077 _('hg rename [OPTION]... SOURCE... DEST')),
3077 "^revert":
3078 "^revert":
3078 (revert,
3079 (revert,
3079 [('r', 'rev', '', _('revision to revert to')),
3080 [('r', 'rev', '', _('revision to revert to')),
3080 ('', 'no-backup', None, _('do not save backup copies of files')),
3081 ('', 'no-backup', None, _('do not save backup copies of files')),
3081 ('I', 'include', [], _('include names matching given patterns')),
3082 ('I', 'include', [], _('include names matching given patterns')),
3082 ('X', 'exclude', [], _('exclude names matching given patterns')),
3083 ('X', 'exclude', [], _('exclude names matching given patterns')),
3083 ('n', 'dry-run', None, _('do not perform actions, just print output'))],
3084 ('n', 'dry-run', None, _('do not perform actions, just print output'))],
3084 _('hg revert [-r REV] [NAME]...')),
3085 _('hg revert [-r REV] [NAME]...')),
3085 "rollback": (rollback, [], _('hg rollback')),
3086 "rollback": (rollback, [], _('hg rollback')),
3086 "root": (root, [], _('hg root')),
3087 "root": (root, [], _('hg root')),
3087 "^serve":
3088 "^serve":
3088 (serve,
3089 (serve,
3089 [('A', 'accesslog', '', _('name of access log file to write to')),
3090 [('A', 'accesslog', '', _('name of access log file to write to')),
3090 ('d', 'daemon', None, _('run server in background')),
3091 ('d', 'daemon', None, _('run server in background')),
3091 ('', 'daemon-pipefds', '', _('used internally by daemon mode')),
3092 ('', 'daemon-pipefds', '', _('used internally by daemon mode')),
3092 ('E', 'errorlog', '', _('name of error log file to write to')),
3093 ('E', 'errorlog', '', _('name of error log file to write to')),
3093 ('p', 'port', 0, _('port to use (default: 8000)')),
3094 ('p', 'port', 0, _('port to use (default: 8000)')),
3094 ('a', 'address', '', _('address to use')),
3095 ('a', 'address', '', _('address to use')),
3095 ('n', 'name', '',
3096 ('n', 'name', '',
3096 _('name to show in web pages (default: working dir)')),
3097 _('name to show in web pages (default: working dir)')),
3097 ('', 'webdir-conf', '', _('name of the webdir config file'
3098 ('', 'webdir-conf', '', _('name of the webdir config file'
3098 ' (serve more than one repo)')),
3099 ' (serve more than one repo)')),
3099 ('', 'pid-file', '', _('name of file to write process ID to')),
3100 ('', 'pid-file', '', _('name of file to write process ID to')),
3100 ('', 'stdio', None, _('for remote clients')),
3101 ('', 'stdio', None, _('for remote clients')),
3101 ('t', 'templates', '', _('web templates to use')),
3102 ('t', 'templates', '', _('web templates to use')),
3102 ('', 'style', '', _('template style to use')),
3103 ('', 'style', '', _('template style to use')),
3103 ('6', 'ipv6', None, _('use IPv6 in addition to IPv4'))],
3104 ('6', 'ipv6', None, _('use IPv6 in addition to IPv4'))],
3104 _('hg serve [OPTION]...')),
3105 _('hg serve [OPTION]...')),
3105 "^status|st":
3106 "^status|st":
3106 (status,
3107 (status,
3107 [('A', 'all', None, _('show status of all files')),
3108 [('A', 'all', None, _('show status of all files')),
3108 ('m', 'modified', None, _('show only modified files')),
3109 ('m', 'modified', None, _('show only modified files')),
3109 ('a', 'added', None, _('show only added files')),
3110 ('a', 'added', None, _('show only added files')),
3110 ('r', 'removed', None, _('show only removed files')),
3111 ('r', 'removed', None, _('show only removed files')),
3111 ('d', 'deleted', None, _('show only deleted (but tracked) files')),
3112 ('d', 'deleted', None, _('show only deleted (but tracked) files')),
3112 ('c', 'clean', None, _('show only files without changes')),
3113 ('c', 'clean', None, _('show only files without changes')),
3113 ('u', 'unknown', None, _('show only unknown (not tracked) files')),
3114 ('u', 'unknown', None, _('show only unknown (not tracked) files')),
3114 ('i', 'ignored', None, _('show ignored files')),
3115 ('i', 'ignored', None, _('show ignored files')),
3115 ('n', 'no-status', None, _('hide status prefix')),
3116 ('n', 'no-status', None, _('hide status prefix')),
3116 ('C', 'copies', None, _('show source of copied files')),
3117 ('C', 'copies', None, _('show source of copied files')),
3117 ('0', 'print0', None,
3118 ('0', 'print0', None,
3118 _('end filenames with NUL, for use with xargs')),
3119 _('end filenames with NUL, for use with xargs')),
3119 ('I', 'include', [], _('include names matching the given patterns')),
3120 ('I', 'include', [], _('include names matching the given patterns')),
3120 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
3121 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
3121 _('hg status [OPTION]... [FILE]...')),
3122 _('hg status [OPTION]... [FILE]...')),
3122 "tag":
3123 "tag":
3123 (tag,
3124 (tag,
3124 [('l', 'local', None, _('make the tag local')),
3125 [('l', 'local', None, _('make the tag local')),
3125 ('m', 'message', '', _('message for tag commit log entry')),
3126 ('m', 'message', '', _('message for tag commit log entry')),
3126 ('d', 'date', '', _('record datecode as commit date')),
3127 ('d', 'date', '', _('record datecode as commit date')),
3127 ('u', 'user', '', _('record user as commiter')),
3128 ('u', 'user', '', _('record user as commiter')),
3128 ('r', 'rev', '', _('revision to tag'))],
3129 ('r', 'rev', '', _('revision to tag'))],
3129 _('hg tag [-l] [-m TEXT] [-d DATE] [-u USER] [-r REV] NAME')),
3130 _('hg tag [-l] [-m TEXT] [-d DATE] [-u USER] [-r REV] NAME')),
3130 "tags": (tags, [], _('hg tags')),
3131 "tags": (tags, [], _('hg tags')),
3131 "tip":
3132 "tip":
3132 (tip,
3133 (tip,
3133 [('b', 'branches', None, _('show branches')),
3134 [('b', 'branches', None, _('show branches')),
3134 ('', 'style', '', _('display using template map file')),
3135 ('', 'style', '', _('display using template map file')),
3135 ('p', 'patch', None, _('show patch')),
3136 ('p', 'patch', None, _('show patch')),
3136 ('', 'template', '', _('display with template'))],
3137 ('', 'template', '', _('display with template'))],
3137 _('hg tip [-b] [-p]')),
3138 _('hg tip [-b] [-p]')),
3138 "unbundle":
3139 "unbundle":
3139 (unbundle,
3140 (unbundle,
3140 [('u', 'update', None,
3141 [('u', 'update', None,
3141 _('update the working directory to tip after unbundle'))],
3142 _('update the working directory to tip after unbundle'))],
3142 _('hg unbundle [-u] FILE')),
3143 _('hg unbundle [-u] FILE')),
3143 "debugundo|undo": (undo, [], _('hg undo')),
3144 "debugundo|undo": (undo, [], _('hg undo')),
3144 "^update|up|checkout|co":
3145 "^update|up|checkout|co":
3145 (update,
3146 (update,
3146 [('b', 'branch', '', _('checkout the head of a specific branch')),
3147 [('b', 'branch', '', _('checkout the head of a specific branch')),
3147 ('m', 'merge', None, _('allow merging of branches (DEPRECATED)')),
3148 ('m', 'merge', None, _('allow merging of branches (DEPRECATED)')),
3148 ('C', 'clean', None, _('overwrite locally modified files')),
3149 ('C', 'clean', None, _('overwrite locally modified files')),
3149 ('f', 'force', None, _('force a merge with outstanding changes'))],
3150 ('f', 'force', None, _('force a merge with outstanding changes'))],
3150 _('hg update [-b TAG] [-m] [-C] [-f] [REV]')),
3151 _('hg update [-b TAG] [-m] [-C] [-f] [REV]')),
3151 "verify": (verify, [], _('hg verify')),
3152 "verify": (verify, [], _('hg verify')),
3152 "version": (show_version, [], _('hg version')),
3153 "version": (show_version, [], _('hg version')),
3153 }
3154 }
3154
3155
3155 globalopts = [
3156 globalopts = [
3156 ('R', 'repository', '',
3157 ('R', 'repository', '',
3157 _('repository root directory or symbolic path name')),
3158 _('repository root directory or symbolic path name')),
3158 ('', 'cwd', '', _('change working directory')),
3159 ('', 'cwd', '', _('change working directory')),
3159 ('y', 'noninteractive', None,
3160 ('y', 'noninteractive', None,
3160 _('do not prompt, assume \'yes\' for any required answers')),
3161 _('do not prompt, assume \'yes\' for any required answers')),
3161 ('q', 'quiet', None, _('suppress output')),
3162 ('q', 'quiet', None, _('suppress output')),
3162 ('v', 'verbose', None, _('enable additional output')),
3163 ('v', 'verbose', None, _('enable additional output')),
3163 ('', 'config', [], _('set/override config option')),
3164 ('', 'config', [], _('set/override config option')),
3164 ('', 'debug', None, _('enable debugging output')),
3165 ('', 'debug', None, _('enable debugging output')),
3165 ('', 'debugger', None, _('start debugger')),
3166 ('', 'debugger', None, _('start debugger')),
3166 ('', 'lsprof', None, _('print improved command execution profile')),
3167 ('', 'lsprof', None, _('print improved command execution profile')),
3167 ('', 'traceback', None, _('print traceback on exception')),
3168 ('', 'traceback', None, _('print traceback on exception')),
3168 ('', 'time', None, _('time how long the command takes')),
3169 ('', 'time', None, _('time how long the command takes')),
3169 ('', 'profile', None, _('print command execution profile')),
3170 ('', 'profile', None, _('print command execution profile')),
3170 ('', 'version', None, _('output version information and exit')),
3171 ('', 'version', None, _('output version information and exit')),
3171 ('h', 'help', None, _('display help and exit')),
3172 ('h', 'help', None, _('display help and exit')),
3172 ]
3173 ]
3173
3174
3174 norepo = ("clone init version help debugancestor debugcomplete debugdata"
3175 norepo = ("clone init version help debugancestor debugcomplete debugdata"
3175 " debugindex debugindexdot")
3176 " debugindex debugindexdot")
3176 optionalrepo = ("paths serve debugconfig")
3177 optionalrepo = ("paths serve debugconfig")
3177
3178
3178 def findpossible(cmd):
3179 def findpossible(cmd):
3179 """
3180 """
3180 Return cmd -> (aliases, command table entry)
3181 Return cmd -> (aliases, command table entry)
3181 for each matching command.
3182 for each matching command.
3182 Return debug commands (or their aliases) only if no normal command matches.
3183 Return debug commands (or their aliases) only if no normal command matches.
3183 """
3184 """
3184 choice = {}
3185 choice = {}
3185 debugchoice = {}
3186 debugchoice = {}
3186 for e in table.keys():
3187 for e in table.keys():
3187 aliases = e.lstrip("^").split("|")
3188 aliases = e.lstrip("^").split("|")
3188 found = None
3189 found = None
3189 if cmd in aliases:
3190 if cmd in aliases:
3190 found = cmd
3191 found = cmd
3191 else:
3192 else:
3192 for a in aliases:
3193 for a in aliases:
3193 if a.startswith(cmd):
3194 if a.startswith(cmd):
3194 found = a
3195 found = a
3195 break
3196 break
3196 if found is not None:
3197 if found is not None:
3197 if aliases[0].startswith("debug"):
3198 if aliases[0].startswith("debug"):
3198 debugchoice[found] = (aliases, table[e])
3199 debugchoice[found] = (aliases, table[e])
3199 else:
3200 else:
3200 choice[found] = (aliases, table[e])
3201 choice[found] = (aliases, table[e])
3201
3202
3202 if not choice and debugchoice:
3203 if not choice and debugchoice:
3203 choice = debugchoice
3204 choice = debugchoice
3204
3205
3205 return choice
3206 return choice
3206
3207
3207 def findcmd(cmd):
3208 def findcmd(cmd):
3208 """Return (aliases, command table entry) for command string."""
3209 """Return (aliases, command table entry) for command string."""
3209 choice = findpossible(cmd)
3210 choice = findpossible(cmd)
3210
3211
3211 if choice.has_key(cmd):
3212 if choice.has_key(cmd):
3212 return choice[cmd]
3213 return choice[cmd]
3213
3214
3214 if len(choice) > 1:
3215 if len(choice) > 1:
3215 clist = choice.keys()
3216 clist = choice.keys()
3216 clist.sort()
3217 clist.sort()
3217 raise AmbiguousCommand(cmd, clist)
3218 raise AmbiguousCommand(cmd, clist)
3218
3219
3219 if choice:
3220 if choice:
3220 return choice.values()[0]
3221 return choice.values()[0]
3221
3222
3222 raise UnknownCommand(cmd)
3223 raise UnknownCommand(cmd)
3223
3224
3224 def catchterm(*args):
3225 def catchterm(*args):
3225 raise util.SignalInterrupt
3226 raise util.SignalInterrupt
3226
3227
3227 def run():
3228 def run():
3228 sys.exit(dispatch(sys.argv[1:]))
3229 sys.exit(dispatch(sys.argv[1:]))
3229
3230
3230 class ParseError(Exception):
3231 class ParseError(Exception):
3231 """Exception raised on errors in parsing the command line."""
3232 """Exception raised on errors in parsing the command line."""
3232
3233
3233 def parse(ui, args):
3234 def parse(ui, args):
3234 options = {}
3235 options = {}
3235 cmdoptions = {}
3236 cmdoptions = {}
3236
3237
3237 try:
3238 try:
3238 args = fancyopts.fancyopts(args, globalopts, options)
3239 args = fancyopts.fancyopts(args, globalopts, options)
3239 except fancyopts.getopt.GetoptError, inst:
3240 except fancyopts.getopt.GetoptError, inst:
3240 raise ParseError(None, inst)
3241 raise ParseError(None, inst)
3241
3242
3242 if args:
3243 if args:
3243 cmd, args = args[0], args[1:]
3244 cmd, args = args[0], args[1:]
3244 aliases, i = findcmd(cmd)
3245 aliases, i = findcmd(cmd)
3245 cmd = aliases[0]
3246 cmd = aliases[0]
3246 defaults = ui.config("defaults", cmd)
3247 defaults = ui.config("defaults", cmd)
3247 if defaults:
3248 if defaults:
3248 args = defaults.split() + args
3249 args = defaults.split() + args
3249 c = list(i[1])
3250 c = list(i[1])
3250 else:
3251 else:
3251 cmd = None
3252 cmd = None
3252 c = []
3253 c = []
3253
3254
3254 # combine global options into local
3255 # combine global options into local
3255 for o in globalopts:
3256 for o in globalopts:
3256 c.append((o[0], o[1], options[o[1]], o[3]))
3257 c.append((o[0], o[1], options[o[1]], o[3]))
3257
3258
3258 try:
3259 try:
3259 args = fancyopts.fancyopts(args, c, cmdoptions)
3260 args = fancyopts.fancyopts(args, c, cmdoptions)
3260 except fancyopts.getopt.GetoptError, inst:
3261 except fancyopts.getopt.GetoptError, inst:
3261 raise ParseError(cmd, inst)
3262 raise ParseError(cmd, inst)
3262
3263
3263 # separate global options back out
3264 # separate global options back out
3264 for o in globalopts:
3265 for o in globalopts:
3265 n = o[1]
3266 n = o[1]
3266 options[n] = cmdoptions[n]
3267 options[n] = cmdoptions[n]
3267 del cmdoptions[n]
3268 del cmdoptions[n]
3268
3269
3269 return (cmd, cmd and i[0] or None, args, options, cmdoptions)
3270 return (cmd, cmd and i[0] or None, args, options, cmdoptions)
3270
3271
3271 external = {}
3272 external = {}
3272
3273
3273 def findext(name):
3274 def findext(name):
3274 '''return module with given extension name'''
3275 '''return module with given extension name'''
3275 try:
3276 try:
3276 return sys.modules[external[name]]
3277 return sys.modules[external[name]]
3277 except KeyError:
3278 except KeyError:
3278 for k, v in external.iteritems():
3279 for k, v in external.iteritems():
3279 if k.endswith('.' + name) or k.endswith('/' + name) or v == name:
3280 if k.endswith('.' + name) or k.endswith('/' + name) or v == name:
3280 return sys.modules[v]
3281 return sys.modules[v]
3281 raise KeyError(name)
3282 raise KeyError(name)
3282
3283
3283 def dispatch(args):
3284 def dispatch(args):
3284 for name in 'SIGBREAK', 'SIGHUP', 'SIGTERM':
3285 for name in 'SIGBREAK', 'SIGHUP', 'SIGTERM':
3285 num = getattr(signal, name, None)
3286 num = getattr(signal, name, None)
3286 if num: signal.signal(num, catchterm)
3287 if num: signal.signal(num, catchterm)
3287
3288
3288 try:
3289 try:
3289 u = ui.ui(traceback='--traceback' in sys.argv[1:])
3290 u = ui.ui(traceback='--traceback' in sys.argv[1:])
3290 except util.Abort, inst:
3291 except util.Abort, inst:
3291 sys.stderr.write(_("abort: %s\n") % inst)
3292 sys.stderr.write(_("abort: %s\n") % inst)
3292 return -1
3293 return -1
3293
3294
3294 for ext_name, load_from_name in u.extensions():
3295 for ext_name, load_from_name in u.extensions():
3295 try:
3296 try:
3296 if load_from_name:
3297 if load_from_name:
3297 # the module will be loaded in sys.modules
3298 # the module will be loaded in sys.modules
3298 # choose an unique name so that it doesn't
3299 # choose an unique name so that it doesn't
3299 # conflicts with other modules
3300 # conflicts with other modules
3300 module_name = "hgext_%s" % ext_name.replace('.', '_')
3301 module_name = "hgext_%s" % ext_name.replace('.', '_')
3301 mod = imp.load_source(module_name, load_from_name)
3302 mod = imp.load_source(module_name, load_from_name)
3302 else:
3303 else:
3303 def importh(name):
3304 def importh(name):
3304 mod = __import__(name)
3305 mod = __import__(name)
3305 components = name.split('.')
3306 components = name.split('.')
3306 for comp in components[1:]:
3307 for comp in components[1:]:
3307 mod = getattr(mod, comp)
3308 mod = getattr(mod, comp)
3308 return mod
3309 return mod
3309 try:
3310 try:
3310 mod = importh("hgext.%s" % ext_name)
3311 mod = importh("hgext.%s" % ext_name)
3311 except ImportError:
3312 except ImportError:
3312 mod = importh(ext_name)
3313 mod = importh(ext_name)
3313 external[ext_name] = mod.__name__
3314 external[ext_name] = mod.__name__
3314 except (util.SignalInterrupt, KeyboardInterrupt):
3315 except (util.SignalInterrupt, KeyboardInterrupt):
3315 raise
3316 raise
3316 except Exception, inst:
3317 except Exception, inst:
3317 u.warn(_("*** failed to import extension %s: %s\n") % (ext_name, inst))
3318 u.warn(_("*** failed to import extension %s: %s\n") % (ext_name, inst))
3318 if u.print_exc():
3319 if u.print_exc():
3319 return 1
3320 return 1
3320
3321
3321 for name in external.itervalues():
3322 for name in external.itervalues():
3322 mod = sys.modules[name]
3323 mod = sys.modules[name]
3323 uisetup = getattr(mod, 'uisetup', None)
3324 uisetup = getattr(mod, 'uisetup', None)
3324 if uisetup:
3325 if uisetup:
3325 uisetup(u)
3326 uisetup(u)
3326 cmdtable = getattr(mod, 'cmdtable', {})
3327 cmdtable = getattr(mod, 'cmdtable', {})
3327 for t in cmdtable:
3328 for t in cmdtable:
3328 if t in table:
3329 if t in table:
3329 u.warn(_("module %s overrides %s\n") % (name, t))
3330 u.warn(_("module %s overrides %s\n") % (name, t))
3330 table.update(cmdtable)
3331 table.update(cmdtable)
3331
3332
3332 try:
3333 try:
3333 cmd, func, args, options, cmdoptions = parse(u, args)
3334 cmd, func, args, options, cmdoptions = parse(u, args)
3334 if options["time"]:
3335 if options["time"]:
3335 def get_times():
3336 def get_times():
3336 t = os.times()
3337 t = os.times()
3337 if t[4] == 0.0: # Windows leaves this as zero, so use time.clock()
3338 if t[4] == 0.0: # Windows leaves this as zero, so use time.clock()
3338 t = (t[0], t[1], t[2], t[3], time.clock())
3339 t = (t[0], t[1], t[2], t[3], time.clock())
3339 return t
3340 return t
3340 s = get_times()
3341 s = get_times()
3341 def print_time():
3342 def print_time():
3342 t = get_times()
3343 t = get_times()
3343 u.warn(_("Time: real %.3f secs (user %.3f+%.3f sys %.3f+%.3f)\n") %
3344 u.warn(_("Time: real %.3f secs (user %.3f+%.3f sys %.3f+%.3f)\n") %
3344 (t[4]-s[4], t[0]-s[0], t[2]-s[2], t[1]-s[1], t[3]-s[3]))
3345 (t[4]-s[4], t[0]-s[0], t[2]-s[2], t[1]-s[1], t[3]-s[3]))
3345 atexit.register(print_time)
3346 atexit.register(print_time)
3346
3347
3347 u.updateopts(options["verbose"], options["debug"], options["quiet"],
3348 u.updateopts(options["verbose"], options["debug"], options["quiet"],
3348 not options["noninteractive"], options["traceback"],
3349 not options["noninteractive"], options["traceback"],
3349 options["config"])
3350 options["config"])
3350
3351
3351 # enter the debugger before command execution
3352 # enter the debugger before command execution
3352 if options['debugger']:
3353 if options['debugger']:
3353 pdb.set_trace()
3354 pdb.set_trace()
3354
3355
3355 try:
3356 try:
3356 if options['cwd']:
3357 if options['cwd']:
3357 try:
3358 try:
3358 os.chdir(options['cwd'])
3359 os.chdir(options['cwd'])
3359 except OSError, inst:
3360 except OSError, inst:
3360 raise util.Abort('%s: %s' %
3361 raise util.Abort('%s: %s' %
3361 (options['cwd'], inst.strerror))
3362 (options['cwd'], inst.strerror))
3362
3363
3363 path = u.expandpath(options["repository"]) or ""
3364 path = u.expandpath(options["repository"]) or ""
3364 repo = path and hg.repository(u, path=path) or None
3365 repo = path and hg.repository(u, path=path) or None
3365
3366
3366 if options['help']:
3367 if options['help']:
3367 return help_(u, cmd, options['version'])
3368 return help_(u, cmd, options['version'])
3368 elif options['version']:
3369 elif options['version']:
3369 return show_version(u)
3370 return show_version(u)
3370 elif not cmd:
3371 elif not cmd:
3371 return help_(u, 'shortlist')
3372 return help_(u, 'shortlist')
3372
3373
3373 if cmd not in norepo.split():
3374 if cmd not in norepo.split():
3374 try:
3375 try:
3375 if not repo:
3376 if not repo:
3376 repo = hg.repository(u, path=path)
3377 repo = hg.repository(u, path=path)
3377 u = repo.ui
3378 u = repo.ui
3378 for name in external.itervalues():
3379 for name in external.itervalues():
3379 mod = sys.modules[name]
3380 mod = sys.modules[name]
3380 if hasattr(mod, 'reposetup'):
3381 if hasattr(mod, 'reposetup'):
3381 mod.reposetup(u, repo)
3382 mod.reposetup(u, repo)
3382 hg.repo_setup_hooks.append(mod.reposetup)
3383 hg.repo_setup_hooks.append(mod.reposetup)
3383 except hg.RepoError:
3384 except hg.RepoError:
3384 if cmd not in optionalrepo.split():
3385 if cmd not in optionalrepo.split():
3385 raise
3386 raise
3386 d = lambda: func(u, repo, *args, **cmdoptions)
3387 d = lambda: func(u, repo, *args, **cmdoptions)
3387 else:
3388 else:
3388 d = lambda: func(u, *args, **cmdoptions)
3389 d = lambda: func(u, *args, **cmdoptions)
3389
3390
3390 # reupdate the options, repo/.hg/hgrc may have changed them
3391 # reupdate the options, repo/.hg/hgrc may have changed them
3391 u.updateopts(options["verbose"], options["debug"], options["quiet"],
3392 u.updateopts(options["verbose"], options["debug"], options["quiet"],
3392 not options["noninteractive"], options["traceback"],
3393 not options["noninteractive"], options["traceback"],
3393 options["config"])
3394 options["config"])
3394
3395
3395 try:
3396 try:
3396 if options['profile']:
3397 if options['profile']:
3397 import hotshot, hotshot.stats
3398 import hotshot, hotshot.stats
3398 prof = hotshot.Profile("hg.prof")
3399 prof = hotshot.Profile("hg.prof")
3399 try:
3400 try:
3400 try:
3401 try:
3401 return prof.runcall(d)
3402 return prof.runcall(d)
3402 except:
3403 except:
3403 try:
3404 try:
3404 u.warn(_('exception raised - generating '
3405 u.warn(_('exception raised - generating '
3405 'profile anyway\n'))
3406 'profile anyway\n'))
3406 except:
3407 except:
3407 pass
3408 pass
3408 raise
3409 raise
3409 finally:
3410 finally:
3410 prof.close()
3411 prof.close()
3411 stats = hotshot.stats.load("hg.prof")
3412 stats = hotshot.stats.load("hg.prof")
3412 stats.strip_dirs()
3413 stats.strip_dirs()
3413 stats.sort_stats('time', 'calls')
3414 stats.sort_stats('time', 'calls')
3414 stats.print_stats(40)
3415 stats.print_stats(40)
3415 elif options['lsprof']:
3416 elif options['lsprof']:
3416 try:
3417 try:
3417 from mercurial import lsprof
3418 from mercurial import lsprof
3418 except ImportError:
3419 except ImportError:
3419 raise util.Abort(_(
3420 raise util.Abort(_(
3420 'lsprof not available - install from '
3421 'lsprof not available - install from '
3421 'http://codespeak.net/svn/user/arigo/hack/misc/lsprof/'))
3422 'http://codespeak.net/svn/user/arigo/hack/misc/lsprof/'))
3422 p = lsprof.Profiler()
3423 p = lsprof.Profiler()
3423 p.enable(subcalls=True)
3424 p.enable(subcalls=True)
3424 try:
3425 try:
3425 return d()
3426 return d()
3426 finally:
3427 finally:
3427 p.disable()
3428 p.disable()
3428 stats = lsprof.Stats(p.getstats())
3429 stats = lsprof.Stats(p.getstats())
3429 stats.sort()
3430 stats.sort()
3430 stats.pprint(top=10, file=sys.stderr, climit=5)
3431 stats.pprint(top=10, file=sys.stderr, climit=5)
3431 else:
3432 else:
3432 return d()
3433 return d()
3433 finally:
3434 finally:
3434 u.flush()
3435 u.flush()
3435 except:
3436 except:
3436 # enter the debugger when we hit an exception
3437 # enter the debugger when we hit an exception
3437 if options['debugger']:
3438 if options['debugger']:
3438 pdb.post_mortem(sys.exc_info()[2])
3439 pdb.post_mortem(sys.exc_info()[2])
3439 u.print_exc()
3440 u.print_exc()
3440 raise
3441 raise
3441 except ParseError, inst:
3442 except ParseError, inst:
3442 if inst.args[0]:
3443 if inst.args[0]:
3443 u.warn(_("hg %s: %s\n") % (inst.args[0], inst.args[1]))
3444 u.warn(_("hg %s: %s\n") % (inst.args[0], inst.args[1]))
3444 help_(u, inst.args[0])
3445 help_(u, inst.args[0])
3445 else:
3446 else:
3446 u.warn(_("hg: %s\n") % inst.args[1])
3447 u.warn(_("hg: %s\n") % inst.args[1])
3447 help_(u, 'shortlist')
3448 help_(u, 'shortlist')
3448 except AmbiguousCommand, inst:
3449 except AmbiguousCommand, inst:
3449 u.warn(_("hg: command '%s' is ambiguous:\n %s\n") %
3450 u.warn(_("hg: command '%s' is ambiguous:\n %s\n") %
3450 (inst.args[0], " ".join(inst.args[1])))
3451 (inst.args[0], " ".join(inst.args[1])))
3451 except UnknownCommand, inst:
3452 except UnknownCommand, inst:
3452 u.warn(_("hg: unknown command '%s'\n") % inst.args[0])
3453 u.warn(_("hg: unknown command '%s'\n") % inst.args[0])
3453 help_(u, 'shortlist')
3454 help_(u, 'shortlist')
3454 except hg.RepoError, inst:
3455 except hg.RepoError, inst:
3455 u.warn(_("abort: %s!\n") % inst)
3456 u.warn(_("abort: %s!\n") % inst)
3456 except lock.LockHeld, inst:
3457 except lock.LockHeld, inst:
3457 if inst.errno == errno.ETIMEDOUT:
3458 if inst.errno == errno.ETIMEDOUT:
3458 reason = _('timed out waiting for lock held by %s') % inst.locker
3459 reason = _('timed out waiting for lock held by %s') % inst.locker
3459 else:
3460 else:
3460 reason = _('lock held by %s') % inst.locker
3461 reason = _('lock held by %s') % inst.locker
3461 u.warn(_("abort: %s: %s\n") % (inst.desc or inst.filename, reason))
3462 u.warn(_("abort: %s: %s\n") % (inst.desc or inst.filename, reason))
3462 except lock.LockUnavailable, inst:
3463 except lock.LockUnavailable, inst:
3463 u.warn(_("abort: could not lock %s: %s\n") %
3464 u.warn(_("abort: could not lock %s: %s\n") %
3464 (inst.desc or inst.filename, inst.strerror))
3465 (inst.desc or inst.filename, inst.strerror))
3465 except revlog.RevlogError, inst:
3466 except revlog.RevlogError, inst:
3466 u.warn(_("abort: "), inst, "!\n")
3467 u.warn(_("abort: "), inst, "!\n")
3467 except util.SignalInterrupt:
3468 except util.SignalInterrupt:
3468 u.warn(_("killed!\n"))
3469 u.warn(_("killed!\n"))
3469 except KeyboardInterrupt:
3470 except KeyboardInterrupt:
3470 try:
3471 try:
3471 u.warn(_("interrupted!\n"))
3472 u.warn(_("interrupted!\n"))
3472 except IOError, inst:
3473 except IOError, inst:
3473 if inst.errno == errno.EPIPE:
3474 if inst.errno == errno.EPIPE:
3474 if u.debugflag:
3475 if u.debugflag:
3475 u.warn(_("\nbroken pipe\n"))
3476 u.warn(_("\nbroken pipe\n"))
3476 else:
3477 else:
3477 raise
3478 raise
3478 except IOError, inst:
3479 except IOError, inst:
3479 if hasattr(inst, "code"):
3480 if hasattr(inst, "code"):
3480 u.warn(_("abort: %s\n") % inst)
3481 u.warn(_("abort: %s\n") % inst)
3481 elif hasattr(inst, "reason"):
3482 elif hasattr(inst, "reason"):
3482 u.warn(_("abort: error: %s\n") % inst.reason[1])
3483 u.warn(_("abort: error: %s\n") % inst.reason[1])
3483 elif hasattr(inst, "args") and inst[0] == errno.EPIPE:
3484 elif hasattr(inst, "args") and inst[0] == errno.EPIPE:
3484 if u.debugflag:
3485 if u.debugflag:
3485 u.warn(_("broken pipe\n"))
3486 u.warn(_("broken pipe\n"))
3486 elif getattr(inst, "strerror", None):
3487 elif getattr(inst, "strerror", None):
3487 if getattr(inst, "filename", None):
3488 if getattr(inst, "filename", None):
3488 u.warn(_("abort: %s - %s\n") % (inst.strerror, inst.filename))
3489 u.warn(_("abort: %s - %s\n") % (inst.strerror, inst.filename))
3489 else:
3490 else:
3490 u.warn(_("abort: %s\n") % inst.strerror)
3491 u.warn(_("abort: %s\n") % inst.strerror)
3491 else:
3492 else:
3492 raise
3493 raise
3493 except OSError, inst:
3494 except OSError, inst:
3494 if hasattr(inst, "filename"):
3495 if hasattr(inst, "filename"):
3495 u.warn(_("abort: %s: %s\n") % (inst.strerror, inst.filename))
3496 u.warn(_("abort: %s: %s\n") % (inst.strerror, inst.filename))
3496 else:
3497 else:
3497 u.warn(_("abort: %s\n") % inst.strerror)
3498 u.warn(_("abort: %s\n") % inst.strerror)
3498 except util.Abort, inst:
3499 except util.Abort, inst:
3499 u.warn(_('abort: '), inst.args[0] % inst.args[1:], '\n')
3500 u.warn(_('abort: '), inst.args[0] % inst.args[1:], '\n')
3500 except TypeError, inst:
3501 except TypeError, inst:
3501 # was this an argument error?
3502 # was this an argument error?
3502 tb = traceback.extract_tb(sys.exc_info()[2])
3503 tb = traceback.extract_tb(sys.exc_info()[2])
3503 if len(tb) > 2: # no
3504 if len(tb) > 2: # no
3504 raise
3505 raise
3505 u.debug(inst, "\n")
3506 u.debug(inst, "\n")
3506 u.warn(_("%s: invalid arguments\n") % cmd)
3507 u.warn(_("%s: invalid arguments\n") % cmd)
3507 help_(u, cmd)
3508 help_(u, cmd)
3508 except SystemExit, inst:
3509 except SystemExit, inst:
3509 # Commands shouldn't sys.exit directly, but give a return code.
3510 # Commands shouldn't sys.exit directly, but give a return code.
3510 # Just in case catch this and and pass exit code to caller.
3511 # Just in case catch this and and pass exit code to caller.
3511 return inst.code
3512 return inst.code
3512 except:
3513 except:
3513 u.warn(_("** unknown exception encountered, details follow\n"))
3514 u.warn(_("** unknown exception encountered, details follow\n"))
3514 u.warn(_("** report bug details to "
3515 u.warn(_("** report bug details to "
3515 "http://www.selenic.com/mercurial/bts\n"))
3516 "http://www.selenic.com/mercurial/bts\n"))
3516 u.warn(_("** or mercurial@selenic.com\n"))
3517 u.warn(_("** or mercurial@selenic.com\n"))
3517 u.warn(_("** Mercurial Distributed SCM (version %s)\n")
3518 u.warn(_("** Mercurial Distributed SCM (version %s)\n")
3518 % version.get_version())
3519 % version.get_version())
3519 raise
3520 raise
3520
3521
3521 return -1
3522 return -1
@@ -1,128 +1,127 b''
1 # filelog.py - file history class for mercurial
1 # filelog.py - file history class for mercurial
2 #
2 #
3 # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7
7
8 from revlog import *
8 from revlog import *
9 from demandload import *
9 from demandload import *
10 demandload(globals(), "bdiff os")
10 demandload(globals(), "bdiff os")
11
11
12 class filelog(revlog):
12 class filelog(revlog):
13 def __init__(self, opener, path, defversion=REVLOG_DEFAULT_VERSION):
13 def __init__(self, opener, path, defversion=REVLOG_DEFAULT_VERSION):
14 revlog.__init__(self, opener,
14 revlog.__init__(self, opener,
15 os.path.join("data", self.encodedir(path + ".i")),
15 os.path.join("data", self.encodedir(path + ".i")),
16 os.path.join("data", self.encodedir(path + ".d")),
16 os.path.join("data", self.encodedir(path + ".d")),
17 defversion)
17 defversion)
18
18
19 # This avoids a collision between a file named foo and a dir named
19 # This avoids a collision between a file named foo and a dir named
20 # foo.i or foo.d
20 # foo.i or foo.d
21 def encodedir(self, path):
21 def encodedir(self, path):
22 return (path
22 return (path
23 .replace(".hg/", ".hg.hg/")
23 .replace(".hg/", ".hg.hg/")
24 .replace(".i/", ".i.hg/")
24 .replace(".i/", ".i.hg/")
25 .replace(".d/", ".d.hg/"))
25 .replace(".d/", ".d.hg/"))
26
26
27 def decodedir(self, path):
27 def decodedir(self, path):
28 return (path
28 return (path
29 .replace(".d.hg/", ".d/")
29 .replace(".d.hg/", ".d/")
30 .replace(".i.hg/", ".i/")
30 .replace(".i.hg/", ".i/")
31 .replace(".hg.hg/", ".hg/"))
31 .replace(".hg.hg/", ".hg/"))
32
32
33 def read(self, node):
33 def read(self, node):
34 t = self.revision(node)
34 t = self.revision(node)
35 if not t.startswith('\1\n'):
35 if not t.startswith('\1\n'):
36 return t
36 return t
37 s = t.index('\1\n', 2)
37 s = t.index('\1\n', 2)
38 return t[s+2:]
38 return t[s+2:]
39
39
40 def readmeta(self, node):
40 def readmeta(self, node):
41 t = self.revision(node)
41 t = self.revision(node)
42 if not t.startswith('\1\n'):
42 if not t.startswith('\1\n'):
43 return {}
43 return {}
44 s = t.index('\1\n', 2)
44 s = t.index('\1\n', 2)
45 mt = t[2:s]
45 mt = t[2:s]
46 m = {}
46 m = {}
47 for l in mt.splitlines():
47 for l in mt.splitlines():
48 k, v = l.split(": ", 1)
48 k, v = l.split(": ", 1)
49 m[k] = v
49 m[k] = v
50 return m
50 return m
51
51
52 def add(self, text, meta, transaction, link, p1=None, p2=None):
52 def add(self, text, meta, transaction, link, p1=None, p2=None):
53 if meta or text.startswith('\1\n'):
53 if meta or text.startswith('\1\n'):
54 mt = ""
54 mt = ""
55 if meta:
55 if meta:
56 mt = [ "%s: %s\n" % (k, v) for k,v in meta.items() ]
56 mt = [ "%s: %s\n" % (k, v) for k,v in meta.items() ]
57 text = "\1\n%s\1\n%s" % ("".join(mt), text)
57 text = "\1\n%s\1\n%s" % ("".join(mt), text)
58 return self.addrevision(text, transaction, link, p1, p2)
58 return self.addrevision(text, transaction, link, p1, p2)
59
59
60 def renamed(self, node):
60 def renamed(self, node):
61 if self.parents(node)[0] != nullid:
61 if self.parents(node)[0] != nullid:
62 return False
62 return False
63 m = self.readmeta(node)
63 m = self.readmeta(node)
64 if m and m.has_key("copy"):
64 if m and m.has_key("copy"):
65 return (m["copy"], bin(m["copyrev"]))
65 return (m["copy"], bin(m["copyrev"]))
66 return False
66 return False
67
67
68 def size(self, rev):
69 """return the size of a given revision"""
70
71 # for revisions with renames, we have to go the slow way
72 node = self.node(rev)
73 if self.renamed(node):
74 return len(self.read(node))
75
76 return revlog.size(self, rev)
77
68 def cmp(self, node, text):
78 def cmp(self, node, text):
69 """compare text with a given file revision"""
79 """compare text with a given file revision"""
70
80
71 # for renames, we have to go the slow way
81 # for renames, we have to go the slow way
72 if self.renamed(node):
82 if self.renamed(node):
73 t2 = self.read(node)
83 t2 = self.read(node)
74 return t2 == text
84 return t2 != text
75
76 p1, p2 = self.parents(node)
77 h = hash(text, p1, p2)
78
79 return h != node
80
85
81 def makenode(self, node, text):
86 return revlog.cmp(self, node, text)
82 """calculate a file nodeid for text, descended or possibly
83 unchanged from node"""
84
85 if self.cmp(node, text):
86 return hash(text, node, nullid)
87 return node
88
87
89 def annotate(self, node):
88 def annotate(self, node):
90
89
91 def decorate(text, rev):
90 def decorate(text, rev):
92 return ([rev] * len(text.splitlines()), text)
91 return ([rev] * len(text.splitlines()), text)
93
92
94 def pair(parent, child):
93 def pair(parent, child):
95 for a1, a2, b1, b2 in bdiff.blocks(parent[1], child[1]):
94 for a1, a2, b1, b2 in bdiff.blocks(parent[1], child[1]):
96 child[0][b1:b2] = parent[0][a1:a2]
95 child[0][b1:b2] = parent[0][a1:a2]
97 return child
96 return child
98
97
99 # find all ancestors
98 # find all ancestors
100 needed = {node:1}
99 needed = {node:1}
101 visit = [node]
100 visit = [node]
102 while visit:
101 while visit:
103 n = visit.pop(0)
102 n = visit.pop(0)
104 for p in self.parents(n):
103 for p in self.parents(n):
105 if p not in needed:
104 if p not in needed:
106 needed[p] = 1
105 needed[p] = 1
107 visit.append(p)
106 visit.append(p)
108 else:
107 else:
109 # count how many times we'll use this
108 # count how many times we'll use this
110 needed[p] += 1
109 needed[p] += 1
111
110
112 # sort by revision which is a topological order
111 # sort by revision which is a topological order
113 visit = [ (self.rev(n), n) for n in needed.keys() ]
112 visit = [ (self.rev(n), n) for n in needed.keys() ]
114 visit.sort()
113 visit.sort()
115 hist = {}
114 hist = {}
116
115
117 for r,n in visit:
116 for r,n in visit:
118 curr = decorate(self.read(n), self.linkrev(n))
117 curr = decorate(self.read(n), self.linkrev(n))
119 for p in self.parents(n):
118 for p in self.parents(n):
120 if p != nullid:
119 if p != nullid:
121 curr = pair(hist[p], curr)
120 curr = pair(hist[p], curr)
122 # trim the history of unneeded revs
121 # trim the history of unneeded revs
123 needed[p] -= 1
122 needed[p] -= 1
124 if not needed[p]:
123 if not needed[p]:
125 del hist[p]
124 del hist[p]
126 hist[n] = curr
125 hist[n] = curr
127
126
128 return zip(hist[n][0], hist[n][1].splitlines(1))
127 return zip(hist[n][0], hist[n][1].splitlines(1))
@@ -1,981 +1,981 b''
1 # hgweb/hgweb_mod.py - Web interface for a repository.
1 # hgweb/hgweb_mod.py - Web interface for a repository.
2 #
2 #
3 # Copyright 21 May 2005 - (c) 2005 Jake Edge <jake@edge2.net>
3 # Copyright 21 May 2005 - (c) 2005 Jake Edge <jake@edge2.net>
4 # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
4 # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
5 #
5 #
6 # This software may be used and distributed according to the terms
6 # This software may be used and distributed according to the terms
7 # of the GNU General Public License, incorporated herein by reference.
7 # of the GNU General Public License, incorporated herein by reference.
8
8
9 import os
9 import os
10 import os.path
10 import os.path
11 import mimetypes
11 import mimetypes
12 from mercurial.demandload import demandload
12 from mercurial.demandload import demandload
13 demandload(globals(), "re zlib ConfigParser mimetools cStringIO sys tempfile")
13 demandload(globals(), "re zlib ConfigParser mimetools cStringIO sys tempfile")
14 demandload(globals(), "mercurial:mdiff,ui,hg,util,archival,streamclone")
14 demandload(globals(), "mercurial:mdiff,ui,hg,util,archival,streamclone,patch")
15 demandload(globals(), "mercurial:templater")
15 demandload(globals(), "mercurial:templater")
16 demandload(globals(), "mercurial.hgweb.common:get_mtime,staticfile")
16 demandload(globals(), "mercurial.hgweb.common:get_mtime,staticfile")
17 from mercurial.node import *
17 from mercurial.node import *
18 from mercurial.i18n import gettext as _
18 from mercurial.i18n import gettext as _
19
19
20 def _up(p):
20 def _up(p):
21 if p[0] != "/":
21 if p[0] != "/":
22 p = "/" + p
22 p = "/" + p
23 if p[-1] == "/":
23 if p[-1] == "/":
24 p = p[:-1]
24 p = p[:-1]
25 up = os.path.dirname(p)
25 up = os.path.dirname(p)
26 if up == "/":
26 if up == "/":
27 return "/"
27 return "/"
28 return up + "/"
28 return up + "/"
29
29
30 class hgweb(object):
30 class hgweb(object):
31 def __init__(self, repo, name=None):
31 def __init__(self, repo, name=None):
32 if type(repo) == type(""):
32 if type(repo) == type(""):
33 self.repo = hg.repository(ui.ui(), repo)
33 self.repo = hg.repository(ui.ui(), repo)
34 else:
34 else:
35 self.repo = repo
35 self.repo = repo
36
36
37 self.mtime = -1
37 self.mtime = -1
38 self.reponame = name
38 self.reponame = name
39 self.archives = 'zip', 'gz', 'bz2'
39 self.archives = 'zip', 'gz', 'bz2'
40 self.stripecount = 1
40 self.stripecount = 1
41 self.templatepath = self.repo.ui.config("web", "templates",
41 self.templatepath = self.repo.ui.config("web", "templates",
42 templater.templatepath())
42 templater.templatepath())
43
43
44 def refresh(self):
44 def refresh(self):
45 mtime = get_mtime(self.repo.root)
45 mtime = get_mtime(self.repo.root)
46 if mtime != self.mtime:
46 if mtime != self.mtime:
47 self.mtime = mtime
47 self.mtime = mtime
48 self.repo = hg.repository(self.repo.ui, self.repo.root)
48 self.repo = hg.repository(self.repo.ui, self.repo.root)
49 self.maxchanges = int(self.repo.ui.config("web", "maxchanges", 10))
49 self.maxchanges = int(self.repo.ui.config("web", "maxchanges", 10))
50 self.stripecount = int(self.repo.ui.config("web", "stripes", 1))
50 self.stripecount = int(self.repo.ui.config("web", "stripes", 1))
51 self.maxshortchanges = int(self.repo.ui.config("web", "maxshortchanges", 60))
51 self.maxshortchanges = int(self.repo.ui.config("web", "maxshortchanges", 60))
52 self.maxfiles = int(self.repo.ui.config("web", "maxfiles", 10))
52 self.maxfiles = int(self.repo.ui.config("web", "maxfiles", 10))
53 self.allowpull = self.repo.ui.configbool("web", "allowpull", True)
53 self.allowpull = self.repo.ui.configbool("web", "allowpull", True)
54
54
55 def archivelist(self, nodeid):
55 def archivelist(self, nodeid):
56 allowed = self.repo.ui.configlist("web", "allow_archive")
56 allowed = self.repo.ui.configlist("web", "allow_archive")
57 for i in self.archives:
57 for i in self.archives:
58 if i in allowed or self.repo.ui.configbool("web", "allow" + i):
58 if i in allowed or self.repo.ui.configbool("web", "allow" + i):
59 yield {"type" : i, "node" : nodeid, "url": ""}
59 yield {"type" : i, "node" : nodeid, "url": ""}
60
60
61 def listfiles(self, files, mf):
61 def listfiles(self, files, mf):
62 for f in files[:self.maxfiles]:
62 for f in files[:self.maxfiles]:
63 yield self.t("filenodelink", node=hex(mf[f]), file=f)
63 yield self.t("filenodelink", node=hex(mf[f]), file=f)
64 if len(files) > self.maxfiles:
64 if len(files) > self.maxfiles:
65 yield self.t("fileellipses")
65 yield self.t("fileellipses")
66
66
67 def listfilediffs(self, files, changeset):
67 def listfilediffs(self, files, changeset):
68 for f in files[:self.maxfiles]:
68 for f in files[:self.maxfiles]:
69 yield self.t("filedifflink", node=hex(changeset), file=f)
69 yield self.t("filedifflink", node=hex(changeset), file=f)
70 if len(files) > self.maxfiles:
70 if len(files) > self.maxfiles:
71 yield self.t("fileellipses")
71 yield self.t("fileellipses")
72
72
73 def siblings(self, siblings=[], rev=None, hiderev=None, **args):
73 def siblings(self, siblings=[], rev=None, hiderev=None, **args):
74 if not rev:
74 if not rev:
75 rev = lambda x: ""
75 rev = lambda x: ""
76 siblings = [s for s in siblings if s != nullid]
76 siblings = [s for s in siblings if s != nullid]
77 if len(siblings) == 1 and rev(siblings[0]) == hiderev:
77 if len(siblings) == 1 and rev(siblings[0]) == hiderev:
78 return
78 return
79 for s in siblings:
79 for s in siblings:
80 yield dict(node=hex(s), rev=rev(s), **args)
80 yield dict(node=hex(s), rev=rev(s), **args)
81
81
82 def renamelink(self, fl, node):
82 def renamelink(self, fl, node):
83 r = fl.renamed(node)
83 r = fl.renamed(node)
84 if r:
84 if r:
85 return [dict(file=r[0], node=hex(r[1]))]
85 return [dict(file=r[0], node=hex(r[1]))]
86 return []
86 return []
87
87
88 def showtag(self, t1, node=nullid, **args):
88 def showtag(self, t1, node=nullid, **args):
89 for t in self.repo.nodetags(node):
89 for t in self.repo.nodetags(node):
90 yield self.t(t1, tag=t, **args)
90 yield self.t(t1, tag=t, **args)
91
91
92 def diff(self, node1, node2, files):
92 def diff(self, node1, node2, files):
93 def filterfiles(filters, files):
93 def filterfiles(filters, files):
94 l = [x for x in files if x in filters]
94 l = [x for x in files if x in filters]
95
95
96 for t in filters:
96 for t in filters:
97 if t and t[-1] != os.sep:
97 if t and t[-1] != os.sep:
98 t += os.sep
98 t += os.sep
99 l += [x for x in files if x.startswith(t)]
99 l += [x for x in files if x.startswith(t)]
100 return l
100 return l
101
101
102 parity = [0]
102 parity = [0]
103 def diffblock(diff, f, fn):
103 def diffblock(diff, f, fn):
104 yield self.t("diffblock",
104 yield self.t("diffblock",
105 lines=prettyprintlines(diff),
105 lines=prettyprintlines(diff),
106 parity=parity[0],
106 parity=parity[0],
107 file=f,
107 file=f,
108 filenode=hex(fn or nullid))
108 filenode=hex(fn or nullid))
109 parity[0] = 1 - parity[0]
109 parity[0] = 1 - parity[0]
110
110
111 def prettyprintlines(diff):
111 def prettyprintlines(diff):
112 for l in diff.splitlines(1):
112 for l in diff.splitlines(1):
113 if l.startswith('+'):
113 if l.startswith('+'):
114 yield self.t("difflineplus", line=l)
114 yield self.t("difflineplus", line=l)
115 elif l.startswith('-'):
115 elif l.startswith('-'):
116 yield self.t("difflineminus", line=l)
116 yield self.t("difflineminus", line=l)
117 elif l.startswith('@'):
117 elif l.startswith('@'):
118 yield self.t("difflineat", line=l)
118 yield self.t("difflineat", line=l)
119 else:
119 else:
120 yield self.t("diffline", line=l)
120 yield self.t("diffline", line=l)
121
121
122 r = self.repo
122 r = self.repo
123 cl = r.changelog
123 cl = r.changelog
124 mf = r.manifest
124 mf = r.manifest
125 change1 = cl.read(node1)
125 change1 = cl.read(node1)
126 change2 = cl.read(node2)
126 change2 = cl.read(node2)
127 mmap1 = mf.read(change1[0])
127 mmap1 = mf.read(change1[0])
128 mmap2 = mf.read(change2[0])
128 mmap2 = mf.read(change2[0])
129 date1 = util.datestr(change1[2])
129 date1 = util.datestr(change1[2])
130 date2 = util.datestr(change2[2])
130 date2 = util.datestr(change2[2])
131
131
132 modified, added, removed, deleted, unknown = r.status(node1, node2)[:5]
132 modified, added, removed, deleted, unknown = r.status(node1, node2)[:5]
133 if files:
133 if files:
134 modified, added, removed = map(lambda x: filterfiles(files, x),
134 modified, added, removed = map(lambda x: filterfiles(files, x),
135 (modified, added, removed))
135 (modified, added, removed))
136
136
137 diffopts = self.repo.ui.diffopts()
137 diffopts = patch.diffopts(ui)
138 for f in modified:
138 for f in modified:
139 to = r.file(f).read(mmap1[f])
139 to = r.file(f).read(mmap1[f])
140 tn = r.file(f).read(mmap2[f])
140 tn = r.file(f).read(mmap2[f])
141 yield diffblock(mdiff.unidiff(to, date1, tn, date2, f,
141 yield diffblock(mdiff.unidiff(to, date1, tn, date2, f,
142 opts=diffopts), f, tn)
142 opts=diffopts), f, tn)
143 for f in added:
143 for f in added:
144 to = None
144 to = None
145 tn = r.file(f).read(mmap2[f])
145 tn = r.file(f).read(mmap2[f])
146 yield diffblock(mdiff.unidiff(to, date1, tn, date2, f,
146 yield diffblock(mdiff.unidiff(to, date1, tn, date2, f,
147 opts=diffopts), f, tn)
147 opts=diffopts), f, tn)
148 for f in removed:
148 for f in removed:
149 to = r.file(f).read(mmap1[f])
149 to = r.file(f).read(mmap1[f])
150 tn = None
150 tn = None
151 yield diffblock(mdiff.unidiff(to, date1, tn, date2, f,
151 yield diffblock(mdiff.unidiff(to, date1, tn, date2, f,
152 opts=diffopts), f, tn)
152 opts=diffopts), f, tn)
153
153
154 def changelog(self, pos, shortlog=False):
154 def changelog(self, pos, shortlog=False):
155 def changenav(**map):
155 def changenav(**map):
156 def seq(factor, maxchanges=None):
156 def seq(factor, maxchanges=None):
157 if maxchanges:
157 if maxchanges:
158 yield maxchanges
158 yield maxchanges
159 if maxchanges >= 20 and maxchanges <= 40:
159 if maxchanges >= 20 and maxchanges <= 40:
160 yield 50
160 yield 50
161 else:
161 else:
162 yield 1 * factor
162 yield 1 * factor
163 yield 3 * factor
163 yield 3 * factor
164 for f in seq(factor * 10):
164 for f in seq(factor * 10):
165 yield f
165 yield f
166
166
167 l = []
167 l = []
168 last = 0
168 last = 0
169 maxchanges = shortlog and self.maxshortchanges or self.maxchanges
169 maxchanges = shortlog and self.maxshortchanges or self.maxchanges
170 for f in seq(1, maxchanges):
170 for f in seq(1, maxchanges):
171 if f < maxchanges or f <= last:
171 if f < maxchanges or f <= last:
172 continue
172 continue
173 if f > count:
173 if f > count:
174 break
174 break
175 last = f
175 last = f
176 r = "%d" % f
176 r = "%d" % f
177 if pos + f < count:
177 if pos + f < count:
178 l.append(("+" + r, pos + f))
178 l.append(("+" + r, pos + f))
179 if pos - f >= 0:
179 if pos - f >= 0:
180 l.insert(0, ("-" + r, pos - f))
180 l.insert(0, ("-" + r, pos - f))
181
181
182 yield {"rev": 0, "label": "(0)"}
182 yield {"rev": 0, "label": "(0)"}
183
183
184 for label, rev in l:
184 for label, rev in l:
185 yield {"label": label, "rev": rev}
185 yield {"label": label, "rev": rev}
186
186
187 yield {"label": "tip", "rev": "tip"}
187 yield {"label": "tip", "rev": "tip"}
188
188
189 def changelist(**map):
189 def changelist(**map):
190 parity = (start - end) & 1
190 parity = (start - end) & 1
191 cl = self.repo.changelog
191 cl = self.repo.changelog
192 l = [] # build a list in forward order for efficiency
192 l = [] # build a list in forward order for efficiency
193 for i in range(start, end):
193 for i in range(start, end):
194 n = cl.node(i)
194 n = cl.node(i)
195 changes = cl.read(n)
195 changes = cl.read(n)
196 hn = hex(n)
196 hn = hex(n)
197
197
198 l.insert(0, {"parity": parity,
198 l.insert(0, {"parity": parity,
199 "author": changes[1],
199 "author": changes[1],
200 "parent": self.siblings(cl.parents(n), cl.rev,
200 "parent": self.siblings(cl.parents(n), cl.rev,
201 cl.rev(n) - 1),
201 cl.rev(n) - 1),
202 "child": self.siblings(cl.children(n), cl.rev,
202 "child": self.siblings(cl.children(n), cl.rev,
203 cl.rev(n) + 1),
203 cl.rev(n) + 1),
204 "changelogtag": self.showtag("changelogtag",n),
204 "changelogtag": self.showtag("changelogtag",n),
205 "manifest": hex(changes[0]),
205 "manifest": hex(changes[0]),
206 "desc": changes[4],
206 "desc": changes[4],
207 "date": changes[2],
207 "date": changes[2],
208 "files": self.listfilediffs(changes[3], n),
208 "files": self.listfilediffs(changes[3], n),
209 "rev": i,
209 "rev": i,
210 "node": hn})
210 "node": hn})
211 parity = 1 - parity
211 parity = 1 - parity
212
212
213 for e in l:
213 for e in l:
214 yield e
214 yield e
215
215
216 maxchanges = shortlog and self.maxshortchanges or self.maxchanges
216 maxchanges = shortlog and self.maxshortchanges or self.maxchanges
217 cl = self.repo.changelog
217 cl = self.repo.changelog
218 mf = cl.read(cl.tip())[0]
218 mf = cl.read(cl.tip())[0]
219 count = cl.count()
219 count = cl.count()
220 start = max(0, pos - maxchanges + 1)
220 start = max(0, pos - maxchanges + 1)
221 end = min(count, start + maxchanges)
221 end = min(count, start + maxchanges)
222 pos = end - 1
222 pos = end - 1
223
223
224 yield self.t(shortlog and 'shortlog' or 'changelog',
224 yield self.t(shortlog and 'shortlog' or 'changelog',
225 changenav=changenav,
225 changenav=changenav,
226 manifest=hex(mf),
226 manifest=hex(mf),
227 rev=pos, changesets=count, entries=changelist,
227 rev=pos, changesets=count, entries=changelist,
228 archives=self.archivelist("tip"))
228 archives=self.archivelist("tip"))
229
229
230 def search(self, query):
230 def search(self, query):
231
231
232 def changelist(**map):
232 def changelist(**map):
233 cl = self.repo.changelog
233 cl = self.repo.changelog
234 count = 0
234 count = 0
235 qw = query.lower().split()
235 qw = query.lower().split()
236
236
237 def revgen():
237 def revgen():
238 for i in range(cl.count() - 1, 0, -100):
238 for i in range(cl.count() - 1, 0, -100):
239 l = []
239 l = []
240 for j in range(max(0, i - 100), i):
240 for j in range(max(0, i - 100), i):
241 n = cl.node(j)
241 n = cl.node(j)
242 changes = cl.read(n)
242 changes = cl.read(n)
243 l.append((n, j, changes))
243 l.append((n, j, changes))
244 l.reverse()
244 l.reverse()
245 for e in l:
245 for e in l:
246 yield e
246 yield e
247
247
248 for n, i, changes in revgen():
248 for n, i, changes in revgen():
249 miss = 0
249 miss = 0
250 for q in qw:
250 for q in qw:
251 if not (q in changes[1].lower() or
251 if not (q in changes[1].lower() or
252 q in changes[4].lower() or
252 q in changes[4].lower() or
253 q in " ".join(changes[3][:20]).lower()):
253 q in " ".join(changes[3][:20]).lower()):
254 miss = 1
254 miss = 1
255 break
255 break
256 if miss:
256 if miss:
257 continue
257 continue
258
258
259 count += 1
259 count += 1
260 hn = hex(n)
260 hn = hex(n)
261
261
262 yield self.t('searchentry',
262 yield self.t('searchentry',
263 parity=self.stripes(count),
263 parity=self.stripes(count),
264 author=changes[1],
264 author=changes[1],
265 parent=self.siblings(cl.parents(n), cl.rev),
265 parent=self.siblings(cl.parents(n), cl.rev),
266 child=self.siblings(cl.children(n), cl.rev),
266 child=self.siblings(cl.children(n), cl.rev),
267 changelogtag=self.showtag("changelogtag",n),
267 changelogtag=self.showtag("changelogtag",n),
268 manifest=hex(changes[0]),
268 manifest=hex(changes[0]),
269 desc=changes[4],
269 desc=changes[4],
270 date=changes[2],
270 date=changes[2],
271 files=self.listfilediffs(changes[3], n),
271 files=self.listfilediffs(changes[3], n),
272 rev=i,
272 rev=i,
273 node=hn)
273 node=hn)
274
274
275 if count >= self.maxchanges:
275 if count >= self.maxchanges:
276 break
276 break
277
277
278 cl = self.repo.changelog
278 cl = self.repo.changelog
279 mf = cl.read(cl.tip())[0]
279 mf = cl.read(cl.tip())[0]
280
280
281 yield self.t('search',
281 yield self.t('search',
282 query=query,
282 query=query,
283 manifest=hex(mf),
283 manifest=hex(mf),
284 entries=changelist)
284 entries=changelist)
285
285
286 def changeset(self, nodeid):
286 def changeset(self, nodeid):
287 cl = self.repo.changelog
287 cl = self.repo.changelog
288 n = self.repo.lookup(nodeid)
288 n = self.repo.lookup(nodeid)
289 nodeid = hex(n)
289 nodeid = hex(n)
290 changes = cl.read(n)
290 changes = cl.read(n)
291 p1 = cl.parents(n)[0]
291 p1 = cl.parents(n)[0]
292
292
293 files = []
293 files = []
294 mf = self.repo.manifest.read(changes[0])
294 mf = self.repo.manifest.read(changes[0])
295 for f in changes[3]:
295 for f in changes[3]:
296 files.append(self.t("filenodelink",
296 files.append(self.t("filenodelink",
297 filenode=hex(mf.get(f, nullid)), file=f))
297 filenode=hex(mf.get(f, nullid)), file=f))
298
298
299 def diff(**map):
299 def diff(**map):
300 yield self.diff(p1, n, None)
300 yield self.diff(p1, n, None)
301
301
302 yield self.t('changeset',
302 yield self.t('changeset',
303 diff=diff,
303 diff=diff,
304 rev=cl.rev(n),
304 rev=cl.rev(n),
305 node=nodeid,
305 node=nodeid,
306 parent=self.siblings(cl.parents(n), cl.rev),
306 parent=self.siblings(cl.parents(n), cl.rev),
307 child=self.siblings(cl.children(n), cl.rev),
307 child=self.siblings(cl.children(n), cl.rev),
308 changesettag=self.showtag("changesettag",n),
308 changesettag=self.showtag("changesettag",n),
309 manifest=hex(changes[0]),
309 manifest=hex(changes[0]),
310 author=changes[1],
310 author=changes[1],
311 desc=changes[4],
311 desc=changes[4],
312 date=changes[2],
312 date=changes[2],
313 files=files,
313 files=files,
314 archives=self.archivelist(nodeid))
314 archives=self.archivelist(nodeid))
315
315
316 def filelog(self, f, filenode):
316 def filelog(self, f, filenode):
317 cl = self.repo.changelog
317 cl = self.repo.changelog
318 fl = self.repo.file(f)
318 fl = self.repo.file(f)
319 filenode = hex(fl.lookup(filenode))
319 filenode = hex(fl.lookup(filenode))
320 count = fl.count()
320 count = fl.count()
321
321
322 def entries(**map):
322 def entries(**map):
323 l = []
323 l = []
324 parity = (count - 1) & 1
324 parity = (count - 1) & 1
325
325
326 for i in range(count):
326 for i in range(count):
327 n = fl.node(i)
327 n = fl.node(i)
328 lr = fl.linkrev(n)
328 lr = fl.linkrev(n)
329 cn = cl.node(lr)
329 cn = cl.node(lr)
330 cs = cl.read(cl.node(lr))
330 cs = cl.read(cl.node(lr))
331
331
332 l.insert(0, {"parity": parity,
332 l.insert(0, {"parity": parity,
333 "filenode": hex(n),
333 "filenode": hex(n),
334 "filerev": i,
334 "filerev": i,
335 "file": f,
335 "file": f,
336 "node": hex(cn),
336 "node": hex(cn),
337 "author": cs[1],
337 "author": cs[1],
338 "date": cs[2],
338 "date": cs[2],
339 "rename": self.renamelink(fl, n),
339 "rename": self.renamelink(fl, n),
340 "parent": self.siblings(fl.parents(n),
340 "parent": self.siblings(fl.parents(n),
341 fl.rev, file=f),
341 fl.rev, file=f),
342 "child": self.siblings(fl.children(n),
342 "child": self.siblings(fl.children(n),
343 fl.rev, file=f),
343 fl.rev, file=f),
344 "desc": cs[4]})
344 "desc": cs[4]})
345 parity = 1 - parity
345 parity = 1 - parity
346
346
347 for e in l:
347 for e in l:
348 yield e
348 yield e
349
349
350 yield self.t("filelog", file=f, filenode=filenode, entries=entries)
350 yield self.t("filelog", file=f, filenode=filenode, entries=entries)
351
351
352 def filerevision(self, f, node):
352 def filerevision(self, f, node):
353 fl = self.repo.file(f)
353 fl = self.repo.file(f)
354 n = fl.lookup(node)
354 n = fl.lookup(node)
355 node = hex(n)
355 node = hex(n)
356 text = fl.read(n)
356 text = fl.read(n)
357 changerev = fl.linkrev(n)
357 changerev = fl.linkrev(n)
358 cl = self.repo.changelog
358 cl = self.repo.changelog
359 cn = cl.node(changerev)
359 cn = cl.node(changerev)
360 cs = cl.read(cn)
360 cs = cl.read(cn)
361 mfn = cs[0]
361 mfn = cs[0]
362
362
363 mt = mimetypes.guess_type(f)[0]
363 mt = mimetypes.guess_type(f)[0]
364 rawtext = text
364 rawtext = text
365 if util.binary(text):
365 if util.binary(text):
366 mt = mt or 'application/octet-stream'
366 mt = mt or 'application/octet-stream'
367 text = "(binary:%s)" % mt
367 text = "(binary:%s)" % mt
368 mt = mt or 'text/plain'
368 mt = mt or 'text/plain'
369
369
370 def lines():
370 def lines():
371 for l, t in enumerate(text.splitlines(1)):
371 for l, t in enumerate(text.splitlines(1)):
372 yield {"line": t,
372 yield {"line": t,
373 "linenumber": "% 6d" % (l + 1),
373 "linenumber": "% 6d" % (l + 1),
374 "parity": self.stripes(l)}
374 "parity": self.stripes(l)}
375
375
376 yield self.t("filerevision",
376 yield self.t("filerevision",
377 file=f,
377 file=f,
378 filenode=node,
378 filenode=node,
379 path=_up(f),
379 path=_up(f),
380 text=lines(),
380 text=lines(),
381 raw=rawtext,
381 raw=rawtext,
382 mimetype=mt,
382 mimetype=mt,
383 rev=changerev,
383 rev=changerev,
384 node=hex(cn),
384 node=hex(cn),
385 manifest=hex(mfn),
385 manifest=hex(mfn),
386 author=cs[1],
386 author=cs[1],
387 date=cs[2],
387 date=cs[2],
388 parent=self.siblings(fl.parents(n), fl.rev, file=f),
388 parent=self.siblings(fl.parents(n), fl.rev, file=f),
389 child=self.siblings(fl.children(n), fl.rev, file=f),
389 child=self.siblings(fl.children(n), fl.rev, file=f),
390 rename=self.renamelink(fl, n),
390 rename=self.renamelink(fl, n),
391 permissions=self.repo.manifest.read(mfn).execf(f))
391 permissions=self.repo.manifest.read(mfn).execf(f))
392
392
393 def fileannotate(self, f, node):
393 def fileannotate(self, f, node):
394 bcache = {}
394 bcache = {}
395 ncache = {}
395 ncache = {}
396 fl = self.repo.file(f)
396 fl = self.repo.file(f)
397 n = fl.lookup(node)
397 n = fl.lookup(node)
398 node = hex(n)
398 node = hex(n)
399 changerev = fl.linkrev(n)
399 changerev = fl.linkrev(n)
400
400
401 cl = self.repo.changelog
401 cl = self.repo.changelog
402 cn = cl.node(changerev)
402 cn = cl.node(changerev)
403 cs = cl.read(cn)
403 cs = cl.read(cn)
404 mfn = cs[0]
404 mfn = cs[0]
405
405
406 def annotate(**map):
406 def annotate(**map):
407 parity = 0
407 parity = 0
408 last = None
408 last = None
409 for r, l in fl.annotate(n):
409 for r, l in fl.annotate(n):
410 try:
410 try:
411 cnode = ncache[r]
411 cnode = ncache[r]
412 except KeyError:
412 except KeyError:
413 cnode = ncache[r] = self.repo.changelog.node(r)
413 cnode = ncache[r] = self.repo.changelog.node(r)
414
414
415 try:
415 try:
416 name = bcache[r]
416 name = bcache[r]
417 except KeyError:
417 except KeyError:
418 cl = self.repo.changelog.read(cnode)
418 cl = self.repo.changelog.read(cnode)
419 bcache[r] = name = self.repo.ui.shortuser(cl[1])
419 bcache[r] = name = self.repo.ui.shortuser(cl[1])
420
420
421 if last != cnode:
421 if last != cnode:
422 parity = 1 - parity
422 parity = 1 - parity
423 last = cnode
423 last = cnode
424
424
425 yield {"parity": parity,
425 yield {"parity": parity,
426 "node": hex(cnode),
426 "node": hex(cnode),
427 "rev": r,
427 "rev": r,
428 "author": name,
428 "author": name,
429 "file": f,
429 "file": f,
430 "line": l}
430 "line": l}
431
431
432 yield self.t("fileannotate",
432 yield self.t("fileannotate",
433 file=f,
433 file=f,
434 filenode=node,
434 filenode=node,
435 annotate=annotate,
435 annotate=annotate,
436 path=_up(f),
436 path=_up(f),
437 rev=changerev,
437 rev=changerev,
438 node=hex(cn),
438 node=hex(cn),
439 manifest=hex(mfn),
439 manifest=hex(mfn),
440 author=cs[1],
440 author=cs[1],
441 date=cs[2],
441 date=cs[2],
442 rename=self.renamelink(fl, n),
442 rename=self.renamelink(fl, n),
443 parent=self.siblings(fl.parents(n), fl.rev, file=f),
443 parent=self.siblings(fl.parents(n), fl.rev, file=f),
444 child=self.siblings(fl.children(n), fl.rev, file=f),
444 child=self.siblings(fl.children(n), fl.rev, file=f),
445 permissions=self.repo.manifest.read(mfn).execf(f))
445 permissions=self.repo.manifest.read(mfn).execf(f))
446
446
447 def manifest(self, mnode, path):
447 def manifest(self, mnode, path):
448 man = self.repo.manifest
448 man = self.repo.manifest
449 mn = man.lookup(mnode)
449 mn = man.lookup(mnode)
450 mnode = hex(mn)
450 mnode = hex(mn)
451 mf = man.read(mn)
451 mf = man.read(mn)
452 rev = man.rev(mn)
452 rev = man.rev(mn)
453 changerev = man.linkrev(mn)
453 changerev = man.linkrev(mn)
454 node = self.repo.changelog.node(changerev)
454 node = self.repo.changelog.node(changerev)
455
455
456 files = {}
456 files = {}
457
457
458 p = path[1:]
458 p = path[1:]
459 if p and p[-1] != "/":
459 if p and p[-1] != "/":
460 p += "/"
460 p += "/"
461 l = len(p)
461 l = len(p)
462
462
463 for f,n in mf.items():
463 for f,n in mf.items():
464 if f[:l] != p:
464 if f[:l] != p:
465 continue
465 continue
466 remain = f[l:]
466 remain = f[l:]
467 if "/" in remain:
467 if "/" in remain:
468 short = remain[:remain.index("/") + 1] # bleah
468 short = remain[:remain.index("/") + 1] # bleah
469 files[short] = (f, None)
469 files[short] = (f, None)
470 else:
470 else:
471 short = os.path.basename(remain)
471 short = os.path.basename(remain)
472 files[short] = (f, n)
472 files[short] = (f, n)
473
473
474 def filelist(**map):
474 def filelist(**map):
475 parity = 0
475 parity = 0
476 fl = files.keys()
476 fl = files.keys()
477 fl.sort()
477 fl.sort()
478 for f in fl:
478 for f in fl:
479 full, fnode = files[f]
479 full, fnode = files[f]
480 if not fnode:
480 if not fnode:
481 continue
481 continue
482
482
483 yield {"file": full,
483 yield {"file": full,
484 "manifest": mnode,
484 "manifest": mnode,
485 "filenode": hex(fnode),
485 "filenode": hex(fnode),
486 "parity": self.stripes(parity),
486 "parity": self.stripes(parity),
487 "basename": f,
487 "basename": f,
488 "permissions": mf.execf(full)}
488 "permissions": mf.execf(full)}
489 parity += 1
489 parity += 1
490
490
491 def dirlist(**map):
491 def dirlist(**map):
492 parity = 0
492 parity = 0
493 fl = files.keys()
493 fl = files.keys()
494 fl.sort()
494 fl.sort()
495 for f in fl:
495 for f in fl:
496 full, fnode = files[f]
496 full, fnode = files[f]
497 if fnode:
497 if fnode:
498 continue
498 continue
499
499
500 yield {"parity": self.stripes(parity),
500 yield {"parity": self.stripes(parity),
501 "path": os.path.join(path, f),
501 "path": os.path.join(path, f),
502 "manifest": mnode,
502 "manifest": mnode,
503 "basename": f[:-1]}
503 "basename": f[:-1]}
504 parity += 1
504 parity += 1
505
505
506 yield self.t("manifest",
506 yield self.t("manifest",
507 manifest=mnode,
507 manifest=mnode,
508 rev=rev,
508 rev=rev,
509 node=hex(node),
509 node=hex(node),
510 path=path,
510 path=path,
511 up=_up(path),
511 up=_up(path),
512 fentries=filelist,
512 fentries=filelist,
513 dentries=dirlist,
513 dentries=dirlist,
514 archives=self.archivelist(hex(node)))
514 archives=self.archivelist(hex(node)))
515
515
516 def tags(self):
516 def tags(self):
517 cl = self.repo.changelog
517 cl = self.repo.changelog
518 mf = cl.read(cl.tip())[0]
518 mf = cl.read(cl.tip())[0]
519
519
520 i = self.repo.tagslist()
520 i = self.repo.tagslist()
521 i.reverse()
521 i.reverse()
522
522
523 def entries(notip=False, **map):
523 def entries(notip=False, **map):
524 parity = 0
524 parity = 0
525 for k,n in i:
525 for k,n in i:
526 if notip and k == "tip": continue
526 if notip and k == "tip": continue
527 yield {"parity": self.stripes(parity),
527 yield {"parity": self.stripes(parity),
528 "tag": k,
528 "tag": k,
529 "tagmanifest": hex(cl.read(n)[0]),
529 "tagmanifest": hex(cl.read(n)[0]),
530 "date": cl.read(n)[2],
530 "date": cl.read(n)[2],
531 "node": hex(n)}
531 "node": hex(n)}
532 parity += 1
532 parity += 1
533
533
534 yield self.t("tags",
534 yield self.t("tags",
535 manifest=hex(mf),
535 manifest=hex(mf),
536 entries=lambda **x: entries(False, **x),
536 entries=lambda **x: entries(False, **x),
537 entriesnotip=lambda **x: entries(True, **x))
537 entriesnotip=lambda **x: entries(True, **x))
538
538
539 def summary(self):
539 def summary(self):
540 cl = self.repo.changelog
540 cl = self.repo.changelog
541 mf = cl.read(cl.tip())[0]
541 mf = cl.read(cl.tip())[0]
542
542
543 i = self.repo.tagslist()
543 i = self.repo.tagslist()
544 i.reverse()
544 i.reverse()
545
545
546 def tagentries(**map):
546 def tagentries(**map):
547 parity = 0
547 parity = 0
548 count = 0
548 count = 0
549 for k,n in i:
549 for k,n in i:
550 if k == "tip": # skip tip
550 if k == "tip": # skip tip
551 continue;
551 continue;
552
552
553 count += 1
553 count += 1
554 if count > 10: # limit to 10 tags
554 if count > 10: # limit to 10 tags
555 break;
555 break;
556
556
557 c = cl.read(n)
557 c = cl.read(n)
558 m = c[0]
558 m = c[0]
559 t = c[2]
559 t = c[2]
560
560
561 yield self.t("tagentry",
561 yield self.t("tagentry",
562 parity = self.stripes(parity),
562 parity = self.stripes(parity),
563 tag = k,
563 tag = k,
564 node = hex(n),
564 node = hex(n),
565 date = t,
565 date = t,
566 tagmanifest = hex(m))
566 tagmanifest = hex(m))
567 parity += 1
567 parity += 1
568
568
569 def changelist(**map):
569 def changelist(**map):
570 parity = 0
570 parity = 0
571 cl = self.repo.changelog
571 cl = self.repo.changelog
572 l = [] # build a list in forward order for efficiency
572 l = [] # build a list in forward order for efficiency
573 for i in range(start, end):
573 for i in range(start, end):
574 n = cl.node(i)
574 n = cl.node(i)
575 changes = cl.read(n)
575 changes = cl.read(n)
576 hn = hex(n)
576 hn = hex(n)
577 t = changes[2]
577 t = changes[2]
578
578
579 l.insert(0, self.t(
579 l.insert(0, self.t(
580 'shortlogentry',
580 'shortlogentry',
581 parity = parity,
581 parity = parity,
582 author = changes[1],
582 author = changes[1],
583 manifest = hex(changes[0]),
583 manifest = hex(changes[0]),
584 desc = changes[4],
584 desc = changes[4],
585 date = t,
585 date = t,
586 rev = i,
586 rev = i,
587 node = hn))
587 node = hn))
588 parity = 1 - parity
588 parity = 1 - parity
589
589
590 yield l
590 yield l
591
591
592 cl = self.repo.changelog
592 cl = self.repo.changelog
593 mf = cl.read(cl.tip())[0]
593 mf = cl.read(cl.tip())[0]
594 count = cl.count()
594 count = cl.count()
595 start = max(0, count - self.maxchanges)
595 start = max(0, count - self.maxchanges)
596 end = min(count, start + self.maxchanges)
596 end = min(count, start + self.maxchanges)
597
597
598 yield self.t("summary",
598 yield self.t("summary",
599 desc = self.repo.ui.config("web", "description", "unknown"),
599 desc = self.repo.ui.config("web", "description", "unknown"),
600 owner = (self.repo.ui.config("ui", "username") or # preferred
600 owner = (self.repo.ui.config("ui", "username") or # preferred
601 self.repo.ui.config("web", "contact") or # deprecated
601 self.repo.ui.config("web", "contact") or # deprecated
602 self.repo.ui.config("web", "author", "unknown")), # also
602 self.repo.ui.config("web", "author", "unknown")), # also
603 lastchange = (0, 0), # FIXME
603 lastchange = (0, 0), # FIXME
604 manifest = hex(mf),
604 manifest = hex(mf),
605 tags = tagentries,
605 tags = tagentries,
606 shortlog = changelist,
606 shortlog = changelist,
607 archives=self.archivelist("tip"))
607 archives=self.archivelist("tip"))
608
608
609 def filediff(self, file, changeset):
609 def filediff(self, file, changeset):
610 cl = self.repo.changelog
610 cl = self.repo.changelog
611 n = self.repo.lookup(changeset)
611 n = self.repo.lookup(changeset)
612 changeset = hex(n)
612 changeset = hex(n)
613 p1 = cl.parents(n)[0]
613 p1 = cl.parents(n)[0]
614 cs = cl.read(n)
614 cs = cl.read(n)
615 mf = self.repo.manifest.read(cs[0])
615 mf = self.repo.manifest.read(cs[0])
616
616
617 def diff(**map):
617 def diff(**map):
618 yield self.diff(p1, n, [file])
618 yield self.diff(p1, n, [file])
619
619
620 yield self.t("filediff",
620 yield self.t("filediff",
621 file=file,
621 file=file,
622 filenode=hex(mf.get(file, nullid)),
622 filenode=hex(mf.get(file, nullid)),
623 node=changeset,
623 node=changeset,
624 rev=self.repo.changelog.rev(n),
624 rev=self.repo.changelog.rev(n),
625 parent=self.siblings(cl.parents(n), cl.rev),
625 parent=self.siblings(cl.parents(n), cl.rev),
626 child=self.siblings(cl.children(n), cl.rev),
626 child=self.siblings(cl.children(n), cl.rev),
627 diff=diff)
627 diff=diff)
628
628
629 archive_specs = {
629 archive_specs = {
630 'bz2': ('application/x-tar', 'tbz2', '.tar.bz2', None),
630 'bz2': ('application/x-tar', 'tbz2', '.tar.bz2', None),
631 'gz': ('application/x-tar', 'tgz', '.tar.gz', None),
631 'gz': ('application/x-tar', 'tgz', '.tar.gz', None),
632 'zip': ('application/zip', 'zip', '.zip', None),
632 'zip': ('application/zip', 'zip', '.zip', None),
633 }
633 }
634
634
635 def archive(self, req, cnode, type_):
635 def archive(self, req, cnode, type_):
636 reponame = re.sub(r"\W+", "-", os.path.basename(self.reponame))
636 reponame = re.sub(r"\W+", "-", os.path.basename(self.reponame))
637 name = "%s-%s" % (reponame, short(cnode))
637 name = "%s-%s" % (reponame, short(cnode))
638 mimetype, artype, extension, encoding = self.archive_specs[type_]
638 mimetype, artype, extension, encoding = self.archive_specs[type_]
639 headers = [('Content-type', mimetype),
639 headers = [('Content-type', mimetype),
640 ('Content-disposition', 'attachment; filename=%s%s' %
640 ('Content-disposition', 'attachment; filename=%s%s' %
641 (name, extension))]
641 (name, extension))]
642 if encoding:
642 if encoding:
643 headers.append(('Content-encoding', encoding))
643 headers.append(('Content-encoding', encoding))
644 req.header(headers)
644 req.header(headers)
645 archival.archive(self.repo, req.out, cnode, artype, prefix=name)
645 archival.archive(self.repo, req.out, cnode, artype, prefix=name)
646
646
647 # add tags to things
647 # add tags to things
648 # tags -> list of changesets corresponding to tags
648 # tags -> list of changesets corresponding to tags
649 # find tag, changeset, file
649 # find tag, changeset, file
650
650
651 def cleanpath(self, path):
651 def cleanpath(self, path):
652 p = util.normpath(path)
652 p = util.normpath(path)
653 if p[:2] == "..":
653 if p[:2] == "..":
654 raise Exception("suspicious path")
654 raise Exception("suspicious path")
655 return p
655 return p
656
656
657 def run(self):
657 def run(self):
658 if not os.environ.get('GATEWAY_INTERFACE', '').startswith("CGI/1."):
658 if not os.environ.get('GATEWAY_INTERFACE', '').startswith("CGI/1."):
659 raise RuntimeError("This function is only intended to be called while running as a CGI script.")
659 raise RuntimeError("This function is only intended to be called while running as a CGI script.")
660 import mercurial.hgweb.wsgicgi as wsgicgi
660 import mercurial.hgweb.wsgicgi as wsgicgi
661 from request import wsgiapplication
661 from request import wsgiapplication
662 def make_web_app():
662 def make_web_app():
663 return self
663 return self
664 wsgicgi.launch(wsgiapplication(make_web_app))
664 wsgicgi.launch(wsgiapplication(make_web_app))
665
665
666 def run_wsgi(self, req):
666 def run_wsgi(self, req):
667 def header(**map):
667 def header(**map):
668 header_file = cStringIO.StringIO(''.join(self.t("header", **map)))
668 header_file = cStringIO.StringIO(''.join(self.t("header", **map)))
669 msg = mimetools.Message(header_file, 0)
669 msg = mimetools.Message(header_file, 0)
670 req.header(msg.items())
670 req.header(msg.items())
671 yield header_file.read()
671 yield header_file.read()
672
672
673 def rawfileheader(**map):
673 def rawfileheader(**map):
674 req.header([('Content-type', map['mimetype']),
674 req.header([('Content-type', map['mimetype']),
675 ('Content-disposition', 'filename=%s' % map['file']),
675 ('Content-disposition', 'filename=%s' % map['file']),
676 ('Content-length', str(len(map['raw'])))])
676 ('Content-length', str(len(map['raw'])))])
677 yield ''
677 yield ''
678
678
679 def footer(**map):
679 def footer(**map):
680 yield self.t("footer",
680 yield self.t("footer",
681 motd=self.repo.ui.config("web", "motd", ""),
681 motd=self.repo.ui.config("web", "motd", ""),
682 **map)
682 **map)
683
683
684 def expand_form(form):
684 def expand_form(form):
685 shortcuts = {
685 shortcuts = {
686 'cl': [('cmd', ['changelog']), ('rev', None)],
686 'cl': [('cmd', ['changelog']), ('rev', None)],
687 'sl': [('cmd', ['shortlog']), ('rev', None)],
687 'sl': [('cmd', ['shortlog']), ('rev', None)],
688 'cs': [('cmd', ['changeset']), ('node', None)],
688 'cs': [('cmd', ['changeset']), ('node', None)],
689 'f': [('cmd', ['file']), ('filenode', None)],
689 'f': [('cmd', ['file']), ('filenode', None)],
690 'fl': [('cmd', ['filelog']), ('filenode', None)],
690 'fl': [('cmd', ['filelog']), ('filenode', None)],
691 'fd': [('cmd', ['filediff']), ('node', None)],
691 'fd': [('cmd', ['filediff']), ('node', None)],
692 'fa': [('cmd', ['annotate']), ('filenode', None)],
692 'fa': [('cmd', ['annotate']), ('filenode', None)],
693 'mf': [('cmd', ['manifest']), ('manifest', None)],
693 'mf': [('cmd', ['manifest']), ('manifest', None)],
694 'ca': [('cmd', ['archive']), ('node', None)],
694 'ca': [('cmd', ['archive']), ('node', None)],
695 'tags': [('cmd', ['tags'])],
695 'tags': [('cmd', ['tags'])],
696 'tip': [('cmd', ['changeset']), ('node', ['tip'])],
696 'tip': [('cmd', ['changeset']), ('node', ['tip'])],
697 'static': [('cmd', ['static']), ('file', None)]
697 'static': [('cmd', ['static']), ('file', None)]
698 }
698 }
699
699
700 for k in shortcuts.iterkeys():
700 for k in shortcuts.iterkeys():
701 if form.has_key(k):
701 if form.has_key(k):
702 for name, value in shortcuts[k]:
702 for name, value in shortcuts[k]:
703 if value is None:
703 if value is None:
704 value = form[k]
704 value = form[k]
705 form[name] = value
705 form[name] = value
706 del form[k]
706 del form[k]
707
707
708 self.refresh()
708 self.refresh()
709
709
710 expand_form(req.form)
710 expand_form(req.form)
711
711
712 m = os.path.join(self.templatepath, "map")
712 m = os.path.join(self.templatepath, "map")
713 style = self.repo.ui.config("web", "style", "")
713 style = self.repo.ui.config("web", "style", "")
714 if req.form.has_key('style'):
714 if req.form.has_key('style'):
715 style = req.form['style'][0]
715 style = req.form['style'][0]
716 if style:
716 if style:
717 b = os.path.basename("map-" + style)
717 b = os.path.basename("map-" + style)
718 p = os.path.join(self.templatepath, b)
718 p = os.path.join(self.templatepath, b)
719 if os.path.isfile(p):
719 if os.path.isfile(p):
720 m = p
720 m = p
721
721
722 port = req.env["SERVER_PORT"]
722 port = req.env["SERVER_PORT"]
723 port = port != "80" and (":" + port) or ""
723 port = port != "80" and (":" + port) or ""
724 uri = req.env["REQUEST_URI"]
724 uri = req.env["REQUEST_URI"]
725 if "?" in uri:
725 if "?" in uri:
726 uri = uri.split("?")[0]
726 uri = uri.split("?")[0]
727 url = "http://%s%s%s" % (req.env["SERVER_NAME"], port, uri)
727 url = "http://%s%s%s" % (req.env["SERVER_NAME"], port, uri)
728 if not self.reponame:
728 if not self.reponame:
729 self.reponame = (self.repo.ui.config("web", "name")
729 self.reponame = (self.repo.ui.config("web", "name")
730 or uri.strip('/') or self.repo.root)
730 or uri.strip('/') or self.repo.root)
731
731
732 self.t = templater.templater(m, templater.common_filters,
732 self.t = templater.templater(m, templater.common_filters,
733 defaults={"url": url,
733 defaults={"url": url,
734 "repo": self.reponame,
734 "repo": self.reponame,
735 "header": header,
735 "header": header,
736 "footer": footer,
736 "footer": footer,
737 "rawfileheader": rawfileheader,
737 "rawfileheader": rawfileheader,
738 })
738 })
739
739
740 if not req.form.has_key('cmd'):
740 if not req.form.has_key('cmd'):
741 req.form['cmd'] = [self.t.cache['default'],]
741 req.form['cmd'] = [self.t.cache['default'],]
742
742
743 cmd = req.form['cmd'][0]
743 cmd = req.form['cmd'][0]
744
744
745 method = getattr(self, 'do_' + cmd, None)
745 method = getattr(self, 'do_' + cmd, None)
746 if method:
746 if method:
747 method(req)
747 method(req)
748 else:
748 else:
749 req.write(self.t("error"))
749 req.write(self.t("error"))
750
750
751 def stripes(self, parity):
751 def stripes(self, parity):
752 "make horizontal stripes for easier reading"
752 "make horizontal stripes for easier reading"
753 if self.stripecount:
753 if self.stripecount:
754 return (1 + parity / self.stripecount) & 1
754 return (1 + parity / self.stripecount) & 1
755 else:
755 else:
756 return 0
756 return 0
757
757
758 def do_changelog(self, req):
758 def do_changelog(self, req):
759 hi = self.repo.changelog.count() - 1
759 hi = self.repo.changelog.count() - 1
760 if req.form.has_key('rev'):
760 if req.form.has_key('rev'):
761 hi = req.form['rev'][0]
761 hi = req.form['rev'][0]
762 try:
762 try:
763 hi = self.repo.changelog.rev(self.repo.lookup(hi))
763 hi = self.repo.changelog.rev(self.repo.lookup(hi))
764 except hg.RepoError:
764 except hg.RepoError:
765 req.write(self.search(hi)) # XXX redirect to 404 page?
765 req.write(self.search(hi)) # XXX redirect to 404 page?
766 return
766 return
767
767
768 req.write(self.changelog(hi))
768 req.write(self.changelog(hi))
769
769
770 def do_shortlog(self, req):
770 def do_shortlog(self, req):
771 hi = self.repo.changelog.count() - 1
771 hi = self.repo.changelog.count() - 1
772 if req.form.has_key('rev'):
772 if req.form.has_key('rev'):
773 hi = req.form['rev'][0]
773 hi = req.form['rev'][0]
774 try:
774 try:
775 hi = self.repo.changelog.rev(self.repo.lookup(hi))
775 hi = self.repo.changelog.rev(self.repo.lookup(hi))
776 except hg.RepoError:
776 except hg.RepoError:
777 req.write(self.search(hi)) # XXX redirect to 404 page?
777 req.write(self.search(hi)) # XXX redirect to 404 page?
778 return
778 return
779
779
780 req.write(self.changelog(hi, shortlog = True))
780 req.write(self.changelog(hi, shortlog = True))
781
781
782 def do_changeset(self, req):
782 def do_changeset(self, req):
783 req.write(self.changeset(req.form['node'][0]))
783 req.write(self.changeset(req.form['node'][0]))
784
784
785 def do_manifest(self, req):
785 def do_manifest(self, req):
786 req.write(self.manifest(req.form['manifest'][0],
786 req.write(self.manifest(req.form['manifest'][0],
787 self.cleanpath(req.form['path'][0])))
787 self.cleanpath(req.form['path'][0])))
788
788
789 def do_tags(self, req):
789 def do_tags(self, req):
790 req.write(self.tags())
790 req.write(self.tags())
791
791
792 def do_summary(self, req):
792 def do_summary(self, req):
793 req.write(self.summary())
793 req.write(self.summary())
794
794
795 def do_filediff(self, req):
795 def do_filediff(self, req):
796 req.write(self.filediff(self.cleanpath(req.form['file'][0]),
796 req.write(self.filediff(self.cleanpath(req.form['file'][0]),
797 req.form['node'][0]))
797 req.form['node'][0]))
798
798
799 def do_file(self, req):
799 def do_file(self, req):
800 req.write(self.filerevision(self.cleanpath(req.form['file'][0]),
800 req.write(self.filerevision(self.cleanpath(req.form['file'][0]),
801 req.form['filenode'][0]))
801 req.form['filenode'][0]))
802
802
803 def do_annotate(self, req):
803 def do_annotate(self, req):
804 req.write(self.fileannotate(self.cleanpath(req.form['file'][0]),
804 req.write(self.fileannotate(self.cleanpath(req.form['file'][0]),
805 req.form['filenode'][0]))
805 req.form['filenode'][0]))
806
806
807 def do_filelog(self, req):
807 def do_filelog(self, req):
808 req.write(self.filelog(self.cleanpath(req.form['file'][0]),
808 req.write(self.filelog(self.cleanpath(req.form['file'][0]),
809 req.form['filenode'][0]))
809 req.form['filenode'][0]))
810
810
811 def do_heads(self, req):
811 def do_heads(self, req):
812 resp = " ".join(map(hex, self.repo.heads())) + "\n"
812 resp = " ".join(map(hex, self.repo.heads())) + "\n"
813 req.httphdr("application/mercurial-0.1", length=len(resp))
813 req.httphdr("application/mercurial-0.1", length=len(resp))
814 req.write(resp)
814 req.write(resp)
815
815
816 def do_branches(self, req):
816 def do_branches(self, req):
817 nodes = []
817 nodes = []
818 if req.form.has_key('nodes'):
818 if req.form.has_key('nodes'):
819 nodes = map(bin, req.form['nodes'][0].split(" "))
819 nodes = map(bin, req.form['nodes'][0].split(" "))
820 resp = cStringIO.StringIO()
820 resp = cStringIO.StringIO()
821 for b in self.repo.branches(nodes):
821 for b in self.repo.branches(nodes):
822 resp.write(" ".join(map(hex, b)) + "\n")
822 resp.write(" ".join(map(hex, b)) + "\n")
823 resp = resp.getvalue()
823 resp = resp.getvalue()
824 req.httphdr("application/mercurial-0.1", length=len(resp))
824 req.httphdr("application/mercurial-0.1", length=len(resp))
825 req.write(resp)
825 req.write(resp)
826
826
827 def do_between(self, req):
827 def do_between(self, req):
828 nodes = []
828 nodes = []
829 if req.form.has_key('pairs'):
829 if req.form.has_key('pairs'):
830 pairs = [map(bin, p.split("-"))
830 pairs = [map(bin, p.split("-"))
831 for p in req.form['pairs'][0].split(" ")]
831 for p in req.form['pairs'][0].split(" ")]
832 resp = cStringIO.StringIO()
832 resp = cStringIO.StringIO()
833 for b in self.repo.between(pairs):
833 for b in self.repo.between(pairs):
834 resp.write(" ".join(map(hex, b)) + "\n")
834 resp.write(" ".join(map(hex, b)) + "\n")
835 resp = resp.getvalue()
835 resp = resp.getvalue()
836 req.httphdr("application/mercurial-0.1", length=len(resp))
836 req.httphdr("application/mercurial-0.1", length=len(resp))
837 req.write(resp)
837 req.write(resp)
838
838
839 def do_changegroup(self, req):
839 def do_changegroup(self, req):
840 req.httphdr("application/mercurial-0.1")
840 req.httphdr("application/mercurial-0.1")
841 nodes = []
841 nodes = []
842 if not self.allowpull:
842 if not self.allowpull:
843 return
843 return
844
844
845 if req.form.has_key('roots'):
845 if req.form.has_key('roots'):
846 nodes = map(bin, req.form['roots'][0].split(" "))
846 nodes = map(bin, req.form['roots'][0].split(" "))
847
847
848 z = zlib.compressobj()
848 z = zlib.compressobj()
849 f = self.repo.changegroup(nodes, 'serve')
849 f = self.repo.changegroup(nodes, 'serve')
850 while 1:
850 while 1:
851 chunk = f.read(4096)
851 chunk = f.read(4096)
852 if not chunk:
852 if not chunk:
853 break
853 break
854 req.write(z.compress(chunk))
854 req.write(z.compress(chunk))
855
855
856 req.write(z.flush())
856 req.write(z.flush())
857
857
858 def do_archive(self, req):
858 def do_archive(self, req):
859 changeset = self.repo.lookup(req.form['node'][0])
859 changeset = self.repo.lookup(req.form['node'][0])
860 type_ = req.form['type'][0]
860 type_ = req.form['type'][0]
861 allowed = self.repo.ui.configlist("web", "allow_archive")
861 allowed = self.repo.ui.configlist("web", "allow_archive")
862 if (type_ in self.archives and (type_ in allowed or
862 if (type_ in self.archives and (type_ in allowed or
863 self.repo.ui.configbool("web", "allow" + type_, False))):
863 self.repo.ui.configbool("web", "allow" + type_, False))):
864 self.archive(req, changeset, type_)
864 self.archive(req, changeset, type_)
865 return
865 return
866
866
867 req.write(self.t("error"))
867 req.write(self.t("error"))
868
868
869 def do_static(self, req):
869 def do_static(self, req):
870 fname = req.form['file'][0]
870 fname = req.form['file'][0]
871 static = self.repo.ui.config("web", "static",
871 static = self.repo.ui.config("web", "static",
872 os.path.join(self.templatepath,
872 os.path.join(self.templatepath,
873 "static"))
873 "static"))
874 req.write(staticfile(static, fname, req)
874 req.write(staticfile(static, fname, req)
875 or self.t("error", error="%r not found" % fname))
875 or self.t("error", error="%r not found" % fname))
876
876
877 def do_capabilities(self, req):
877 def do_capabilities(self, req):
878 caps = ['unbundle']
878 caps = ['unbundle']
879 if self.repo.ui.configbool('server', 'uncompressed'):
879 if self.repo.ui.configbool('server', 'uncompressed'):
880 caps.append('stream=%d' % self.repo.revlogversion)
880 caps.append('stream=%d' % self.repo.revlogversion)
881 resp = ' '.join(caps)
881 resp = ' '.join(caps)
882 req.httphdr("application/mercurial-0.1", length=len(resp))
882 req.httphdr("application/mercurial-0.1", length=len(resp))
883 req.write(resp)
883 req.write(resp)
884
884
885 def check_perm(self, req, op, default):
885 def check_perm(self, req, op, default):
886 '''check permission for operation based on user auth.
886 '''check permission for operation based on user auth.
887 return true if op allowed, else false.
887 return true if op allowed, else false.
888 default is policy to use if no config given.'''
888 default is policy to use if no config given.'''
889
889
890 user = req.env.get('REMOTE_USER')
890 user = req.env.get('REMOTE_USER')
891
891
892 deny = self.repo.ui.configlist('web', 'deny_' + op)
892 deny = self.repo.ui.configlist('web', 'deny_' + op)
893 if deny and (not user or deny == ['*'] or user in deny):
893 if deny and (not user or deny == ['*'] or user in deny):
894 return False
894 return False
895
895
896 allow = self.repo.ui.configlist('web', 'allow_' + op)
896 allow = self.repo.ui.configlist('web', 'allow_' + op)
897 return (allow and (allow == ['*'] or user in allow)) or default
897 return (allow and (allow == ['*'] or user in allow)) or default
898
898
899 def do_unbundle(self, req):
899 def do_unbundle(self, req):
900 def bail(response, headers={}):
900 def bail(response, headers={}):
901 length = int(req.env['CONTENT_LENGTH'])
901 length = int(req.env['CONTENT_LENGTH'])
902 for s in util.filechunkiter(req, limit=length):
902 for s in util.filechunkiter(req, limit=length):
903 # drain incoming bundle, else client will not see
903 # drain incoming bundle, else client will not see
904 # response when run outside cgi script
904 # response when run outside cgi script
905 pass
905 pass
906 req.httphdr("application/mercurial-0.1", headers=headers)
906 req.httphdr("application/mercurial-0.1", headers=headers)
907 req.write('0\n')
907 req.write('0\n')
908 req.write(response)
908 req.write(response)
909
909
910 # require ssl by default, auth info cannot be sniffed and
910 # require ssl by default, auth info cannot be sniffed and
911 # replayed
911 # replayed
912 ssl_req = self.repo.ui.configbool('web', 'push_ssl', True)
912 ssl_req = self.repo.ui.configbool('web', 'push_ssl', True)
913 if ssl_req:
913 if ssl_req:
914 if not req.env.get('HTTPS'):
914 if not req.env.get('HTTPS'):
915 bail(_('ssl required\n'))
915 bail(_('ssl required\n'))
916 return
916 return
917 proto = 'https'
917 proto = 'https'
918 else:
918 else:
919 proto = 'http'
919 proto = 'http'
920
920
921 # do not allow push unless explicitly allowed
921 # do not allow push unless explicitly allowed
922 if not self.check_perm(req, 'push', False):
922 if not self.check_perm(req, 'push', False):
923 bail(_('push not authorized\n'),
923 bail(_('push not authorized\n'),
924 headers={'status': '401 Unauthorized'})
924 headers={'status': '401 Unauthorized'})
925 return
925 return
926
926
927 req.httphdr("application/mercurial-0.1")
927 req.httphdr("application/mercurial-0.1")
928
928
929 their_heads = req.form['heads'][0].split(' ')
929 their_heads = req.form['heads'][0].split(' ')
930
930
931 def check_heads():
931 def check_heads():
932 heads = map(hex, self.repo.heads())
932 heads = map(hex, self.repo.heads())
933 return their_heads == [hex('force')] or their_heads == heads
933 return their_heads == [hex('force')] or their_heads == heads
934
934
935 # fail early if possible
935 # fail early if possible
936 if not check_heads():
936 if not check_heads():
937 bail(_('unsynced changes\n'))
937 bail(_('unsynced changes\n'))
938 return
938 return
939
939
940 # do not lock repo until all changegroup data is
940 # do not lock repo until all changegroup data is
941 # streamed. save to temporary file.
941 # streamed. save to temporary file.
942
942
943 fd, tempname = tempfile.mkstemp(prefix='hg-unbundle-')
943 fd, tempname = tempfile.mkstemp(prefix='hg-unbundle-')
944 fp = os.fdopen(fd, 'wb+')
944 fp = os.fdopen(fd, 'wb+')
945 try:
945 try:
946 length = int(req.env['CONTENT_LENGTH'])
946 length = int(req.env['CONTENT_LENGTH'])
947 for s in util.filechunkiter(req, limit=length):
947 for s in util.filechunkiter(req, limit=length):
948 fp.write(s)
948 fp.write(s)
949
949
950 lock = self.repo.lock()
950 lock = self.repo.lock()
951 try:
951 try:
952 if not check_heads():
952 if not check_heads():
953 req.write('0\n')
953 req.write('0\n')
954 req.write(_('unsynced changes\n'))
954 req.write(_('unsynced changes\n'))
955 return
955 return
956
956
957 fp.seek(0)
957 fp.seek(0)
958
958
959 # send addchangegroup output to client
959 # send addchangegroup output to client
960
960
961 old_stdout = sys.stdout
961 old_stdout = sys.stdout
962 sys.stdout = cStringIO.StringIO()
962 sys.stdout = cStringIO.StringIO()
963
963
964 try:
964 try:
965 url = 'remote:%s:%s' % (proto,
965 url = 'remote:%s:%s' % (proto,
966 req.env.get('REMOTE_HOST', ''))
966 req.env.get('REMOTE_HOST', ''))
967 ret = self.repo.addchangegroup(fp, 'serve', url)
967 ret = self.repo.addchangegroup(fp, 'serve', url)
968 finally:
968 finally:
969 val = sys.stdout.getvalue()
969 val = sys.stdout.getvalue()
970 sys.stdout = old_stdout
970 sys.stdout = old_stdout
971 req.write('%d\n' % ret)
971 req.write('%d\n' % ret)
972 req.write(val)
972 req.write(val)
973 finally:
973 finally:
974 lock.release()
974 lock.release()
975 finally:
975 finally:
976 fp.close()
976 fp.close()
977 os.unlink(tempname)
977 os.unlink(tempname)
978
978
979 def do_stream_out(self, req):
979 def do_stream_out(self, req):
980 req.httphdr("application/mercurial-0.1")
980 req.httphdr("application/mercurial-0.1")
981 streamclone.stream_out(self.repo, req)
981 streamclone.stream_out(self.repo, req)
@@ -1,334 +1,339 b''
1 # merge.py - directory-level update/merge handling for Mercurial
1 # merge.py - directory-level update/merge handling for Mercurial
2 #
2 #
3 # Copyright 2006 Matt Mackall <mpm@selenic.com>
3 # Copyright 2006 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7
7
8 from node import *
8 from node import *
9 from i18n import gettext as _
9 from i18n import gettext as _
10 from demandload import *
10 from demandload import *
11 demandload(globals(), "util os tempfile")
11 demandload(globals(), "util os tempfile")
12
12
13 def fmerge(f, local, other, ancestor):
14 """merge executable flags"""
15 a, b, c = ancestor.execf(f), local.execf(f), other.execf(f)
16 return ((a^b) | (a^c)) ^ a
17
13 def merge3(repo, fn, my, other, p1, p2):
18 def merge3(repo, fn, my, other, p1, p2):
14 """perform a 3-way merge in the working directory"""
19 """perform a 3-way merge in the working directory"""
15
20
16 def temp(prefix, node):
21 def temp(prefix, node):
17 pre = "%s~%s." % (os.path.basename(fn), prefix)
22 pre = "%s~%s." % (os.path.basename(fn), prefix)
18 (fd, name) = tempfile.mkstemp(prefix=pre)
23 (fd, name) = tempfile.mkstemp(prefix=pre)
19 f = os.fdopen(fd, "wb")
24 f = os.fdopen(fd, "wb")
20 repo.wwrite(fn, fl.read(node), f)
25 repo.wwrite(fn, fl.read(node), f)
21 f.close()
26 f.close()
22 return name
27 return name
23
28
24 fl = repo.file(fn)
29 fl = repo.file(fn)
25 base = fl.ancestor(my, other)
30 base = fl.ancestor(my, other)
26 a = repo.wjoin(fn)
31 a = repo.wjoin(fn)
27 b = temp("base", base)
32 b = temp("base", base)
28 c = temp("other", other)
33 c = temp("other", other)
29
34
30 repo.ui.note(_("resolving %s\n") % fn)
35 repo.ui.note(_("resolving %s\n") % fn)
31 repo.ui.debug(_("file %s: my %s other %s ancestor %s\n") %
36 repo.ui.debug(_("file %s: my %s other %s ancestor %s\n") %
32 (fn, short(my), short(other), short(base)))
37 (fn, short(my), short(other), short(base)))
33
38
34 cmd = (os.environ.get("HGMERGE") or repo.ui.config("ui", "merge")
39 cmd = (os.environ.get("HGMERGE") or repo.ui.config("ui", "merge")
35 or "hgmerge")
40 or "hgmerge")
36 r = util.system('%s "%s" "%s" "%s"' % (cmd, a, b, c), cwd=repo.root,
41 r = util.system('%s "%s" "%s" "%s"' % (cmd, a, b, c), cwd=repo.root,
37 environ={'HG_FILE': fn,
42 environ={'HG_FILE': fn,
38 'HG_MY_NODE': p1,
43 'HG_MY_NODE': p1,
39 'HG_OTHER_NODE': p2,
44 'HG_OTHER_NODE': p2,
40 'HG_FILE_MY_NODE': hex(my),
45 'HG_FILE_MY_NODE': hex(my),
41 'HG_FILE_OTHER_NODE': hex(other),
46 'HG_FILE_OTHER_NODE': hex(other),
42 'HG_FILE_BASE_NODE': hex(base)})
47 'HG_FILE_BASE_NODE': hex(base)})
43 if r:
48 if r:
44 repo.ui.warn(_("merging %s failed!\n") % fn)
49 repo.ui.warn(_("merging %s failed!\n") % fn)
45
50
46 os.unlink(b)
51 os.unlink(b)
47 os.unlink(c)
52 os.unlink(c)
48 return r
53 return r
49
54
50 def update(repo, node, branchmerge=False, force=False, partial=None,
55 def update(repo, node, branchmerge=False, force=False, partial=None,
51 wlock=None, show_stats=True, remind=True):
56 wlock=None, show_stats=True, remind=True):
52
57
53 overwrite = force and not branchmerge
58 overwrite = force and not branchmerge
54 forcemerge = force and branchmerge
59 forcemerge = force and branchmerge
55
60
56 if not wlock:
61 if not wlock:
57 wlock = repo.wlock()
62 wlock = repo.wlock()
58
63
59 ### check phase
64 ### check phase
60
65
61 pl = repo.dirstate.parents()
66 pl = repo.dirstate.parents()
62 if not overwrite and pl[1] != nullid:
67 if not overwrite and pl[1] != nullid:
63 raise util.Abort(_("outstanding uncommitted merges"))
68 raise util.Abort(_("outstanding uncommitted merges"))
64
69
65 p1, p2 = pl[0], node
70 p1, p2 = pl[0], node
66 pa = repo.changelog.ancestor(p1, p2)
71 pa = repo.changelog.ancestor(p1, p2)
67
72
68 # is there a linear path from p1 to p2?
73 # is there a linear path from p1 to p2?
69 linear_path = (pa == p1 or pa == p2)
74 linear_path = (pa == p1 or pa == p2)
70 if branchmerge and linear_path:
75 if branchmerge and linear_path:
71 raise util.Abort(_("there is nothing to merge, just use "
76 raise util.Abort(_("there is nothing to merge, just use "
72 "'hg update' or look at 'hg heads'"))
77 "'hg update' or look at 'hg heads'"))
73
78
74 if not overwrite and not linear_path and not branchmerge:
79 if not overwrite and not linear_path and not branchmerge:
75 raise util.Abort(_("update spans branches, use 'hg merge' "
80 raise util.Abort(_("update spans branches, use 'hg merge' "
76 "or 'hg update -C' to lose changes"))
81 "or 'hg update -C' to lose changes"))
77
82
78 modified, added, removed, deleted, unknown = repo.status()[:5]
83 modified, added, removed, deleted, unknown = repo.status()[:5]
79 if branchmerge and not forcemerge:
84 if branchmerge and not forcemerge:
80 if modified or added or removed:
85 if modified or added or removed:
81 raise util.Abort(_("outstanding uncommitted changes"))
86 raise util.Abort(_("outstanding uncommitted changes"))
82
87
83 m1n = repo.changelog.read(p1)[0]
88 m1n = repo.changelog.read(p1)[0]
84 m2n = repo.changelog.read(p2)[0]
89 m2n = repo.changelog.read(p2)[0]
85 man = repo.manifest.ancestor(m1n, m2n)
90 man = repo.manifest.ancestor(m1n, m2n)
86 m1 = repo.manifest.read(m1n)
91 m1 = repo.manifest.read(m1n)
87 m2 = repo.manifest.read(m2n).copy()
92 m2 = repo.manifest.read(m2n).copy()
88 ma = repo.manifest.read(man)
93 ma = repo.manifest.read(man)
89
94
90 if not force:
95 if not force:
91 for f in unknown:
96 for f in unknown:
92 if f in m2:
97 if f in m2:
93 t1 = repo.wread(f)
98 if repo.file(f).cmp(m2[f], repo.wread(f)):
94 t2 = repo.file(f).read(m2[f])
95 if cmp(t1, t2) != 0:
96 raise util.Abort(_("'%s' already exists in the working"
99 raise util.Abort(_("'%s' already exists in the working"
97 " dir and differs from remote") % f)
100 " dir and differs from remote") % f)
98
101
99 # resolve the manifest to determine which files
102 # resolve the manifest to determine which files
100 # we care about merging
103 # we care about merging
101 repo.ui.note(_("resolving manifests\n"))
104 repo.ui.note(_("resolving manifests\n"))
102 repo.ui.debug(_(" overwrite %s branchmerge %s partial %s linear %s\n") %
105 repo.ui.debug(_(" overwrite %s branchmerge %s partial %s linear %s\n") %
103 (overwrite, branchmerge, partial and True or False, linear_path))
106 (overwrite, branchmerge, bool(partial), linear_path))
104 repo.ui.debug(_(" ancestor %s local %s remote %s\n") %
107 repo.ui.debug(_(" ancestor %s local %s remote %s\n") %
105 (short(man), short(m1n), short(m2n)))
108 (short(man), short(m1n), short(m2n)))
106
109
107 merge = {}
110 merge = {}
108 get = {}
111 get = {}
109 remove = []
112 remove = []
113 forget = []
110
114
111 # construct a working dir manifest
115 # construct a working dir manifest
112 mw = m1.copy()
116 mw = m1.copy()
113 umap = dict.fromkeys(unknown)
117 umap = dict.fromkeys(unknown)
114
118
115 for f in added + modified + unknown:
119 for f in added + modified + unknown:
116 mw[f] = ""
120 mw[f] = ""
121 # is the wfile new and matches m2?
122 if (f not in m1 and f in m2 and
123 not repo.file(f).cmp(m2[f], repo.wread(f))):
124 mw[f] = m2[f]
125
117 mw.set(f, util.is_exec(repo.wjoin(f), mw.execf(f)))
126 mw.set(f, util.is_exec(repo.wjoin(f), mw.execf(f)))
118
127
119 for f in deleted + removed:
128 for f in deleted + removed:
120 if f in mw:
129 if f in mw:
121 del mw[f]
130 del mw[f]
122
131
123 # If we're jumping between revisions (as opposed to merging),
132 # If we're jumping between revisions (as opposed to merging),
124 # and if neither the working directory nor the target rev has
133 # and if neither the working directory nor the target rev has
125 # the file, then we need to remove it from the dirstate, to
134 # the file, then we need to remove it from the dirstate, to
126 # prevent the dirstate from listing the file when it is no
135 # prevent the dirstate from listing the file when it is no
127 # longer in the manifest.
136 # longer in the manifest.
128 if not partial and linear_path and f not in m2:
137 if linear_path and f not in m2:
129 repo.dirstate.forget((f,))
138 forget.append(f)
130
139
131 # Compare manifests
140 # Compare manifests
132 for f, n in mw.iteritems():
141 for f, n in mw.iteritems():
133 if partial and not partial(f):
142 if partial and not partial(f):
134 continue
143 continue
135 if f in m2:
144 if f in m2:
136 s = 0
145 s = 0
137
146
138 # is the wfile new since m1, and match m2?
139 if f not in m1:
140 t1 = repo.wread(f)
141 t2 = repo.file(f).read(m2[f])
142 if cmp(t1, t2) == 0:
143 n = m2[f]
144 del t1, t2
145
146 # are files different?
147 # are files different?
147 if n != m2[f]:
148 if n != m2[f]:
148 a = ma.get(f, nullid)
149 a = ma.get(f, nullid)
149 # are both different from the ancestor?
150 # are both different from the ancestor?
150 if n != a and m2[f] != a:
151 if n != a and m2[f] != a:
151 repo.ui.debug(_(" %s versions differ, resolve\n") % f)
152 repo.ui.debug(_(" %s versions differ, resolve\n") % f)
152 # merge executable bits
153 merge[f] = (fmerge(f, mw, m2, ma), m1.get(f, nullid), m2[f])
153 # "if we changed or they changed, change in merge"
154 a, b, c = ma.execf(f), mw.execf(f), m2.execf(f)
155 mode = ((a^b) | (a^c)) ^ a
156 merge[f] = (mode, m1.get(f, nullid), m2[f])
157 s = 1
154 s = 1
158 # are we clobbering?
155 # are we clobbering?
159 # is remote's version newer?
156 # is remote's version newer?
160 # or are we going back in time?
157 # or are we going back in time?
161 elif overwrite or m2[f] != a or (p2 == pa and mw[f] == m1[f]):
158 elif overwrite or m2[f] != a or (p2 == pa and mw[f] == m1[f]):
162 repo.ui.debug(_(" remote %s is newer, get\n") % f)
159 repo.ui.debug(_(" remote %s is newer, get\n") % f)
163 get[f] = (m2.execf(f), m2[f])
160 get[f] = (m2.execf(f), m2[f])
164 s = 1
161 s = 1
165 elif f in umap or f in added:
162 elif f in umap or f in added:
166 # this unknown file is the same as the checkout
163 # this unknown file is the same as the checkout
167 # we need to reset the dirstate if the file was added
164 # we need to reset the dirstate if the file was added
168 get[f] = (m2.execf(f), m2[f])
165 get[f] = (m2.execf(f), m2[f])
169
166
170 if not s and mw.execf(f) != m2.execf(f):
167 if not s and mw.execf(f) != m2.execf(f):
171 if overwrite:
168 if overwrite:
172 repo.ui.debug(_(" updating permissions for %s\n") % f)
169 repo.ui.debug(_(" updating permissions for %s\n") % f)
173 util.set_exec(repo.wjoin(f), m2.execf(f))
170 util.set_exec(repo.wjoin(f), m2.execf(f))
174 else:
171 else:
175 a, b, c = ma.execf(f), mw.execf(f), m2.execf(f)
172 if fmerge(f, mw, m2, ma) != mw.execf(f):
176 mode = ((a^b) | (a^c)) ^ a
177 if mode != b:
178 repo.ui.debug(_(" updating permissions for %s\n")
173 repo.ui.debug(_(" updating permissions for %s\n")
179 % f)
174 % f)
180 util.set_exec(repo.wjoin(f), mode)
175 util.set_exec(repo.wjoin(f), mode)
181 del m2[f]
176 del m2[f]
182 elif f in ma:
177 elif f in ma:
183 if n != ma[f]:
178 if n != ma[f]:
184 r = _("d")
179 r = _("d")
185 if not overwrite and (linear_path or branchmerge):
180 if not overwrite and (linear_path or branchmerge):
186 r = repo.ui.prompt(
181 r = repo.ui.prompt(
187 (_(" local changed %s which remote deleted\n") % f) +
182 (_(" local changed %s which remote deleted\n") % f) +
188 _("(k)eep or (d)elete?"), _("[kd]"), _("k"))
183 _("(k)eep or (d)elete?"), _("[kd]"), _("k"))
189 if r == _("d"):
184 if r == _("d"):
190 remove.append(f)
185 remove.append(f)
191 else:
186 else:
192 repo.ui.debug(_("other deleted %s\n") % f)
187 repo.ui.debug(_("other deleted %s\n") % f)
193 remove.append(f) # other deleted it
188 remove.append(f) # other deleted it
194 else:
189 else:
195 # file is created on branch or in working directory
190 # file is created on branch or in working directory
196 if overwrite and f not in umap:
191 if overwrite and f not in umap:
197 repo.ui.debug(_("remote deleted %s, clobbering\n") % f)
192 repo.ui.debug(_("remote deleted %s, clobbering\n") % f)
198 remove.append(f)
193 remove.append(f)
199 elif n == m1.get(f, nullid): # same as parent
194 elif n == m1.get(f, nullid): # same as parent
200 if p2 == pa: # going backwards?
195 if p2 == pa: # going backwards?
201 repo.ui.debug(_("remote deleted %s\n") % f)
196 repo.ui.debug(_("remote deleted %s\n") % f)
202 remove.append(f)
197 remove.append(f)
203 else:
198 else:
204 repo.ui.debug(_("local modified %s, keeping\n") % f)
199 repo.ui.debug(_("local modified %s, keeping\n") % f)
205 else:
200 else:
206 repo.ui.debug(_("working dir created %s, keeping\n") % f)
201 repo.ui.debug(_("working dir created %s, keeping\n") % f)
207
202
208 for f, n in m2.iteritems():
203 for f, n in m2.iteritems():
209 if partial and not partial(f):
204 if partial and not partial(f):
210 continue
205 continue
211 if f[0] == "/":
206 if f[0] == "/":
212 continue
207 continue
213 if f in ma and n != ma[f]:
208 if f in ma and n != ma[f]:
214 r = _("k")
209 r = _("k")
215 if not overwrite and (linear_path or branchmerge):
210 if not overwrite and (linear_path or branchmerge):
216 r = repo.ui.prompt(
211 r = repo.ui.prompt(
217 (_("remote changed %s which local deleted\n") % f) +
212 (_("remote changed %s which local deleted\n") % f) +
218 _("(k)eep or (d)elete?"), _("[kd]"), _("k"))
213 _("(k)eep or (d)elete?"), _("[kd]"), _("k"))
219 if r == _("k"):
214 if r == _("k"):
220 get[f] = (m2.execf(f), n)
215 get[f] = (m2.execf(f), n)
221 elif f not in ma:
216 elif f not in ma:
222 repo.ui.debug(_("remote created %s\n") % f)
217 repo.ui.debug(_("remote created %s\n") % f)
223 get[f] = (m2.execf(f), n)
218 get[f] = (m2.execf(f), n)
224 else:
219 else:
225 if overwrite or p2 == pa: # going backwards?
220 if overwrite or p2 == pa: # going backwards?
226 repo.ui.debug(_("local deleted %s, recreating\n") % f)
221 repo.ui.debug(_("local deleted %s, recreating\n") % f)
227 get[f] = (m2.execf(f), n)
222 get[f] = (m2.execf(f), n)
228 else:
223 else:
229 repo.ui.debug(_("local deleted %s\n") % f)
224 repo.ui.debug(_("local deleted %s\n") % f)
230
225
231 del mw, m1, m2, ma
226 del mw, m1, m2, ma
232
227
228 ### apply phase
229
233 if overwrite:
230 if overwrite:
234 for f in merge:
231 for f in merge:
235 get[f] = merge[f][:2]
232 get[f] = merge[f][:2]
236 merge = {}
233 merge = {}
237
234
238 if linear_path or overwrite:
235 if linear_path or overwrite:
239 # we don't need to do any magic, just jump to the new rev
236 # we don't need to do any magic, just jump to the new rev
240 p1, p2 = p2, nullid
237 p1, p2 = p2, nullid
241
238
242 xp1 = hex(p1)
239 xp1 = hex(p1)
243 xp2 = hex(p2)
240 xp2 = hex(p2)
244 if p2 == nullid: xxp2 = ''
241 if p2 == nullid: xxp2 = ''
245 else: xxp2 = xp2
242 else: xxp2 = xp2
246
243
247 repo.hook('preupdate', throw=True, parent1=xp1, parent2=xxp2)
244 repo.hook('preupdate', throw=True, parent1=xp1, parent2=xxp2)
248
245
249 # get the files we don't need to change
246 # get the files we don't need to change
250 files = get.keys()
247 files = get.keys()
251 files.sort()
248 files.sort()
252 for f in files:
249 for f in files:
253 flag, node = get[f]
250 flag, node = get[f]
254 if f[0] == "/":
251 if f[0] == "/":
255 continue
252 continue
256 repo.ui.note(_("getting %s\n") % f)
253 repo.ui.note(_("getting %s\n") % f)
257 t = repo.file(f).read(node)
254 t = repo.file(f).read(node)
258 repo.wwrite(f, t)
255 repo.wwrite(f, t)
259 util.set_exec(repo.wjoin(f), flag)
256 util.set_exec(repo.wjoin(f), flag)
260 if not partial:
261 if branchmerge:
262 repo.dirstate.update([f], 'n', st_mtime=-1)
263 else:
264 repo.dirstate.update([f], 'n')
265
257
266 # merge the tricky bits
258 # merge the tricky bits
267 unresolved = []
259 unresolved = []
268 files = merge.keys()
260 files = merge.keys()
269 files.sort()
261 files.sort()
270 for f in files:
262 for f in files:
271 repo.ui.status(_("merging %s\n") % f)
263 repo.ui.status(_("merging %s\n") % f)
272 flag, my, other = merge[f]
264 flag, my, other = merge[f]
273 ret = merge3(repo, f, my, other, xp1, xp2)
265 ret = merge3(repo, f, my, other, xp1, xp2)
274 if ret:
266 if ret:
275 unresolved.append(f)
267 unresolved.append(f)
276 util.set_exec(repo.wjoin(f), flag)
268 util.set_exec(repo.wjoin(f), flag)
277 if not partial:
278 if branchmerge:
279 # We've done a branch merge, mark this file as merged
280 # so that we properly record the merger later
281 repo.dirstate.update([f], 'm')
282 else:
283 # We've update-merged a locally modified file, so
284 # we set the dirstate to emulate a normal checkout
285 # of that file some time in the past. Thus our
286 # merge will appear as a normal local file
287 # modification.
288 f_len = len(repo.file(f).read(other))
289 repo.dirstate.update([f], 'n', st_size=f_len, st_mtime=-1)
290
269
291 remove.sort()
270 remove.sort()
292 for f in remove:
271 for f in remove:
293 repo.ui.note(_("removing %s\n") % f)
272 repo.ui.note(_("removing %s\n") % f)
294 util.audit_path(f)
273 util.audit_path(f)
295 try:
274 try:
296 util.unlink(repo.wjoin(f))
275 util.unlink(repo.wjoin(f))
297 except OSError, inst:
276 except OSError, inst:
298 if inst.errno != errno.ENOENT:
277 if inst.errno != errno.ENOENT:
299 repo.ui.warn(_("update failed to remove %s: %s!\n") %
278 repo.ui.warn(_("update failed to remove %s: %s!\n") %
300 (f, inst.strerror))
279 (f, inst.strerror))
280
281 # update dirstate
301 if not partial:
282 if not partial:
283 repo.dirstate.setparents(p1, p2)
284 repo.dirstate.forget(forget)
302 if branchmerge:
285 if branchmerge:
303 repo.dirstate.update(remove, 'r')
286 repo.dirstate.update(remove, 'r')
304 else:
287 else:
305 repo.dirstate.forget(remove)
288 repo.dirstate.forget(remove)
306
289
307 if not partial:
290 files = get.keys()
308 repo.dirstate.setparents(p1, p2)
291 files.sort()
292 for f in files:
293 if branchmerge:
294 repo.dirstate.update([f], 'n', st_mtime=-1)
295 else:
296 repo.dirstate.update([f], 'n')
297
298 files = merge.keys()
299 files.sort()
300 for f in files:
301 if branchmerge:
302 # We've done a branch merge, mark this file as merged
303 # so that we properly record the merger later
304 repo.dirstate.update([f], 'm')
305 else:
306 # We've update-merged a locally modified file, so
307 # we set the dirstate to emulate a normal checkout
308 # of that file some time in the past. Thus our
309 # merge will appear as a normal local file
310 # modification.
311 fl = repo.file(f)
312 f_len = fl.size(fl.rev(other))
313 repo.dirstate.update([f], 'n', st_size=f_len, st_mtime=-1)
309
314
310 if show_stats:
315 if show_stats:
311 stats = ((len(get), _("updated")),
316 stats = ((len(get), _("updated")),
312 (len(merge) - len(unresolved), _("merged")),
317 (len(merge) - len(unresolved), _("merged")),
313 (len(remove), _("removed")),
318 (len(remove), _("removed")),
314 (len(unresolved), _("unresolved")))
319 (len(unresolved), _("unresolved")))
315 note = ", ".join([_("%d files %s") % s for s in stats])
320 note = ", ".join([_("%d files %s") % s for s in stats])
316 repo.ui.status("%s\n" % note)
321 repo.ui.status("%s\n" % note)
317 if not partial:
322 if not partial:
318 if branchmerge:
323 if branchmerge:
319 if unresolved:
324 if unresolved:
320 repo.ui.status(_("There are unresolved merges,"
325 repo.ui.status(_("There are unresolved merges,"
321 " you can redo the full merge using:\n"
326 " you can redo the full merge using:\n"
322 " hg update -C %s\n"
327 " hg update -C %s\n"
323 " hg merge %s\n"
328 " hg merge %s\n"
324 % (repo.changelog.rev(p1),
329 % (repo.changelog.rev(p1),
325 repo.changelog.rev(p2))))
330 repo.changelog.rev(p2))))
326 elif remind:
331 elif remind:
327 repo.ui.status(_("(branch merge, don't forget to commit)\n"))
332 repo.ui.status(_("(branch merge, don't forget to commit)\n"))
328 elif unresolved:
333 elif unresolved:
329 repo.ui.status(_("There are unresolved merges with"
334 repo.ui.status(_("There are unresolved merges with"
330 " locally modified files.\n"))
335 " locally modified files.\n"))
331
336
332 repo.hook('update', parent1=xp1, parent2=xxp2, error=len(unresolved))
337 repo.hook('update', parent1=xp1, parent2=xxp2, error=len(unresolved))
333 return len(unresolved)
338 return len(unresolved)
334
339
@@ -1,435 +1,449 b''
1 # patch.py - patch file parsing routines
1 # patch.py - patch file parsing routines
2 #
2 #
3 # Copyright 2006 Brendan Cully <brendan@kublai.com>
3 # Copyright 2006 Brendan Cully <brendan@kublai.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7
7
8 from demandload import demandload
8 from demandload import demandload
9 from i18n import gettext as _
9 from i18n import gettext as _
10 from node import *
10 from node import *
11 demandload(globals(), "cmdutil mdiff util")
11 demandload(globals(), "cmdutil mdiff util")
12 demandload(globals(), "cStringIO email.Parser os re shutil sys tempfile")
12 demandload(globals(), "cStringIO email.Parser os re shutil sys tempfile")
13
13
14 def extract(ui, fileobj):
14 def extract(ui, fileobj):
15 '''extract patch from data read from fileobj.
15 '''extract patch from data read from fileobj.
16
16
17 patch can be normal patch or contained in email message.
17 patch can be normal patch or contained in email message.
18
18
19 return tuple (filename, message, user, date). any item in returned
19 return tuple (filename, message, user, date). any item in returned
20 tuple can be None. if filename is None, fileobj did not contain
20 tuple can be None. if filename is None, fileobj did not contain
21 patch. caller must unlink filename when done.'''
21 patch. caller must unlink filename when done.'''
22
22
23 # attempt to detect the start of a patch
23 # attempt to detect the start of a patch
24 # (this heuristic is borrowed from quilt)
24 # (this heuristic is borrowed from quilt)
25 diffre = re.compile(r'^(?:Index:[ \t]|diff[ \t]|RCS file: |' +
25 diffre = re.compile(r'^(?:Index:[ \t]|diff[ \t]|RCS file: |' +
26 'retrieving revision [0-9]+(\.[0-9]+)*$|' +
26 'retrieving revision [0-9]+(\.[0-9]+)*$|' +
27 '(---|\*\*\*)[ \t])', re.MULTILINE)
27 '(---|\*\*\*)[ \t])', re.MULTILINE)
28
28
29 fd, tmpname = tempfile.mkstemp(prefix='hg-patch-')
29 fd, tmpname = tempfile.mkstemp(prefix='hg-patch-')
30 tmpfp = os.fdopen(fd, 'w')
30 tmpfp = os.fdopen(fd, 'w')
31 try:
31 try:
32 hgpatch = False
32 hgpatch = False
33
33
34 msg = email.Parser.Parser().parse(fileobj)
34 msg = email.Parser.Parser().parse(fileobj)
35
35
36 message = msg['Subject']
36 message = msg['Subject']
37 user = msg['From']
37 user = msg['From']
38 # should try to parse msg['Date']
38 # should try to parse msg['Date']
39 date = None
39 date = None
40
40
41 if message:
41 if message:
42 message = message.replace('\n\t', ' ')
42 message = message.replace('\n\t', ' ')
43 ui.debug('Subject: %s\n' % message)
43 ui.debug('Subject: %s\n' % message)
44 if user:
44 if user:
45 ui.debug('From: %s\n' % user)
45 ui.debug('From: %s\n' % user)
46 diffs_seen = 0
46 diffs_seen = 0
47 ok_types = ('text/plain', 'text/x-diff', 'text/x-patch')
47 ok_types = ('text/plain', 'text/x-diff', 'text/x-patch')
48
48
49 for part in msg.walk():
49 for part in msg.walk():
50 content_type = part.get_content_type()
50 content_type = part.get_content_type()
51 ui.debug('Content-Type: %s\n' % content_type)
51 ui.debug('Content-Type: %s\n' % content_type)
52 if content_type not in ok_types:
52 if content_type not in ok_types:
53 continue
53 continue
54 payload = part.get_payload(decode=True)
54 payload = part.get_payload(decode=True)
55 m = diffre.search(payload)
55 m = diffre.search(payload)
56 if m:
56 if m:
57 ui.debug(_('found patch at byte %d\n') % m.start(0))
57 ui.debug(_('found patch at byte %d\n') % m.start(0))
58 diffs_seen += 1
58 diffs_seen += 1
59 cfp = cStringIO.StringIO()
59 cfp = cStringIO.StringIO()
60 if message:
60 if message:
61 cfp.write(message)
61 cfp.write(message)
62 cfp.write('\n')
62 cfp.write('\n')
63 for line in payload[:m.start(0)].splitlines():
63 for line in payload[:m.start(0)].splitlines():
64 if line.startswith('# HG changeset patch'):
64 if line.startswith('# HG changeset patch'):
65 ui.debug(_('patch generated by hg export\n'))
65 ui.debug(_('patch generated by hg export\n'))
66 hgpatch = True
66 hgpatch = True
67 # drop earlier commit message content
67 # drop earlier commit message content
68 cfp.seek(0)
68 cfp.seek(0)
69 cfp.truncate()
69 cfp.truncate()
70 elif hgpatch:
70 elif hgpatch:
71 if line.startswith('# User '):
71 if line.startswith('# User '):
72 user = line[7:]
72 user = line[7:]
73 ui.debug('From: %s\n' % user)
73 ui.debug('From: %s\n' % user)
74 elif line.startswith("# Date "):
74 elif line.startswith("# Date "):
75 date = line[7:]
75 date = line[7:]
76 if not line.startswith('# '):
76 if not line.startswith('# '):
77 cfp.write(line)
77 cfp.write(line)
78 cfp.write('\n')
78 cfp.write('\n')
79 message = cfp.getvalue()
79 message = cfp.getvalue()
80 if tmpfp:
80 if tmpfp:
81 tmpfp.write(payload)
81 tmpfp.write(payload)
82 if not payload.endswith('\n'):
82 if not payload.endswith('\n'):
83 tmpfp.write('\n')
83 tmpfp.write('\n')
84 elif not diffs_seen and message and content_type == 'text/plain':
84 elif not diffs_seen and message and content_type == 'text/plain':
85 message += '\n' + payload
85 message += '\n' + payload
86 except:
86 except:
87 tmpfp.close()
87 tmpfp.close()
88 os.unlink(tmpname)
88 os.unlink(tmpname)
89 raise
89 raise
90
90
91 tmpfp.close()
91 tmpfp.close()
92 if not diffs_seen:
92 if not diffs_seen:
93 os.unlink(tmpname)
93 os.unlink(tmpname)
94 return None, message, user, date
94 return None, message, user, date
95 return tmpname, message, user, date
95 return tmpname, message, user, date
96
96
97 def readgitpatch(patchname):
97 def readgitpatch(patchname):
98 """extract git-style metadata about patches from <patchname>"""
98 """extract git-style metadata about patches from <patchname>"""
99 class gitpatch:
99 class gitpatch:
100 "op is one of ADD, DELETE, RENAME, MODIFY or COPY"
100 "op is one of ADD, DELETE, RENAME, MODIFY or COPY"
101 def __init__(self, path):
101 def __init__(self, path):
102 self.path = path
102 self.path = path
103 self.oldpath = None
103 self.oldpath = None
104 self.mode = None
104 self.mode = None
105 self.op = 'MODIFY'
105 self.op = 'MODIFY'
106 self.copymod = False
106 self.copymod = False
107 self.lineno = 0
107 self.lineno = 0
108
108
109 # Filter patch for git information
109 # Filter patch for git information
110 gitre = re.compile('diff --git a/(.*) b/(.*)')
110 gitre = re.compile('diff --git a/(.*) b/(.*)')
111 pf = file(patchname)
111 pf = file(patchname)
112 gp = None
112 gp = None
113 gitpatches = []
113 gitpatches = []
114 # Can have a git patch with only metadata, causing patch to complain
114 # Can have a git patch with only metadata, causing patch to complain
115 dopatch = False
115 dopatch = False
116
116
117 lineno = 0
117 lineno = 0
118 for line in pf:
118 for line in pf:
119 lineno += 1
119 lineno += 1
120 if line.startswith('diff --git'):
120 if line.startswith('diff --git'):
121 m = gitre.match(line)
121 m = gitre.match(line)
122 if m:
122 if m:
123 if gp:
123 if gp:
124 gitpatches.append(gp)
124 gitpatches.append(gp)
125 src, dst = m.group(1,2)
125 src, dst = m.group(1,2)
126 gp = gitpatch(dst)
126 gp = gitpatch(dst)
127 gp.lineno = lineno
127 gp.lineno = lineno
128 elif gp:
128 elif gp:
129 if line.startswith('--- '):
129 if line.startswith('--- '):
130 if gp.op in ('COPY', 'RENAME'):
130 if gp.op in ('COPY', 'RENAME'):
131 gp.copymod = True
131 gp.copymod = True
132 dopatch = 'filter'
132 dopatch = 'filter'
133 gitpatches.append(gp)
133 gitpatches.append(gp)
134 gp = None
134 gp = None
135 if not dopatch:
135 if not dopatch:
136 dopatch = True
136 dopatch = True
137 continue
137 continue
138 if line.startswith('rename from '):
138 if line.startswith('rename from '):
139 gp.op = 'RENAME'
139 gp.op = 'RENAME'
140 gp.oldpath = line[12:].rstrip()
140 gp.oldpath = line[12:].rstrip()
141 elif line.startswith('rename to '):
141 elif line.startswith('rename to '):
142 gp.path = line[10:].rstrip()
142 gp.path = line[10:].rstrip()
143 elif line.startswith('copy from '):
143 elif line.startswith('copy from '):
144 gp.op = 'COPY'
144 gp.op = 'COPY'
145 gp.oldpath = line[10:].rstrip()
145 gp.oldpath = line[10:].rstrip()
146 elif line.startswith('copy to '):
146 elif line.startswith('copy to '):
147 gp.path = line[8:].rstrip()
147 gp.path = line[8:].rstrip()
148 elif line.startswith('deleted file'):
148 elif line.startswith('deleted file'):
149 gp.op = 'DELETE'
149 gp.op = 'DELETE'
150 elif line.startswith('new file mode '):
150 elif line.startswith('new file mode '):
151 gp.op = 'ADD'
151 gp.op = 'ADD'
152 gp.mode = int(line.rstrip()[-3:], 8)
152 gp.mode = int(line.rstrip()[-3:], 8)
153 elif line.startswith('new mode '):
153 elif line.startswith('new mode '):
154 gp.mode = int(line.rstrip()[-3:], 8)
154 gp.mode = int(line.rstrip()[-3:], 8)
155 if gp:
155 if gp:
156 gitpatches.append(gp)
156 gitpatches.append(gp)
157
157
158 if not gitpatches:
158 if not gitpatches:
159 dopatch = True
159 dopatch = True
160
160
161 return (dopatch, gitpatches)
161 return (dopatch, gitpatches)
162
162
163 def dogitpatch(patchname, gitpatches):
163 def dogitpatch(patchname, gitpatches):
164 """Preprocess git patch so that vanilla patch can handle it"""
164 """Preprocess git patch so that vanilla patch can handle it"""
165 pf = file(patchname)
165 pf = file(patchname)
166 pfline = 1
166 pfline = 1
167
167
168 fd, patchname = tempfile.mkstemp(prefix='hg-patch-')
168 fd, patchname = tempfile.mkstemp(prefix='hg-patch-')
169 tmpfp = os.fdopen(fd, 'w')
169 tmpfp = os.fdopen(fd, 'w')
170
170
171 try:
171 try:
172 for i in range(len(gitpatches)):
172 for i in range(len(gitpatches)):
173 p = gitpatches[i]
173 p = gitpatches[i]
174 if not p.copymod:
174 if not p.copymod:
175 continue
175 continue
176
176
177 if os.path.exists(p.path):
177 if os.path.exists(p.path):
178 raise util.Abort(_("cannot create %s: destination already exists") %
178 raise util.Abort(_("cannot create %s: destination already exists") %
179 p.path)
179 p.path)
180
180
181 (src, dst) = [os.path.join(os.getcwd(), n)
181 (src, dst) = [os.path.join(os.getcwd(), n)
182 for n in (p.oldpath, p.path)]
182 for n in (p.oldpath, p.path)]
183
183
184 targetdir = os.path.dirname(dst)
184 targetdir = os.path.dirname(dst)
185 if not os.path.isdir(targetdir):
185 if not os.path.isdir(targetdir):
186 os.makedirs(targetdir)
186 os.makedirs(targetdir)
187 try:
187 try:
188 shutil.copyfile(src, dst)
188 shutil.copyfile(src, dst)
189 shutil.copymode(src, dst)
189 shutil.copymode(src, dst)
190 except shutil.Error, inst:
190 except shutil.Error, inst:
191 raise util.Abort(str(inst))
191 raise util.Abort(str(inst))
192
192
193 # rewrite patch hunk
193 # rewrite patch hunk
194 while pfline < p.lineno:
194 while pfline < p.lineno:
195 tmpfp.write(pf.readline())
195 tmpfp.write(pf.readline())
196 pfline += 1
196 pfline += 1
197 tmpfp.write('diff --git a/%s b/%s\n' % (p.path, p.path))
197 tmpfp.write('diff --git a/%s b/%s\n' % (p.path, p.path))
198 line = pf.readline()
198 line = pf.readline()
199 pfline += 1
199 pfline += 1
200 while not line.startswith('--- a/'):
200 while not line.startswith('--- a/'):
201 tmpfp.write(line)
201 tmpfp.write(line)
202 line = pf.readline()
202 line = pf.readline()
203 pfline += 1
203 pfline += 1
204 tmpfp.write('--- a/%s\n' % p.path)
204 tmpfp.write('--- a/%s\n' % p.path)
205
205
206 line = pf.readline()
206 line = pf.readline()
207 while line:
207 while line:
208 tmpfp.write(line)
208 tmpfp.write(line)
209 line = pf.readline()
209 line = pf.readline()
210 except:
210 except:
211 tmpfp.close()
211 tmpfp.close()
212 os.unlink(patchname)
212 os.unlink(patchname)
213 raise
213 raise
214
214
215 tmpfp.close()
215 tmpfp.close()
216 return patchname
216 return patchname
217
217
218 def patch(patchname, ui, strip=1, cwd=None):
218 def patch(patchname, ui, strip=1, cwd=None):
219 """apply the patch <patchname> to the working directory.
219 """apply the patch <patchname> to the working directory.
220 a list of patched files is returned"""
220 a list of patched files is returned"""
221
221
222 (dopatch, gitpatches) = readgitpatch(patchname)
222 (dopatch, gitpatches) = readgitpatch(patchname)
223
223
224 files = {}
224 files = {}
225 fuzz = False
225 fuzz = False
226 if dopatch:
226 if dopatch:
227 if dopatch == 'filter':
227 if dopatch == 'filter':
228 patchname = dogitpatch(patchname, gitpatches)
228 patchname = dogitpatch(patchname, gitpatches)
229 patcher = util.find_in_path('gpatch', os.environ.get('PATH', ''), 'patch')
229 patcher = util.find_in_path('gpatch', os.environ.get('PATH', ''), 'patch')
230 args = []
230 args = []
231 if cwd:
231 if cwd:
232 args.append('-d %s' % util.shellquote(cwd))
232 args.append('-d %s' % util.shellquote(cwd))
233 fp = os.popen('%s %s -p%d < %s' % (patcher, ' '.join(args), strip,
233 fp = os.popen('%s %s -p%d < %s' % (patcher, ' '.join(args), strip,
234 util.shellquote(patchname)))
234 util.shellquote(patchname)))
235
235
236 if dopatch == 'filter':
236 if dopatch == 'filter':
237 False and os.unlink(patchname)
237 False and os.unlink(patchname)
238
238
239 for line in fp:
239 for line in fp:
240 line = line.rstrip()
240 line = line.rstrip()
241 ui.note(line + '\n')
241 ui.note(line + '\n')
242 if line.startswith('patching file '):
242 if line.startswith('patching file '):
243 pf = util.parse_patch_output(line)
243 pf = util.parse_patch_output(line)
244 printed_file = False
244 printed_file = False
245 files.setdefault(pf, (None, None))
245 files.setdefault(pf, (None, None))
246 elif line.find('with fuzz') >= 0:
246 elif line.find('with fuzz') >= 0:
247 fuzz = True
247 fuzz = True
248 if not printed_file:
248 if not printed_file:
249 ui.warn(pf + '\n')
249 ui.warn(pf + '\n')
250 printed_file = True
250 printed_file = True
251 ui.warn(line + '\n')
251 ui.warn(line + '\n')
252 elif line.find('saving rejects to file') >= 0:
252 elif line.find('saving rejects to file') >= 0:
253 ui.warn(line + '\n')
253 ui.warn(line + '\n')
254 elif line.find('FAILED') >= 0:
254 elif line.find('FAILED') >= 0:
255 if not printed_file:
255 if not printed_file:
256 ui.warn(pf + '\n')
256 ui.warn(pf + '\n')
257 printed_file = True
257 printed_file = True
258 ui.warn(line + '\n')
258 ui.warn(line + '\n')
259
259
260 code = fp.close()
260 code = fp.close()
261 if code:
261 if code:
262 raise util.Abort(_("patch command failed: %s") %
262 raise util.Abort(_("patch command failed: %s") %
263 util.explain_exit(code)[0])
263 util.explain_exit(code)[0])
264
264
265 for gp in gitpatches:
265 for gp in gitpatches:
266 files[gp.path] = (gp.op, gp)
266 files[gp.path] = (gp.op, gp)
267
267
268 return (files, fuzz)
268 return (files, fuzz)
269
269
270 def diffopts(ui, opts={}):
271 return mdiff.diffopts(
272 text=opts.get('text'),
273 git=(opts.get('git') or
274 ui.configbool('diff', 'git', None)),
275 showfunc=(opts.get('show_function') or
276 ui.configbool('diff', 'showfunc', None)),
277 ignorews=(opts.get('ignore_all_space') or
278 ui.configbool('diff', 'ignorews', None)),
279 ignorewsamount=(opts.get('ignore_space_change') or
280 ui.configbool('diff', 'ignorewsamount', None)),
281 ignoreblanklines=(opts.get('ignore_blank_lines') or
282 ui.configbool('diff', 'ignoreblanklines', None)))
283
270 def diff(repo, node1=None, node2=None, files=None, match=util.always,
284 def diff(repo, node1=None, node2=None, files=None, match=util.always,
271 fp=None, changes=None, opts=None):
285 fp=None, changes=None, opts=None):
272 '''print diff of changes to files between two nodes, or node and
286 '''print diff of changes to files between two nodes, or node and
273 working directory.
287 working directory.
274
288
275 if node1 is None, use first dirstate parent instead.
289 if node1 is None, use first dirstate parent instead.
276 if node2 is None, compare node1 with working directory.'''
290 if node2 is None, compare node1 with working directory.'''
277
291
278 if opts is None:
292 if opts is None:
279 opts = mdiff.defaultopts
293 opts = mdiff.defaultopts
280 if fp is None:
294 if fp is None:
281 fp = repo.ui
295 fp = repo.ui
282
296
283 if not node1:
297 if not node1:
284 node1 = repo.dirstate.parents()[0]
298 node1 = repo.dirstate.parents()[0]
285 # reading the data for node1 early allows it to play nicely
299 # reading the data for node1 early allows it to play nicely
286 # with repo.status and the revlog cache.
300 # with repo.status and the revlog cache.
287 change = repo.changelog.read(node1)
301 change = repo.changelog.read(node1)
288 mmap = repo.manifest.read(change[0])
302 mmap = repo.manifest.read(change[0])
289 date1 = util.datestr(change[2])
303 date1 = util.datestr(change[2])
290
304
291 if not changes:
305 if not changes:
292 changes = repo.status(node1, node2, files, match=match)[:5]
306 changes = repo.status(node1, node2, files, match=match)[:5]
293 modified, added, removed, deleted, unknown = changes
307 modified, added, removed, deleted, unknown = changes
294 if files:
308 if files:
295 def filterfiles(filters):
309 def filterfiles(filters):
296 l = [x for x in filters if x in files]
310 l = [x for x in filters if x in files]
297
311
298 for t in files:
312 for t in files:
299 if not t.endswith("/"):
313 if not t.endswith("/"):
300 t += "/"
314 t += "/"
301 l += [x for x in filters if x.startswith(t)]
315 l += [x for x in filters if x.startswith(t)]
302 return l
316 return l
303
317
304 modified, added, removed = map(filterfiles, (modified, added, removed))
318 modified, added, removed = map(filterfiles, (modified, added, removed))
305
319
306 if not modified and not added and not removed:
320 if not modified and not added and not removed:
307 return
321 return
308
322
309 if node2:
323 if node2:
310 change = repo.changelog.read(node2)
324 change = repo.changelog.read(node2)
311 mmap2 = repo.manifest.read(change[0])
325 mmap2 = repo.manifest.read(change[0])
312 _date2 = util.datestr(change[2])
326 _date2 = util.datestr(change[2])
313 def date2(f):
327 def date2(f):
314 return _date2
328 return _date2
315 def read(f):
329 def read(f):
316 return repo.file(f).read(mmap2[f])
330 return repo.file(f).read(mmap2[f])
317 def renamed(f):
331 def renamed(f):
318 src = repo.file(f).renamed(mmap2[f])
332 src = repo.file(f).renamed(mmap2[f])
319 return src and src[0] or None
333 return src and src[0] or None
320 else:
334 else:
321 tz = util.makedate()[1]
335 tz = util.makedate()[1]
322 _date2 = util.datestr()
336 _date2 = util.datestr()
323 def date2(f):
337 def date2(f):
324 try:
338 try:
325 return util.datestr((os.lstat(repo.wjoin(f)).st_mtime, tz))
339 return util.datestr((os.lstat(repo.wjoin(f)).st_mtime, tz))
326 except OSError, err:
340 except OSError, err:
327 if err.errno != errno.ENOENT: raise
341 if err.errno != errno.ENOENT: raise
328 return _date2
342 return _date2
329 def read(f):
343 def read(f):
330 return repo.wread(f)
344 return repo.wread(f)
331 def renamed(f):
345 def renamed(f):
332 return repo.dirstate.copies.get(f)
346 return repo.dirstate.copies.get(f)
333
347
334 if repo.ui.quiet:
348 if repo.ui.quiet:
335 r = None
349 r = None
336 else:
350 else:
337 hexfunc = repo.ui.verbose and hex or short
351 hexfunc = repo.ui.verbose and hex or short
338 r = [hexfunc(node) for node in [node1, node2] if node]
352 r = [hexfunc(node) for node in [node1, node2] if node]
339
353
340 if opts.git:
354 if opts.git:
341 copied = {}
355 copied = {}
342 for f in added:
356 for f in added:
343 src = renamed(f)
357 src = renamed(f)
344 if src:
358 if src:
345 copied[f] = src
359 copied[f] = src
346 srcs = [x[1] for x in copied.items()]
360 srcs = [x[1] for x in copied.items()]
347
361
348 all = modified + added + removed
362 all = modified + added + removed
349 all.sort()
363 all.sort()
350 for f in all:
364 for f in all:
351 to = None
365 to = None
352 tn = None
366 tn = None
353 dodiff = True
367 dodiff = True
354 if f in mmap:
368 if f in mmap:
355 to = repo.file(f).read(mmap[f])
369 to = repo.file(f).read(mmap[f])
356 if f not in removed:
370 if f not in removed:
357 tn = read(f)
371 tn = read(f)
358 if opts.git:
372 if opts.git:
359 def gitmode(x):
373 def gitmode(x):
360 return x and '100755' or '100644'
374 return x and '100755' or '100644'
361 def addmodehdr(header, omode, nmode):
375 def addmodehdr(header, omode, nmode):
362 if omode != nmode:
376 if omode != nmode:
363 header.append('old mode %s\n' % omode)
377 header.append('old mode %s\n' % omode)
364 header.append('new mode %s\n' % nmode)
378 header.append('new mode %s\n' % nmode)
365
379
366 a, b = f, f
380 a, b = f, f
367 header = []
381 header = []
368 if f in added:
382 if f in added:
369 if node2:
383 if node2:
370 mode = gitmode(mmap2.execf(f))
384 mode = gitmode(mmap2.execf(f))
371 else:
385 else:
372 mode = gitmode(util.is_exec(repo.wjoin(f), None))
386 mode = gitmode(util.is_exec(repo.wjoin(f), None))
373 if f in copied:
387 if f in copied:
374 a = copied[f]
388 a = copied[f]
375 omode = gitmode(mmap.execf(a))
389 omode = gitmode(mmap.execf(a))
376 addmodehdr(header, omode, mode)
390 addmodehdr(header, omode, mode)
377 op = a in removed and 'rename' or 'copy'
391 op = a in removed and 'rename' or 'copy'
378 header.append('%s from %s\n' % (op, a))
392 header.append('%s from %s\n' % (op, a))
379 header.append('%s to %s\n' % (op, f))
393 header.append('%s to %s\n' % (op, f))
380 to = repo.file(a).read(mmap[a])
394 to = repo.file(a).read(mmap[a])
381 else:
395 else:
382 header.append('new file mode %s\n' % mode)
396 header.append('new file mode %s\n' % mode)
383 elif f in removed:
397 elif f in removed:
384 if f in srcs:
398 if f in srcs:
385 dodiff = False
399 dodiff = False
386 else:
400 else:
387 mode = gitmode(mmap.execf(f))
401 mode = gitmode(mmap.execf(f))
388 header.append('deleted file mode %s\n' % mode)
402 header.append('deleted file mode %s\n' % mode)
389 else:
403 else:
390 omode = gitmode(mmap.execf(f))
404 omode = gitmode(mmap.execf(f))
391 nmode = gitmode(util.is_exec(repo.wjoin(f), mmap.execf(f)))
405 nmode = gitmode(util.is_exec(repo.wjoin(f), mmap.execf(f)))
392 addmodehdr(header, omode, nmode)
406 addmodehdr(header, omode, nmode)
393 r = None
407 r = None
394 if dodiff:
408 if dodiff:
395 header.insert(0, 'diff --git a/%s b/%s\n' % (a, b))
409 header.insert(0, 'diff --git a/%s b/%s\n' % (a, b))
396 fp.write(''.join(header))
410 fp.write(''.join(header))
397 if dodiff:
411 if dodiff:
398 fp.write(mdiff.unidiff(to, date1, tn, date2(f), f, r, opts=opts))
412 fp.write(mdiff.unidiff(to, date1, tn, date2(f), f, r, opts=opts))
399
413
400 def export(repo, revs, template='hg-%h.patch', fp=None, switch_parent=False,
414 def export(repo, revs, template='hg-%h.patch', fp=None, switch_parent=False,
401 opts=None):
415 opts=None):
402 '''export changesets as hg patches.'''
416 '''export changesets as hg patches.'''
403
417
404 total = len(revs)
418 total = len(revs)
405 revwidth = max(map(len, revs))
419 revwidth = max(map(len, revs))
406
420
407 def single(node, seqno, fp):
421 def single(node, seqno, fp):
408 parents = [p for p in repo.changelog.parents(node) if p != nullid]
422 parents = [p for p in repo.changelog.parents(node) if p != nullid]
409 if switch_parent:
423 if switch_parent:
410 parents.reverse()
424 parents.reverse()
411 prev = (parents and parents[0]) or nullid
425 prev = (parents and parents[0]) or nullid
412 change = repo.changelog.read(node)
426 change = repo.changelog.read(node)
413
427
414 if not fp:
428 if not fp:
415 fp = cmdutil.make_file(repo, template, node, total=total,
429 fp = cmdutil.make_file(repo, template, node, total=total,
416 seqno=seqno, revwidth=revwidth)
430 seqno=seqno, revwidth=revwidth)
417 if fp not in (sys.stdout, repo.ui):
431 if fp not in (sys.stdout, repo.ui):
418 repo.ui.note("%s\n" % fp.name)
432 repo.ui.note("%s\n" % fp.name)
419
433
420 fp.write("# HG changeset patch\n")
434 fp.write("# HG changeset patch\n")
421 fp.write("# User %s\n" % change[1])
435 fp.write("# User %s\n" % change[1])
422 fp.write("# Date %d %d\n" % change[2])
436 fp.write("# Date %d %d\n" % change[2])
423 fp.write("# Node ID %s\n" % hex(node))
437 fp.write("# Node ID %s\n" % hex(node))
424 fp.write("# Parent %s\n" % hex(prev))
438 fp.write("# Parent %s\n" % hex(prev))
425 if len(parents) > 1:
439 if len(parents) > 1:
426 fp.write("# Parent %s\n" % hex(parents[1]))
440 fp.write("# Parent %s\n" % hex(parents[1]))
427 fp.write(change[4].rstrip())
441 fp.write(change[4].rstrip())
428 fp.write("\n\n")
442 fp.write("\n\n")
429
443
430 diff(repo, prev, node, fp=fp, opts=opts)
444 diff(repo, prev, node, fp=fp, opts=opts)
431 if fp not in (sys.stdout, repo.ui):
445 if fp not in (sys.stdout, repo.ui):
432 fp.close()
446 fp.close()
433
447
434 for seqno, cset in enumerate(revs):
448 for seqno, cset in enumerate(revs):
435 single(cset, seqno, fp)
449 single(cset, seqno, fp)
@@ -1,1285 +1,1298 b''
1 """
1 """
2 revlog.py - storage back-end for mercurial
2 revlog.py - storage back-end for mercurial
3
3
4 This provides efficient delta storage with O(1) retrieve and append
4 This provides efficient delta storage with O(1) retrieve and append
5 and O(changes) merge between branches
5 and O(changes) merge between branches
6
6
7 Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
7 Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
8
8
9 This software may be used and distributed according to the terms
9 This software may be used and distributed according to the terms
10 of the GNU General Public License, incorporated herein by reference.
10 of the GNU General Public License, incorporated herein by reference.
11 """
11 """
12
12
13 from node import *
13 from node import *
14 from i18n import gettext as _
14 from i18n import gettext as _
15 from demandload import demandload
15 from demandload import demandload
16 demandload(globals(), "binascii changegroup errno heapq mdiff os")
16 demandload(globals(), "binascii changegroup errno heapq mdiff os")
17 demandload(globals(), "sha struct util zlib")
17 demandload(globals(), "sha struct util zlib")
18
18
19 # revlog version strings
19 # revlog version strings
20 REVLOGV0 = 0
20 REVLOGV0 = 0
21 REVLOGNG = 1
21 REVLOGNG = 1
22
22
23 # revlog flags
23 # revlog flags
24 REVLOGNGINLINEDATA = (1 << 16)
24 REVLOGNGINLINEDATA = (1 << 16)
25 REVLOG_DEFAULT_FLAGS = REVLOGNGINLINEDATA
25 REVLOG_DEFAULT_FLAGS = REVLOGNGINLINEDATA
26
26
27 REVLOG_DEFAULT_FORMAT = REVLOGNG
27 REVLOG_DEFAULT_FORMAT = REVLOGNG
28 REVLOG_DEFAULT_VERSION = REVLOG_DEFAULT_FORMAT | REVLOG_DEFAULT_FLAGS
28 REVLOG_DEFAULT_VERSION = REVLOG_DEFAULT_FORMAT | REVLOG_DEFAULT_FLAGS
29
29
30 def flagstr(flag):
30 def flagstr(flag):
31 if flag == "inline":
31 if flag == "inline":
32 return REVLOGNGINLINEDATA
32 return REVLOGNGINLINEDATA
33 raise RevlogError(_("unknown revlog flag %s" % flag))
33 raise RevlogError(_("unknown revlog flag %s" % flag))
34
34
35 def hash(text, p1, p2):
35 def hash(text, p1, p2):
36 """generate a hash from the given text and its parent hashes
36 """generate a hash from the given text and its parent hashes
37
37
38 This hash combines both the current file contents and its history
38 This hash combines both the current file contents and its history
39 in a manner that makes it easy to distinguish nodes with the same
39 in a manner that makes it easy to distinguish nodes with the same
40 content in the revision graph.
40 content in the revision graph.
41 """
41 """
42 l = [p1, p2]
42 l = [p1, p2]
43 l.sort()
43 l.sort()
44 s = sha.new(l[0])
44 s = sha.new(l[0])
45 s.update(l[1])
45 s.update(l[1])
46 s.update(text)
46 s.update(text)
47 return s.digest()
47 return s.digest()
48
48
49 def compress(text):
49 def compress(text):
50 """ generate a possibly-compressed representation of text """
50 """ generate a possibly-compressed representation of text """
51 if not text: return ("", text)
51 if not text: return ("", text)
52 if len(text) < 44:
52 if len(text) < 44:
53 if text[0] == '\0': return ("", text)
53 if text[0] == '\0': return ("", text)
54 return ('u', text)
54 return ('u', text)
55 bin = zlib.compress(text)
55 bin = zlib.compress(text)
56 if len(bin) > len(text):
56 if len(bin) > len(text):
57 if text[0] == '\0': return ("", text)
57 if text[0] == '\0': return ("", text)
58 return ('u', text)
58 return ('u', text)
59 return ("", bin)
59 return ("", bin)
60
60
61 def decompress(bin):
61 def decompress(bin):
62 """ decompress the given input """
62 """ decompress the given input """
63 if not bin: return bin
63 if not bin: return bin
64 t = bin[0]
64 t = bin[0]
65 if t == '\0': return bin
65 if t == '\0': return bin
66 if t == 'x': return zlib.decompress(bin)
66 if t == 'x': return zlib.decompress(bin)
67 if t == 'u': return bin[1:]
67 if t == 'u': return bin[1:]
68 raise RevlogError(_("unknown compression type %r") % t)
68 raise RevlogError(_("unknown compression type %r") % t)
69
69
70 indexformatv0 = ">4l20s20s20s"
70 indexformatv0 = ">4l20s20s20s"
71 v0shaoffset = 56
71 v0shaoffset = 56
72 # index ng:
72 # index ng:
73 # 6 bytes offset
73 # 6 bytes offset
74 # 2 bytes flags
74 # 2 bytes flags
75 # 4 bytes compressed length
75 # 4 bytes compressed length
76 # 4 bytes uncompressed length
76 # 4 bytes uncompressed length
77 # 4 bytes: base rev
77 # 4 bytes: base rev
78 # 4 bytes link rev
78 # 4 bytes link rev
79 # 4 bytes parent 1 rev
79 # 4 bytes parent 1 rev
80 # 4 bytes parent 2 rev
80 # 4 bytes parent 2 rev
81 # 32 bytes: nodeid
81 # 32 bytes: nodeid
82 indexformatng = ">Qiiiiii20s12x"
82 indexformatng = ">Qiiiiii20s12x"
83 ngshaoffset = 32
83 ngshaoffset = 32
84 versionformat = ">i"
84 versionformat = ">i"
85
85
86 class lazyparser(object):
86 class lazyparser(object):
87 """
87 """
88 this class avoids the need to parse the entirety of large indices
88 this class avoids the need to parse the entirety of large indices
89 """
89 """
90
90
91 # lazyparser is not safe to use on windows if win32 extensions not
91 # lazyparser is not safe to use on windows if win32 extensions not
92 # available. it keeps file handle open, which make it not possible
92 # available. it keeps file handle open, which make it not possible
93 # to break hardlinks on local cloned repos.
93 # to break hardlinks on local cloned repos.
94 safe_to_use = os.name != 'nt' or (not util.is_win_9x() and
94 safe_to_use = os.name != 'nt' or (not util.is_win_9x() and
95 hasattr(util, 'win32api'))
95 hasattr(util, 'win32api'))
96
96
97 def __init__(self, dataf, size, indexformat, shaoffset):
97 def __init__(self, dataf, size, indexformat, shaoffset):
98 self.dataf = dataf
98 self.dataf = dataf
99 self.format = indexformat
99 self.format = indexformat
100 self.s = struct.calcsize(indexformat)
100 self.s = struct.calcsize(indexformat)
101 self.indexformat = indexformat
101 self.indexformat = indexformat
102 self.datasize = size
102 self.datasize = size
103 self.l = size/self.s
103 self.l = size/self.s
104 self.index = [None] * self.l
104 self.index = [None] * self.l
105 self.map = {nullid: -1}
105 self.map = {nullid: -1}
106 self.allmap = 0
106 self.allmap = 0
107 self.all = 0
107 self.all = 0
108 self.mapfind_count = 0
108 self.mapfind_count = 0
109 self.shaoffset = shaoffset
109 self.shaoffset = shaoffset
110
110
111 def loadmap(self):
111 def loadmap(self):
112 """
112 """
113 during a commit, we need to make sure the rev being added is
113 during a commit, we need to make sure the rev being added is
114 not a duplicate. This requires loading the entire index,
114 not a duplicate. This requires loading the entire index,
115 which is fairly slow. loadmap can load up just the node map,
115 which is fairly slow. loadmap can load up just the node map,
116 which takes much less time.
116 which takes much less time.
117 """
117 """
118 if self.allmap: return
118 if self.allmap: return
119 start = 0
119 start = 0
120 end = self.datasize
120 end = self.datasize
121 self.allmap = 1
121 self.allmap = 1
122 cur = 0
122 cur = 0
123 count = 0
123 count = 0
124 blocksize = self.s * 256
124 blocksize = self.s * 256
125 self.dataf.seek(0)
125 self.dataf.seek(0)
126 while cur < end:
126 while cur < end:
127 data = self.dataf.read(blocksize)
127 data = self.dataf.read(blocksize)
128 off = 0
128 off = 0
129 for x in xrange(256):
129 for x in xrange(256):
130 n = data[off + self.shaoffset:off + self.shaoffset + 20]
130 n = data[off + self.shaoffset:off + self.shaoffset + 20]
131 self.map[n] = count
131 self.map[n] = count
132 count += 1
132 count += 1
133 if count >= self.l:
133 if count >= self.l:
134 break
134 break
135 off += self.s
135 off += self.s
136 cur += blocksize
136 cur += blocksize
137
137
138 def loadblock(self, blockstart, blocksize, data=None):
138 def loadblock(self, blockstart, blocksize, data=None):
139 if self.all: return
139 if self.all: return
140 if data is None:
140 if data is None:
141 self.dataf.seek(blockstart)
141 self.dataf.seek(blockstart)
142 data = self.dataf.read(blocksize)
142 data = self.dataf.read(blocksize)
143 lend = len(data) / self.s
143 lend = len(data) / self.s
144 i = blockstart / self.s
144 i = blockstart / self.s
145 off = 0
145 off = 0
146 for x in xrange(lend):
146 for x in xrange(lend):
147 if self.index[i + x] == None:
147 if self.index[i + x] == None:
148 b = data[off : off + self.s]
148 b = data[off : off + self.s]
149 self.index[i + x] = b
149 self.index[i + x] = b
150 n = b[self.shaoffset:self.shaoffset + 20]
150 n = b[self.shaoffset:self.shaoffset + 20]
151 self.map[n] = i + x
151 self.map[n] = i + x
152 off += self.s
152 off += self.s
153
153
154 def findnode(self, node):
154 def findnode(self, node):
155 """search backwards through the index file for a specific node"""
155 """search backwards through the index file for a specific node"""
156 if self.allmap: return None
156 if self.allmap: return None
157
157
158 # hg log will cause many many searches for the manifest
158 # hg log will cause many many searches for the manifest
159 # nodes. After we get called a few times, just load the whole
159 # nodes. After we get called a few times, just load the whole
160 # thing.
160 # thing.
161 if self.mapfind_count > 8:
161 if self.mapfind_count > 8:
162 self.loadmap()
162 self.loadmap()
163 if node in self.map:
163 if node in self.map:
164 return node
164 return node
165 return None
165 return None
166 self.mapfind_count += 1
166 self.mapfind_count += 1
167 last = self.l - 1
167 last = self.l - 1
168 while self.index[last] != None:
168 while self.index[last] != None:
169 if last == 0:
169 if last == 0:
170 self.all = 1
170 self.all = 1
171 self.allmap = 1
171 self.allmap = 1
172 return None
172 return None
173 last -= 1
173 last -= 1
174 end = (last + 1) * self.s
174 end = (last + 1) * self.s
175 blocksize = self.s * 256
175 blocksize = self.s * 256
176 while end >= 0:
176 while end >= 0:
177 start = max(end - blocksize, 0)
177 start = max(end - blocksize, 0)
178 self.dataf.seek(start)
178 self.dataf.seek(start)
179 data = self.dataf.read(end - start)
179 data = self.dataf.read(end - start)
180 findend = end - start
180 findend = end - start
181 while True:
181 while True:
182 # we're searching backwards, so weh have to make sure
182 # we're searching backwards, so weh have to make sure
183 # we don't find a changeset where this node is a parent
183 # we don't find a changeset where this node is a parent
184 off = data.rfind(node, 0, findend)
184 off = data.rfind(node, 0, findend)
185 findend = off
185 findend = off
186 if off >= 0:
186 if off >= 0:
187 i = off / self.s
187 i = off / self.s
188 off = i * self.s
188 off = i * self.s
189 n = data[off + self.shaoffset:off + self.shaoffset + 20]
189 n = data[off + self.shaoffset:off + self.shaoffset + 20]
190 if n == node:
190 if n == node:
191 self.map[n] = i + start / self.s
191 self.map[n] = i + start / self.s
192 return node
192 return node
193 else:
193 else:
194 break
194 break
195 end -= blocksize
195 end -= blocksize
196 return None
196 return None
197
197
198 def loadindex(self, i=None, end=None):
198 def loadindex(self, i=None, end=None):
199 if self.all: return
199 if self.all: return
200 all = False
200 all = False
201 if i == None:
201 if i == None:
202 blockstart = 0
202 blockstart = 0
203 blocksize = (512 / self.s) * self.s
203 blocksize = (512 / self.s) * self.s
204 end = self.datasize
204 end = self.datasize
205 all = True
205 all = True
206 else:
206 else:
207 if end:
207 if end:
208 blockstart = i * self.s
208 blockstart = i * self.s
209 end = end * self.s
209 end = end * self.s
210 blocksize = end - blockstart
210 blocksize = end - blockstart
211 else:
211 else:
212 blockstart = (i & ~(32)) * self.s
212 blockstart = (i & ~(32)) * self.s
213 blocksize = self.s * 64
213 blocksize = self.s * 64
214 end = blockstart + blocksize
214 end = blockstart + blocksize
215 while blockstart < end:
215 while blockstart < end:
216 self.loadblock(blockstart, blocksize)
216 self.loadblock(blockstart, blocksize)
217 blockstart += blocksize
217 blockstart += blocksize
218 if all: self.all = True
218 if all: self.all = True
219
219
220 class lazyindex(object):
220 class lazyindex(object):
221 """a lazy version of the index array"""
221 """a lazy version of the index array"""
222 def __init__(self, parser):
222 def __init__(self, parser):
223 self.p = parser
223 self.p = parser
224 def __len__(self):
224 def __len__(self):
225 return len(self.p.index)
225 return len(self.p.index)
226 def load(self, pos):
226 def load(self, pos):
227 if pos < 0:
227 if pos < 0:
228 pos += len(self.p.index)
228 pos += len(self.p.index)
229 self.p.loadindex(pos)
229 self.p.loadindex(pos)
230 return self.p.index[pos]
230 return self.p.index[pos]
231 def __getitem__(self, pos):
231 def __getitem__(self, pos):
232 ret = self.p.index[pos] or self.load(pos)
232 ret = self.p.index[pos] or self.load(pos)
233 if isinstance(ret, str):
233 if isinstance(ret, str):
234 ret = struct.unpack(self.p.indexformat, ret)
234 ret = struct.unpack(self.p.indexformat, ret)
235 return ret
235 return ret
236 def __setitem__(self, pos, item):
236 def __setitem__(self, pos, item):
237 self.p.index[pos] = item
237 self.p.index[pos] = item
238 def __delitem__(self, pos):
238 def __delitem__(self, pos):
239 del self.p.index[pos]
239 del self.p.index[pos]
240 def append(self, e):
240 def append(self, e):
241 self.p.index.append(e)
241 self.p.index.append(e)
242
242
243 class lazymap(object):
243 class lazymap(object):
244 """a lazy version of the node map"""
244 """a lazy version of the node map"""
245 def __init__(self, parser):
245 def __init__(self, parser):
246 self.p = parser
246 self.p = parser
247 def load(self, key):
247 def load(self, key):
248 n = self.p.findnode(key)
248 n = self.p.findnode(key)
249 if n == None:
249 if n == None:
250 raise KeyError(key)
250 raise KeyError(key)
251 def __contains__(self, key):
251 def __contains__(self, key):
252 if key in self.p.map:
252 if key in self.p.map:
253 return True
253 return True
254 self.p.loadmap()
254 self.p.loadmap()
255 return key in self.p.map
255 return key in self.p.map
256 def __iter__(self):
256 def __iter__(self):
257 yield nullid
257 yield nullid
258 for i in xrange(self.p.l):
258 for i in xrange(self.p.l):
259 ret = self.p.index[i]
259 ret = self.p.index[i]
260 if not ret:
260 if not ret:
261 self.p.loadindex(i)
261 self.p.loadindex(i)
262 ret = self.p.index[i]
262 ret = self.p.index[i]
263 if isinstance(ret, str):
263 if isinstance(ret, str):
264 ret = struct.unpack(self.p.indexformat, ret)
264 ret = struct.unpack(self.p.indexformat, ret)
265 yield ret[-1]
265 yield ret[-1]
266 def __getitem__(self, key):
266 def __getitem__(self, key):
267 try:
267 try:
268 return self.p.map[key]
268 return self.p.map[key]
269 except KeyError:
269 except KeyError:
270 try:
270 try:
271 self.load(key)
271 self.load(key)
272 return self.p.map[key]
272 return self.p.map[key]
273 except KeyError:
273 except KeyError:
274 raise KeyError("node " + hex(key))
274 raise KeyError("node " + hex(key))
275 def __setitem__(self, key, val):
275 def __setitem__(self, key, val):
276 self.p.map[key] = val
276 self.p.map[key] = val
277 def __delitem__(self, key):
277 def __delitem__(self, key):
278 del self.p.map[key]
278 del self.p.map[key]
279
279
280 class RevlogError(Exception): pass
280 class RevlogError(Exception): pass
281
281
282 class revlog(object):
282 class revlog(object):
283 """
283 """
284 the underlying revision storage object
284 the underlying revision storage object
285
285
286 A revlog consists of two parts, an index and the revision data.
286 A revlog consists of two parts, an index and the revision data.
287
287
288 The index is a file with a fixed record size containing
288 The index is a file with a fixed record size containing
289 information on each revision, includings its nodeid (hash), the
289 information on each revision, includings its nodeid (hash), the
290 nodeids of its parents, the position and offset of its data within
290 nodeids of its parents, the position and offset of its data within
291 the data file, and the revision it's based on. Finally, each entry
291 the data file, and the revision it's based on. Finally, each entry
292 contains a linkrev entry that can serve as a pointer to external
292 contains a linkrev entry that can serve as a pointer to external
293 data.
293 data.
294
294
295 The revision data itself is a linear collection of data chunks.
295 The revision data itself is a linear collection of data chunks.
296 Each chunk represents a revision and is usually represented as a
296 Each chunk represents a revision and is usually represented as a
297 delta against the previous chunk. To bound lookup time, runs of
297 delta against the previous chunk. To bound lookup time, runs of
298 deltas are limited to about 2 times the length of the original
298 deltas are limited to about 2 times the length of the original
299 version data. This makes retrieval of a version proportional to
299 version data. This makes retrieval of a version proportional to
300 its size, or O(1) relative to the number of revisions.
300 its size, or O(1) relative to the number of revisions.
301
301
302 Both pieces of the revlog are written to in an append-only
302 Both pieces of the revlog are written to in an append-only
303 fashion, which means we never need to rewrite a file to insert or
303 fashion, which means we never need to rewrite a file to insert or
304 remove data, and can use some simple techniques to avoid the need
304 remove data, and can use some simple techniques to avoid the need
305 for locking while reading.
305 for locking while reading.
306 """
306 """
307 def __init__(self, opener, indexfile, datafile,
307 def __init__(self, opener, indexfile, datafile,
308 defversion=REVLOG_DEFAULT_VERSION):
308 defversion=REVLOG_DEFAULT_VERSION):
309 """
309 """
310 create a revlog object
310 create a revlog object
311
311
312 opener is a function that abstracts the file opening operation
312 opener is a function that abstracts the file opening operation
313 and can be used to implement COW semantics or the like.
313 and can be used to implement COW semantics or the like.
314 """
314 """
315 self.indexfile = indexfile
315 self.indexfile = indexfile
316 self.datafile = datafile
316 self.datafile = datafile
317 self.opener = opener
317 self.opener = opener
318
318
319 self.indexstat = None
319 self.indexstat = None
320 self.cache = None
320 self.cache = None
321 self.chunkcache = None
321 self.chunkcache = None
322 self.defversion = defversion
322 self.defversion = defversion
323 self.load()
323 self.load()
324
324
325 def load(self):
325 def load(self):
326 v = self.defversion
326 v = self.defversion
327 try:
327 try:
328 f = self.opener(self.indexfile)
328 f = self.opener(self.indexfile)
329 i = f.read(4)
329 i = f.read(4)
330 f.seek(0)
330 f.seek(0)
331 except IOError, inst:
331 except IOError, inst:
332 if inst.errno != errno.ENOENT:
332 if inst.errno != errno.ENOENT:
333 raise
333 raise
334 i = ""
334 i = ""
335 else:
335 else:
336 try:
336 try:
337 st = util.fstat(f)
337 st = util.fstat(f)
338 except AttributeError, inst:
338 except AttributeError, inst:
339 st = None
339 st = None
340 else:
340 else:
341 oldst = self.indexstat
341 oldst = self.indexstat
342 if (oldst and st.st_dev == oldst.st_dev
342 if (oldst and st.st_dev == oldst.st_dev
343 and st.st_ino == oldst.st_ino
343 and st.st_ino == oldst.st_ino
344 and st.st_mtime == oldst.st_mtime
344 and st.st_mtime == oldst.st_mtime
345 and st.st_ctime == oldst.st_ctime):
345 and st.st_ctime == oldst.st_ctime):
346 return
346 return
347 self.indexstat = st
347 self.indexstat = st
348 if len(i) > 0:
348 if len(i) > 0:
349 v = struct.unpack(versionformat, i)[0]
349 v = struct.unpack(versionformat, i)[0]
350 flags = v & ~0xFFFF
350 flags = v & ~0xFFFF
351 fmt = v & 0xFFFF
351 fmt = v & 0xFFFF
352 if fmt == REVLOGV0:
352 if fmt == REVLOGV0:
353 if flags:
353 if flags:
354 raise RevlogError(_("index %s invalid flags %x for format v0" %
354 raise RevlogError(_("index %s invalid flags %x for format v0" %
355 (self.indexfile, flags)))
355 (self.indexfile, flags)))
356 elif fmt == REVLOGNG:
356 elif fmt == REVLOGNG:
357 if flags & ~REVLOGNGINLINEDATA:
357 if flags & ~REVLOGNGINLINEDATA:
358 raise RevlogError(_("index %s invalid flags %x for revlogng" %
358 raise RevlogError(_("index %s invalid flags %x for revlogng" %
359 (self.indexfile, flags)))
359 (self.indexfile, flags)))
360 else:
360 else:
361 raise RevlogError(_("index %s invalid format %d" %
361 raise RevlogError(_("index %s invalid format %d" %
362 (self.indexfile, fmt)))
362 (self.indexfile, fmt)))
363 self.version = v
363 self.version = v
364 if v == REVLOGV0:
364 if v == REVLOGV0:
365 self.indexformat = indexformatv0
365 self.indexformat = indexformatv0
366 shaoffset = v0shaoffset
366 shaoffset = v0shaoffset
367 else:
367 else:
368 self.indexformat = indexformatng
368 self.indexformat = indexformatng
369 shaoffset = ngshaoffset
369 shaoffset = ngshaoffset
370
370
371 if i:
371 if i:
372 if (lazyparser.safe_to_use and not self.inlinedata() and
372 if (lazyparser.safe_to_use and not self.inlinedata() and
373 st and st.st_size > 10000):
373 st and st.st_size > 10000):
374 # big index, let's parse it on demand
374 # big index, let's parse it on demand
375 parser = lazyparser(f, st.st_size, self.indexformat, shaoffset)
375 parser = lazyparser(f, st.st_size, self.indexformat, shaoffset)
376 self.index = lazyindex(parser)
376 self.index = lazyindex(parser)
377 self.nodemap = lazymap(parser)
377 self.nodemap = lazymap(parser)
378 else:
378 else:
379 self.parseindex(f, st)
379 self.parseindex(f, st)
380 if self.version != REVLOGV0:
380 if self.version != REVLOGV0:
381 e = list(self.index[0])
381 e = list(self.index[0])
382 type = self.ngtype(e[0])
382 type = self.ngtype(e[0])
383 e[0] = self.offset_type(0, type)
383 e[0] = self.offset_type(0, type)
384 self.index[0] = e
384 self.index[0] = e
385 else:
385 else:
386 self.nodemap = { nullid: -1}
386 self.nodemap = { nullid: -1}
387 self.index = []
387 self.index = []
388
388
389
389
390 def parseindex(self, fp, st):
390 def parseindex(self, fp, st):
391 s = struct.calcsize(self.indexformat)
391 s = struct.calcsize(self.indexformat)
392 self.index = []
392 self.index = []
393 self.nodemap = {nullid: -1}
393 self.nodemap = {nullid: -1}
394 inline = self.inlinedata()
394 inline = self.inlinedata()
395 n = 0
395 n = 0
396 leftover = None
396 leftover = None
397 while True:
397 while True:
398 if st:
398 if st:
399 data = fp.read(65536)
399 data = fp.read(65536)
400 else:
400 else:
401 # hack for httprangereader, it doesn't do partial reads well
401 # hack for httprangereader, it doesn't do partial reads well
402 data = fp.read()
402 data = fp.read()
403 if not data:
403 if not data:
404 break
404 break
405 if n == 0 and self.inlinedata():
405 if n == 0 and self.inlinedata():
406 # cache the first chunk
406 # cache the first chunk
407 self.chunkcache = (0, data)
407 self.chunkcache = (0, data)
408 if leftover:
408 if leftover:
409 data = leftover + data
409 data = leftover + data
410 leftover = None
410 leftover = None
411 off = 0
411 off = 0
412 l = len(data)
412 l = len(data)
413 while off < l:
413 while off < l:
414 if l - off < s:
414 if l - off < s:
415 leftover = data[off:]
415 leftover = data[off:]
416 break
416 break
417 cur = data[off:off + s]
417 cur = data[off:off + s]
418 off += s
418 off += s
419 e = struct.unpack(self.indexformat, cur)
419 e = struct.unpack(self.indexformat, cur)
420 self.index.append(e)
420 self.index.append(e)
421 self.nodemap[e[-1]] = n
421 self.nodemap[e[-1]] = n
422 n += 1
422 n += 1
423 if inline:
423 if inline:
424 off += e[1]
424 off += e[1]
425 if off > l:
425 if off > l:
426 # some things don't seek well, just read it
426 # some things don't seek well, just read it
427 fp.read(off - l)
427 fp.read(off - l)
428 if not st:
428 if not st:
429 break
429 break
430
430
431
431
432 def ngoffset(self, q):
432 def ngoffset(self, q):
433 if q & 0xFFFF:
433 if q & 0xFFFF:
434 raise RevlogError(_('%s: incompatible revision flag %x') %
434 raise RevlogError(_('%s: incompatible revision flag %x') %
435 (self.indexfile, q))
435 (self.indexfile, q))
436 return long(q >> 16)
436 return long(q >> 16)
437
437
438 def ngtype(self, q):
438 def ngtype(self, q):
439 return int(q & 0xFFFF)
439 return int(q & 0xFFFF)
440
440
441 def offset_type(self, offset, type):
441 def offset_type(self, offset, type):
442 return long(long(offset) << 16 | type)
442 return long(long(offset) << 16 | type)
443
443
444 def loadindex(self, start, end):
444 def loadindex(self, start, end):
445 """load a block of indexes all at once from the lazy parser"""
445 """load a block of indexes all at once from the lazy parser"""
446 if isinstance(self.index, lazyindex):
446 if isinstance(self.index, lazyindex):
447 self.index.p.loadindex(start, end)
447 self.index.p.loadindex(start, end)
448
448
449 def loadindexmap(self):
449 def loadindexmap(self):
450 """loads both the map and the index from the lazy parser"""
450 """loads both the map and the index from the lazy parser"""
451 if isinstance(self.index, lazyindex):
451 if isinstance(self.index, lazyindex):
452 p = self.index.p
452 p = self.index.p
453 p.loadindex()
453 p.loadindex()
454 self.nodemap = p.map
454 self.nodemap = p.map
455
455
456 def loadmap(self):
456 def loadmap(self):
457 """loads the map from the lazy parser"""
457 """loads the map from the lazy parser"""
458 if isinstance(self.nodemap, lazymap):
458 if isinstance(self.nodemap, lazymap):
459 self.nodemap.p.loadmap()
459 self.nodemap.p.loadmap()
460 self.nodemap = self.nodemap.p.map
460 self.nodemap = self.nodemap.p.map
461
461
462 def inlinedata(self): return self.version & REVLOGNGINLINEDATA
462 def inlinedata(self): return self.version & REVLOGNGINLINEDATA
463 def tip(self): return self.node(len(self.index) - 1)
463 def tip(self): return self.node(len(self.index) - 1)
464 def count(self): return len(self.index)
464 def count(self): return len(self.index)
465 def node(self, rev):
465 def node(self, rev):
466 return (rev < 0) and nullid or self.index[rev][-1]
466 return (rev < 0) and nullid or self.index[rev][-1]
467 def rev(self, node):
467 def rev(self, node):
468 try:
468 try:
469 return self.nodemap[node]
469 return self.nodemap[node]
470 except KeyError:
470 except KeyError:
471 raise RevlogError(_('%s: no node %s') % (self.indexfile, hex(node)))
471 raise RevlogError(_('%s: no node %s') % (self.indexfile, hex(node)))
472 def linkrev(self, node):
472 def linkrev(self, node):
473 return (node == nullid) and -1 or self.index[self.rev(node)][-4]
473 return (node == nullid) and -1 or self.index[self.rev(node)][-4]
474 def parents(self, node):
474 def parents(self, node):
475 if node == nullid: return (nullid, nullid)
475 if node == nullid: return (nullid, nullid)
476 r = self.rev(node)
476 r = self.rev(node)
477 d = self.index[r][-3:-1]
477 d = self.index[r][-3:-1]
478 if self.version == REVLOGV0:
478 if self.version == REVLOGV0:
479 return d
479 return d
480 return [ self.node(x) for x in d ]
480 return [ self.node(x) for x in d ]
481 def parentrevs(self, rev):
481 def parentrevs(self, rev):
482 if rev == -1:
482 if rev == -1:
483 return (-1, -1)
483 return (-1, -1)
484 d = self.index[rev][-3:-1]
484 d = self.index[rev][-3:-1]
485 if self.version == REVLOGV0:
485 if self.version == REVLOGV0:
486 return [ self.rev(x) for x in d ]
486 return [ self.rev(x) for x in d ]
487 return d
487 return d
488 def start(self, rev):
488 def start(self, rev):
489 if rev < 0:
489 if rev < 0:
490 return -1
490 return -1
491 if self.version != REVLOGV0:
491 if self.version != REVLOGV0:
492 return self.ngoffset(self.index[rev][0])
492 return self.ngoffset(self.index[rev][0])
493 return self.index[rev][0]
493 return self.index[rev][0]
494
494
495 def end(self, rev): return self.start(rev) + self.length(rev)
495 def end(self, rev): return self.start(rev) + self.length(rev)
496
496
497 def size(self, rev):
497 def size(self, rev):
498 """return the length of the uncompressed text for a given revision"""
498 """return the length of the uncompressed text for a given revision"""
499 l = -1
499 l = -1
500 if self.version != REVLOGV0:
500 if self.version != REVLOGV0:
501 l = self.index[rev][2]
501 l = self.index[rev][2]
502 if l >= 0:
502 if l >= 0:
503 return l
503 return l
504
504
505 t = self.revision(self.node(rev))
505 t = self.revision(self.node(rev))
506 return len(t)
506 return len(t)
507
507
508 # alternate implementation, The advantage to this code is it
508 # alternate implementation, The advantage to this code is it
509 # will be faster for a single revision. But, the results are not
509 # will be faster for a single revision. But, the results are not
510 # cached, so finding the size of every revision will be slower.
510 # cached, so finding the size of every revision will be slower.
511 """
511 """
512 if self.cache and self.cache[1] == rev:
512 if self.cache and self.cache[1] == rev:
513 return len(self.cache[2])
513 return len(self.cache[2])
514
514
515 base = self.base(rev)
515 base = self.base(rev)
516 if self.cache and self.cache[1] >= base and self.cache[1] < rev:
516 if self.cache and self.cache[1] >= base and self.cache[1] < rev:
517 base = self.cache[1]
517 base = self.cache[1]
518 text = self.cache[2]
518 text = self.cache[2]
519 else:
519 else:
520 text = self.revision(self.node(base))
520 text = self.revision(self.node(base))
521
521
522 l = len(text)
522 l = len(text)
523 for x in xrange(base + 1, rev + 1):
523 for x in xrange(base + 1, rev + 1):
524 l = mdiff.patchedsize(l, self.chunk(x))
524 l = mdiff.patchedsize(l, self.chunk(x))
525 return l
525 return l
526 """
526 """
527
527
528 def length(self, rev):
528 def length(self, rev):
529 if rev < 0:
529 if rev < 0:
530 return 0
530 return 0
531 else:
531 else:
532 return self.index[rev][1]
532 return self.index[rev][1]
533 def base(self, rev): return (rev < 0) and rev or self.index[rev][-5]
533 def base(self, rev): return (rev < 0) and rev or self.index[rev][-5]
534
534
535 def reachable(self, rev, stop=None):
535 def reachable(self, rev, stop=None):
536 reachable = {}
536 reachable = {}
537 visit = [rev]
537 visit = [rev]
538 reachable[rev] = 1
538 reachable[rev] = 1
539 if stop:
539 if stop:
540 stopn = self.rev(stop)
540 stopn = self.rev(stop)
541 else:
541 else:
542 stopn = 0
542 stopn = 0
543 while visit:
543 while visit:
544 n = visit.pop(0)
544 n = visit.pop(0)
545 if n == stop:
545 if n == stop:
546 continue
546 continue
547 if n == nullid:
547 if n == nullid:
548 continue
548 continue
549 for p in self.parents(n):
549 for p in self.parents(n):
550 if self.rev(p) < stopn:
550 if self.rev(p) < stopn:
551 continue
551 continue
552 if p not in reachable:
552 if p not in reachable:
553 reachable[p] = 1
553 reachable[p] = 1
554 visit.append(p)
554 visit.append(p)
555 return reachable
555 return reachable
556
556
557 def nodesbetween(self, roots=None, heads=None):
557 def nodesbetween(self, roots=None, heads=None):
558 """Return a tuple containing three elements. Elements 1 and 2 contain
558 """Return a tuple containing three elements. Elements 1 and 2 contain
559 a final list bases and heads after all the unreachable ones have been
559 a final list bases and heads after all the unreachable ones have been
560 pruned. Element 0 contains a topologically sorted list of all
560 pruned. Element 0 contains a topologically sorted list of all
561
561
562 nodes that satisfy these constraints:
562 nodes that satisfy these constraints:
563 1. All nodes must be descended from a node in roots (the nodes on
563 1. All nodes must be descended from a node in roots (the nodes on
564 roots are considered descended from themselves).
564 roots are considered descended from themselves).
565 2. All nodes must also be ancestors of a node in heads (the nodes in
565 2. All nodes must also be ancestors of a node in heads (the nodes in
566 heads are considered to be their own ancestors).
566 heads are considered to be their own ancestors).
567
567
568 If roots is unspecified, nullid is assumed as the only root.
568 If roots is unspecified, nullid is assumed as the only root.
569 If heads is unspecified, it is taken to be the output of the
569 If heads is unspecified, it is taken to be the output of the
570 heads method (i.e. a list of all nodes in the repository that
570 heads method (i.e. a list of all nodes in the repository that
571 have no children)."""
571 have no children)."""
572 nonodes = ([], [], [])
572 nonodes = ([], [], [])
573 if roots is not None:
573 if roots is not None:
574 roots = list(roots)
574 roots = list(roots)
575 if not roots:
575 if not roots:
576 return nonodes
576 return nonodes
577 lowestrev = min([self.rev(n) for n in roots])
577 lowestrev = min([self.rev(n) for n in roots])
578 else:
578 else:
579 roots = [nullid] # Everybody's a descendent of nullid
579 roots = [nullid] # Everybody's a descendent of nullid
580 lowestrev = -1
580 lowestrev = -1
581 if (lowestrev == -1) and (heads is None):
581 if (lowestrev == -1) and (heads is None):
582 # We want _all_ the nodes!
582 # We want _all_ the nodes!
583 return ([self.node(r) for r in xrange(0, self.count())],
583 return ([self.node(r) for r in xrange(0, self.count())],
584 [nullid], list(self.heads()))
584 [nullid], list(self.heads()))
585 if heads is None:
585 if heads is None:
586 # All nodes are ancestors, so the latest ancestor is the last
586 # All nodes are ancestors, so the latest ancestor is the last
587 # node.
587 # node.
588 highestrev = self.count() - 1
588 highestrev = self.count() - 1
589 # Set ancestors to None to signal that every node is an ancestor.
589 # Set ancestors to None to signal that every node is an ancestor.
590 ancestors = None
590 ancestors = None
591 # Set heads to an empty dictionary for later discovery of heads
591 # Set heads to an empty dictionary for later discovery of heads
592 heads = {}
592 heads = {}
593 else:
593 else:
594 heads = list(heads)
594 heads = list(heads)
595 if not heads:
595 if not heads:
596 return nonodes
596 return nonodes
597 ancestors = {}
597 ancestors = {}
598 # Start at the top and keep marking parents until we're done.
598 # Start at the top and keep marking parents until we're done.
599 nodestotag = heads[:]
599 nodestotag = heads[:]
600 # Turn heads into a dictionary so we can remove 'fake' heads.
600 # Turn heads into a dictionary so we can remove 'fake' heads.
601 # Also, later we will be using it to filter out the heads we can't
601 # Also, later we will be using it to filter out the heads we can't
602 # find from roots.
602 # find from roots.
603 heads = dict.fromkeys(heads, 0)
603 heads = dict.fromkeys(heads, 0)
604 # Remember where the top was so we can use it as a limit later.
604 # Remember where the top was so we can use it as a limit later.
605 highestrev = max([self.rev(n) for n in nodestotag])
605 highestrev = max([self.rev(n) for n in nodestotag])
606 while nodestotag:
606 while nodestotag:
607 # grab a node to tag
607 # grab a node to tag
608 n = nodestotag.pop()
608 n = nodestotag.pop()
609 # Never tag nullid
609 # Never tag nullid
610 if n == nullid:
610 if n == nullid:
611 continue
611 continue
612 # A node's revision number represents its place in a
612 # A node's revision number represents its place in a
613 # topologically sorted list of nodes.
613 # topologically sorted list of nodes.
614 r = self.rev(n)
614 r = self.rev(n)
615 if r >= lowestrev:
615 if r >= lowestrev:
616 if n not in ancestors:
616 if n not in ancestors:
617 # If we are possibly a descendent of one of the roots
617 # If we are possibly a descendent of one of the roots
618 # and we haven't already been marked as an ancestor
618 # and we haven't already been marked as an ancestor
619 ancestors[n] = 1 # Mark as ancestor
619 ancestors[n] = 1 # Mark as ancestor
620 # Add non-nullid parents to list of nodes to tag.
620 # Add non-nullid parents to list of nodes to tag.
621 nodestotag.extend([p for p in self.parents(n) if
621 nodestotag.extend([p for p in self.parents(n) if
622 p != nullid])
622 p != nullid])
623 elif n in heads: # We've seen it before, is it a fake head?
623 elif n in heads: # We've seen it before, is it a fake head?
624 # So it is, real heads should not be the ancestors of
624 # So it is, real heads should not be the ancestors of
625 # any other heads.
625 # any other heads.
626 heads.pop(n)
626 heads.pop(n)
627 if not ancestors:
627 if not ancestors:
628 return nonodes
628 return nonodes
629 # Now that we have our set of ancestors, we want to remove any
629 # Now that we have our set of ancestors, we want to remove any
630 # roots that are not ancestors.
630 # roots that are not ancestors.
631
631
632 # If one of the roots was nullid, everything is included anyway.
632 # If one of the roots was nullid, everything is included anyway.
633 if lowestrev > -1:
633 if lowestrev > -1:
634 # But, since we weren't, let's recompute the lowest rev to not
634 # But, since we weren't, let's recompute the lowest rev to not
635 # include roots that aren't ancestors.
635 # include roots that aren't ancestors.
636
636
637 # Filter out roots that aren't ancestors of heads
637 # Filter out roots that aren't ancestors of heads
638 roots = [n for n in roots if n in ancestors]
638 roots = [n for n in roots if n in ancestors]
639 # Recompute the lowest revision
639 # Recompute the lowest revision
640 if roots:
640 if roots:
641 lowestrev = min([self.rev(n) for n in roots])
641 lowestrev = min([self.rev(n) for n in roots])
642 else:
642 else:
643 # No more roots? Return empty list
643 # No more roots? Return empty list
644 return nonodes
644 return nonodes
645 else:
645 else:
646 # We are descending from nullid, and don't need to care about
646 # We are descending from nullid, and don't need to care about
647 # any other roots.
647 # any other roots.
648 lowestrev = -1
648 lowestrev = -1
649 roots = [nullid]
649 roots = [nullid]
650 # Transform our roots list into a 'set' (i.e. a dictionary where the
650 # Transform our roots list into a 'set' (i.e. a dictionary where the
651 # values don't matter.
651 # values don't matter.
652 descendents = dict.fromkeys(roots, 1)
652 descendents = dict.fromkeys(roots, 1)
653 # Also, keep the original roots so we can filter out roots that aren't
653 # Also, keep the original roots so we can filter out roots that aren't
654 # 'real' roots (i.e. are descended from other roots).
654 # 'real' roots (i.e. are descended from other roots).
655 roots = descendents.copy()
655 roots = descendents.copy()
656 # Our topologically sorted list of output nodes.
656 # Our topologically sorted list of output nodes.
657 orderedout = []
657 orderedout = []
658 # Don't start at nullid since we don't want nullid in our output list,
658 # Don't start at nullid since we don't want nullid in our output list,
659 # and if nullid shows up in descedents, empty parents will look like
659 # and if nullid shows up in descedents, empty parents will look like
660 # they're descendents.
660 # they're descendents.
661 for r in xrange(max(lowestrev, 0), highestrev + 1):
661 for r in xrange(max(lowestrev, 0), highestrev + 1):
662 n = self.node(r)
662 n = self.node(r)
663 isdescendent = False
663 isdescendent = False
664 if lowestrev == -1: # Everybody is a descendent of nullid
664 if lowestrev == -1: # Everybody is a descendent of nullid
665 isdescendent = True
665 isdescendent = True
666 elif n in descendents:
666 elif n in descendents:
667 # n is already a descendent
667 # n is already a descendent
668 isdescendent = True
668 isdescendent = True
669 # This check only needs to be done here because all the roots
669 # This check only needs to be done here because all the roots
670 # will start being marked is descendents before the loop.
670 # will start being marked is descendents before the loop.
671 if n in roots:
671 if n in roots:
672 # If n was a root, check if it's a 'real' root.
672 # If n was a root, check if it's a 'real' root.
673 p = tuple(self.parents(n))
673 p = tuple(self.parents(n))
674 # If any of its parents are descendents, it's not a root.
674 # If any of its parents are descendents, it's not a root.
675 if (p[0] in descendents) or (p[1] in descendents):
675 if (p[0] in descendents) or (p[1] in descendents):
676 roots.pop(n)
676 roots.pop(n)
677 else:
677 else:
678 p = tuple(self.parents(n))
678 p = tuple(self.parents(n))
679 # A node is a descendent if either of its parents are
679 # A node is a descendent if either of its parents are
680 # descendents. (We seeded the dependents list with the roots
680 # descendents. (We seeded the dependents list with the roots
681 # up there, remember?)
681 # up there, remember?)
682 if (p[0] in descendents) or (p[1] in descendents):
682 if (p[0] in descendents) or (p[1] in descendents):
683 descendents[n] = 1
683 descendents[n] = 1
684 isdescendent = True
684 isdescendent = True
685 if isdescendent and ((ancestors is None) or (n in ancestors)):
685 if isdescendent and ((ancestors is None) or (n in ancestors)):
686 # Only include nodes that are both descendents and ancestors.
686 # Only include nodes that are both descendents and ancestors.
687 orderedout.append(n)
687 orderedout.append(n)
688 if (ancestors is not None) and (n in heads):
688 if (ancestors is not None) and (n in heads):
689 # We're trying to figure out which heads are reachable
689 # We're trying to figure out which heads are reachable
690 # from roots.
690 # from roots.
691 # Mark this head as having been reached
691 # Mark this head as having been reached
692 heads[n] = 1
692 heads[n] = 1
693 elif ancestors is None:
693 elif ancestors is None:
694 # Otherwise, we're trying to discover the heads.
694 # Otherwise, we're trying to discover the heads.
695 # Assume this is a head because if it isn't, the next step
695 # Assume this is a head because if it isn't, the next step
696 # will eventually remove it.
696 # will eventually remove it.
697 heads[n] = 1
697 heads[n] = 1
698 # But, obviously its parents aren't.
698 # But, obviously its parents aren't.
699 for p in self.parents(n):
699 for p in self.parents(n):
700 heads.pop(p, None)
700 heads.pop(p, None)
701 heads = [n for n in heads.iterkeys() if heads[n] != 0]
701 heads = [n for n in heads.iterkeys() if heads[n] != 0]
702 roots = roots.keys()
702 roots = roots.keys()
703 assert orderedout
703 assert orderedout
704 assert roots
704 assert roots
705 assert heads
705 assert heads
706 return (orderedout, roots, heads)
706 return (orderedout, roots, heads)
707
707
708 def heads(self, start=None):
708 def heads(self, start=None):
709 """return the list of all nodes that have no children
709 """return the list of all nodes that have no children
710
710
711 if start is specified, only heads that are descendants of
711 if start is specified, only heads that are descendants of
712 start will be returned
712 start will be returned
713
713
714 """
714 """
715 if start is None:
715 if start is None:
716 start = nullid
716 start = nullid
717 startrev = self.rev(start)
717 startrev = self.rev(start)
718 reachable = {startrev: 1}
718 reachable = {startrev: 1}
719 heads = {startrev: 1}
719 heads = {startrev: 1}
720
720
721 parentrevs = self.parentrevs
721 parentrevs = self.parentrevs
722 for r in xrange(startrev + 1, self.count()):
722 for r in xrange(startrev + 1, self.count()):
723 for p in parentrevs(r):
723 for p in parentrevs(r):
724 if p in reachable:
724 if p in reachable:
725 reachable[r] = 1
725 reachable[r] = 1
726 heads[r] = 1
726 heads[r] = 1
727 if p in heads:
727 if p in heads:
728 del heads[p]
728 del heads[p]
729 return [self.node(r) for r in heads]
729 return [self.node(r) for r in heads]
730
730
731 def children(self, node):
731 def children(self, node):
732 """find the children of a given node"""
732 """find the children of a given node"""
733 c = []
733 c = []
734 p = self.rev(node)
734 p = self.rev(node)
735 for r in range(p + 1, self.count()):
735 for r in range(p + 1, self.count()):
736 n = self.node(r)
736 n = self.node(r)
737 for pn in self.parents(n):
737 for pn in self.parents(n):
738 if pn == node:
738 if pn == node:
739 c.append(n)
739 c.append(n)
740 continue
740 continue
741 elif pn == nullid:
741 elif pn == nullid:
742 continue
742 continue
743 return c
743 return c
744
744
745 def lookup(self, id):
745 def lookup(self, id):
746 """locate a node based on revision number or subset of hex nodeid"""
746 """locate a node based on revision number or subset of hex nodeid"""
747 if type(id) == type(0):
747 if type(id) == type(0):
748 return self.node(id)
748 return self.node(id)
749 try:
749 try:
750 rev = int(id)
750 rev = int(id)
751 if str(rev) != id: raise ValueError
751 if str(rev) != id: raise ValueError
752 if rev < 0: rev = self.count() + rev
752 if rev < 0: rev = self.count() + rev
753 if rev < 0 or rev >= self.count(): raise ValueError
753 if rev < 0 or rev >= self.count(): raise ValueError
754 return self.node(rev)
754 return self.node(rev)
755 except (ValueError, OverflowError):
755 except (ValueError, OverflowError):
756 c = []
756 c = []
757 for n in self.nodemap:
757 for n in self.nodemap:
758 if hex(n).startswith(id):
758 if hex(n).startswith(id):
759 c.append(n)
759 c.append(n)
760 if len(c) > 1: raise RevlogError(_("Ambiguous identifier"))
760 if len(c) > 1: raise RevlogError(_("Ambiguous identifier"))
761 if len(c) == 1: return c[0]
761 if len(c) == 1: return c[0]
762
762
763 # might need fixing if we change hash lengths
763 # might need fixing if we change hash lengths
764 if len(id) == 20 and id in self.nodemap:
764 if len(id) == 20 and id in self.nodemap:
765 return id
765 return id
766
766
767 raise RevlogError(_("No match found"))
767 raise RevlogError(_("No match found"))
768
768
769 def cmp(self, node, text):
770 """compare text with a given file revision"""
771 p1, p2 = self.parents(node)
772 return hash(text, p1, p2) != node
773
774 def makenode(self, node, text):
775 """calculate a file nodeid for text, descended or possibly
776 unchanged from node"""
777
778 if self.cmp(node, text):
779 return hash(text, node, nullid)
780 return node
781
769 def diff(self, a, b):
782 def diff(self, a, b):
770 """return a delta between two revisions"""
783 """return a delta between two revisions"""
771 return mdiff.textdiff(a, b)
784 return mdiff.textdiff(a, b)
772
785
773 def patches(self, t, pl):
786 def patches(self, t, pl):
774 """apply a list of patches to a string"""
787 """apply a list of patches to a string"""
775 return mdiff.patches(t, pl)
788 return mdiff.patches(t, pl)
776
789
777 def chunk(self, rev, df=None, cachelen=4096):
790 def chunk(self, rev, df=None, cachelen=4096):
778 start, length = self.start(rev), self.length(rev)
791 start, length = self.start(rev), self.length(rev)
779 inline = self.inlinedata()
792 inline = self.inlinedata()
780 if inline:
793 if inline:
781 start += (rev + 1) * struct.calcsize(self.indexformat)
794 start += (rev + 1) * struct.calcsize(self.indexformat)
782 end = start + length
795 end = start + length
783 def loadcache(df):
796 def loadcache(df):
784 cache_length = max(cachelen, length) # 4k
797 cache_length = max(cachelen, length) # 4k
785 if not df:
798 if not df:
786 if inline:
799 if inline:
787 df = self.opener(self.indexfile)
800 df = self.opener(self.indexfile)
788 else:
801 else:
789 df = self.opener(self.datafile)
802 df = self.opener(self.datafile)
790 df.seek(start)
803 df.seek(start)
791 self.chunkcache = (start, df.read(cache_length))
804 self.chunkcache = (start, df.read(cache_length))
792
805
793 if not self.chunkcache:
806 if not self.chunkcache:
794 loadcache(df)
807 loadcache(df)
795
808
796 cache_start = self.chunkcache[0]
809 cache_start = self.chunkcache[0]
797 cache_end = cache_start + len(self.chunkcache[1])
810 cache_end = cache_start + len(self.chunkcache[1])
798 if start >= cache_start and end <= cache_end:
811 if start >= cache_start and end <= cache_end:
799 # it is cached
812 # it is cached
800 offset = start - cache_start
813 offset = start - cache_start
801 else:
814 else:
802 loadcache(df)
815 loadcache(df)
803 offset = 0
816 offset = 0
804
817
805 #def checkchunk():
818 #def checkchunk():
806 # df = self.opener(self.datafile)
819 # df = self.opener(self.datafile)
807 # df.seek(start)
820 # df.seek(start)
808 # return df.read(length)
821 # return df.read(length)
809 #assert s == checkchunk()
822 #assert s == checkchunk()
810 return decompress(self.chunkcache[1][offset:offset + length])
823 return decompress(self.chunkcache[1][offset:offset + length])
811
824
812 def delta(self, node):
825 def delta(self, node):
813 """return or calculate a delta between a node and its predecessor"""
826 """return or calculate a delta between a node and its predecessor"""
814 r = self.rev(node)
827 r = self.rev(node)
815 return self.revdiff(r - 1, r)
828 return self.revdiff(r - 1, r)
816
829
817 def revdiff(self, rev1, rev2):
830 def revdiff(self, rev1, rev2):
818 """return or calculate a delta between two revisions"""
831 """return or calculate a delta between two revisions"""
819 b1 = self.base(rev1)
832 b1 = self.base(rev1)
820 b2 = self.base(rev2)
833 b2 = self.base(rev2)
821 if b1 == b2 and rev1 + 1 == rev2:
834 if b1 == b2 and rev1 + 1 == rev2:
822 return self.chunk(rev2)
835 return self.chunk(rev2)
823 else:
836 else:
824 return self.diff(self.revision(self.node(rev1)),
837 return self.diff(self.revision(self.node(rev1)),
825 self.revision(self.node(rev2)))
838 self.revision(self.node(rev2)))
826
839
827 def revision(self, node):
840 def revision(self, node):
828 """return an uncompressed revision of a given"""
841 """return an uncompressed revision of a given"""
829 if node == nullid: return ""
842 if node == nullid: return ""
830 if self.cache and self.cache[0] == node: return self.cache[2]
843 if self.cache and self.cache[0] == node: return self.cache[2]
831
844
832 # look up what we need to read
845 # look up what we need to read
833 text = None
846 text = None
834 rev = self.rev(node)
847 rev = self.rev(node)
835 base = self.base(rev)
848 base = self.base(rev)
836
849
837 if self.inlinedata():
850 if self.inlinedata():
838 # we probably have the whole chunk cached
851 # we probably have the whole chunk cached
839 df = None
852 df = None
840 else:
853 else:
841 df = self.opener(self.datafile)
854 df = self.opener(self.datafile)
842
855
843 # do we have useful data cached?
856 # do we have useful data cached?
844 if self.cache and self.cache[1] >= base and self.cache[1] < rev:
857 if self.cache and self.cache[1] >= base and self.cache[1] < rev:
845 base = self.cache[1]
858 base = self.cache[1]
846 text = self.cache[2]
859 text = self.cache[2]
847 self.loadindex(base, rev + 1)
860 self.loadindex(base, rev + 1)
848 else:
861 else:
849 self.loadindex(base, rev + 1)
862 self.loadindex(base, rev + 1)
850 text = self.chunk(base, df=df)
863 text = self.chunk(base, df=df)
851
864
852 bins = []
865 bins = []
853 for r in xrange(base + 1, rev + 1):
866 for r in xrange(base + 1, rev + 1):
854 bins.append(self.chunk(r, df=df))
867 bins.append(self.chunk(r, df=df))
855
868
856 text = self.patches(text, bins)
869 text = self.patches(text, bins)
857
870
858 p1, p2 = self.parents(node)
871 p1, p2 = self.parents(node)
859 if node != hash(text, p1, p2):
872 if node != hash(text, p1, p2):
860 raise RevlogError(_("integrity check failed on %s:%d")
873 raise RevlogError(_("integrity check failed on %s:%d")
861 % (self.datafile, rev))
874 % (self.datafile, rev))
862
875
863 self.cache = (node, rev, text)
876 self.cache = (node, rev, text)
864 return text
877 return text
865
878
866 def checkinlinesize(self, tr, fp=None):
879 def checkinlinesize(self, tr, fp=None):
867 if not self.inlinedata():
880 if not self.inlinedata():
868 return
881 return
869 if not fp:
882 if not fp:
870 fp = self.opener(self.indexfile, 'r')
883 fp = self.opener(self.indexfile, 'r')
871 fp.seek(0, 2)
884 fp.seek(0, 2)
872 size = fp.tell()
885 size = fp.tell()
873 if size < 131072:
886 if size < 131072:
874 return
887 return
875 trinfo = tr.find(self.indexfile)
888 trinfo = tr.find(self.indexfile)
876 if trinfo == None:
889 if trinfo == None:
877 raise RevlogError(_("%s not found in the transaction" %
890 raise RevlogError(_("%s not found in the transaction" %
878 self.indexfile))
891 self.indexfile))
879
892
880 trindex = trinfo[2]
893 trindex = trinfo[2]
881 dataoff = self.start(trindex)
894 dataoff = self.start(trindex)
882
895
883 tr.add(self.datafile, dataoff)
896 tr.add(self.datafile, dataoff)
884 df = self.opener(self.datafile, 'w')
897 df = self.opener(self.datafile, 'w')
885 calc = struct.calcsize(self.indexformat)
898 calc = struct.calcsize(self.indexformat)
886 for r in xrange(self.count()):
899 for r in xrange(self.count()):
887 start = self.start(r) + (r + 1) * calc
900 start = self.start(r) + (r + 1) * calc
888 length = self.length(r)
901 length = self.length(r)
889 fp.seek(start)
902 fp.seek(start)
890 d = fp.read(length)
903 d = fp.read(length)
891 df.write(d)
904 df.write(d)
892 fp.close()
905 fp.close()
893 df.close()
906 df.close()
894 fp = self.opener(self.indexfile, 'w', atomictemp=True)
907 fp = self.opener(self.indexfile, 'w', atomictemp=True)
895 self.version &= ~(REVLOGNGINLINEDATA)
908 self.version &= ~(REVLOGNGINLINEDATA)
896 if self.count():
909 if self.count():
897 x = self.index[0]
910 x = self.index[0]
898 e = struct.pack(self.indexformat, *x)[4:]
911 e = struct.pack(self.indexformat, *x)[4:]
899 l = struct.pack(versionformat, self.version)
912 l = struct.pack(versionformat, self.version)
900 fp.write(l)
913 fp.write(l)
901 fp.write(e)
914 fp.write(e)
902
915
903 for i in xrange(1, self.count()):
916 for i in xrange(1, self.count()):
904 x = self.index[i]
917 x = self.index[i]
905 e = struct.pack(self.indexformat, *x)
918 e = struct.pack(self.indexformat, *x)
906 fp.write(e)
919 fp.write(e)
907
920
908 # if we don't call rename, the temp file will never replace the
921 # if we don't call rename, the temp file will never replace the
909 # real index
922 # real index
910 fp.rename()
923 fp.rename()
911
924
912 tr.replace(self.indexfile, trindex * calc)
925 tr.replace(self.indexfile, trindex * calc)
913 self.chunkcache = None
926 self.chunkcache = None
914
927
915 def addrevision(self, text, transaction, link, p1=None, p2=None, d=None):
928 def addrevision(self, text, transaction, link, p1=None, p2=None, d=None):
916 """add a revision to the log
929 """add a revision to the log
917
930
918 text - the revision data to add
931 text - the revision data to add
919 transaction - the transaction object used for rollback
932 transaction - the transaction object used for rollback
920 link - the linkrev data to add
933 link - the linkrev data to add
921 p1, p2 - the parent nodeids of the revision
934 p1, p2 - the parent nodeids of the revision
922 d - an optional precomputed delta
935 d - an optional precomputed delta
923 """
936 """
924 if text is None: text = ""
937 if text is None: text = ""
925 if p1 is None: p1 = self.tip()
938 if p1 is None: p1 = self.tip()
926 if p2 is None: p2 = nullid
939 if p2 is None: p2 = nullid
927
940
928 node = hash(text, p1, p2)
941 node = hash(text, p1, p2)
929
942
930 if node in self.nodemap:
943 if node in self.nodemap:
931 return node
944 return node
932
945
933 n = self.count()
946 n = self.count()
934 t = n - 1
947 t = n - 1
935
948
936 if n:
949 if n:
937 base = self.base(t)
950 base = self.base(t)
938 start = self.start(base)
951 start = self.start(base)
939 end = self.end(t)
952 end = self.end(t)
940 if not d:
953 if not d:
941 prev = self.revision(self.tip())
954 prev = self.revision(self.tip())
942 d = self.diff(prev, str(text))
955 d = self.diff(prev, str(text))
943 data = compress(d)
956 data = compress(d)
944 l = len(data[1]) + len(data[0])
957 l = len(data[1]) + len(data[0])
945 dist = end - start + l
958 dist = end - start + l
946
959
947 # full versions are inserted when the needed deltas
960 # full versions are inserted when the needed deltas
948 # become comparable to the uncompressed text
961 # become comparable to the uncompressed text
949 if not n or dist > len(text) * 2:
962 if not n or dist > len(text) * 2:
950 data = compress(text)
963 data = compress(text)
951 l = len(data[1]) + len(data[0])
964 l = len(data[1]) + len(data[0])
952 base = n
965 base = n
953 else:
966 else:
954 base = self.base(t)
967 base = self.base(t)
955
968
956 offset = 0
969 offset = 0
957 if t >= 0:
970 if t >= 0:
958 offset = self.end(t)
971 offset = self.end(t)
959
972
960 if self.version == REVLOGV0:
973 if self.version == REVLOGV0:
961 e = (offset, l, base, link, p1, p2, node)
974 e = (offset, l, base, link, p1, p2, node)
962 else:
975 else:
963 e = (self.offset_type(offset, 0), l, len(text),
976 e = (self.offset_type(offset, 0), l, len(text),
964 base, link, self.rev(p1), self.rev(p2), node)
977 base, link, self.rev(p1), self.rev(p2), node)
965
978
966 self.index.append(e)
979 self.index.append(e)
967 self.nodemap[node] = n
980 self.nodemap[node] = n
968 entry = struct.pack(self.indexformat, *e)
981 entry = struct.pack(self.indexformat, *e)
969
982
970 if not self.inlinedata():
983 if not self.inlinedata():
971 transaction.add(self.datafile, offset)
984 transaction.add(self.datafile, offset)
972 transaction.add(self.indexfile, n * len(entry))
985 transaction.add(self.indexfile, n * len(entry))
973 f = self.opener(self.datafile, "a")
986 f = self.opener(self.datafile, "a")
974 if data[0]:
987 if data[0]:
975 f.write(data[0])
988 f.write(data[0])
976 f.write(data[1])
989 f.write(data[1])
977 f.close()
990 f.close()
978 f = self.opener(self.indexfile, "a")
991 f = self.opener(self.indexfile, "a")
979 else:
992 else:
980 f = self.opener(self.indexfile, "a+")
993 f = self.opener(self.indexfile, "a+")
981 f.seek(0, 2)
994 f.seek(0, 2)
982 transaction.add(self.indexfile, f.tell(), self.count() - 1)
995 transaction.add(self.indexfile, f.tell(), self.count() - 1)
983
996
984 if len(self.index) == 1 and self.version != REVLOGV0:
997 if len(self.index) == 1 and self.version != REVLOGV0:
985 l = struct.pack(versionformat, self.version)
998 l = struct.pack(versionformat, self.version)
986 f.write(l)
999 f.write(l)
987 entry = entry[4:]
1000 entry = entry[4:]
988
1001
989 f.write(entry)
1002 f.write(entry)
990
1003
991 if self.inlinedata():
1004 if self.inlinedata():
992 f.write(data[0])
1005 f.write(data[0])
993 f.write(data[1])
1006 f.write(data[1])
994 self.checkinlinesize(transaction, f)
1007 self.checkinlinesize(transaction, f)
995
1008
996 self.cache = (node, n, text)
1009 self.cache = (node, n, text)
997 return node
1010 return node
998
1011
999 def ancestor(self, a, b):
1012 def ancestor(self, a, b):
1000 """calculate the least common ancestor of nodes a and b"""
1013 """calculate the least common ancestor of nodes a and b"""
1001
1014
1002 # start with some short cuts for the linear cases
1015 # start with some short cuts for the linear cases
1003 if a == b:
1016 if a == b:
1004 return a
1017 return a
1005 ra = self.rev(a)
1018 ra = self.rev(a)
1006 rb = self.rev(b)
1019 rb = self.rev(b)
1007 if ra < rb:
1020 if ra < rb:
1008 last = b
1021 last = b
1009 first = a
1022 first = a
1010 else:
1023 else:
1011 last = a
1024 last = a
1012 first = b
1025 first = b
1013
1026
1014 # reachable won't include stop in the list, so we have to use a parent
1027 # reachable won't include stop in the list, so we have to use a parent
1015 reachable = self.reachable(last, stop=self.parents(first)[0])
1028 reachable = self.reachable(last, stop=self.parents(first)[0])
1016 if first in reachable:
1029 if first in reachable:
1017 return first
1030 return first
1018
1031
1019 # calculate the distance of every node from root
1032 # calculate the distance of every node from root
1020 dist = {nullid: 0}
1033 dist = {nullid: 0}
1021 for i in xrange(self.count()):
1034 for i in xrange(self.count()):
1022 n = self.node(i)
1035 n = self.node(i)
1023 p1, p2 = self.parents(n)
1036 p1, p2 = self.parents(n)
1024 dist[n] = max(dist[p1], dist[p2]) + 1
1037 dist[n] = max(dist[p1], dist[p2]) + 1
1025
1038
1026 # traverse ancestors in order of decreasing distance from root
1039 # traverse ancestors in order of decreasing distance from root
1027 def ancestors(node):
1040 def ancestors(node):
1028 # we store negative distances because heap returns smallest member
1041 # we store negative distances because heap returns smallest member
1029 h = [(-dist[node], node)]
1042 h = [(-dist[node], node)]
1030 seen = {}
1043 seen = {}
1031 while h:
1044 while h:
1032 d, n = heapq.heappop(h)
1045 d, n = heapq.heappop(h)
1033 if n not in seen:
1046 if n not in seen:
1034 seen[n] = 1
1047 seen[n] = 1
1035 yield (-d, n)
1048 yield (-d, n)
1036 for p in self.parents(n):
1049 for p in self.parents(n):
1037 heapq.heappush(h, (-dist[p], p))
1050 heapq.heappush(h, (-dist[p], p))
1038
1051
1039 def generations(node):
1052 def generations(node):
1040 sg, s = None, {}
1053 sg, s = None, {}
1041 for g,n in ancestors(node):
1054 for g,n in ancestors(node):
1042 if g != sg:
1055 if g != sg:
1043 if sg:
1056 if sg:
1044 yield sg, s
1057 yield sg, s
1045 sg, s = g, {n:1}
1058 sg, s = g, {n:1}
1046 else:
1059 else:
1047 s[n] = 1
1060 s[n] = 1
1048 yield sg, s
1061 yield sg, s
1049
1062
1050 x = generations(a)
1063 x = generations(a)
1051 y = generations(b)
1064 y = generations(b)
1052 gx = x.next()
1065 gx = x.next()
1053 gy = y.next()
1066 gy = y.next()
1054
1067
1055 # increment each ancestor list until it is closer to root than
1068 # increment each ancestor list until it is closer to root than
1056 # the other, or they match
1069 # the other, or they match
1057 while 1:
1070 while 1:
1058 #print "ancestor gen %s %s" % (gx[0], gy[0])
1071 #print "ancestor gen %s %s" % (gx[0], gy[0])
1059 if gx[0] == gy[0]:
1072 if gx[0] == gy[0]:
1060 # find the intersection
1073 # find the intersection
1061 i = [ n for n in gx[1] if n in gy[1] ]
1074 i = [ n for n in gx[1] if n in gy[1] ]
1062 if i:
1075 if i:
1063 return i[0]
1076 return i[0]
1064 else:
1077 else:
1065 #print "next"
1078 #print "next"
1066 gy = y.next()
1079 gy = y.next()
1067 gx = x.next()
1080 gx = x.next()
1068 elif gx[0] < gy[0]:
1081 elif gx[0] < gy[0]:
1069 #print "next y"
1082 #print "next y"
1070 gy = y.next()
1083 gy = y.next()
1071 else:
1084 else:
1072 #print "next x"
1085 #print "next x"
1073 gx = x.next()
1086 gx = x.next()
1074
1087
1075 def group(self, nodelist, lookup, infocollect=None):
1088 def group(self, nodelist, lookup, infocollect=None):
1076 """calculate a delta group
1089 """calculate a delta group
1077
1090
1078 Given a list of changeset revs, return a set of deltas and
1091 Given a list of changeset revs, return a set of deltas and
1079 metadata corresponding to nodes. the first delta is
1092 metadata corresponding to nodes. the first delta is
1080 parent(nodes[0]) -> nodes[0] the receiver is guaranteed to
1093 parent(nodes[0]) -> nodes[0] the receiver is guaranteed to
1081 have this parent as it has all history before these
1094 have this parent as it has all history before these
1082 changesets. parent is parent[0]
1095 changesets. parent is parent[0]
1083 """
1096 """
1084 revs = [self.rev(n) for n in nodelist]
1097 revs = [self.rev(n) for n in nodelist]
1085
1098
1086 # if we don't have any revisions touched by these changesets, bail
1099 # if we don't have any revisions touched by these changesets, bail
1087 if not revs:
1100 if not revs:
1088 yield changegroup.closechunk()
1101 yield changegroup.closechunk()
1089 return
1102 return
1090
1103
1091 # add the parent of the first rev
1104 # add the parent of the first rev
1092 p = self.parents(self.node(revs[0]))[0]
1105 p = self.parents(self.node(revs[0]))[0]
1093 revs.insert(0, self.rev(p))
1106 revs.insert(0, self.rev(p))
1094
1107
1095 # build deltas
1108 # build deltas
1096 for d in xrange(0, len(revs) - 1):
1109 for d in xrange(0, len(revs) - 1):
1097 a, b = revs[d], revs[d + 1]
1110 a, b = revs[d], revs[d + 1]
1098 nb = self.node(b)
1111 nb = self.node(b)
1099
1112
1100 if infocollect is not None:
1113 if infocollect is not None:
1101 infocollect(nb)
1114 infocollect(nb)
1102
1115
1103 d = self.revdiff(a, b)
1116 d = self.revdiff(a, b)
1104 p = self.parents(nb)
1117 p = self.parents(nb)
1105 meta = nb + p[0] + p[1] + lookup(nb)
1118 meta = nb + p[0] + p[1] + lookup(nb)
1106 yield changegroup.genchunk("%s%s" % (meta, d))
1119 yield changegroup.genchunk("%s%s" % (meta, d))
1107
1120
1108 yield changegroup.closechunk()
1121 yield changegroup.closechunk()
1109
1122
1110 def addgroup(self, revs, linkmapper, transaction, unique=0):
1123 def addgroup(self, revs, linkmapper, transaction, unique=0):
1111 """
1124 """
1112 add a delta group
1125 add a delta group
1113
1126
1114 given a set of deltas, add them to the revision log. the
1127 given a set of deltas, add them to the revision log. the
1115 first delta is against its parent, which should be in our
1128 first delta is against its parent, which should be in our
1116 log, the rest are against the previous delta.
1129 log, the rest are against the previous delta.
1117 """
1130 """
1118
1131
1119 #track the base of the current delta log
1132 #track the base of the current delta log
1120 r = self.count()
1133 r = self.count()
1121 t = r - 1
1134 t = r - 1
1122 node = None
1135 node = None
1123
1136
1124 base = prev = -1
1137 base = prev = -1
1125 start = end = textlen = 0
1138 start = end = textlen = 0
1126 if r:
1139 if r:
1127 end = self.end(t)
1140 end = self.end(t)
1128
1141
1129 ifh = self.opener(self.indexfile, "a+")
1142 ifh = self.opener(self.indexfile, "a+")
1130 ifh.seek(0, 2)
1143 ifh.seek(0, 2)
1131 transaction.add(self.indexfile, ifh.tell(), self.count())
1144 transaction.add(self.indexfile, ifh.tell(), self.count())
1132 if self.inlinedata():
1145 if self.inlinedata():
1133 dfh = None
1146 dfh = None
1134 else:
1147 else:
1135 transaction.add(self.datafile, end)
1148 transaction.add(self.datafile, end)
1136 dfh = self.opener(self.datafile, "a")
1149 dfh = self.opener(self.datafile, "a")
1137
1150
1138 # loop through our set of deltas
1151 # loop through our set of deltas
1139 chain = None
1152 chain = None
1140 for chunk in revs:
1153 for chunk in revs:
1141 node, p1, p2, cs = struct.unpack("20s20s20s20s", chunk[:80])
1154 node, p1, p2, cs = struct.unpack("20s20s20s20s", chunk[:80])
1142 link = linkmapper(cs)
1155 link = linkmapper(cs)
1143 if node in self.nodemap:
1156 if node in self.nodemap:
1144 # this can happen if two branches make the same change
1157 # this can happen if two branches make the same change
1145 # if unique:
1158 # if unique:
1146 # raise RevlogError(_("already have %s") % hex(node[:4]))
1159 # raise RevlogError(_("already have %s") % hex(node[:4]))
1147 chain = node
1160 chain = node
1148 continue
1161 continue
1149 delta = chunk[80:]
1162 delta = chunk[80:]
1150
1163
1151 for p in (p1, p2):
1164 for p in (p1, p2):
1152 if not p in self.nodemap:
1165 if not p in self.nodemap:
1153 raise RevlogError(_("unknown parent %s") % short(p))
1166 raise RevlogError(_("unknown parent %s") % short(p))
1154
1167
1155 if not chain:
1168 if not chain:
1156 # retrieve the parent revision of the delta chain
1169 # retrieve the parent revision of the delta chain
1157 chain = p1
1170 chain = p1
1158 if not chain in self.nodemap:
1171 if not chain in self.nodemap:
1159 raise RevlogError(_("unknown base %s") % short(chain[:4]))
1172 raise RevlogError(_("unknown base %s") % short(chain[:4]))
1160
1173
1161 # full versions are inserted when the needed deltas become
1174 # full versions are inserted when the needed deltas become
1162 # comparable to the uncompressed text or when the previous
1175 # comparable to the uncompressed text or when the previous
1163 # version is not the one we have a delta against. We use
1176 # version is not the one we have a delta against. We use
1164 # the size of the previous full rev as a proxy for the
1177 # the size of the previous full rev as a proxy for the
1165 # current size.
1178 # current size.
1166
1179
1167 if chain == prev:
1180 if chain == prev:
1168 tempd = compress(delta)
1181 tempd = compress(delta)
1169 cdelta = tempd[0] + tempd[1]
1182 cdelta = tempd[0] + tempd[1]
1170 textlen = mdiff.patchedsize(textlen, delta)
1183 textlen = mdiff.patchedsize(textlen, delta)
1171
1184
1172 if chain != prev or (end - start + len(cdelta)) > textlen * 2:
1185 if chain != prev or (end - start + len(cdelta)) > textlen * 2:
1173 # flush our writes here so we can read it in revision
1186 # flush our writes here so we can read it in revision
1174 if dfh:
1187 if dfh:
1175 dfh.flush()
1188 dfh.flush()
1176 ifh.flush()
1189 ifh.flush()
1177 text = self.revision(chain)
1190 text = self.revision(chain)
1178 text = self.patches(text, [delta])
1191 text = self.patches(text, [delta])
1179 chk = self.addrevision(text, transaction, link, p1, p2)
1192 chk = self.addrevision(text, transaction, link, p1, p2)
1180 if chk != node:
1193 if chk != node:
1181 raise RevlogError(_("consistency error adding group"))
1194 raise RevlogError(_("consistency error adding group"))
1182 textlen = len(text)
1195 textlen = len(text)
1183 else:
1196 else:
1184 if self.version == REVLOGV0:
1197 if self.version == REVLOGV0:
1185 e = (end, len(cdelta), base, link, p1, p2, node)
1198 e = (end, len(cdelta), base, link, p1, p2, node)
1186 else:
1199 else:
1187 e = (self.offset_type(end, 0), len(cdelta), textlen, base,
1200 e = (self.offset_type(end, 0), len(cdelta), textlen, base,
1188 link, self.rev(p1), self.rev(p2), node)
1201 link, self.rev(p1), self.rev(p2), node)
1189 self.index.append(e)
1202 self.index.append(e)
1190 self.nodemap[node] = r
1203 self.nodemap[node] = r
1191 if self.inlinedata():
1204 if self.inlinedata():
1192 ifh.write(struct.pack(self.indexformat, *e))
1205 ifh.write(struct.pack(self.indexformat, *e))
1193 ifh.write(cdelta)
1206 ifh.write(cdelta)
1194 self.checkinlinesize(transaction, ifh)
1207 self.checkinlinesize(transaction, ifh)
1195 if not self.inlinedata():
1208 if not self.inlinedata():
1196 dfh = self.opener(self.datafile, "a")
1209 dfh = self.opener(self.datafile, "a")
1197 ifh = self.opener(self.indexfile, "a")
1210 ifh = self.opener(self.indexfile, "a")
1198 else:
1211 else:
1199 if not dfh:
1212 if not dfh:
1200 # addrevision switched from inline to conventional
1213 # addrevision switched from inline to conventional
1201 # reopen the index
1214 # reopen the index
1202 dfh = self.opener(self.datafile, "a")
1215 dfh = self.opener(self.datafile, "a")
1203 ifh = self.opener(self.indexfile, "a")
1216 ifh = self.opener(self.indexfile, "a")
1204 dfh.write(cdelta)
1217 dfh.write(cdelta)
1205 ifh.write(struct.pack(self.indexformat, *e))
1218 ifh.write(struct.pack(self.indexformat, *e))
1206
1219
1207 t, r, chain, prev = r, r + 1, node, node
1220 t, r, chain, prev = r, r + 1, node, node
1208 base = self.base(t)
1221 base = self.base(t)
1209 start = self.start(base)
1222 start = self.start(base)
1210 end = self.end(t)
1223 end = self.end(t)
1211
1224
1212 return node
1225 return node
1213
1226
1214 def strip(self, rev, minlink):
1227 def strip(self, rev, minlink):
1215 if self.count() == 0 or rev >= self.count():
1228 if self.count() == 0 or rev >= self.count():
1216 return
1229 return
1217
1230
1218 if isinstance(self.index, lazyindex):
1231 if isinstance(self.index, lazyindex):
1219 self.loadindexmap()
1232 self.loadindexmap()
1220
1233
1221 # When stripping away a revision, we need to make sure it
1234 # When stripping away a revision, we need to make sure it
1222 # does not actually belong to an older changeset.
1235 # does not actually belong to an older changeset.
1223 # The minlink parameter defines the oldest revision
1236 # The minlink parameter defines the oldest revision
1224 # we're allowed to strip away.
1237 # we're allowed to strip away.
1225 while minlink > self.index[rev][-4]:
1238 while minlink > self.index[rev][-4]:
1226 rev += 1
1239 rev += 1
1227 if rev >= self.count():
1240 if rev >= self.count():
1228 return
1241 return
1229
1242
1230 # first truncate the files on disk
1243 # first truncate the files on disk
1231 end = self.start(rev)
1244 end = self.start(rev)
1232 if not self.inlinedata():
1245 if not self.inlinedata():
1233 df = self.opener(self.datafile, "a")
1246 df = self.opener(self.datafile, "a")
1234 df.truncate(end)
1247 df.truncate(end)
1235 end = rev * struct.calcsize(self.indexformat)
1248 end = rev * struct.calcsize(self.indexformat)
1236 else:
1249 else:
1237 end += rev * struct.calcsize(self.indexformat)
1250 end += rev * struct.calcsize(self.indexformat)
1238
1251
1239 indexf = self.opener(self.indexfile, "a")
1252 indexf = self.opener(self.indexfile, "a")
1240 indexf.truncate(end)
1253 indexf.truncate(end)
1241
1254
1242 # then reset internal state in memory to forget those revisions
1255 # then reset internal state in memory to forget those revisions
1243 self.cache = None
1256 self.cache = None
1244 self.chunkcache = None
1257 self.chunkcache = None
1245 for x in xrange(rev, self.count()):
1258 for x in xrange(rev, self.count()):
1246 del self.nodemap[self.node(x)]
1259 del self.nodemap[self.node(x)]
1247
1260
1248 del self.index[rev:]
1261 del self.index[rev:]
1249
1262
1250 def checksize(self):
1263 def checksize(self):
1251 expected = 0
1264 expected = 0
1252 if self.count():
1265 if self.count():
1253 expected = self.end(self.count() - 1)
1266 expected = self.end(self.count() - 1)
1254
1267
1255 try:
1268 try:
1256 f = self.opener(self.datafile)
1269 f = self.opener(self.datafile)
1257 f.seek(0, 2)
1270 f.seek(0, 2)
1258 actual = f.tell()
1271 actual = f.tell()
1259 dd = actual - expected
1272 dd = actual - expected
1260 except IOError, inst:
1273 except IOError, inst:
1261 if inst.errno != errno.ENOENT:
1274 if inst.errno != errno.ENOENT:
1262 raise
1275 raise
1263 dd = 0
1276 dd = 0
1264
1277
1265 try:
1278 try:
1266 f = self.opener(self.indexfile)
1279 f = self.opener(self.indexfile)
1267 f.seek(0, 2)
1280 f.seek(0, 2)
1268 actual = f.tell()
1281 actual = f.tell()
1269 s = struct.calcsize(self.indexformat)
1282 s = struct.calcsize(self.indexformat)
1270 i = actual / s
1283 i = actual / s
1271 di = actual - (i * s)
1284 di = actual - (i * s)
1272 if self.inlinedata():
1285 if self.inlinedata():
1273 databytes = 0
1286 databytes = 0
1274 for r in xrange(self.count()):
1287 for r in xrange(self.count()):
1275 databytes += self.length(r)
1288 databytes += self.length(r)
1276 dd = 0
1289 dd = 0
1277 di = actual - self.count() * s - databytes
1290 di = actual - self.count() * s - databytes
1278 except IOError, inst:
1291 except IOError, inst:
1279 if inst.errno != errno.ENOENT:
1292 if inst.errno != errno.ENOENT:
1280 raise
1293 raise
1281 di = 0
1294 di = 0
1282
1295
1283 return (dd, di)
1296 return (dd, di)
1284
1297
1285
1298
@@ -1,360 +1,290 b''
1 # ui.py - user interface bits for mercurial
1 # ui.py - user interface bits for mercurial
2 #
2 #
3 # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7
7
8 from i18n import gettext as _
8 from i18n import gettext as _
9 from demandload import *
9 from demandload import *
10 demandload(globals(), "errno getpass os re smtplib socket sys tempfile")
10 demandload(globals(), "errno getpass os re socket sys tempfile")
11 demandload(globals(), "ConfigParser mdiff templater traceback util")
11 demandload(globals(), "ConfigParser mdiff templater traceback util")
12
12
13 class ui(object):
13 class ui(object):
14 def __init__(self, verbose=False, debug=False, quiet=False,
14 def __init__(self, verbose=False, debug=False, quiet=False,
15 interactive=True, traceback=False, parentui=None):
15 interactive=True, traceback=False, parentui=None):
16 self.overlay = {}
16 self.overlay = {}
17 if parentui is None:
17 if parentui is None:
18 # this is the parent of all ui children
18 # this is the parent of all ui children
19 self.parentui = None
19 self.parentui = None
20 self.cdata = ConfigParser.SafeConfigParser()
20 self.cdata = ConfigParser.SafeConfigParser()
21 self.readconfig(util.rcpath())
21 self.readconfig(util.rcpath())
22
22
23 self.quiet = self.configbool("ui", "quiet")
23 self.quiet = self.configbool("ui", "quiet")
24 self.verbose = self.configbool("ui", "verbose")
24 self.verbose = self.configbool("ui", "verbose")
25 self.debugflag = self.configbool("ui", "debug")
25 self.debugflag = self.configbool("ui", "debug")
26 self.interactive = self.configbool("ui", "interactive", True)
26 self.interactive = self.configbool("ui", "interactive", True)
27 self.traceback = traceback
27 self.traceback = traceback
28
28
29 self.updateopts(verbose, debug, quiet, interactive)
29 self.updateopts(verbose, debug, quiet, interactive)
30 self.diffcache = None
30 self.diffcache = None
31 self.header = []
31 self.header = []
32 self.prev_header = []
32 self.prev_header = []
33 self.revlogopts = self.configrevlog()
33 self.revlogopts = self.configrevlog()
34 else:
34 else:
35 # parentui may point to an ui object which is already a child
35 # parentui may point to an ui object which is already a child
36 self.parentui = parentui.parentui or parentui
36 self.parentui = parentui.parentui or parentui
37 parent_cdata = self.parentui.cdata
37 parent_cdata = self.parentui.cdata
38 self.cdata = ConfigParser.SafeConfigParser(parent_cdata.defaults())
38 self.cdata = ConfigParser.SafeConfigParser(parent_cdata.defaults())
39 # make interpolation work
39 # make interpolation work
40 for section in parent_cdata.sections():
40 for section in parent_cdata.sections():
41 self.cdata.add_section(section)
41 self.cdata.add_section(section)
42 for name, value in parent_cdata.items(section, raw=True):
42 for name, value in parent_cdata.items(section, raw=True):
43 self.cdata.set(section, name, value)
43 self.cdata.set(section, name, value)
44
44
45 def __getattr__(self, key):
45 def __getattr__(self, key):
46 return getattr(self.parentui, key)
46 return getattr(self.parentui, key)
47
47
48 def updateopts(self, verbose=False, debug=False, quiet=False,
48 def updateopts(self, verbose=False, debug=False, quiet=False,
49 interactive=True, traceback=False, config=[]):
49 interactive=True, traceback=False, config=[]):
50 self.quiet = (self.quiet or quiet) and not verbose and not debug
50 self.quiet = (self.quiet or quiet) and not verbose and not debug
51 self.verbose = (self.verbose or verbose) or debug
51 self.verbose = (self.verbose or verbose) or debug
52 self.debugflag = (self.debugflag or debug)
52 self.debugflag = (self.debugflag or debug)
53 self.interactive = (self.interactive and interactive)
53 self.interactive = (self.interactive and interactive)
54 self.traceback = self.traceback or traceback
54 self.traceback = self.traceback or traceback
55 for cfg in config:
55 for cfg in config:
56 try:
56 try:
57 name, value = cfg.split('=', 1)
57 name, value = cfg.split('=', 1)
58 section, name = name.split('.', 1)
58 section, name = name.split('.', 1)
59 if not self.cdata.has_section(section):
59 if not self.cdata.has_section(section):
60 self.cdata.add_section(section)
60 self.cdata.add_section(section)
61 if not section or not name:
61 if not section or not name:
62 raise IndexError
62 raise IndexError
63 self.cdata.set(section, name, value)
63 self.cdata.set(section, name, value)
64 except (IndexError, ValueError):
64 except (IndexError, ValueError):
65 raise util.Abort(_('malformed --config option: %s') % cfg)
65 raise util.Abort(_('malformed --config option: %s') % cfg)
66
66
67 def readconfig(self, fn, root=None):
67 def readconfig(self, fn, root=None):
68 if isinstance(fn, basestring):
68 if isinstance(fn, basestring):
69 fn = [fn]
69 fn = [fn]
70 for f in fn:
70 for f in fn:
71 try:
71 try:
72 self.cdata.read(f)
72 self.cdata.read(f)
73 except ConfigParser.ParsingError, inst:
73 except ConfigParser.ParsingError, inst:
74 raise util.Abort(_("Failed to parse %s\n%s") % (f, inst))
74 raise util.Abort(_("Failed to parse %s\n%s") % (f, inst))
75 # translate paths relative to root (or home) into absolute paths
75 # translate paths relative to root (or home) into absolute paths
76 if root is None:
76 if root is None:
77 root = os.path.expanduser('~')
77 root = os.path.expanduser('~')
78 for name, path in self.configitems("paths"):
78 for name, path in self.configitems("paths"):
79 if path and "://" not in path and not os.path.isabs(path):
79 if path and "://" not in path and not os.path.isabs(path):
80 self.cdata.set("paths", name, os.path.join(root, path))
80 self.cdata.set("paths", name, os.path.join(root, path))
81
81
82 def setconfig(self, section, name, val):
82 def setconfig(self, section, name, val):
83 self.overlay[(section, name)] = val
83 self.overlay[(section, name)] = val
84
84
85 def config(self, section, name, default=None):
85 def config(self, section, name, default=None):
86 if self.overlay.has_key((section, name)):
86 if self.overlay.has_key((section, name)):
87 return self.overlay[(section, name)]
87 return self.overlay[(section, name)]
88 if self.cdata.has_option(section, name):
88 if self.cdata.has_option(section, name):
89 try:
89 try:
90 return self.cdata.get(section, name)
90 return self.cdata.get(section, name)
91 except ConfigParser.InterpolationError, inst:
91 except ConfigParser.InterpolationError, inst:
92 raise util.Abort(_("Error in configuration:\n%s") % inst)
92 raise util.Abort(_("Error in configuration:\n%s") % inst)
93 if self.parentui is None:
93 if self.parentui is None:
94 return default
94 return default
95 else:
95 else:
96 return self.parentui.config(section, name, default)
96 return self.parentui.config(section, name, default)
97
97
98 def configlist(self, section, name, default=None):
98 def configlist(self, section, name, default=None):
99 """Return a list of comma/space separated strings"""
99 """Return a list of comma/space separated strings"""
100 result = self.config(section, name)
100 result = self.config(section, name)
101 if result is None:
101 if result is None:
102 result = default or []
102 result = default or []
103 if isinstance(result, basestring):
103 if isinstance(result, basestring):
104 result = result.replace(",", " ").split()
104 result = result.replace(",", " ").split()
105 return result
105 return result
106
106
107 def configbool(self, section, name, default=False):
107 def configbool(self, section, name, default=False):
108 if self.overlay.has_key((section, name)):
108 if self.overlay.has_key((section, name)):
109 return self.overlay[(section, name)]
109 return self.overlay[(section, name)]
110 if self.cdata.has_option(section, name):
110 if self.cdata.has_option(section, name):
111 try:
111 try:
112 return self.cdata.getboolean(section, name)
112 return self.cdata.getboolean(section, name)
113 except ConfigParser.InterpolationError, inst:
113 except ConfigParser.InterpolationError, inst:
114 raise util.Abort(_("Error in configuration:\n%s") % inst)
114 raise util.Abort(_("Error in configuration:\n%s") % inst)
115 if self.parentui is None:
115 if self.parentui is None:
116 return default
116 return default
117 else:
117 else:
118 return self.parentui.configbool(section, name, default)
118 return self.parentui.configbool(section, name, default)
119
119
120 def has_config(self, section):
120 def has_config(self, section):
121 '''tell whether section exists in config.'''
121 '''tell whether section exists in config.'''
122 return self.cdata.has_section(section)
122 return self.cdata.has_section(section)
123
123
124 def configitems(self, section):
124 def configitems(self, section):
125 items = {}
125 items = {}
126 if self.parentui is not None:
126 if self.parentui is not None:
127 items = dict(self.parentui.configitems(section))
127 items = dict(self.parentui.configitems(section))
128 if self.cdata.has_section(section):
128 if self.cdata.has_section(section):
129 try:
129 try:
130 items.update(dict(self.cdata.items(section)))
130 items.update(dict(self.cdata.items(section)))
131 except ConfigParser.InterpolationError, inst:
131 except ConfigParser.InterpolationError, inst:
132 raise util.Abort(_("Error in configuration:\n%s") % inst)
132 raise util.Abort(_("Error in configuration:\n%s") % inst)
133 x = items.items()
133 x = items.items()
134 x.sort()
134 x.sort()
135 return x
135 return x
136
136
137 def walkconfig(self, seen=None):
137 def walkconfig(self, seen=None):
138 if seen is None:
138 if seen is None:
139 seen = {}
139 seen = {}
140 for (section, name), value in self.overlay.iteritems():
140 for (section, name), value in self.overlay.iteritems():
141 yield section, name, value
141 yield section, name, value
142 seen[section, name] = 1
142 seen[section, name] = 1
143 for section in self.cdata.sections():
143 for section in self.cdata.sections():
144 for name, value in self.cdata.items(section):
144 for name, value in self.cdata.items(section):
145 if (section, name) in seen: continue
145 if (section, name) in seen: continue
146 yield section, name, value.replace('\n', '\\n')
146 yield section, name, value.replace('\n', '\\n')
147 seen[section, name] = 1
147 seen[section, name] = 1
148 if self.parentui is not None:
148 if self.parentui is not None:
149 for parent in self.parentui.walkconfig(seen):
149 for parent in self.parentui.walkconfig(seen):
150 yield parent
150 yield parent
151
151
152 def extensions(self):
152 def extensions(self):
153 result = self.configitems("extensions")
153 result = self.configitems("extensions")
154 for i, (key, value) in enumerate(result):
154 for i, (key, value) in enumerate(result):
155 if value:
155 if value:
156 result[i] = (key, os.path.expanduser(value))
156 result[i] = (key, os.path.expanduser(value))
157 return result
157 return result
158
158
159 def hgignorefiles(self):
159 def hgignorefiles(self):
160 result = []
160 result = []
161 for key, value in self.configitems("ui"):
161 for key, value in self.configitems("ui"):
162 if key == 'ignore' or key.startswith('ignore.'):
162 if key == 'ignore' or key.startswith('ignore.'):
163 result.append(os.path.expanduser(value))
163 result.append(os.path.expanduser(value))
164 return result
164 return result
165
165
166 def configrevlog(self):
166 def configrevlog(self):
167 result = {}
167 result = {}
168 for key, value in self.configitems("revlog"):
168 for key, value in self.configitems("revlog"):
169 result[key.lower()] = value
169 result[key.lower()] = value
170 return result
170 return result
171
171
172 def diffopts(self, opts={}):
173 return mdiff.diffopts(
174 text=opts.get('text'),
175 showfunc=(opts.get('show_function') or
176 self.configbool('diff', 'showfunc', None)),
177 git=(opts.get('git') or
178 self.configbool('diff', 'git', None)),
179 ignorews=(opts.get('ignore_all_space') or
180 self.configbool('diff', 'ignorews', None)),
181 ignorewsamount=(opts.get('ignore_space_change') or
182 self.configbool('diff', 'ignorewsamount', None)),
183 ignoreblanklines=(opts.get('ignore_blank_lines') or
184 self.configbool('diff', 'ignoreblanklines', None)))
185
186 def username(self):
172 def username(self):
187 """Return default username to be used in commits.
173 """Return default username to be used in commits.
188
174
189 Searched in this order: $HGUSER, [ui] section of hgrcs, $EMAIL
175 Searched in this order: $HGUSER, [ui] section of hgrcs, $EMAIL
190 and stop searching if one of these is set.
176 and stop searching if one of these is set.
191 Abort if found username is an empty string to force specifying
177 Abort if found username is an empty string to force specifying
192 the commit user elsewhere, e.g. with line option or repo hgrc.
178 the commit user elsewhere, e.g. with line option or repo hgrc.
193 If not found, use ($LOGNAME or $USER or $LNAME or
179 If not found, use ($LOGNAME or $USER or $LNAME or
194 $USERNAME) +"@full.hostname".
180 $USERNAME) +"@full.hostname".
195 """
181 """
196 user = os.environ.get("HGUSER")
182 user = os.environ.get("HGUSER")
197 if user is None:
183 if user is None:
198 user = self.config("ui", "username")
184 user = self.config("ui", "username")
199 if user is None:
185 if user is None:
200 user = os.environ.get("EMAIL")
186 user = os.environ.get("EMAIL")
201 if user is None:
187 if user is None:
202 try:
188 try:
203 user = '%s@%s' % (util.getuser(), socket.getfqdn())
189 user = '%s@%s' % (util.getuser(), socket.getfqdn())
204 except KeyError:
190 except KeyError:
205 raise util.Abort(_("Please specify a username."))
191 raise util.Abort(_("Please specify a username."))
206 return user
192 return user
207
193
208 def shortuser(self, user):
194 def shortuser(self, user):
209 """Return a short representation of a user name or email address."""
195 """Return a short representation of a user name or email address."""
210 if not self.verbose: user = util.shortuser(user)
196 if not self.verbose: user = util.shortuser(user)
211 return user
197 return user
212
198
213 def expandpath(self, loc, default=None):
199 def expandpath(self, loc, default=None):
214 """Return repository location relative to cwd or from [paths]"""
200 """Return repository location relative to cwd or from [paths]"""
215 if "://" in loc or os.path.isdir(loc):
201 if "://" in loc or os.path.isdir(loc):
216 return loc
202 return loc
217
203
218 path = self.config("paths", loc)
204 path = self.config("paths", loc)
219 if not path and default is not None:
205 if not path and default is not None:
220 path = self.config("paths", default)
206 path = self.config("paths", default)
221 return path or loc
207 return path or loc
222
208
223 def write(self, *args):
209 def write(self, *args):
224 if self.header:
210 if self.header:
225 if self.header != self.prev_header:
211 if self.header != self.prev_header:
226 self.prev_header = self.header
212 self.prev_header = self.header
227 self.write(*self.header)
213 self.write(*self.header)
228 self.header = []
214 self.header = []
229 for a in args:
215 for a in args:
230 sys.stdout.write(str(a))
216 sys.stdout.write(str(a))
231
217
232 def write_header(self, *args):
218 def write_header(self, *args):
233 for a in args:
219 for a in args:
234 self.header.append(str(a))
220 self.header.append(str(a))
235
221
236 def write_err(self, *args):
222 def write_err(self, *args):
237 try:
223 try:
238 if not sys.stdout.closed: sys.stdout.flush()
224 if not sys.stdout.closed: sys.stdout.flush()
239 for a in args:
225 for a in args:
240 sys.stderr.write(str(a))
226 sys.stderr.write(str(a))
241 except IOError, inst:
227 except IOError, inst:
242 if inst.errno != errno.EPIPE:
228 if inst.errno != errno.EPIPE:
243 raise
229 raise
244
230
245 def flush(self):
231 def flush(self):
246 try: sys.stdout.flush()
232 try: sys.stdout.flush()
247 except: pass
233 except: pass
248 try: sys.stderr.flush()
234 try: sys.stderr.flush()
249 except: pass
235 except: pass
250
236
251 def readline(self):
237 def readline(self):
252 return sys.stdin.readline()[:-1]
238 return sys.stdin.readline()[:-1]
253 def prompt(self, msg, pat=None, default="y"):
239 def prompt(self, msg, pat=None, default="y"):
254 if not self.interactive: return default
240 if not self.interactive: return default
255 while 1:
241 while 1:
256 self.write(msg, " ")
242 self.write(msg, " ")
257 r = self.readline()
243 r = self.readline()
258 if not pat or re.match(pat, r):
244 if not pat or re.match(pat, r):
259 return r
245 return r
260 else:
246 else:
261 self.write(_("unrecognized response\n"))
247 self.write(_("unrecognized response\n"))
262 def getpass(self, prompt=None, default=None):
248 def getpass(self, prompt=None, default=None):
263 if not self.interactive: return default
249 if not self.interactive: return default
264 return getpass.getpass(prompt or _('password: '))
250 return getpass.getpass(prompt or _('password: '))
265 def status(self, *msg):
251 def status(self, *msg):
266 if not self.quiet: self.write(*msg)
252 if not self.quiet: self.write(*msg)
267 def warn(self, *msg):
253 def warn(self, *msg):
268 self.write_err(*msg)
254 self.write_err(*msg)
269 def note(self, *msg):
255 def note(self, *msg):
270 if self.verbose: self.write(*msg)
256 if self.verbose: self.write(*msg)
271 def debug(self, *msg):
257 def debug(self, *msg):
272 if self.debugflag: self.write(*msg)
258 if self.debugflag: self.write(*msg)
273 def edit(self, text, user):
259 def edit(self, text, user):
274 (fd, name) = tempfile.mkstemp(prefix="hg-editor-", suffix=".txt",
260 (fd, name) = tempfile.mkstemp(prefix="hg-editor-", suffix=".txt",
275 text=True)
261 text=True)
276 try:
262 try:
277 f = os.fdopen(fd, "w")
263 f = os.fdopen(fd, "w")
278 f.write(text)
264 f.write(text)
279 f.close()
265 f.close()
280
266
281 editor = (os.environ.get("HGEDITOR") or
267 editor = (os.environ.get("HGEDITOR") or
282 self.config("ui", "editor") or
268 self.config("ui", "editor") or
283 os.environ.get("EDITOR", "vi"))
269 os.environ.get("EDITOR", "vi"))
284
270
285 util.system("%s \"%s\"" % (editor, name),
271 util.system("%s \"%s\"" % (editor, name),
286 environ={'HGUSER': user},
272 environ={'HGUSER': user},
287 onerr=util.Abort, errprefix=_("edit failed"))
273 onerr=util.Abort, errprefix=_("edit failed"))
288
274
289 f = open(name)
275 f = open(name)
290 t = f.read()
276 t = f.read()
291 f.close()
277 f.close()
292 t = re.sub("(?m)^HG:.*\n", "", t)
278 t = re.sub("(?m)^HG:.*\n", "", t)
293 finally:
279 finally:
294 os.unlink(name)
280 os.unlink(name)
295
281
296 return t
282 return t
297
283
298 def sendmail(self):
299 '''send mail message. object returned has one method, sendmail.
300 call as sendmail(sender, list-of-recipients, msg).'''
301
302 def smtp():
303 '''send mail using smtp.'''
304
305 local_hostname = self.config('smtp', 'local_hostname')
306 s = smtplib.SMTP(local_hostname=local_hostname)
307 mailhost = self.config('smtp', 'host')
308 if not mailhost:
309 raise util.Abort(_('no [smtp]host in hgrc - cannot send mail'))
310 mailport = int(self.config('smtp', 'port', 25))
311 self.note(_('sending mail: smtp host %s, port %s\n') %
312 (mailhost, mailport))
313 s.connect(host=mailhost, port=mailport)
314 if self.configbool('smtp', 'tls'):
315 self.note(_('(using tls)\n'))
316 s.ehlo()
317 s.starttls()
318 s.ehlo()
319 username = self.config('smtp', 'username')
320 password = self.config('smtp', 'password')
321 if username and password:
322 self.note(_('(authenticating to mail server as %s)\n') %
323 (username))
324 s.login(username, password)
325 return s
326
327 class sendmail(object):
328 '''send mail using sendmail.'''
329
330 def __init__(self, ui, program):
331 self.ui = ui
332 self.program = program
333
334 def sendmail(self, sender, recipients, msg):
335 cmdline = '%s -f %s %s' % (
336 self.program, templater.email(sender),
337 ' '.join(map(templater.email, recipients)))
338 self.ui.note(_('sending mail: %s\n') % cmdline)
339 fp = os.popen(cmdline, 'w')
340 fp.write(msg)
341 ret = fp.close()
342 if ret:
343 raise util.Abort('%s %s' % (
344 os.path.basename(self.program.split(None, 1)[0]),
345 util.explain_exit(ret)[0]))
346
347 method = self.config('email', 'method', 'smtp')
348 if method == 'smtp':
349 mail = smtp()
350 else:
351 mail = sendmail(self, method)
352 return mail
353
354 def print_exc(self):
284 def print_exc(self):
355 '''print exception traceback if traceback printing enabled.
285 '''print exception traceback if traceback printing enabled.
356 only to call in exception handler. returns true if traceback
286 only to call in exception handler. returns true if traceback
357 printed.'''
287 printed.'''
358 if self.traceback:
288 if self.traceback:
359 traceback.print_exc()
289 traceback.print_exc()
360 return self.traceback
290 return self.traceback
General Comments 0
You need to be logged in to leave comments. Login now