##// END OF EJS Templates
merge with mpm.
Vadim Gelfer -
r2920:ef8ee447 merge default
parent child Browse files
Show More
@@ -0,0 +1,68 b''
1 # mail.py - mail sending bits for mercurial
2 #
3 # Copyright 2006 Matt Mackall <mpm@selenic.com>
4 #
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
7
8 from i18n import gettext as _
9 from demandload import *
10 demandload(globals(), "os re smtplib templater util")
11
12 def _smtp(ui):
13 '''send mail using smtp.'''
14
15 local_hostname = ui.config('smtp', 'local_hostname')
16 s = smtplib.SMTP(local_hostname=local_hostname)
17 mailhost = ui.config('smtp', 'host')
18 if not mailhost:
19 raise util.Abort(_('no [smtp]host in hgrc - cannot send mail'))
20 mailport = int(ui.config('smtp', 'port', 25))
21 self.note(_('sending mail: smtp host %s, port %s\n') %
22 (mailhost, mailport))
23 s.connect(host=mailhost, port=mailport)
24 if ui.configbool('smtp', 'tls'):
25 ui.note(_('(using tls)\n'))
26 s.ehlo()
27 s.starttls()
28 s.ehlo()
29 username = ui.config('smtp', 'username')
30 password = ui.config('smtp', 'password')
31 if username and password:
32 ui.note(_('(authenticating to mail server as %s)\n') %
33 (username))
34 s.login(username, password)
35 return s
36
37 class _sendmail(object):
38 '''send mail using sendmail.'''
39
40 def __init__(self, ui, program):
41 self.ui = ui
42 self.program = program
43
44 def sendmail(self, sender, recipients, msg):
45 cmdline = '%s -f %s %s' % (
46 self.program, templater.email(sender),
47 ' '.join(map(templater.email, recipients)))
48 self.ui.note(_('sending mail: %s\n') % cmdline)
49 fp = os.popen(cmdline, 'w')
50 fp.write(msg)
51 ret = fp.close()
52 if ret:
53 raise util.Abort('%s %s' % (
54 os.path.basename(self.program.split(None, 1)[0]),
55 util.explain_exit(ret)[0]))
56
57 def connect(ui):
58 '''make a mail connection. object returned has one method, sendmail.
59 call as sendmail(sender, list-of-recipients, msg).'''
60
61 method = ui.config('email', 'method', 'smtp')
62 if method == 'smtp':
63 return smtp(ui)
64
65 return sendmail(ui, method)
66
67 def sendmail(ui, sender, recipients, msg):
68 return connect(ui).sendmail(sender, recipients, msg)
@@ -1,1980 +1,1997 b''
1 # queue.py - patch queues for mercurial
1 # queue.py - patch queues for mercurial
2 #
2 #
3 # Copyright 2005, 2006 Chris Mason <mason@suse.com>
3 # Copyright 2005, 2006 Chris Mason <mason@suse.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7
7
8 '''patch management and development
8 '''patch management and development
9
9
10 This extension lets you work with a stack of patches in a Mercurial
10 This extension lets you work with a stack of patches in a Mercurial
11 repository. It manages two stacks of patches - all known patches, and
11 repository. It manages two stacks of patches - all known patches, and
12 applied patches (subset of known patches).
12 applied patches (subset of known patches).
13
13
14 Known patches are represented as patch files in the .hg/patches
14 Known patches are represented as patch files in the .hg/patches
15 directory. Applied patches are both patch files and changesets.
15 directory. Applied patches are both patch files and changesets.
16
16
17 Common tasks (use "hg help command" for more details):
17 Common tasks (use "hg help command" for more details):
18
18
19 prepare repository to work with patches qinit
19 prepare repository to work with patches qinit
20 create new patch qnew
20 create new patch qnew
21 import existing patch qimport
21 import existing patch qimport
22
22
23 print patch series qseries
23 print patch series qseries
24 print applied patches qapplied
24 print applied patches qapplied
25 print name of top applied patch qtop
25 print name of top applied patch qtop
26
26
27 add known patch to applied stack qpush
27 add known patch to applied stack qpush
28 remove patch from applied stack qpop
28 remove patch from applied stack qpop
29 refresh contents of top applied patch qrefresh
29 refresh contents of top applied patch qrefresh
30 '''
30 '''
31
31
32 from mercurial.demandload import *
32 from mercurial.demandload import *
33 from mercurial.i18n import gettext as _
33 from mercurial.i18n import gettext as _
34 demandload(globals(), "os sys re struct traceback errno bz2")
34 demandload(globals(), "os sys re struct traceback errno bz2")
35 demandload(globals(), "mercurial:cmdutil,commands,hg,patch,revlog,ui,util")
35 demandload(globals(), "mercurial:cmdutil,commands,hg,patch,revlog,ui,util")
36
36
37 commands.norepo += " qclone qversion"
37 commands.norepo += " qclone qversion"
38
38
39 class statusentry:
39 class statusentry:
40 def __init__(self, rev, name=None):
40 def __init__(self, rev, name=None):
41 if not name:
41 if not name:
42 fields = rev.split(':')
42 fields = rev.split(':')
43 if len(fields) == 2:
43 if len(fields) == 2:
44 self.rev, self.name = fields
44 self.rev, self.name = fields
45 else:
45 else:
46 self.rev, self.name = None, None
46 self.rev, self.name = None, None
47 else:
47 else:
48 self.rev, self.name = rev, name
48 self.rev, self.name = rev, name
49
49
50 def __str__(self):
50 def __str__(self):
51 return self.rev + ':' + self.name
51 return self.rev + ':' + self.name
52
52
53 class queue:
53 class queue:
54 def __init__(self, ui, path, patchdir=None):
54 def __init__(self, ui, path, patchdir=None):
55 self.basepath = path
55 self.basepath = path
56 self.path = patchdir or os.path.join(path, "patches")
56 self.path = patchdir or os.path.join(path, "patches")
57 self.opener = util.opener(self.path)
57 self.opener = util.opener(self.path)
58 self.ui = ui
58 self.ui = ui
59 self.applied = []
59 self.applied = []
60 self.full_series = []
60 self.full_series = []
61 self.applied_dirty = 0
61 self.applied_dirty = 0
62 self.series_dirty = 0
62 self.series_dirty = 0
63 self.series_path = "series"
63 self.series_path = "series"
64 self.status_path = "status"
64 self.status_path = "status"
65 self.guards_path = "guards"
65 self.guards_path = "guards"
66 self.active_guards = None
66 self.active_guards = None
67 self.guards_dirty = False
67 self.guards_dirty = False
68 self._diffopts = None
68 self._diffopts = None
69
69
70 if os.path.exists(self.join(self.series_path)):
70 if os.path.exists(self.join(self.series_path)):
71 self.full_series = self.opener(self.series_path).read().splitlines()
71 self.full_series = self.opener(self.series_path).read().splitlines()
72 self.parse_series()
72 self.parse_series()
73
73
74 if os.path.exists(self.join(self.status_path)):
74 if os.path.exists(self.join(self.status_path)):
75 lines = self.opener(self.status_path).read().splitlines()
75 lines = self.opener(self.status_path).read().splitlines()
76 self.applied = [statusentry(l) for l in lines]
76 self.applied = [statusentry(l) for l in lines]
77
77
78 def diffopts(self):
78 def diffopts(self):
79 if self._diffopts is None:
79 if self._diffopts is None:
80 self._diffopts = self.ui.diffopts()
80 self._diffopts = patch.diffopts(self.ui)
81 return self._diffopts
81 return self._diffopts
82
82
83 def join(self, *p):
83 def join(self, *p):
84 return os.path.join(self.path, *p)
84 return os.path.join(self.path, *p)
85
85
86 def find_series(self, patch):
86 def find_series(self, patch):
87 pre = re.compile("(\s*)([^#]+)")
87 pre = re.compile("(\s*)([^#]+)")
88 index = 0
88 index = 0
89 for l in self.full_series:
89 for l in self.full_series:
90 m = pre.match(l)
90 m = pre.match(l)
91 if m:
91 if m:
92 s = m.group(2)
92 s = m.group(2)
93 s = s.rstrip()
93 s = s.rstrip()
94 if s == patch:
94 if s == patch:
95 return index
95 return index
96 index += 1
96 index += 1
97 return None
97 return None
98
98
99 guard_re = re.compile(r'\s?#([-+][^-+# \t\r\n\f][^# \t\r\n\f]*)')
99 guard_re = re.compile(r'\s?#([-+][^-+# \t\r\n\f][^# \t\r\n\f]*)')
100
100
101 def parse_series(self):
101 def parse_series(self):
102 self.series = []
102 self.series = []
103 self.series_guards = []
103 self.series_guards = []
104 for l in self.full_series:
104 for l in self.full_series:
105 h = l.find('#')
105 h = l.find('#')
106 if h == -1:
106 if h == -1:
107 patch = l
107 patch = l
108 comment = ''
108 comment = ''
109 elif h == 0:
109 elif h == 0:
110 continue
110 continue
111 else:
111 else:
112 patch = l[:h]
112 patch = l[:h]
113 comment = l[h:]
113 comment = l[h:]
114 patch = patch.strip()
114 patch = patch.strip()
115 if patch:
115 if patch:
116 self.series.append(patch)
116 self.series.append(patch)
117 self.series_guards.append(self.guard_re.findall(comment))
117 self.series_guards.append(self.guard_re.findall(comment))
118
118
119 def check_guard(self, guard):
119 def check_guard(self, guard):
120 bad_chars = '# \t\r\n\f'
120 bad_chars = '# \t\r\n\f'
121 first = guard[0]
121 first = guard[0]
122 for c in '-+':
122 for c in '-+':
123 if first == c:
123 if first == c:
124 return (_('guard %r starts with invalid character: %r') %
124 return (_('guard %r starts with invalid character: %r') %
125 (guard, c))
125 (guard, c))
126 for c in bad_chars:
126 for c in bad_chars:
127 if c in guard:
127 if c in guard:
128 return _('invalid character in guard %r: %r') % (guard, c)
128 return _('invalid character in guard %r: %r') % (guard, c)
129
129
130 def set_active(self, guards):
130 def set_active(self, guards):
131 for guard in guards:
131 for guard in guards:
132 bad = self.check_guard(guard)
132 bad = self.check_guard(guard)
133 if bad:
133 if bad:
134 raise util.Abort(bad)
134 raise util.Abort(bad)
135 guards = dict.fromkeys(guards).keys()
135 guards = dict.fromkeys(guards).keys()
136 guards.sort()
136 guards.sort()
137 self.ui.debug('active guards: %s\n' % ' '.join(guards))
137 self.ui.debug('active guards: %s\n' % ' '.join(guards))
138 self.active_guards = guards
138 self.active_guards = guards
139 self.guards_dirty = True
139 self.guards_dirty = True
140
140
141 def active(self):
141 def active(self):
142 if self.active_guards is None:
142 if self.active_guards is None:
143 self.active_guards = []
143 self.active_guards = []
144 try:
144 try:
145 guards = self.opener(self.guards_path).read().split()
145 guards = self.opener(self.guards_path).read().split()
146 except IOError, err:
146 except IOError, err:
147 if err.errno != errno.ENOENT: raise
147 if err.errno != errno.ENOENT: raise
148 guards = []
148 guards = []
149 for i, guard in enumerate(guards):
149 for i, guard in enumerate(guards):
150 bad = self.check_guard(guard)
150 bad = self.check_guard(guard)
151 if bad:
151 if bad:
152 self.ui.warn('%s:%d: %s\n' %
152 self.ui.warn('%s:%d: %s\n' %
153 (self.join(self.guards_path), i + 1, bad))
153 (self.join(self.guards_path), i + 1, bad))
154 else:
154 else:
155 self.active_guards.append(guard)
155 self.active_guards.append(guard)
156 return self.active_guards
156 return self.active_guards
157
157
158 def set_guards(self, idx, guards):
158 def set_guards(self, idx, guards):
159 for g in guards:
159 for g in guards:
160 if len(g) < 2:
160 if len(g) < 2:
161 raise util.Abort(_('guard %r too short') % g)
161 raise util.Abort(_('guard %r too short') % g)
162 if g[0] not in '-+':
162 if g[0] not in '-+':
163 raise util.Abort(_('guard %r starts with invalid char') % g)
163 raise util.Abort(_('guard %r starts with invalid char') % g)
164 bad = self.check_guard(g[1:])
164 bad = self.check_guard(g[1:])
165 if bad:
165 if bad:
166 raise util.Abort(bad)
166 raise util.Abort(bad)
167 drop = self.guard_re.sub('', self.full_series[idx])
167 drop = self.guard_re.sub('', self.full_series[idx])
168 self.full_series[idx] = drop + ''.join([' #' + g for g in guards])
168 self.full_series[idx] = drop + ''.join([' #' + g for g in guards])
169 self.parse_series()
169 self.parse_series()
170 self.series_dirty = True
170 self.series_dirty = True
171
171
172 def pushable(self, idx):
172 def pushable(self, idx):
173 if isinstance(idx, str):
173 if isinstance(idx, str):
174 idx = self.series.index(idx)
174 idx = self.series.index(idx)
175 patchguards = self.series_guards[idx]
175 patchguards = self.series_guards[idx]
176 if not patchguards:
176 if not patchguards:
177 return True, None
177 return True, None
178 default = False
178 default = False
179 guards = self.active()
179 guards = self.active()
180 exactneg = [g for g in patchguards if g[0] == '-' and g[1:] in guards]
180 exactneg = [g for g in patchguards if g[0] == '-' and g[1:] in guards]
181 if exactneg:
181 if exactneg:
182 return False, exactneg[0]
182 return False, exactneg[0]
183 pos = [g for g in patchguards if g[0] == '+']
183 pos = [g for g in patchguards if g[0] == '+']
184 exactpos = [g for g in pos if g[1:] in guards]
184 exactpos = [g for g in pos if g[1:] in guards]
185 if pos:
185 if pos:
186 if exactpos:
186 if exactpos:
187 return True, exactpos[0]
187 return True, exactpos[0]
188 return False, pos
188 return False, pos
189 return True, ''
189 return True, ''
190
190
191 def explain_pushable(self, idx, all_patches=False):
191 def explain_pushable(self, idx, all_patches=False):
192 write = all_patches and self.ui.write or self.ui.warn
192 write = all_patches and self.ui.write or self.ui.warn
193 if all_patches or self.ui.verbose:
193 if all_patches or self.ui.verbose:
194 if isinstance(idx, str):
194 if isinstance(idx, str):
195 idx = self.series.index(idx)
195 idx = self.series.index(idx)
196 pushable, why = self.pushable(idx)
196 pushable, why = self.pushable(idx)
197 if all_patches and pushable:
197 if all_patches and pushable:
198 if why is None:
198 if why is None:
199 write(_('allowing %s - no guards in effect\n') %
199 write(_('allowing %s - no guards in effect\n') %
200 self.series[idx])
200 self.series[idx])
201 else:
201 else:
202 if not why:
202 if not why:
203 write(_('allowing %s - no matching negative guards\n') %
203 write(_('allowing %s - no matching negative guards\n') %
204 self.series[idx])
204 self.series[idx])
205 else:
205 else:
206 write(_('allowing %s - guarded by %r\n') %
206 write(_('allowing %s - guarded by %r\n') %
207 (self.series[idx], why))
207 (self.series[idx], why))
208 if not pushable:
208 if not pushable:
209 if why:
209 if why:
210 write(_('skipping %s - guarded by %r\n') %
210 write(_('skipping %s - guarded by %r\n') %
211 (self.series[idx], ' '.join(why)))
211 (self.series[idx], ' '.join(why)))
212 else:
212 else:
213 write(_('skipping %s - no matching guards\n') %
213 write(_('skipping %s - no matching guards\n') %
214 self.series[idx])
214 self.series[idx])
215
215
216 def save_dirty(self):
216 def save_dirty(self):
217 def write_list(items, path):
217 def write_list(items, path):
218 fp = self.opener(path, 'w')
218 fp = self.opener(path, 'w')
219 for i in items:
219 for i in items:
220 print >> fp, i
220 print >> fp, i
221 fp.close()
221 fp.close()
222 if self.applied_dirty: write_list(map(str, self.applied), self.status_path)
222 if self.applied_dirty: write_list(map(str, self.applied), self.status_path)
223 if self.series_dirty: write_list(self.full_series, self.series_path)
223 if self.series_dirty: write_list(self.full_series, self.series_path)
224 if self.guards_dirty: write_list(self.active_guards, self.guards_path)
224 if self.guards_dirty: write_list(self.active_guards, self.guards_path)
225
225
226 def readheaders(self, patch):
226 def readheaders(self, patch):
227 def eatdiff(lines):
227 def eatdiff(lines):
228 while lines:
228 while lines:
229 l = lines[-1]
229 l = lines[-1]
230 if (l.startswith("diff -") or
230 if (l.startswith("diff -") or
231 l.startswith("Index:") or
231 l.startswith("Index:") or
232 l.startswith("===========")):
232 l.startswith("===========")):
233 del lines[-1]
233 del lines[-1]
234 else:
234 else:
235 break
235 break
236 def eatempty(lines):
236 def eatempty(lines):
237 while lines:
237 while lines:
238 l = lines[-1]
238 l = lines[-1]
239 if re.match('\s*$', l):
239 if re.match('\s*$', l):
240 del lines[-1]
240 del lines[-1]
241 else:
241 else:
242 break
242 break
243
243
244 pf = self.join(patch)
244 pf = self.join(patch)
245 message = []
245 message = []
246 comments = []
246 comments = []
247 user = None
247 user = None
248 date = None
248 date = None
249 format = None
249 format = None
250 subject = None
250 subject = None
251 diffstart = 0
251 diffstart = 0
252
252
253 for line in file(pf):
253 for line in file(pf):
254 line = line.rstrip()
254 line = line.rstrip()
255 if diffstart:
255 if diffstart:
256 if line.startswith('+++ '):
256 if line.startswith('+++ '):
257 diffstart = 2
257 diffstart = 2
258 break
258 break
259 if line.startswith("--- "):
259 if line.startswith("--- "):
260 diffstart = 1
260 diffstart = 1
261 continue
261 continue
262 elif format == "hgpatch":
262 elif format == "hgpatch":
263 # parse values when importing the result of an hg export
263 # parse values when importing the result of an hg export
264 if line.startswith("# User "):
264 if line.startswith("# User "):
265 user = line[7:]
265 user = line[7:]
266 elif line.startswith("# Date "):
266 elif line.startswith("# Date "):
267 date = line[7:]
267 date = line[7:]
268 elif not line.startswith("# ") and line:
268 elif not line.startswith("# ") and line:
269 message.append(line)
269 message.append(line)
270 format = None
270 format = None
271 elif line == '# HG changeset patch':
271 elif line == '# HG changeset patch':
272 format = "hgpatch"
272 format = "hgpatch"
273 elif (format != "tagdone" and (line.startswith("Subject: ") or
273 elif (format != "tagdone" and (line.startswith("Subject: ") or
274 line.startswith("subject: "))):
274 line.startswith("subject: "))):
275 subject = line[9:]
275 subject = line[9:]
276 format = "tag"
276 format = "tag"
277 elif (format != "tagdone" and (line.startswith("From: ") or
277 elif (format != "tagdone" and (line.startswith("From: ") or
278 line.startswith("from: "))):
278 line.startswith("from: "))):
279 user = line[6:]
279 user = line[6:]
280 format = "tag"
280 format = "tag"
281 elif format == "tag" and line == "":
281 elif format == "tag" and line == "":
282 # when looking for tags (subject: from: etc) they
282 # when looking for tags (subject: from: etc) they
283 # end once you find a blank line in the source
283 # end once you find a blank line in the source
284 format = "tagdone"
284 format = "tagdone"
285 elif message or line:
285 elif message or line:
286 message.append(line)
286 message.append(line)
287 comments.append(line)
287 comments.append(line)
288
288
289 eatdiff(message)
289 eatdiff(message)
290 eatdiff(comments)
290 eatdiff(comments)
291 eatempty(message)
291 eatempty(message)
292 eatempty(comments)
292 eatempty(comments)
293
293
294 # make sure message isn't empty
294 # make sure message isn't empty
295 if format and format.startswith("tag") and subject:
295 if format and format.startswith("tag") and subject:
296 message.insert(0, "")
296 message.insert(0, "")
297 message.insert(0, subject)
297 message.insert(0, subject)
298 return (message, comments, user, date, diffstart > 1)
298 return (message, comments, user, date, diffstart > 1)
299
299
300 def printdiff(self, repo, node1, node2=None, files=None,
300 def printdiff(self, repo, node1, node2=None, files=None,
301 fp=None, changes=None, opts=None):
301 fp=None, changes=None, opts=None):
302 patch.diff(repo, node1, node2, files,
302 patch.diff(repo, node1, node2, files,
303 fp=fp, changes=changes, opts=self.diffopts())
303 fp=fp, changes=changes, opts=self.diffopts())
304
304
305 def mergeone(self, repo, mergeq, head, patch, rev, wlock):
305 def mergeone(self, repo, mergeq, head, patch, rev, wlock):
306 # first try just applying the patch
306 # first try just applying the patch
307 (err, n) = self.apply(repo, [ patch ], update_status=False,
307 (err, n) = self.apply(repo, [ patch ], update_status=False,
308 strict=True, merge=rev, wlock=wlock)
308 strict=True, merge=rev, wlock=wlock)
309
309
310 if err == 0:
310 if err == 0:
311 return (err, n)
311 return (err, n)
312
312
313 if n is None:
313 if n is None:
314 raise util.Abort(_("apply failed for patch %s") % patch)
314 raise util.Abort(_("apply failed for patch %s") % patch)
315
315
316 self.ui.warn("patch didn't work out, merging %s\n" % patch)
316 self.ui.warn("patch didn't work out, merging %s\n" % patch)
317
317
318 # apply failed, strip away that rev and merge.
318 # apply failed, strip away that rev and merge.
319 hg.clean(repo, head, wlock=wlock)
319 hg.clean(repo, head, wlock=wlock)
320 self.strip(repo, n, update=False, backup='strip', wlock=wlock)
320 self.strip(repo, n, update=False, backup='strip', wlock=wlock)
321
321
322 c = repo.changelog.read(rev)
322 c = repo.changelog.read(rev)
323 ret = hg.merge(repo, rev, wlock=wlock)
323 ret = hg.merge(repo, rev, wlock=wlock)
324 if ret:
324 if ret:
325 raise util.Abort(_("update returned %d") % ret)
325 raise util.Abort(_("update returned %d") % ret)
326 n = repo.commit(None, c[4], c[1], force=1, wlock=wlock)
326 n = repo.commit(None, c[4], c[1], force=1, wlock=wlock)
327 if n == None:
327 if n == None:
328 raise util.Abort(_("repo commit failed"))
328 raise util.Abort(_("repo commit failed"))
329 try:
329 try:
330 message, comments, user, date, patchfound = mergeq.readheaders(patch)
330 message, comments, user, date, patchfound = mergeq.readheaders(patch)
331 except:
331 except:
332 raise util.Abort(_("unable to read %s") % patch)
332 raise util.Abort(_("unable to read %s") % patch)
333
333
334 patchf = self.opener(patch, "w")
334 patchf = self.opener(patch, "w")
335 if comments:
335 if comments:
336 comments = "\n".join(comments) + '\n\n'
336 comments = "\n".join(comments) + '\n\n'
337 patchf.write(comments)
337 patchf.write(comments)
338 self.printdiff(repo, head, n, fp=patchf)
338 self.printdiff(repo, head, n, fp=patchf)
339 patchf.close()
339 patchf.close()
340 return (0, n)
340 return (0, n)
341
341
342 def qparents(self, repo, rev=None):
342 def qparents(self, repo, rev=None):
343 if rev is None:
343 if rev is None:
344 (p1, p2) = repo.dirstate.parents()
344 (p1, p2) = repo.dirstate.parents()
345 if p2 == revlog.nullid:
345 if p2 == revlog.nullid:
346 return p1
346 return p1
347 if len(self.applied) == 0:
347 if len(self.applied) == 0:
348 return None
348 return None
349 return revlog.bin(self.applied[-1].rev)
349 return revlog.bin(self.applied[-1].rev)
350 pp = repo.changelog.parents(rev)
350 pp = repo.changelog.parents(rev)
351 if pp[1] != revlog.nullid:
351 if pp[1] != revlog.nullid:
352 arevs = [ x.rev for x in self.applied ]
352 arevs = [ x.rev for x in self.applied ]
353 p0 = revlog.hex(pp[0])
353 p0 = revlog.hex(pp[0])
354 p1 = revlog.hex(pp[1])
354 p1 = revlog.hex(pp[1])
355 if p0 in arevs:
355 if p0 in arevs:
356 return pp[0]
356 return pp[0]
357 if p1 in arevs:
357 if p1 in arevs:
358 return pp[1]
358 return pp[1]
359 return pp[0]
359 return pp[0]
360
360
361 def mergepatch(self, repo, mergeq, series, wlock):
361 def mergepatch(self, repo, mergeq, series, wlock):
362 if len(self.applied) == 0:
362 if len(self.applied) == 0:
363 # each of the patches merged in will have two parents. This
363 # each of the patches merged in will have two parents. This
364 # can confuse the qrefresh, qdiff, and strip code because it
364 # can confuse the qrefresh, qdiff, and strip code because it
365 # needs to know which parent is actually in the patch queue.
365 # needs to know which parent is actually in the patch queue.
366 # so, we insert a merge marker with only one parent. This way
366 # so, we insert a merge marker with only one parent. This way
367 # the first patch in the queue is never a merge patch
367 # the first patch in the queue is never a merge patch
368 #
368 #
369 pname = ".hg.patches.merge.marker"
369 pname = ".hg.patches.merge.marker"
370 n = repo.commit(None, '[mq]: merge marker', user=None, force=1,
370 n = repo.commit(None, '[mq]: merge marker', user=None, force=1,
371 wlock=wlock)
371 wlock=wlock)
372 self.applied.append(statusentry(revlog.hex(n), pname))
372 self.applied.append(statusentry(revlog.hex(n), pname))
373 self.applied_dirty = 1
373 self.applied_dirty = 1
374
374
375 head = self.qparents(repo)
375 head = self.qparents(repo)
376
376
377 for patch in series:
377 for patch in series:
378 patch = mergeq.lookup(patch, strict=True)
378 patch = mergeq.lookup(patch, strict=True)
379 if not patch:
379 if not patch:
380 self.ui.warn("patch %s does not exist\n" % patch)
380 self.ui.warn("patch %s does not exist\n" % patch)
381 return (1, None)
381 return (1, None)
382 pushable, reason = self.pushable(patch)
382 pushable, reason = self.pushable(patch)
383 if not pushable:
383 if not pushable:
384 self.explain_pushable(patch, all_patches=True)
384 self.explain_pushable(patch, all_patches=True)
385 continue
385 continue
386 info = mergeq.isapplied(patch)
386 info = mergeq.isapplied(patch)
387 if not info:
387 if not info:
388 self.ui.warn("patch %s is not applied\n" % patch)
388 self.ui.warn("patch %s is not applied\n" % patch)
389 return (1, None)
389 return (1, None)
390 rev = revlog.bin(info[1])
390 rev = revlog.bin(info[1])
391 (err, head) = self.mergeone(repo, mergeq, head, patch, rev, wlock)
391 (err, head) = self.mergeone(repo, mergeq, head, patch, rev, wlock)
392 if head:
392 if head:
393 self.applied.append(statusentry(revlog.hex(head), patch))
393 self.applied.append(statusentry(revlog.hex(head), patch))
394 self.applied_dirty = 1
394 self.applied_dirty = 1
395 if err:
395 if err:
396 return (err, head)
396 return (err, head)
397 return (0, head)
397 return (0, head)
398
398
399 def patch(self, repo, patchfile):
399 def patch(self, repo, patchfile):
400 '''Apply patchfile to the working directory.
400 '''Apply patchfile to the working directory.
401 patchfile: file name of patch'''
401 patchfile: file name of patch'''
402 try:
402 try:
403 (files, fuzz) = patch.patch(patchfile, self.ui, strip=1,
403 pp = util.find_in_path('gpatch', os.environ.get('PATH', ''), 'patch')
404 cwd=repo.root)
404 f = os.popen("%s -d %s -p1 --no-backup-if-mismatch < %s" %
405 except Exception, inst:
405 (pp, util.shellquote(repo.root), util.shellquote(patchfile)))
406 self.ui.note(str(inst) + '\n')
406 except:
407 if not self.ui.verbose:
407 self.ui.warn("patch failed, unable to continue (try -v)\n")
408 self.ui.warn("patch failed, unable to continue (try -v)\n")
408 return (None, [], False)
409 return (False, [], False)
409 files = []
410 fuzz = False
411 for l in f:
412 l = l.rstrip('\r\n');
413 if self.ui.verbose:
414 self.ui.warn(l + "\n")
415 if l[:14] == 'patching file ':
416 pf = os.path.normpath(util.parse_patch_output(l))
417 if pf not in files:
418 files.append(pf)
419 printed_file = False
420 file_str = l
421 elif l.find('with fuzz') >= 0:
422 if not printed_file:
423 self.ui.warn(file_str + '\n')
424 printed_file = True
425 self.ui.warn(l + '\n')
426 fuzz = True
427 elif l.find('saving rejects to file') >= 0:
428 self.ui.warn(l + '\n')
429 elif l.find('FAILED') >= 0:
430 if not printed_file:
431 self.ui.warn(file_str + '\n')
432 printed_file = True
433 self.ui.warn(l + '\n')
410
434
411 return (True, files.keys(), fuzz)
435 return (not f.close(), files, fuzz)
412
436
413 def apply(self, repo, series, list=False, update_status=True,
437 def apply(self, repo, series, list=False, update_status=True,
414 strict=False, patchdir=None, merge=None, wlock=None):
438 strict=False, patchdir=None, merge=None, wlock=None):
415 # TODO unify with commands.py
439 # TODO unify with commands.py
416 if not patchdir:
440 if not patchdir:
417 patchdir = self.path
441 patchdir = self.path
418 err = 0
442 err = 0
419 if not wlock:
443 if not wlock:
420 wlock = repo.wlock()
444 wlock = repo.wlock()
421 lock = repo.lock()
445 lock = repo.lock()
422 tr = repo.transaction()
446 tr = repo.transaction()
423 n = None
447 n = None
424 for patch in series:
448 for patch in series:
425 pushable, reason = self.pushable(patch)
449 pushable, reason = self.pushable(patch)
426 if not pushable:
450 if not pushable:
427 self.explain_pushable(patch, all_patches=True)
451 self.explain_pushable(patch, all_patches=True)
428 continue
452 continue
429 self.ui.warn("applying %s\n" % patch)
453 self.ui.warn("applying %s\n" % patch)
430 pf = os.path.join(patchdir, patch)
454 pf = os.path.join(patchdir, patch)
431
455
432 try:
456 try:
433 message, comments, user, date, patchfound = self.readheaders(patch)
457 message, comments, user, date, patchfound = self.readheaders(patch)
434 except:
458 except:
435 self.ui.warn("Unable to read %s\n" % pf)
459 self.ui.warn("Unable to read %s\n" % pf)
436 err = 1
460 err = 1
437 break
461 break
438
462
439 if not message:
463 if not message:
440 message = "imported patch %s\n" % patch
464 message = "imported patch %s\n" % patch
441 else:
465 else:
442 if list:
466 if list:
443 message.append("\nimported patch %s" % patch)
467 message.append("\nimported patch %s" % patch)
444 message = '\n'.join(message)
468 message = '\n'.join(message)
445
469
446 (patcherr, files, fuzz) = self.patch(repo, pf)
470 (patcherr, files, fuzz) = self.patch(repo, pf)
447 patcherr = not patcherr
471 patcherr = not patcherr
448
472
449 if merge and len(files) > 0:
473 if merge and len(files) > 0:
450 # Mark as merged and update dirstate parent info
474 # Mark as merged and update dirstate parent info
451 repo.dirstate.update(repo.dirstate.filterfiles(files), 'm')
475 repo.dirstate.update(repo.dirstate.filterfiles(files), 'm')
452 p1, p2 = repo.dirstate.parents()
476 p1, p2 = repo.dirstate.parents()
453 repo.dirstate.setparents(p1, merge)
477 repo.dirstate.setparents(p1, merge)
454 if len(files) > 0:
478 if len(files) > 0:
455 cwd = repo.getcwd()
479 cwd = repo.getcwd()
456 cfiles = files
480 cfiles = files
457 if cwd:
481 if cwd:
458 cfiles = [util.pathto(cwd, f) for f in files]
482 cfiles = [util.pathto(cwd, f) for f in files]
459 cmdutil.addremove(repo, cfiles, wlock=wlock)
483 cmdutil.addremove(repo, cfiles, wlock=wlock)
460 n = repo.commit(files, message, user, date, force=1, lock=lock,
484 n = repo.commit(files, message, user, date, force=1, lock=lock,
461 wlock=wlock)
485 wlock=wlock)
462
486
463 if n == None:
487 if n == None:
464 raise util.Abort(_("repo commit failed"))
488 raise util.Abort(_("repo commit failed"))
465
489
466 if update_status:
490 if update_status:
467 self.applied.append(statusentry(revlog.hex(n), patch))
491 self.applied.append(statusentry(revlog.hex(n), patch))
468
492
469 if patcherr:
493 if patcherr:
470 if not patchfound:
494 if not patchfound:
471 self.ui.warn("patch %s is empty\n" % patch)
495 self.ui.warn("patch %s is empty\n" % patch)
472 err = 0
496 err = 0
473 else:
497 else:
474 self.ui.warn("patch failed, rejects left in working dir\n")
498 self.ui.warn("patch failed, rejects left in working dir\n")
475 err = 1
499 err = 1
476 break
500 break
477
501
478 if fuzz and strict:
502 if fuzz and strict:
479 self.ui.warn("fuzz found when applying patch, stopping\n")
503 self.ui.warn("fuzz found when applying patch, stopping\n")
480 err = 1
504 err = 1
481 break
505 break
482 tr.close()
506 tr.close()
483 return (err, n)
507 return (err, n)
484
508
485 def delete(self, repo, patches, keep=False):
509 def delete(self, repo, patch, force=False):
486 realpatches = []
510 patch = self.lookup(patch, strict=True)
487 for patch in patches:
511 info = self.isapplied(patch)
488 patch = self.lookup(patch, strict=True)
512 if info:
489 info = self.isapplied(patch)
513 raise util.Abort(_("cannot delete applied patch %s") % patch)
490 if info:
514 if patch not in self.series:
491 raise util.Abort(_("cannot delete applied patch %s") % patch)
515 raise util.Abort(_("patch %s not in series file") % patch)
492 if patch not in self.series:
516 if force:
493 raise util.Abort(_("patch %s not in series file") % patch)
494 realpatches.append(patch)
495
496 if not keep:
497 r = self.qrepo()
517 r = self.qrepo()
498 if r:
518 if r:
499 r.remove(realpatches, True)
519 r.remove([patch], True)
500 else:
520 else:
501 os.unlink(self.join(patch))
521 os.unlink(self.join(patch))
502
522 i = self.find_series(patch)
503 indices = [self.find_series(p) for p in realpatches]
523 del self.full_series[i]
504 indices.sort()
505 for i in indices[-1::-1]:
506 del self.full_series[i]
507 self.parse_series()
524 self.parse_series()
508 self.series_dirty = 1
525 self.series_dirty = 1
509
526
510 def check_toppatch(self, repo):
527 def check_toppatch(self, repo):
511 if len(self.applied) > 0:
528 if len(self.applied) > 0:
512 top = revlog.bin(self.applied[-1].rev)
529 top = revlog.bin(self.applied[-1].rev)
513 pp = repo.dirstate.parents()
530 pp = repo.dirstate.parents()
514 if top not in pp:
531 if top not in pp:
515 raise util.Abort(_("queue top not at same revision as working directory"))
532 raise util.Abort(_("queue top not at same revision as working directory"))
516 return top
533 return top
517 return None
534 return None
518 def check_localchanges(self, repo, force=False, refresh=True):
535 def check_localchanges(self, repo, force=False, refresh=True):
519 m, a, r, d = repo.status()[:4]
536 m, a, r, d = repo.status()[:4]
520 if m or a or r or d:
537 if m or a or r or d:
521 if not force:
538 if not force:
522 if refresh:
539 if refresh:
523 raise util.Abort(_("local changes found, refresh first"))
540 raise util.Abort(_("local changes found, refresh first"))
524 else:
541 else:
525 raise util.Abort(_("local changes found"))
542 raise util.Abort(_("local changes found"))
526 return m, a, r, d
543 return m, a, r, d
527 def new(self, repo, patch, msg=None, force=None):
544 def new(self, repo, patch, msg=None, force=None):
528 if os.path.exists(self.join(patch)):
545 if os.path.exists(self.join(patch)):
529 raise util.Abort(_('patch "%s" already exists') % patch)
546 raise util.Abort(_('patch "%s" already exists') % patch)
530 m, a, r, d = self.check_localchanges(repo, force)
547 m, a, r, d = self.check_localchanges(repo, force)
531 commitfiles = m + a + r
548 commitfiles = m + a + r
532 self.check_toppatch(repo)
549 self.check_toppatch(repo)
533 wlock = repo.wlock()
550 wlock = repo.wlock()
534 insert = self.full_series_end()
551 insert = self.full_series_end()
535 if msg:
552 if msg:
536 n = repo.commit(commitfiles, "[mq]: %s" % msg, force=True,
553 n = repo.commit(commitfiles, "[mq]: %s" % msg, force=True,
537 wlock=wlock)
554 wlock=wlock)
538 else:
555 else:
539 n = repo.commit(commitfiles,
556 n = repo.commit(commitfiles,
540 "New patch: %s" % patch, force=True, wlock=wlock)
557 "New patch: %s" % patch, force=True, wlock=wlock)
541 if n == None:
558 if n == None:
542 raise util.Abort(_("repo commit failed"))
559 raise util.Abort(_("repo commit failed"))
543 self.full_series[insert:insert] = [patch]
560 self.full_series[insert:insert] = [patch]
544 self.applied.append(statusentry(revlog.hex(n), patch))
561 self.applied.append(statusentry(revlog.hex(n), patch))
545 self.parse_series()
562 self.parse_series()
546 self.series_dirty = 1
563 self.series_dirty = 1
547 self.applied_dirty = 1
564 self.applied_dirty = 1
548 p = self.opener(patch, "w")
565 p = self.opener(patch, "w")
549 if msg:
566 if msg:
550 msg = msg + "\n"
567 msg = msg + "\n"
551 p.write(msg)
568 p.write(msg)
552 p.close()
569 p.close()
553 wlock = None
570 wlock = None
554 r = self.qrepo()
571 r = self.qrepo()
555 if r: r.add([patch])
572 if r: r.add([patch])
556 if commitfiles:
573 if commitfiles:
557 self.refresh(repo, short=True)
574 self.refresh(repo, short=True)
558
575
559 def strip(self, repo, rev, update=True, backup="all", wlock=None):
576 def strip(self, repo, rev, update=True, backup="all", wlock=None):
560 def limitheads(chlog, stop):
577 def limitheads(chlog, stop):
561 """return the list of all nodes that have no children"""
578 """return the list of all nodes that have no children"""
562 p = {}
579 p = {}
563 h = []
580 h = []
564 stoprev = 0
581 stoprev = 0
565 if stop in chlog.nodemap:
582 if stop in chlog.nodemap:
566 stoprev = chlog.rev(stop)
583 stoprev = chlog.rev(stop)
567
584
568 for r in range(chlog.count() - 1, -1, -1):
585 for r in range(chlog.count() - 1, -1, -1):
569 n = chlog.node(r)
586 n = chlog.node(r)
570 if n not in p:
587 if n not in p:
571 h.append(n)
588 h.append(n)
572 if n == stop:
589 if n == stop:
573 break
590 break
574 if r < stoprev:
591 if r < stoprev:
575 break
592 break
576 for pn in chlog.parents(n):
593 for pn in chlog.parents(n):
577 p[pn] = 1
594 p[pn] = 1
578 return h
595 return h
579
596
580 def bundle(cg):
597 def bundle(cg):
581 backupdir = repo.join("strip-backup")
598 backupdir = repo.join("strip-backup")
582 if not os.path.isdir(backupdir):
599 if not os.path.isdir(backupdir):
583 os.mkdir(backupdir)
600 os.mkdir(backupdir)
584 name = os.path.join(backupdir, "%s" % revlog.short(rev))
601 name = os.path.join(backupdir, "%s" % revlog.short(rev))
585 name = savename(name)
602 name = savename(name)
586 self.ui.warn("saving bundle to %s\n" % name)
603 self.ui.warn("saving bundle to %s\n" % name)
587 # TODO, exclusive open
604 # TODO, exclusive open
588 f = open(name, "wb")
605 f = open(name, "wb")
589 try:
606 try:
590 f.write("HG10")
607 f.write("HG10")
591 z = bz2.BZ2Compressor(9)
608 z = bz2.BZ2Compressor(9)
592 while 1:
609 while 1:
593 chunk = cg.read(4096)
610 chunk = cg.read(4096)
594 if not chunk:
611 if not chunk:
595 break
612 break
596 f.write(z.compress(chunk))
613 f.write(z.compress(chunk))
597 f.write(z.flush())
614 f.write(z.flush())
598 except:
615 except:
599 os.unlink(name)
616 os.unlink(name)
600 raise
617 raise
601 f.close()
618 f.close()
602 return name
619 return name
603
620
604 def stripall(rev, revnum):
621 def stripall(rev, revnum):
605 cl = repo.changelog
622 cl = repo.changelog
606 c = cl.read(rev)
623 c = cl.read(rev)
607 mm = repo.manifest.read(c[0])
624 mm = repo.manifest.read(c[0])
608 seen = {}
625 seen = {}
609
626
610 for x in xrange(revnum, cl.count()):
627 for x in xrange(revnum, cl.count()):
611 c = cl.read(cl.node(x))
628 c = cl.read(cl.node(x))
612 for f in c[3]:
629 for f in c[3]:
613 if f in seen:
630 if f in seen:
614 continue
631 continue
615 seen[f] = 1
632 seen[f] = 1
616 if f in mm:
633 if f in mm:
617 filerev = mm[f]
634 filerev = mm[f]
618 else:
635 else:
619 filerev = 0
636 filerev = 0
620 seen[f] = filerev
637 seen[f] = filerev
621 # we go in two steps here so the strip loop happens in a
638 # we go in two steps here so the strip loop happens in a
622 # sensible order. When stripping many files, this helps keep
639 # sensible order. When stripping many files, this helps keep
623 # our disk access patterns under control.
640 # our disk access patterns under control.
624 seen_list = seen.keys()
641 seen_list = seen.keys()
625 seen_list.sort()
642 seen_list.sort()
626 for f in seen_list:
643 for f in seen_list:
627 ff = repo.file(f)
644 ff = repo.file(f)
628 filerev = seen[f]
645 filerev = seen[f]
629 if filerev != 0:
646 if filerev != 0:
630 if filerev in ff.nodemap:
647 if filerev in ff.nodemap:
631 filerev = ff.rev(filerev)
648 filerev = ff.rev(filerev)
632 else:
649 else:
633 filerev = 0
650 filerev = 0
634 ff.strip(filerev, revnum)
651 ff.strip(filerev, revnum)
635
652
636 if not wlock:
653 if not wlock:
637 wlock = repo.wlock()
654 wlock = repo.wlock()
638 lock = repo.lock()
655 lock = repo.lock()
639 chlog = repo.changelog
656 chlog = repo.changelog
640 # TODO delete the undo files, and handle undo of merge sets
657 # TODO delete the undo files, and handle undo of merge sets
641 pp = chlog.parents(rev)
658 pp = chlog.parents(rev)
642 revnum = chlog.rev(rev)
659 revnum = chlog.rev(rev)
643
660
644 if update:
661 if update:
645 self.check_localchanges(repo, refresh=False)
662 self.check_localchanges(repo, refresh=False)
646 urev = self.qparents(repo, rev)
663 urev = self.qparents(repo, rev)
647 hg.clean(repo, urev, wlock=wlock)
664 hg.clean(repo, urev, wlock=wlock)
648 repo.dirstate.write()
665 repo.dirstate.write()
649
666
650 # save is a list of all the branches we are truncating away
667 # save is a list of all the branches we are truncating away
651 # that we actually want to keep. changegroup will be used
668 # that we actually want to keep. changegroup will be used
652 # to preserve them and add them back after the truncate
669 # to preserve them and add them back after the truncate
653 saveheads = []
670 saveheads = []
654 savebases = {}
671 savebases = {}
655
672
656 heads = limitheads(chlog, rev)
673 heads = limitheads(chlog, rev)
657 seen = {}
674 seen = {}
658
675
659 # search through all the heads, finding those where the revision
676 # search through all the heads, finding those where the revision
660 # we want to strip away is an ancestor. Also look for merges
677 # we want to strip away is an ancestor. Also look for merges
661 # that might be turned into new heads by the strip.
678 # that might be turned into new heads by the strip.
662 while heads:
679 while heads:
663 h = heads.pop()
680 h = heads.pop()
664 n = h
681 n = h
665 while True:
682 while True:
666 seen[n] = 1
683 seen[n] = 1
667 pp = chlog.parents(n)
684 pp = chlog.parents(n)
668 if pp[1] != revlog.nullid and chlog.rev(pp[1]) > revnum:
685 if pp[1] != revlog.nullid and chlog.rev(pp[1]) > revnum:
669 if pp[1] not in seen:
686 if pp[1] not in seen:
670 heads.append(pp[1])
687 heads.append(pp[1])
671 if pp[0] == revlog.nullid:
688 if pp[0] == revlog.nullid:
672 break
689 break
673 if chlog.rev(pp[0]) < revnum:
690 if chlog.rev(pp[0]) < revnum:
674 break
691 break
675 n = pp[0]
692 n = pp[0]
676 if n == rev:
693 if n == rev:
677 break
694 break
678 r = chlog.reachable(h, rev)
695 r = chlog.reachable(h, rev)
679 if rev not in r:
696 if rev not in r:
680 saveheads.append(h)
697 saveheads.append(h)
681 for x in r:
698 for x in r:
682 if chlog.rev(x) > revnum:
699 if chlog.rev(x) > revnum:
683 savebases[x] = 1
700 savebases[x] = 1
684
701
685 # create a changegroup for all the branches we need to keep
702 # create a changegroup for all the branches we need to keep
686 if backup == "all":
703 if backup == "all":
687 backupch = repo.changegroupsubset([rev], chlog.heads(), 'strip')
704 backupch = repo.changegroupsubset([rev], chlog.heads(), 'strip')
688 bundle(backupch)
705 bundle(backupch)
689 if saveheads:
706 if saveheads:
690 backupch = repo.changegroupsubset(savebases.keys(), saveheads, 'strip')
707 backupch = repo.changegroupsubset(savebases.keys(), saveheads, 'strip')
691 chgrpfile = bundle(backupch)
708 chgrpfile = bundle(backupch)
692
709
693 stripall(rev, revnum)
710 stripall(rev, revnum)
694
711
695 change = chlog.read(rev)
712 change = chlog.read(rev)
696 repo.manifest.strip(repo.manifest.rev(change[0]), revnum)
713 repo.manifest.strip(repo.manifest.rev(change[0]), revnum)
697 chlog.strip(revnum, revnum)
714 chlog.strip(revnum, revnum)
698 if saveheads:
715 if saveheads:
699 self.ui.status("adding branch\n")
716 self.ui.status("adding branch\n")
700 commands.unbundle(self.ui, repo, chgrpfile, update=False)
717 commands.unbundle(self.ui, repo, chgrpfile, update=False)
701 if backup != "strip":
718 if backup != "strip":
702 os.unlink(chgrpfile)
719 os.unlink(chgrpfile)
703
720
704 def isapplied(self, patch):
721 def isapplied(self, patch):
705 """returns (index, rev, patch)"""
722 """returns (index, rev, patch)"""
706 for i in xrange(len(self.applied)):
723 for i in xrange(len(self.applied)):
707 a = self.applied[i]
724 a = self.applied[i]
708 if a.name == patch:
725 if a.name == patch:
709 return (i, a.rev, a.name)
726 return (i, a.rev, a.name)
710 return None
727 return None
711
728
712 # if the exact patch name does not exist, we try a few
729 # if the exact patch name does not exist, we try a few
713 # variations. If strict is passed, we try only #1
730 # variations. If strict is passed, we try only #1
714 #
731 #
715 # 1) a number to indicate an offset in the series file
732 # 1) a number to indicate an offset in the series file
716 # 2) a unique substring of the patch name was given
733 # 2) a unique substring of the patch name was given
717 # 3) patchname[-+]num to indicate an offset in the series file
734 # 3) patchname[-+]num to indicate an offset in the series file
718 def lookup(self, patch, strict=False):
735 def lookup(self, patch, strict=False):
719 patch = patch and str(patch)
736 patch = patch and str(patch)
720
737
721 def partial_name(s):
738 def partial_name(s):
722 if s in self.series:
739 if s in self.series:
723 return s
740 return s
724 matches = [x for x in self.series if s in x]
741 matches = [x for x in self.series if s in x]
725 if len(matches) > 1:
742 if len(matches) > 1:
726 self.ui.warn(_('patch name "%s" is ambiguous:\n') % s)
743 self.ui.warn(_('patch name "%s" is ambiguous:\n') % s)
727 for m in matches:
744 for m in matches:
728 self.ui.warn(' %s\n' % m)
745 self.ui.warn(' %s\n' % m)
729 return None
746 return None
730 if matches:
747 if matches:
731 return matches[0]
748 return matches[0]
732 if len(self.series) > 0 and len(self.applied) > 0:
749 if len(self.series) > 0 and len(self.applied) > 0:
733 if s == 'qtip':
750 if s == 'qtip':
734 return self.series[self.series_end()-1]
751 return self.series[self.series_end()-1]
735 if s == 'qbase':
752 if s == 'qbase':
736 return self.series[0]
753 return self.series[0]
737 return None
754 return None
738 if patch == None:
755 if patch == None:
739 return None
756 return None
740
757
741 # we don't want to return a partial match until we make
758 # we don't want to return a partial match until we make
742 # sure the file name passed in does not exist (checked below)
759 # sure the file name passed in does not exist (checked below)
743 res = partial_name(patch)
760 res = partial_name(patch)
744 if res and res == patch:
761 if res and res == patch:
745 return res
762 return res
746
763
747 if not os.path.isfile(self.join(patch)):
764 if not os.path.isfile(self.join(patch)):
748 try:
765 try:
749 sno = int(patch)
766 sno = int(patch)
750 except(ValueError, OverflowError):
767 except(ValueError, OverflowError):
751 pass
768 pass
752 else:
769 else:
753 if sno < len(self.series):
770 if sno < len(self.series):
754 return self.series[sno]
771 return self.series[sno]
755 if not strict:
772 if not strict:
756 # return any partial match made above
773 # return any partial match made above
757 if res:
774 if res:
758 return res
775 return res
759 minus = patch.rsplit('-', 1)
776 minus = patch.rsplit('-', 1)
760 if len(minus) > 1:
777 if len(minus) > 1:
761 res = partial_name(minus[0])
778 res = partial_name(minus[0])
762 if res:
779 if res:
763 i = self.series.index(res)
780 i = self.series.index(res)
764 try:
781 try:
765 off = int(minus[1] or 1)
782 off = int(minus[1] or 1)
766 except(ValueError, OverflowError):
783 except(ValueError, OverflowError):
767 pass
784 pass
768 else:
785 else:
769 if i - off >= 0:
786 if i - off >= 0:
770 return self.series[i - off]
787 return self.series[i - off]
771 plus = patch.rsplit('+', 1)
788 plus = patch.rsplit('+', 1)
772 if len(plus) > 1:
789 if len(plus) > 1:
773 res = partial_name(plus[0])
790 res = partial_name(plus[0])
774 if res:
791 if res:
775 i = self.series.index(res)
792 i = self.series.index(res)
776 try:
793 try:
777 off = int(plus[1] or 1)
794 off = int(plus[1] or 1)
778 except(ValueError, OverflowError):
795 except(ValueError, OverflowError):
779 pass
796 pass
780 else:
797 else:
781 if i + off < len(self.series):
798 if i + off < len(self.series):
782 return self.series[i + off]
799 return self.series[i + off]
783 raise util.Abort(_("patch %s not in series") % patch)
800 raise util.Abort(_("patch %s not in series") % patch)
784
801
785 def push(self, repo, patch=None, force=False, list=False,
802 def push(self, repo, patch=None, force=False, list=False,
786 mergeq=None, wlock=None):
803 mergeq=None, wlock=None):
787 if not wlock:
804 if not wlock:
788 wlock = repo.wlock()
805 wlock = repo.wlock()
789 patch = self.lookup(patch)
806 patch = self.lookup(patch)
790 if patch and self.isapplied(patch):
807 if patch and self.isapplied(patch):
791 self.ui.warn(_("patch %s is already applied\n") % patch)
808 self.ui.warn(_("patch %s is already applied\n") % patch)
792 sys.exit(1)
809 sys.exit(1)
793 if self.series_end() == len(self.series):
810 if self.series_end() == len(self.series):
794 self.ui.warn(_("patch series fully applied\n"))
811 self.ui.warn(_("patch series fully applied\n"))
795 sys.exit(1)
812 sys.exit(1)
796 if not force:
813 if not force:
797 self.check_localchanges(repo)
814 self.check_localchanges(repo)
798
815
799 self.applied_dirty = 1;
816 self.applied_dirty = 1;
800 start = self.series_end()
817 start = self.series_end()
801 if start > 0:
818 if start > 0:
802 self.check_toppatch(repo)
819 self.check_toppatch(repo)
803 if not patch:
820 if not patch:
804 patch = self.series[start]
821 patch = self.series[start]
805 end = start + 1
822 end = start + 1
806 else:
823 else:
807 end = self.series.index(patch, start) + 1
824 end = self.series.index(patch, start) + 1
808 s = self.series[start:end]
825 s = self.series[start:end]
809 if mergeq:
826 if mergeq:
810 ret = self.mergepatch(repo, mergeq, s, wlock)
827 ret = self.mergepatch(repo, mergeq, s, wlock)
811 else:
828 else:
812 ret = self.apply(repo, s, list, wlock=wlock)
829 ret = self.apply(repo, s, list, wlock=wlock)
813 top = self.applied[-1].name
830 top = self.applied[-1].name
814 if ret[0]:
831 if ret[0]:
815 self.ui.write("Errors during apply, please fix and refresh %s\n" %
832 self.ui.write("Errors during apply, please fix and refresh %s\n" %
816 top)
833 top)
817 else:
834 else:
818 self.ui.write("Now at: %s\n" % top)
835 self.ui.write("Now at: %s\n" % top)
819 return ret[0]
836 return ret[0]
820
837
821 def pop(self, repo, patch=None, force=False, update=True, all=False,
838 def pop(self, repo, patch=None, force=False, update=True, all=False,
822 wlock=None):
839 wlock=None):
823 def getfile(f, rev):
840 def getfile(f, rev):
824 t = repo.file(f).read(rev)
841 t = repo.file(f).read(rev)
825 try:
842 try:
826 repo.wfile(f, "w").write(t)
843 repo.wfile(f, "w").write(t)
827 except IOError:
844 except IOError:
828 try:
845 try:
829 os.makedirs(os.path.dirname(repo.wjoin(f)))
846 os.makedirs(os.path.dirname(repo.wjoin(f)))
830 except OSError, err:
847 except OSError, err:
831 if err.errno != errno.EEXIST: raise
848 if err.errno != errno.EEXIST: raise
832 repo.wfile(f, "w").write(t)
849 repo.wfile(f, "w").write(t)
833
850
834 if not wlock:
851 if not wlock:
835 wlock = repo.wlock()
852 wlock = repo.wlock()
836 if patch:
853 if patch:
837 # index, rev, patch
854 # index, rev, patch
838 info = self.isapplied(patch)
855 info = self.isapplied(patch)
839 if not info:
856 if not info:
840 patch = self.lookup(patch)
857 patch = self.lookup(patch)
841 info = self.isapplied(patch)
858 info = self.isapplied(patch)
842 if not info:
859 if not info:
843 raise util.Abort(_("patch %s is not applied") % patch)
860 raise util.Abort(_("patch %s is not applied") % patch)
844 if len(self.applied) == 0:
861 if len(self.applied) == 0:
845 self.ui.warn(_("no patches applied\n"))
862 self.ui.warn(_("no patches applied\n"))
846 sys.exit(1)
863 sys.exit(1)
847
864
848 if not update:
865 if not update:
849 parents = repo.dirstate.parents()
866 parents = repo.dirstate.parents()
850 rr = [ revlog.bin(x.rev) for x in self.applied ]
867 rr = [ revlog.bin(x.rev) for x in self.applied ]
851 for p in parents:
868 for p in parents:
852 if p in rr:
869 if p in rr:
853 self.ui.warn("qpop: forcing dirstate update\n")
870 self.ui.warn("qpop: forcing dirstate update\n")
854 update = True
871 update = True
855
872
856 if not force and update:
873 if not force and update:
857 self.check_localchanges(repo)
874 self.check_localchanges(repo)
858
875
859 self.applied_dirty = 1;
876 self.applied_dirty = 1;
860 end = len(self.applied)
877 end = len(self.applied)
861 if not patch:
878 if not patch:
862 if all:
879 if all:
863 popi = 0
880 popi = 0
864 else:
881 else:
865 popi = len(self.applied) - 1
882 popi = len(self.applied) - 1
866 else:
883 else:
867 popi = info[0] + 1
884 popi = info[0] + 1
868 if popi >= end:
885 if popi >= end:
869 self.ui.warn("qpop: %s is already at the top\n" % patch)
886 self.ui.warn("qpop: %s is already at the top\n" % patch)
870 return
887 return
871 info = [ popi ] + [self.applied[popi].rev, self.applied[popi].name]
888 info = [ popi ] + [self.applied[popi].rev, self.applied[popi].name]
872
889
873 start = info[0]
890 start = info[0]
874 rev = revlog.bin(info[1])
891 rev = revlog.bin(info[1])
875
892
876 # we know there are no local changes, so we can make a simplified
893 # we know there are no local changes, so we can make a simplified
877 # form of hg.update.
894 # form of hg.update.
878 if update:
895 if update:
879 top = self.check_toppatch(repo)
896 top = self.check_toppatch(repo)
880 qp = self.qparents(repo, rev)
897 qp = self.qparents(repo, rev)
881 changes = repo.changelog.read(qp)
898 changes = repo.changelog.read(qp)
882 mmap = repo.manifest.read(changes[0])
899 mmap = repo.manifest.read(changes[0])
883 m, a, r, d, u = repo.status(qp, top)[:5]
900 m, a, r, d, u = repo.status(qp, top)[:5]
884 if d:
901 if d:
885 raise util.Abort("deletions found between repo revs")
902 raise util.Abort("deletions found between repo revs")
886 for f in m:
903 for f in m:
887 getfile(f, mmap[f])
904 getfile(f, mmap[f])
888 for f in r:
905 for f in r:
889 getfile(f, mmap[f])
906 getfile(f, mmap[f])
890 util.set_exec(repo.wjoin(f), mmap.execf(f))
907 util.set_exec(repo.wjoin(f), mmap.execf(f))
891 repo.dirstate.update(m + r, 'n')
908 repo.dirstate.update(m + r, 'n')
892 for f in a:
909 for f in a:
893 try: os.unlink(repo.wjoin(f))
910 try: os.unlink(repo.wjoin(f))
894 except: raise
911 except: raise
895 try: os.removedirs(os.path.dirname(repo.wjoin(f)))
912 try: os.removedirs(os.path.dirname(repo.wjoin(f)))
896 except: pass
913 except: pass
897 if a:
914 if a:
898 repo.dirstate.forget(a)
915 repo.dirstate.forget(a)
899 repo.dirstate.setparents(qp, revlog.nullid)
916 repo.dirstate.setparents(qp, revlog.nullid)
900 self.strip(repo, rev, update=False, backup='strip', wlock=wlock)
917 self.strip(repo, rev, update=False, backup='strip', wlock=wlock)
901 del self.applied[start:end]
918 del self.applied[start:end]
902 if len(self.applied):
919 if len(self.applied):
903 self.ui.write("Now at: %s\n" % self.applied[-1].name)
920 self.ui.write("Now at: %s\n" % self.applied[-1].name)
904 else:
921 else:
905 self.ui.write("Patch queue now empty\n")
922 self.ui.write("Patch queue now empty\n")
906
923
907 def diff(self, repo, files):
924 def diff(self, repo, files):
908 top = self.check_toppatch(repo)
925 top = self.check_toppatch(repo)
909 if not top:
926 if not top:
910 self.ui.write("No patches applied\n")
927 self.ui.write("No patches applied\n")
911 return
928 return
912 qp = self.qparents(repo, top)
929 qp = self.qparents(repo, top)
913 self.printdiff(repo, qp, files=files)
930 self.printdiff(repo, qp, files=files)
914
931
915 def refresh(self, repo, msg='', short=False):
932 def refresh(self, repo, msg='', short=False):
916 if len(self.applied) == 0:
933 if len(self.applied) == 0:
917 self.ui.write("No patches applied\n")
934 self.ui.write("No patches applied\n")
918 return
935 return
919 wlock = repo.wlock()
936 wlock = repo.wlock()
920 self.check_toppatch(repo)
937 self.check_toppatch(repo)
921 (top, patch) = (self.applied[-1].rev, self.applied[-1].name)
938 (top, patch) = (self.applied[-1].rev, self.applied[-1].name)
922 top = revlog.bin(top)
939 top = revlog.bin(top)
923 cparents = repo.changelog.parents(top)
940 cparents = repo.changelog.parents(top)
924 patchparent = self.qparents(repo, top)
941 patchparent = self.qparents(repo, top)
925 message, comments, user, date, patchfound = self.readheaders(patch)
942 message, comments, user, date, patchfound = self.readheaders(patch)
926
943
927 patchf = self.opener(patch, "w")
944 patchf = self.opener(patch, "w")
928 msg = msg.rstrip()
945 msg = msg.rstrip()
929 if msg:
946 if msg:
930 if comments:
947 if comments:
931 # Remove existing message.
948 # Remove existing message.
932 ci = 0
949 ci = 0
933 for mi in range(len(message)):
950 for mi in range(len(message)):
934 while message[mi] != comments[ci]:
951 while message[mi] != comments[ci]:
935 ci += 1
952 ci += 1
936 del comments[ci]
953 del comments[ci]
937 comments.append(msg)
954 comments.append(msg)
938 if comments:
955 if comments:
939 comments = "\n".join(comments) + '\n\n'
956 comments = "\n".join(comments) + '\n\n'
940 patchf.write(comments)
957 patchf.write(comments)
941
958
942 tip = repo.changelog.tip()
959 tip = repo.changelog.tip()
943 if top == tip:
960 if top == tip:
944 # if the top of our patch queue is also the tip, there is an
961 # if the top of our patch queue is also the tip, there is an
945 # optimization here. We update the dirstate in place and strip
962 # optimization here. We update the dirstate in place and strip
946 # off the tip commit. Then just commit the current directory
963 # off the tip commit. Then just commit the current directory
947 # tree. We can also send repo.commit the list of files
964 # tree. We can also send repo.commit the list of files
948 # changed to speed up the diff
965 # changed to speed up the diff
949 #
966 #
950 # in short mode, we only diff the files included in the
967 # in short mode, we only diff the files included in the
951 # patch already
968 # patch already
952 #
969 #
953 # this should really read:
970 # this should really read:
954 # mm, dd, aa, aa2, uu = repo.status(tip, patchparent)[:5]
971 # mm, dd, aa, aa2, uu = repo.status(tip, patchparent)[:5]
955 # but we do it backwards to take advantage of manifest/chlog
972 # but we do it backwards to take advantage of manifest/chlog
956 # caching against the next repo.status call
973 # caching against the next repo.status call
957 #
974 #
958 mm, aa, dd, aa2, uu = repo.status(patchparent, tip)[:5]
975 mm, aa, dd, aa2, uu = repo.status(patchparent, tip)[:5]
959 if short:
976 if short:
960 filelist = mm + aa + dd
977 filelist = mm + aa + dd
961 else:
978 else:
962 filelist = None
979 filelist = None
963 m, a, r, d, u = repo.status(files=filelist)[:5]
980 m, a, r, d, u = repo.status(files=filelist)[:5]
964
981
965 # we might end up with files that were added between tip and
982 # we might end up with files that were added between tip and
966 # the dirstate parent, but then changed in the local dirstate.
983 # the dirstate parent, but then changed in the local dirstate.
967 # in this case, we want them to only show up in the added section
984 # in this case, we want them to only show up in the added section
968 for x in m:
985 for x in m:
969 if x not in aa:
986 if x not in aa:
970 mm.append(x)
987 mm.append(x)
971 # we might end up with files added by the local dirstate that
988 # we might end up with files added by the local dirstate that
972 # were deleted by the patch. In this case, they should only
989 # were deleted by the patch. In this case, they should only
973 # show up in the changed section.
990 # show up in the changed section.
974 for x in a:
991 for x in a:
975 if x in dd:
992 if x in dd:
976 del dd[dd.index(x)]
993 del dd[dd.index(x)]
977 mm.append(x)
994 mm.append(x)
978 else:
995 else:
979 aa.append(x)
996 aa.append(x)
980 # make sure any files deleted in the local dirstate
997 # make sure any files deleted in the local dirstate
981 # are not in the add or change column of the patch
998 # are not in the add or change column of the patch
982 forget = []
999 forget = []
983 for x in d + r:
1000 for x in d + r:
984 if x in aa:
1001 if x in aa:
985 del aa[aa.index(x)]
1002 del aa[aa.index(x)]
986 forget.append(x)
1003 forget.append(x)
987 continue
1004 continue
988 elif x in mm:
1005 elif x in mm:
989 del mm[mm.index(x)]
1006 del mm[mm.index(x)]
990 dd.append(x)
1007 dd.append(x)
991
1008
992 m = list(util.unique(mm))
1009 m = list(util.unique(mm))
993 r = list(util.unique(dd))
1010 r = list(util.unique(dd))
994 a = list(util.unique(aa))
1011 a = list(util.unique(aa))
995 filelist = list(util.unique(m + r + a))
1012 filelist = list(util.unique(m + r + a))
996 self.printdiff(repo, patchparent, files=filelist,
1013 self.printdiff(repo, patchparent, files=filelist,
997 changes=(m, a, r, [], u), fp=patchf)
1014 changes=(m, a, r, [], u), fp=patchf)
998 patchf.close()
1015 patchf.close()
999
1016
1000 changes = repo.changelog.read(tip)
1017 changes = repo.changelog.read(tip)
1001 repo.dirstate.setparents(*cparents)
1018 repo.dirstate.setparents(*cparents)
1002 repo.dirstate.update(a, 'a')
1019 repo.dirstate.update(a, 'a')
1003 repo.dirstate.update(r, 'r')
1020 repo.dirstate.update(r, 'r')
1004 repo.dirstate.update(m, 'n')
1021 repo.dirstate.update(m, 'n')
1005 repo.dirstate.forget(forget)
1022 repo.dirstate.forget(forget)
1006
1023
1007 if not msg:
1024 if not msg:
1008 if not message:
1025 if not message:
1009 message = "patch queue: %s\n" % patch
1026 message = "patch queue: %s\n" % patch
1010 else:
1027 else:
1011 message = "\n".join(message)
1028 message = "\n".join(message)
1012 else:
1029 else:
1013 message = msg
1030 message = msg
1014
1031
1015 self.strip(repo, top, update=False, backup='strip', wlock=wlock)
1032 self.strip(repo, top, update=False, backup='strip', wlock=wlock)
1016 n = repo.commit(filelist, message, changes[1], force=1, wlock=wlock)
1033 n = repo.commit(filelist, message, changes[1], force=1, wlock=wlock)
1017 self.applied[-1] = statusentry(revlog.hex(n), patch)
1034 self.applied[-1] = statusentry(revlog.hex(n), patch)
1018 self.applied_dirty = 1
1035 self.applied_dirty = 1
1019 else:
1036 else:
1020 self.printdiff(repo, patchparent, fp=patchf)
1037 self.printdiff(repo, patchparent, fp=patchf)
1021 patchf.close()
1038 patchf.close()
1022 self.pop(repo, force=True, wlock=wlock)
1039 self.pop(repo, force=True, wlock=wlock)
1023 self.push(repo, force=True, wlock=wlock)
1040 self.push(repo, force=True, wlock=wlock)
1024
1041
1025 def init(self, repo, create=False):
1042 def init(self, repo, create=False):
1026 if os.path.isdir(self.path):
1043 if os.path.isdir(self.path):
1027 raise util.Abort(_("patch queue directory already exists"))
1044 raise util.Abort(_("patch queue directory already exists"))
1028 os.mkdir(self.path)
1045 os.mkdir(self.path)
1029 if create:
1046 if create:
1030 return self.qrepo(create=True)
1047 return self.qrepo(create=True)
1031
1048
1032 def unapplied(self, repo, patch=None):
1049 def unapplied(self, repo, patch=None):
1033 if patch and patch not in self.series:
1050 if patch and patch not in self.series:
1034 raise util.Abort(_("patch %s is not in series file") % patch)
1051 raise util.Abort(_("patch %s is not in series file") % patch)
1035 if not patch:
1052 if not patch:
1036 start = self.series_end()
1053 start = self.series_end()
1037 else:
1054 else:
1038 start = self.series.index(patch) + 1
1055 start = self.series.index(patch) + 1
1039 unapplied = []
1056 unapplied = []
1040 for i in xrange(start, len(self.series)):
1057 for i in xrange(start, len(self.series)):
1041 pushable, reason = self.pushable(i)
1058 pushable, reason = self.pushable(i)
1042 if pushable:
1059 if pushable:
1043 unapplied.append((i, self.series[i]))
1060 unapplied.append((i, self.series[i]))
1044 self.explain_pushable(i)
1061 self.explain_pushable(i)
1045 return unapplied
1062 return unapplied
1046
1063
1047 def qseries(self, repo, missing=None, summary=False):
1064 def qseries(self, repo, missing=None, summary=False):
1048 start = self.series_end(all_patches=True)
1065 start = self.series_end(all_patches=True)
1049 if not missing:
1066 if not missing:
1050 for i in range(len(self.series)):
1067 for i in range(len(self.series)):
1051 patch = self.series[i]
1068 patch = self.series[i]
1052 if self.ui.verbose:
1069 if self.ui.verbose:
1053 if i < start:
1070 if i < start:
1054 status = 'A'
1071 status = 'A'
1055 elif self.pushable(i)[0]:
1072 elif self.pushable(i)[0]:
1056 status = 'U'
1073 status = 'U'
1057 else:
1074 else:
1058 status = 'G'
1075 status = 'G'
1059 self.ui.write('%d %s ' % (i, status))
1076 self.ui.write('%d %s ' % (i, status))
1060 if summary:
1077 if summary:
1061 msg = self.readheaders(patch)[0]
1078 msg = self.readheaders(patch)[0]
1062 msg = msg and ': ' + msg[0] or ': '
1079 msg = msg and ': ' + msg[0] or ': '
1063 else:
1080 else:
1064 msg = ''
1081 msg = ''
1065 self.ui.write('%s%s\n' % (patch, msg))
1082 self.ui.write('%s%s\n' % (patch, msg))
1066 else:
1083 else:
1067 msng_list = []
1084 msng_list = []
1068 for root, dirs, files in os.walk(self.path):
1085 for root, dirs, files in os.walk(self.path):
1069 d = root[len(self.path) + 1:]
1086 d = root[len(self.path) + 1:]
1070 for f in files:
1087 for f in files:
1071 fl = os.path.join(d, f)
1088 fl = os.path.join(d, f)
1072 if (fl not in self.series and
1089 if (fl not in self.series and
1073 fl not in (self.status_path, self.series_path)
1090 fl not in (self.status_path, self.series_path)
1074 and not fl.startswith('.')):
1091 and not fl.startswith('.')):
1075 msng_list.append(fl)
1092 msng_list.append(fl)
1076 msng_list.sort()
1093 msng_list.sort()
1077 for x in msng_list:
1094 for x in msng_list:
1078 if self.ui.verbose:
1095 if self.ui.verbose:
1079 self.ui.write("D ")
1096 self.ui.write("D ")
1080 self.ui.write("%s\n" % x)
1097 self.ui.write("%s\n" % x)
1081
1098
1082 def issaveline(self, l):
1099 def issaveline(self, l):
1083 if l.name == '.hg.patches.save.line':
1100 if l.name == '.hg.patches.save.line':
1084 return True
1101 return True
1085
1102
1086 def qrepo(self, create=False):
1103 def qrepo(self, create=False):
1087 if create or os.path.isdir(self.join(".hg")):
1104 if create or os.path.isdir(self.join(".hg")):
1088 return hg.repository(self.ui, path=self.path, create=create)
1105 return hg.repository(self.ui, path=self.path, create=create)
1089
1106
1090 def restore(self, repo, rev, delete=None, qupdate=None):
1107 def restore(self, repo, rev, delete=None, qupdate=None):
1091 c = repo.changelog.read(rev)
1108 c = repo.changelog.read(rev)
1092 desc = c[4].strip()
1109 desc = c[4].strip()
1093 lines = desc.splitlines()
1110 lines = desc.splitlines()
1094 i = 0
1111 i = 0
1095 datastart = None
1112 datastart = None
1096 series = []
1113 series = []
1097 applied = []
1114 applied = []
1098 qpp = None
1115 qpp = None
1099 for i in xrange(0, len(lines)):
1116 for i in xrange(0, len(lines)):
1100 if lines[i] == 'Patch Data:':
1117 if lines[i] == 'Patch Data:':
1101 datastart = i + 1
1118 datastart = i + 1
1102 elif lines[i].startswith('Dirstate:'):
1119 elif lines[i].startswith('Dirstate:'):
1103 l = lines[i].rstrip()
1120 l = lines[i].rstrip()
1104 l = l[10:].split(' ')
1121 l = l[10:].split(' ')
1105 qpp = [ hg.bin(x) for x in l ]
1122 qpp = [ hg.bin(x) for x in l ]
1106 elif datastart != None:
1123 elif datastart != None:
1107 l = lines[i].rstrip()
1124 l = lines[i].rstrip()
1108 se = statusentry(l)
1125 se = statusentry(l)
1109 file_ = se.name
1126 file_ = se.name
1110 if se.rev:
1127 if se.rev:
1111 applied.append(se)
1128 applied.append(se)
1112 series.append(file_)
1129 series.append(file_)
1113 if datastart == None:
1130 if datastart == None:
1114 self.ui.warn("No saved patch data found\n")
1131 self.ui.warn("No saved patch data found\n")
1115 return 1
1132 return 1
1116 self.ui.warn("restoring status: %s\n" % lines[0])
1133 self.ui.warn("restoring status: %s\n" % lines[0])
1117 self.full_series = series
1134 self.full_series = series
1118 self.applied = applied
1135 self.applied = applied
1119 self.parse_series()
1136 self.parse_series()
1120 self.series_dirty = 1
1137 self.series_dirty = 1
1121 self.applied_dirty = 1
1138 self.applied_dirty = 1
1122 heads = repo.changelog.heads()
1139 heads = repo.changelog.heads()
1123 if delete:
1140 if delete:
1124 if rev not in heads:
1141 if rev not in heads:
1125 self.ui.warn("save entry has children, leaving it alone\n")
1142 self.ui.warn("save entry has children, leaving it alone\n")
1126 else:
1143 else:
1127 self.ui.warn("removing save entry %s\n" % hg.short(rev))
1144 self.ui.warn("removing save entry %s\n" % hg.short(rev))
1128 pp = repo.dirstate.parents()
1145 pp = repo.dirstate.parents()
1129 if rev in pp:
1146 if rev in pp:
1130 update = True
1147 update = True
1131 else:
1148 else:
1132 update = False
1149 update = False
1133 self.strip(repo, rev, update=update, backup='strip')
1150 self.strip(repo, rev, update=update, backup='strip')
1134 if qpp:
1151 if qpp:
1135 self.ui.warn("saved queue repository parents: %s %s\n" %
1152 self.ui.warn("saved queue repository parents: %s %s\n" %
1136 (hg.short(qpp[0]), hg.short(qpp[1])))
1153 (hg.short(qpp[0]), hg.short(qpp[1])))
1137 if qupdate:
1154 if qupdate:
1138 print "queue directory updating"
1155 print "queue directory updating"
1139 r = self.qrepo()
1156 r = self.qrepo()
1140 if not r:
1157 if not r:
1141 self.ui.warn("Unable to load queue repository\n")
1158 self.ui.warn("Unable to load queue repository\n")
1142 return 1
1159 return 1
1143 hg.clean(r, qpp[0])
1160 hg.clean(r, qpp[0])
1144
1161
1145 def save(self, repo, msg=None):
1162 def save(self, repo, msg=None):
1146 if len(self.applied) == 0:
1163 if len(self.applied) == 0:
1147 self.ui.warn("save: no patches applied, exiting\n")
1164 self.ui.warn("save: no patches applied, exiting\n")
1148 return 1
1165 return 1
1149 if self.issaveline(self.applied[-1]):
1166 if self.issaveline(self.applied[-1]):
1150 self.ui.warn("status is already saved\n")
1167 self.ui.warn("status is already saved\n")
1151 return 1
1168 return 1
1152
1169
1153 ar = [ ':' + x for x in self.full_series ]
1170 ar = [ ':' + x for x in self.full_series ]
1154 if not msg:
1171 if not msg:
1155 msg = "hg patches saved state"
1172 msg = "hg patches saved state"
1156 else:
1173 else:
1157 msg = "hg patches: " + msg.rstrip('\r\n')
1174 msg = "hg patches: " + msg.rstrip('\r\n')
1158 r = self.qrepo()
1175 r = self.qrepo()
1159 if r:
1176 if r:
1160 pp = r.dirstate.parents()
1177 pp = r.dirstate.parents()
1161 msg += "\nDirstate: %s %s" % (hg.hex(pp[0]), hg.hex(pp[1]))
1178 msg += "\nDirstate: %s %s" % (hg.hex(pp[0]), hg.hex(pp[1]))
1162 msg += "\n\nPatch Data:\n"
1179 msg += "\n\nPatch Data:\n"
1163 text = msg + "\n".join([str(x) for x in self.applied]) + '\n' + (ar and
1180 text = msg + "\n".join([str(x) for x in self.applied]) + '\n' + (ar and
1164 "\n".join(ar) + '\n' or "")
1181 "\n".join(ar) + '\n' or "")
1165 n = repo.commit(None, text, user=None, force=1)
1182 n = repo.commit(None, text, user=None, force=1)
1166 if not n:
1183 if not n:
1167 self.ui.warn("repo commit failed\n")
1184 self.ui.warn("repo commit failed\n")
1168 return 1
1185 return 1
1169 self.applied.append(statusentry(revlog.hex(n),'.hg.patches.save.line'))
1186 self.applied.append(statusentry(revlog.hex(n),'.hg.patches.save.line'))
1170 self.applied_dirty = 1
1187 self.applied_dirty = 1
1171
1188
1172 def full_series_end(self):
1189 def full_series_end(self):
1173 if len(self.applied) > 0:
1190 if len(self.applied) > 0:
1174 p = self.applied[-1].name
1191 p = self.applied[-1].name
1175 end = self.find_series(p)
1192 end = self.find_series(p)
1176 if end == None:
1193 if end == None:
1177 return len(self.full_series)
1194 return len(self.full_series)
1178 return end + 1
1195 return end + 1
1179 return 0
1196 return 0
1180
1197
1181 def series_end(self, all_patches=False):
1198 def series_end(self, all_patches=False):
1182 end = 0
1199 end = 0
1183 def next(start):
1200 def next(start):
1184 if all_patches:
1201 if all_patches:
1185 return start
1202 return start
1186 i = start
1203 i = start
1187 while i < len(self.series):
1204 while i < len(self.series):
1188 p, reason = self.pushable(i)
1205 p, reason = self.pushable(i)
1189 if p:
1206 if p:
1190 break
1207 break
1191 self.explain_pushable(i)
1208 self.explain_pushable(i)
1192 i += 1
1209 i += 1
1193 return i
1210 return i
1194 if len(self.applied) > 0:
1211 if len(self.applied) > 0:
1195 p = self.applied[-1].name
1212 p = self.applied[-1].name
1196 try:
1213 try:
1197 end = self.series.index(p)
1214 end = self.series.index(p)
1198 except ValueError:
1215 except ValueError:
1199 return 0
1216 return 0
1200 return next(end + 1)
1217 return next(end + 1)
1201 return next(end)
1218 return next(end)
1202
1219
1203 def qapplied(self, repo, patch=None):
1220 def qapplied(self, repo, patch=None):
1204 if patch and patch not in self.series:
1221 if patch and patch not in self.series:
1205 raise util.Abort(_("patch %s is not in series file") % patch)
1222 raise util.Abort(_("patch %s is not in series file") % patch)
1206 if not patch:
1223 if not patch:
1207 end = len(self.applied)
1224 end = len(self.applied)
1208 else:
1225 else:
1209 end = self.series.index(patch) + 1
1226 end = self.series.index(patch) + 1
1210 for x in xrange(end):
1227 for x in xrange(end):
1211 p = self.appliedname(x)
1228 p = self.appliedname(x)
1212 self.ui.write("%s\n" % p)
1229 self.ui.write("%s\n" % p)
1213
1230
1214 def appliedname(self, index):
1231 def appliedname(self, index):
1215 pname = self.applied[index].name
1232 pname = self.applied[index].name
1216 if not self.ui.verbose:
1233 if not self.ui.verbose:
1217 p = pname
1234 p = pname
1218 else:
1235 else:
1219 p = str(self.series.index(pname)) + " " + p
1236 p = str(self.series.index(pname)) + " " + p
1220 return p
1237 return p
1221
1238
1222 def top(self, repo):
1239 def top(self, repo):
1223 if len(self.applied):
1240 if len(self.applied):
1224 p = self.appliedname(-1)
1241 p = self.appliedname(-1)
1225 self.ui.write(p + '\n')
1242 self.ui.write(p + '\n')
1226 else:
1243 else:
1227 self.ui.write("No patches applied\n")
1244 self.ui.write("No patches applied\n")
1228
1245
1229 def next(self, repo):
1246 def next(self, repo):
1230 end = self.series_end()
1247 end = self.series_end()
1231 if end == len(self.series):
1248 if end == len(self.series):
1232 self.ui.write("All patches applied\n")
1249 self.ui.write("All patches applied\n")
1233 else:
1250 else:
1234 p = self.series[end]
1251 p = self.series[end]
1235 if self.ui.verbose:
1252 if self.ui.verbose:
1236 self.ui.write("%d " % self.series.index(p))
1253 self.ui.write("%d " % self.series.index(p))
1237 self.ui.write(p + '\n')
1254 self.ui.write(p + '\n')
1238
1255
1239 def prev(self, repo):
1256 def prev(self, repo):
1240 if len(self.applied) > 1:
1257 if len(self.applied) > 1:
1241 p = self.appliedname(-2)
1258 p = self.appliedname(-2)
1242 self.ui.write(p + '\n')
1259 self.ui.write(p + '\n')
1243 elif len(self.applied) == 1:
1260 elif len(self.applied) == 1:
1244 self.ui.write("Only one patch applied\n")
1261 self.ui.write("Only one patch applied\n")
1245 else:
1262 else:
1246 self.ui.write("No patches applied\n")
1263 self.ui.write("No patches applied\n")
1247
1264
1248 def qimport(self, repo, files, patch=None, existing=None, force=None):
1265 def qimport(self, repo, files, patch=None, existing=None, force=None):
1249 if len(files) > 1 and patch:
1266 if len(files) > 1 and patch:
1250 raise util.Abort(_('option "-n" not valid when importing multiple '
1267 raise util.Abort(_('option "-n" not valid when importing multiple '
1251 'files'))
1268 'files'))
1252 i = 0
1269 i = 0
1253 added = []
1270 added = []
1254 for filename in files:
1271 for filename in files:
1255 if existing:
1272 if existing:
1256 if not patch:
1273 if not patch:
1257 patch = filename
1274 patch = filename
1258 if not os.path.isfile(self.join(patch)):
1275 if not os.path.isfile(self.join(patch)):
1259 raise util.Abort(_("patch %s does not exist") % patch)
1276 raise util.Abort(_("patch %s does not exist") % patch)
1260 else:
1277 else:
1261 try:
1278 try:
1262 text = file(filename).read()
1279 text = file(filename).read()
1263 except IOError:
1280 except IOError:
1264 raise util.Abort(_("unable to read %s") % patch)
1281 raise util.Abort(_("unable to read %s") % patch)
1265 if not patch:
1282 if not patch:
1266 patch = os.path.split(filename)[1]
1283 patch = os.path.split(filename)[1]
1267 if not force and os.path.exists(self.join(patch)):
1284 if not force and os.path.exists(self.join(patch)):
1268 raise util.Abort(_('patch "%s" already exists') % patch)
1285 raise util.Abort(_('patch "%s" already exists') % patch)
1269 patchf = self.opener(patch, "w")
1286 patchf = self.opener(patch, "w")
1270 patchf.write(text)
1287 patchf.write(text)
1271 if patch in self.series:
1288 if patch in self.series:
1272 raise util.Abort(_('patch %s is already in the series file')
1289 raise util.Abort(_('patch %s is already in the series file')
1273 % patch)
1290 % patch)
1274 index = self.full_series_end() + i
1291 index = self.full_series_end() + i
1275 self.full_series[index:index] = [patch]
1292 self.full_series[index:index] = [patch]
1276 self.parse_series()
1293 self.parse_series()
1277 self.ui.warn("adding %s to series file\n" % patch)
1294 self.ui.warn("adding %s to series file\n" % patch)
1278 i += 1
1295 i += 1
1279 added.append(patch)
1296 added.append(patch)
1280 patch = None
1297 patch = None
1281 self.series_dirty = 1
1298 self.series_dirty = 1
1282 qrepo = self.qrepo()
1299 qrepo = self.qrepo()
1283 if qrepo:
1300 if qrepo:
1284 qrepo.add(added)
1301 qrepo.add(added)
1285
1302
1286 def delete(ui, repo, patch, *patches, **opts):
1303 def delete(ui, repo, patch, **opts):
1287 """remove patches from queue
1304 """remove a patch from the series file
1288
1305
1289 The patches must not be applied.
1306 The patch must not be applied.
1290 With -k, the patch files are preserved in the patch directory."""
1307 With -f, deletes the patch file as well as the series entry."""
1291 q = repo.mq
1308 q = repo.mq
1292 q.delete(repo, (patch,) + patches, keep=opts.get('keep'))
1309 q.delete(repo, patch, force=opts.get('force'))
1293 q.save_dirty()
1310 q.save_dirty()
1294 return 0
1311 return 0
1295
1312
1296 def applied(ui, repo, patch=None, **opts):
1313 def applied(ui, repo, patch=None, **opts):
1297 """print the patches already applied"""
1314 """print the patches already applied"""
1298 repo.mq.qapplied(repo, patch)
1315 repo.mq.qapplied(repo, patch)
1299 return 0
1316 return 0
1300
1317
1301 def unapplied(ui, repo, patch=None, **opts):
1318 def unapplied(ui, repo, patch=None, **opts):
1302 """print the patches not yet applied"""
1319 """print the patches not yet applied"""
1303 for i, p in repo.mq.unapplied(repo, patch):
1320 for i, p in repo.mq.unapplied(repo, patch):
1304 if ui.verbose:
1321 if ui.verbose:
1305 ui.write("%d " % i)
1322 ui.write("%d " % i)
1306 ui.write("%s\n" % p)
1323 ui.write("%s\n" % p)
1307
1324
1308 def qimport(ui, repo, *filename, **opts):
1325 def qimport(ui, repo, *filename, **opts):
1309 """import a patch"""
1326 """import a patch"""
1310 q = repo.mq
1327 q = repo.mq
1311 q.qimport(repo, filename, patch=opts['name'],
1328 q.qimport(repo, filename, patch=opts['name'],
1312 existing=opts['existing'], force=opts['force'])
1329 existing=opts['existing'], force=opts['force'])
1313 q.save_dirty()
1330 q.save_dirty()
1314 return 0
1331 return 0
1315
1332
1316 def init(ui, repo, **opts):
1333 def init(ui, repo, **opts):
1317 """init a new queue repository
1334 """init a new queue repository
1318
1335
1319 The queue repository is unversioned by default. If -c is
1336 The queue repository is unversioned by default. If -c is
1320 specified, qinit will create a separate nested repository
1337 specified, qinit will create a separate nested repository
1321 for patches. Use qcommit to commit changes to this queue
1338 for patches. Use qcommit to commit changes to this queue
1322 repository."""
1339 repository."""
1323 q = repo.mq
1340 q = repo.mq
1324 r = q.init(repo, create=opts['create_repo'])
1341 r = q.init(repo, create=opts['create_repo'])
1325 q.save_dirty()
1342 q.save_dirty()
1326 if r:
1343 if r:
1327 fp = r.wopener('.hgignore', 'w')
1344 fp = r.wopener('.hgignore', 'w')
1328 print >> fp, 'syntax: glob'
1345 print >> fp, 'syntax: glob'
1329 print >> fp, 'status'
1346 print >> fp, 'status'
1330 fp.close()
1347 fp.close()
1331 r.wopener('series', 'w').close()
1348 r.wopener('series', 'w').close()
1332 r.add(['.hgignore', 'series'])
1349 r.add(['.hgignore', 'series'])
1333 return 0
1350 return 0
1334
1351
1335 def clone(ui, source, dest=None, **opts):
1352 def clone(ui, source, dest=None, **opts):
1336 '''clone main and patch repository at same time
1353 '''clone main and patch repository at same time
1337
1354
1338 If source is local, destination will have no patches applied. If
1355 If source is local, destination will have no patches applied. If
1339 source is remote, this command can not check if patches are
1356 source is remote, this command can not check if patches are
1340 applied in source, so cannot guarantee that patches are not
1357 applied in source, so cannot guarantee that patches are not
1341 applied in destination. If you clone remote repository, be sure
1358 applied in destination. If you clone remote repository, be sure
1342 before that it has no patches applied.
1359 before that it has no patches applied.
1343
1360
1344 Source patch repository is looked for in <src>/.hg/patches by
1361 Source patch repository is looked for in <src>/.hg/patches by
1345 default. Use -p <url> to change.
1362 default. Use -p <url> to change.
1346 '''
1363 '''
1347 commands.setremoteconfig(ui, opts)
1364 commands.setremoteconfig(ui, opts)
1348 if dest is None:
1365 if dest is None:
1349 dest = hg.defaultdest(source)
1366 dest = hg.defaultdest(source)
1350 sr = hg.repository(ui, ui.expandpath(source))
1367 sr = hg.repository(ui, ui.expandpath(source))
1351 qbase, destrev = None, None
1368 qbase, destrev = None, None
1352 if sr.local():
1369 if sr.local():
1353 reposetup(ui, sr)
1370 reposetup(ui, sr)
1354 if sr.mq.applied:
1371 if sr.mq.applied:
1355 qbase = revlog.bin(sr.mq.applied[0].rev)
1372 qbase = revlog.bin(sr.mq.applied[0].rev)
1356 if not hg.islocal(dest):
1373 if not hg.islocal(dest):
1357 destrev = sr.parents(qbase)[0]
1374 destrev = sr.parents(qbase)[0]
1358 ui.note(_('cloning main repo\n'))
1375 ui.note(_('cloning main repo\n'))
1359 sr, dr = hg.clone(ui, sr, dest,
1376 sr, dr = hg.clone(ui, sr, dest,
1360 pull=opts['pull'],
1377 pull=opts['pull'],
1361 rev=destrev,
1378 rev=destrev,
1362 update=False,
1379 update=False,
1363 stream=opts['uncompressed'])
1380 stream=opts['uncompressed'])
1364 ui.note(_('cloning patch repo\n'))
1381 ui.note(_('cloning patch repo\n'))
1365 spr, dpr = hg.clone(ui, opts['patches'] or (sr.url() + '/.hg/patches'),
1382 spr, dpr = hg.clone(ui, opts['patches'] or (sr.url() + '/.hg/patches'),
1366 dr.url() + '/.hg/patches',
1383 dr.url() + '/.hg/patches',
1367 pull=opts['pull'],
1384 pull=opts['pull'],
1368 update=not opts['noupdate'],
1385 update=not opts['noupdate'],
1369 stream=opts['uncompressed'])
1386 stream=opts['uncompressed'])
1370 if dr.local():
1387 if dr.local():
1371 if qbase:
1388 if qbase:
1372 ui.note(_('stripping applied patches from destination repo\n'))
1389 ui.note(_('stripping applied patches from destination repo\n'))
1373 reposetup(ui, dr)
1390 reposetup(ui, dr)
1374 dr.mq.strip(dr, qbase, update=False, backup=None)
1391 dr.mq.strip(dr, qbase, update=False, backup=None)
1375 if not opts['noupdate']:
1392 if not opts['noupdate']:
1376 ui.note(_('updating destination repo\n'))
1393 ui.note(_('updating destination repo\n'))
1377 hg.update(dr, dr.changelog.tip())
1394 hg.update(dr, dr.changelog.tip())
1378
1395
1379 def commit(ui, repo, *pats, **opts):
1396 def commit(ui, repo, *pats, **opts):
1380 """commit changes in the queue repository"""
1397 """commit changes in the queue repository"""
1381 q = repo.mq
1398 q = repo.mq
1382 r = q.qrepo()
1399 r = q.qrepo()
1383 if not r: raise util.Abort('no queue repository')
1400 if not r: raise util.Abort('no queue repository')
1384 commands.commit(r.ui, r, *pats, **opts)
1401 commands.commit(r.ui, r, *pats, **opts)
1385
1402
1386 def series(ui, repo, **opts):
1403 def series(ui, repo, **opts):
1387 """print the entire series file"""
1404 """print the entire series file"""
1388 repo.mq.qseries(repo, missing=opts['missing'], summary=opts['summary'])
1405 repo.mq.qseries(repo, missing=opts['missing'], summary=opts['summary'])
1389 return 0
1406 return 0
1390
1407
1391 def top(ui, repo, **opts):
1408 def top(ui, repo, **opts):
1392 """print the name of the current patch"""
1409 """print the name of the current patch"""
1393 repo.mq.top(repo)
1410 repo.mq.top(repo)
1394 return 0
1411 return 0
1395
1412
1396 def next(ui, repo, **opts):
1413 def next(ui, repo, **opts):
1397 """print the name of the next patch"""
1414 """print the name of the next patch"""
1398 repo.mq.next(repo)
1415 repo.mq.next(repo)
1399 return 0
1416 return 0
1400
1417
1401 def prev(ui, repo, **opts):
1418 def prev(ui, repo, **opts):
1402 """print the name of the previous patch"""
1419 """print the name of the previous patch"""
1403 repo.mq.prev(repo)
1420 repo.mq.prev(repo)
1404 return 0
1421 return 0
1405
1422
1406 def new(ui, repo, patch, **opts):
1423 def new(ui, repo, patch, **opts):
1407 """create a new patch
1424 """create a new patch
1408
1425
1409 qnew creates a new patch on top of the currently-applied patch
1426 qnew creates a new patch on top of the currently-applied patch
1410 (if any). It will refuse to run if there are any outstanding
1427 (if any). It will refuse to run if there are any outstanding
1411 changes unless -f is specified, in which case the patch will
1428 changes unless -f is specified, in which case the patch will
1412 be initialised with them.
1429 be initialised with them.
1413
1430
1414 -m or -l set the patch header as well as the commit message.
1431 -m or -l set the patch header as well as the commit message.
1415 If neither is specified, the patch header is empty and the
1432 If neither is specified, the patch header is empty and the
1416 commit message is 'New patch: PATCH'"""
1433 commit message is 'New patch: PATCH'"""
1417 q = repo.mq
1434 q = repo.mq
1418 message = commands.logmessage(opts)
1435 message = commands.logmessage(opts)
1419 q.new(repo, patch, msg=message, force=opts['force'])
1436 q.new(repo, patch, msg=message, force=opts['force'])
1420 q.save_dirty()
1437 q.save_dirty()
1421 return 0
1438 return 0
1422
1439
1423 def refresh(ui, repo, **opts):
1440 def refresh(ui, repo, **opts):
1424 """update the current patch"""
1441 """update the current patch"""
1425 q = repo.mq
1442 q = repo.mq
1426 message = commands.logmessage(opts)
1443 message = commands.logmessage(opts)
1427 if opts['edit']:
1444 if opts['edit']:
1428 if message:
1445 if message:
1429 raise util.Abort(_('option "-e" incompatible with "-m" or "-l"'))
1446 raise util.Abort(_('option "-e" incompatible with "-m" or "-l"'))
1430 patch = q.applied[-1].name
1447 patch = q.applied[-1].name
1431 (message, comment, user, date, hasdiff) = q.readheaders(patch)
1448 (message, comment, user, date, hasdiff) = q.readheaders(patch)
1432 message = ui.edit('\n'.join(message), user or ui.username())
1449 message = ui.edit('\n'.join(message), user or ui.username())
1433 q.refresh(repo, msg=message, short=opts['short'])
1450 q.refresh(repo, msg=message, short=opts['short'])
1434 q.save_dirty()
1451 q.save_dirty()
1435 return 0
1452 return 0
1436
1453
1437 def diff(ui, repo, *files, **opts):
1454 def diff(ui, repo, *files, **opts):
1438 """diff of the current patch"""
1455 """diff of the current patch"""
1439 # deep in the dirstate code, the walkhelper method wants a list, not a tuple
1456 # deep in the dirstate code, the walkhelper method wants a list, not a tuple
1440 repo.mq.diff(repo, list(files))
1457 repo.mq.diff(repo, list(files))
1441 return 0
1458 return 0
1442
1459
1443 def fold(ui, repo, *files, **opts):
1460 def fold(ui, repo, *files, **opts):
1444 """fold the named patches into the current patch
1461 """fold the named patches into the current patch
1445
1462
1446 Patches must not yet be applied. Each patch will be successively
1463 Patches must not yet be applied. Each patch will be successively
1447 applied to the current patch in the order given. If all the
1464 applied to the current patch in the order given. If all the
1448 patches apply successfully, the current patch will be refreshed
1465 patches apply successfully, the current patch will be refreshed
1449 with the new cumulative patch, and the folded patches will
1466 with the new cumulative patch, and the folded patches will
1450 be deleted. With -k/--keep, the folded patch files will not
1467 be deleted. With -f/--force, the folded patch files will
1451 be removed afterwards.
1468 be removed afterwards.
1452
1469
1453 The header for each folded patch will be concatenated with
1470 The header for each folded patch will be concatenated with
1454 the current patch header, separated by a line of '* * *'."""
1471 the current patch header, separated by a line of '* * *'."""
1455
1472
1456 q = repo.mq
1473 q = repo.mq
1457
1474
1458 if not files:
1475 if not files:
1459 raise util.Abort(_('qfold requires at least one patch name'))
1476 raise util.Abort(_('qfold requires at least one patch name'))
1460 if not q.check_toppatch(repo):
1477 if not q.check_toppatch(repo):
1461 raise util.Abort(_('No patches applied\n'))
1478 raise util.Abort(_('No patches applied\n'))
1462
1479
1463 message = commands.logmessage(opts)
1480 message = commands.logmessage(opts)
1464 if opts['edit']:
1481 if opts['edit']:
1465 if message:
1482 if message:
1466 raise util.Abort(_('option "-e" incompatible with "-m" or "-l"'))
1483 raise util.Abort(_('option "-e" incompatible with "-m" or "-l"'))
1467
1484
1468 parent = q.lookup('qtip')
1485 parent = q.lookup('qtip')
1469 patches = []
1486 patches = []
1470 messages = []
1487 messages = []
1471 for f in files:
1488 for f in files:
1472 patch = q.lookup(f)
1489 patch = q.lookup(f)
1473 if patch in patches or patch == parent:
1490 if patch in patches or patch == parent:
1474 ui.warn(_('Skipping already folded patch %s') % patch)
1491 ui.warn(_('Skipping already folded patch %s') % patch)
1475 if q.isapplied(patch):
1492 if q.isapplied(patch):
1476 raise util.Abort(_('qfold cannot fold already applied patch %s') % patch)
1493 raise util.Abort(_('qfold cannot fold already applied patch %s') % patch)
1477 patches.append(patch)
1494 patches.append(patch)
1478
1495
1479 for patch in patches:
1496 for patch in patches:
1480 if not message:
1497 if not message:
1481 messages.append(q.readheaders(patch)[0])
1498 messages.append(q.readheaders(patch)[0])
1482 pf = q.join(patch)
1499 pf = q.join(patch)
1483 (patchsuccess, files, fuzz) = q.patch(repo, pf)
1500 (patchsuccess, files, fuzz) = q.patch(repo, pf)
1484 if not patchsuccess:
1501 if not patchsuccess:
1485 raise util.Abort(_('Error folding patch %s') % patch)
1502 raise util.Abort(_('Error folding patch %s') % patch)
1486
1503
1487 if not message:
1504 if not message:
1488 message, comments, user = q.readheaders(parent)[0:3]
1505 message, comments, user = q.readheaders(parent)[0:3]
1489 for msg in messages:
1506 for msg in messages:
1490 message.append('* * *')
1507 message.append('* * *')
1491 message.extend(msg)
1508 message.extend(msg)
1492 message = '\n'.join(message)
1509 message = '\n'.join(message)
1493
1510
1494 if opts['edit']:
1511 if opts['edit']:
1495 message = ui.edit(message, user or ui.username())
1512 message = ui.edit(message, user or ui.username())
1496
1513
1497 q.refresh(repo, msg=message)
1514 q.refresh(repo, msg=message)
1498
1515
1499 for patch in patches:
1516 for patch in patches:
1500 q.delete(repo, patch, keep=opts['keep'])
1517 q.delete(repo, patch, force=opts['force'])
1501
1518
1502 q.save_dirty()
1519 q.save_dirty()
1503
1520
1504 def guard(ui, repo, *args, **opts):
1521 def guard(ui, repo, *args, **opts):
1505 '''set or print guards for a patch
1522 '''set or print guards for a patch
1506
1523
1507 guards control whether a patch can be pushed. a patch with no
1524 guards control whether a patch can be pushed. a patch with no
1508 guards is aways pushed. a patch with posative guard ("+foo") is
1525 guards is aways pushed. a patch with posative guard ("+foo") is
1509 pushed only if qselect command enables guard "foo". a patch with
1526 pushed only if qselect command enables guard "foo". a patch with
1510 nagative guard ("-foo") is never pushed if qselect command enables
1527 nagative guard ("-foo") is never pushed if qselect command enables
1511 guard "foo".
1528 guard "foo".
1512
1529
1513 with no arguments, default is to print current active guards.
1530 with no arguments, default is to print current active guards.
1514 with arguments, set active guards for patch.
1531 with arguments, set active guards for patch.
1515
1532
1516 to set nagative guard "-foo" on topmost patch ("--" is needed so
1533 to set nagative guard "-foo" on topmost patch ("--" is needed so
1517 hg will not interpret "-foo" as argument):
1534 hg will not interpret "-foo" as argument):
1518 hg qguard -- -foo
1535 hg qguard -- -foo
1519
1536
1520 to set guards on other patch:
1537 to set guards on other patch:
1521 hg qguard other.patch +2.6.17 -stable
1538 hg qguard other.patch +2.6.17 -stable
1522 '''
1539 '''
1523 def status(idx):
1540 def status(idx):
1524 guards = q.series_guards[idx] or ['unguarded']
1541 guards = q.series_guards[idx] or ['unguarded']
1525 ui.write('%s: %s\n' % (q.series[idx], ' '.join(guards)))
1542 ui.write('%s: %s\n' % (q.series[idx], ' '.join(guards)))
1526 q = repo.mq
1543 q = repo.mq
1527 patch = None
1544 patch = None
1528 args = list(args)
1545 args = list(args)
1529 if opts['list']:
1546 if opts['list']:
1530 if args or opts['none']:
1547 if args or opts['none']:
1531 raise util.Abort(_('cannot mix -l/--list with options or arguments'))
1548 raise util.Abort(_('cannot mix -l/--list with options or arguments'))
1532 for i in xrange(len(q.series)):
1549 for i in xrange(len(q.series)):
1533 status(i)
1550 status(i)
1534 return
1551 return
1535 if not args or args[0][0:1] in '-+':
1552 if not args or args[0][0:1] in '-+':
1536 if not q.applied:
1553 if not q.applied:
1537 raise util.Abort(_('no patches applied'))
1554 raise util.Abort(_('no patches applied'))
1538 patch = q.applied[-1].name
1555 patch = q.applied[-1].name
1539 if patch is None and args[0][0:1] not in '-+':
1556 if patch is None and args[0][0:1] not in '-+':
1540 patch = args.pop(0)
1557 patch = args.pop(0)
1541 if patch is None:
1558 if patch is None:
1542 raise util.Abort(_('no patch to work with'))
1559 raise util.Abort(_('no patch to work with'))
1543 if args or opts['none']:
1560 if args or opts['none']:
1544 q.set_guards(q.find_series(patch), args)
1561 q.set_guards(q.find_series(patch), args)
1545 q.save_dirty()
1562 q.save_dirty()
1546 else:
1563 else:
1547 status(q.series.index(q.lookup(patch)))
1564 status(q.series.index(q.lookup(patch)))
1548
1565
1549 def header(ui, repo, patch=None):
1566 def header(ui, repo, patch=None):
1550 """Print the header of the topmost or specified patch"""
1567 """Print the header of the topmost or specified patch"""
1551 q = repo.mq
1568 q = repo.mq
1552
1569
1553 if patch:
1570 if patch:
1554 patch = q.lookup(patch)
1571 patch = q.lookup(patch)
1555 else:
1572 else:
1556 if not q.applied:
1573 if not q.applied:
1557 ui.write('No patches applied\n')
1574 ui.write('No patches applied\n')
1558 return
1575 return
1559 patch = q.lookup('qtip')
1576 patch = q.lookup('qtip')
1560 message = repo.mq.readheaders(patch)[0]
1577 message = repo.mq.readheaders(patch)[0]
1561
1578
1562 ui.write('\n'.join(message) + '\n')
1579 ui.write('\n'.join(message) + '\n')
1563
1580
1564 def lastsavename(path):
1581 def lastsavename(path):
1565 (directory, base) = os.path.split(path)
1582 (directory, base) = os.path.split(path)
1566 names = os.listdir(directory)
1583 names = os.listdir(directory)
1567 namere = re.compile("%s.([0-9]+)" % base)
1584 namere = re.compile("%s.([0-9]+)" % base)
1568 maxindex = None
1585 maxindex = None
1569 maxname = None
1586 maxname = None
1570 for f in names:
1587 for f in names:
1571 m = namere.match(f)
1588 m = namere.match(f)
1572 if m:
1589 if m:
1573 index = int(m.group(1))
1590 index = int(m.group(1))
1574 if maxindex == None or index > maxindex:
1591 if maxindex == None or index > maxindex:
1575 maxindex = index
1592 maxindex = index
1576 maxname = f
1593 maxname = f
1577 if maxname:
1594 if maxname:
1578 return (os.path.join(directory, maxname), maxindex)
1595 return (os.path.join(directory, maxname), maxindex)
1579 return (None, None)
1596 return (None, None)
1580
1597
1581 def savename(path):
1598 def savename(path):
1582 (last, index) = lastsavename(path)
1599 (last, index) = lastsavename(path)
1583 if last is None:
1600 if last is None:
1584 index = 0
1601 index = 0
1585 newpath = path + ".%d" % (index + 1)
1602 newpath = path + ".%d" % (index + 1)
1586 return newpath
1603 return newpath
1587
1604
1588 def push(ui, repo, patch=None, **opts):
1605 def push(ui, repo, patch=None, **opts):
1589 """push the next patch onto the stack"""
1606 """push the next patch onto the stack"""
1590 q = repo.mq
1607 q = repo.mq
1591 mergeq = None
1608 mergeq = None
1592
1609
1593 if opts['all']:
1610 if opts['all']:
1594 patch = q.series[-1]
1611 patch = q.series[-1]
1595 if opts['merge']:
1612 if opts['merge']:
1596 if opts['name']:
1613 if opts['name']:
1597 newpath = opts['name']
1614 newpath = opts['name']
1598 else:
1615 else:
1599 newpath, i = lastsavename(q.path)
1616 newpath, i = lastsavename(q.path)
1600 if not newpath:
1617 if not newpath:
1601 ui.warn("no saved queues found, please use -n\n")
1618 ui.warn("no saved queues found, please use -n\n")
1602 return 1
1619 return 1
1603 mergeq = queue(ui, repo.join(""), newpath)
1620 mergeq = queue(ui, repo.join(""), newpath)
1604 ui.warn("merging with queue at: %s\n" % mergeq.path)
1621 ui.warn("merging with queue at: %s\n" % mergeq.path)
1605 ret = q.push(repo, patch, force=opts['force'], list=opts['list'],
1622 ret = q.push(repo, patch, force=opts['force'], list=opts['list'],
1606 mergeq=mergeq)
1623 mergeq=mergeq)
1607 q.save_dirty()
1624 q.save_dirty()
1608 return ret
1625 return ret
1609
1626
1610 def pop(ui, repo, patch=None, **opts):
1627 def pop(ui, repo, patch=None, **opts):
1611 """pop the current patch off the stack"""
1628 """pop the current patch off the stack"""
1612 localupdate = True
1629 localupdate = True
1613 if opts['name']:
1630 if opts['name']:
1614 q = queue(ui, repo.join(""), repo.join(opts['name']))
1631 q = queue(ui, repo.join(""), repo.join(opts['name']))
1615 ui.warn('using patch queue: %s\n' % q.path)
1632 ui.warn('using patch queue: %s\n' % q.path)
1616 localupdate = False
1633 localupdate = False
1617 else:
1634 else:
1618 q = repo.mq
1635 q = repo.mq
1619 q.pop(repo, patch, force=opts['force'], update=localupdate, all=opts['all'])
1636 q.pop(repo, patch, force=opts['force'], update=localupdate, all=opts['all'])
1620 q.save_dirty()
1637 q.save_dirty()
1621 return 0
1638 return 0
1622
1639
1623 def rename(ui, repo, patch, name=None, **opts):
1640 def rename(ui, repo, patch, name=None, **opts):
1624 """rename a patch
1641 """rename a patch
1625
1642
1626 With one argument, renames the current patch to PATCH1.
1643 With one argument, renames the current patch to PATCH1.
1627 With two arguments, renames PATCH1 to PATCH2."""
1644 With two arguments, renames PATCH1 to PATCH2."""
1628
1645
1629 q = repo.mq
1646 q = repo.mq
1630
1647
1631 if not name:
1648 if not name:
1632 name = patch
1649 name = patch
1633 patch = None
1650 patch = None
1634
1651
1635 if name in q.series:
1652 if name in q.series:
1636 raise util.Abort(_('A patch named %s already exists in the series file') % name)
1653 raise util.Abort(_('A patch named %s already exists in the series file') % name)
1637
1654
1638 absdest = q.join(name)
1655 absdest = q.join(name)
1639 if os.path.exists(absdest):
1656 if os.path.exists(absdest):
1640 raise util.Abort(_('%s already exists') % absdest)
1657 raise util.Abort(_('%s already exists') % absdest)
1641
1658
1642 if patch:
1659 if patch:
1643 patch = q.lookup(patch)
1660 patch = q.lookup(patch)
1644 else:
1661 else:
1645 if not q.applied:
1662 if not q.applied:
1646 ui.write(_('No patches applied\n'))
1663 ui.write(_('No patches applied\n'))
1647 return
1664 return
1648 patch = q.lookup('qtip')
1665 patch = q.lookup('qtip')
1649
1666
1650 if ui.verbose:
1667 if ui.verbose:
1651 ui.write('Renaming %s to %s\n' % (patch, name))
1668 ui.write('Renaming %s to %s\n' % (patch, name))
1652 i = q.find_series(patch)
1669 i = q.find_series(patch)
1653 q.full_series[i] = name
1670 q.full_series[i] = name
1654 q.parse_series()
1671 q.parse_series()
1655 q.series_dirty = 1
1672 q.series_dirty = 1
1656
1673
1657 info = q.isapplied(patch)
1674 info = q.isapplied(patch)
1658 if info:
1675 if info:
1659 q.applied[info[0]] = statusentry(info[1], name)
1676 q.applied[info[0]] = statusentry(info[1], name)
1660 q.applied_dirty = 1
1677 q.applied_dirty = 1
1661
1678
1662 util.rename(q.join(patch), absdest)
1679 util.rename(q.join(patch), absdest)
1663 r = q.qrepo()
1680 r = q.qrepo()
1664 if r:
1681 if r:
1665 wlock = r.wlock()
1682 wlock = r.wlock()
1666 if r.dirstate.state(name) == 'r':
1683 if r.dirstate.state(name) == 'r':
1667 r.undelete([name], wlock)
1684 r.undelete([name], wlock)
1668 r.copy(patch, name, wlock)
1685 r.copy(patch, name, wlock)
1669 r.remove([patch], False, wlock)
1686 r.remove([patch], False, wlock)
1670
1687
1671 q.save_dirty()
1688 q.save_dirty()
1672
1689
1673 def restore(ui, repo, rev, **opts):
1690 def restore(ui, repo, rev, **opts):
1674 """restore the queue state saved by a rev"""
1691 """restore the queue state saved by a rev"""
1675 rev = repo.lookup(rev)
1692 rev = repo.lookup(rev)
1676 q = repo.mq
1693 q = repo.mq
1677 q.restore(repo, rev, delete=opts['delete'],
1694 q.restore(repo, rev, delete=opts['delete'],
1678 qupdate=opts['update'])
1695 qupdate=opts['update'])
1679 q.save_dirty()
1696 q.save_dirty()
1680 return 0
1697 return 0
1681
1698
1682 def save(ui, repo, **opts):
1699 def save(ui, repo, **opts):
1683 """save current queue state"""
1700 """save current queue state"""
1684 q = repo.mq
1701 q = repo.mq
1685 message = commands.logmessage(opts)
1702 message = commands.logmessage(opts)
1686 ret = q.save(repo, msg=message)
1703 ret = q.save(repo, msg=message)
1687 if ret:
1704 if ret:
1688 return ret
1705 return ret
1689 q.save_dirty()
1706 q.save_dirty()
1690 if opts['copy']:
1707 if opts['copy']:
1691 path = q.path
1708 path = q.path
1692 if opts['name']:
1709 if opts['name']:
1693 newpath = os.path.join(q.basepath, opts['name'])
1710 newpath = os.path.join(q.basepath, opts['name'])
1694 if os.path.exists(newpath):
1711 if os.path.exists(newpath):
1695 if not os.path.isdir(newpath):
1712 if not os.path.isdir(newpath):
1696 raise util.Abort(_('destination %s exists and is not '
1713 raise util.Abort(_('destination %s exists and is not '
1697 'a directory') % newpath)
1714 'a directory') % newpath)
1698 if not opts['force']:
1715 if not opts['force']:
1699 raise util.Abort(_('destination %s exists, '
1716 raise util.Abort(_('destination %s exists, '
1700 'use -f to force') % newpath)
1717 'use -f to force') % newpath)
1701 else:
1718 else:
1702 newpath = savename(path)
1719 newpath = savename(path)
1703 ui.warn("copy %s to %s\n" % (path, newpath))
1720 ui.warn("copy %s to %s\n" % (path, newpath))
1704 util.copyfiles(path, newpath)
1721 util.copyfiles(path, newpath)
1705 if opts['empty']:
1722 if opts['empty']:
1706 try:
1723 try:
1707 os.unlink(q.join(q.status_path))
1724 os.unlink(q.join(q.status_path))
1708 except:
1725 except:
1709 pass
1726 pass
1710 return 0
1727 return 0
1711
1728
1712 def strip(ui, repo, rev, **opts):
1729 def strip(ui, repo, rev, **opts):
1713 """strip a revision and all later revs on the same branch"""
1730 """strip a revision and all later revs on the same branch"""
1714 rev = repo.lookup(rev)
1731 rev = repo.lookup(rev)
1715 backup = 'all'
1732 backup = 'all'
1716 if opts['backup']:
1733 if opts['backup']:
1717 backup = 'strip'
1734 backup = 'strip'
1718 elif opts['nobackup']:
1735 elif opts['nobackup']:
1719 backup = 'none'
1736 backup = 'none'
1720 repo.mq.strip(repo, rev, backup=backup)
1737 repo.mq.strip(repo, rev, backup=backup)
1721 return 0
1738 return 0
1722
1739
1723 def select(ui, repo, *args, **opts):
1740 def select(ui, repo, *args, **opts):
1724 '''set or print guarded patches to push
1741 '''set or print guarded patches to push
1725
1742
1726 use qguard command to set or print guards on patch. then use
1743 use qguard command to set or print guards on patch. then use
1727 qselect to tell mq which guards to use. example:
1744 qselect to tell mq which guards to use. example:
1728
1745
1729 qguard foo.patch -stable (nagative guard)
1746 qguard foo.patch -stable (nagative guard)
1730 qguard bar.patch +stable (posative guard)
1747 qguard bar.patch +stable (posative guard)
1731 qselect stable
1748 qselect stable
1732
1749
1733 this sets "stable" guard. mq will skip foo.patch (because it has
1750 this sets "stable" guard. mq will skip foo.patch (because it has
1734 nagative match) but push bar.patch (because it has posative
1751 nagative match) but push bar.patch (because it has posative
1735 match). patch is pushed if any posative guards match and no
1752 match). patch is pushed if any posative guards match and no
1736 nagative guards match.
1753 nagative guards match.
1737
1754
1738 with no arguments, default is to print current active guards.
1755 with no arguments, default is to print current active guards.
1739 with arguments, set active guards as given.
1756 with arguments, set active guards as given.
1740
1757
1741 use -n/--none to deactivate guards (no other arguments needed).
1758 use -n/--none to deactivate guards (no other arguments needed).
1742 when no guards active, patches with posative guards are skipped,
1759 when no guards active, patches with posative guards are skipped,
1743 patches with nagative guards are pushed.
1760 patches with nagative guards are pushed.
1744
1761
1745 qselect can change guards of applied patches. it does not pop
1762 qselect can change guards of applied patches. it does not pop
1746 guarded patches by default. use --pop to pop back to last applied
1763 guarded patches by default. use --pop to pop back to last applied
1747 patch that is not guarded. use --reapply (implies --pop) to push
1764 patch that is not guarded. use --reapply (implies --pop) to push
1748 back to current patch afterwards, but skip guarded patches.
1765 back to current patch afterwards, but skip guarded patches.
1749
1766
1750 use -s/--series to print list of all guards in series file (no
1767 use -s/--series to print list of all guards in series file (no
1751 other arguments needed). use -v for more information.'''
1768 other arguments needed). use -v for more information.'''
1752
1769
1753 q = repo.mq
1770 q = repo.mq
1754 guards = q.active()
1771 guards = q.active()
1755 if args or opts['none']:
1772 if args or opts['none']:
1756 old_unapplied = q.unapplied(repo)
1773 old_unapplied = q.unapplied(repo)
1757 old_guarded = [i for i in xrange(len(q.applied)) if
1774 old_guarded = [i for i in xrange(len(q.applied)) if
1758 not q.pushable(i)[0]]
1775 not q.pushable(i)[0]]
1759 q.set_active(args)
1776 q.set_active(args)
1760 q.save_dirty()
1777 q.save_dirty()
1761 if not args:
1778 if not args:
1762 ui.status(_('guards deactivated\n'))
1779 ui.status(_('guards deactivated\n'))
1763 if not opts['pop'] and not opts['reapply']:
1780 if not opts['pop'] and not opts['reapply']:
1764 unapplied = q.unapplied(repo)
1781 unapplied = q.unapplied(repo)
1765 guarded = [i for i in xrange(len(q.applied))
1782 guarded = [i for i in xrange(len(q.applied))
1766 if not q.pushable(i)[0]]
1783 if not q.pushable(i)[0]]
1767 if len(unapplied) != len(old_unapplied):
1784 if len(unapplied) != len(old_unapplied):
1768 ui.status(_('number of unguarded, unapplied patches has '
1785 ui.status(_('number of unguarded, unapplied patches has '
1769 'changed from %d to %d\n') %
1786 'changed from %d to %d\n') %
1770 (len(old_unapplied), len(unapplied)))
1787 (len(old_unapplied), len(unapplied)))
1771 if len(guarded) != len(old_guarded):
1788 if len(guarded) != len(old_guarded):
1772 ui.status(_('number of guarded, applied patches has changed '
1789 ui.status(_('number of guarded, applied patches has changed '
1773 'from %d to %d\n') %
1790 'from %d to %d\n') %
1774 (len(old_guarded), len(guarded)))
1791 (len(old_guarded), len(guarded)))
1775 elif opts['series']:
1792 elif opts['series']:
1776 guards = {}
1793 guards = {}
1777 noguards = 0
1794 noguards = 0
1778 for gs in q.series_guards:
1795 for gs in q.series_guards:
1779 if not gs:
1796 if not gs:
1780 noguards += 1
1797 noguards += 1
1781 for g in gs:
1798 for g in gs:
1782 guards.setdefault(g, 0)
1799 guards.setdefault(g, 0)
1783 guards[g] += 1
1800 guards[g] += 1
1784 if ui.verbose:
1801 if ui.verbose:
1785 guards['NONE'] = noguards
1802 guards['NONE'] = noguards
1786 guards = guards.items()
1803 guards = guards.items()
1787 guards.sort(lambda a, b: cmp(a[0][1:], b[0][1:]))
1804 guards.sort(lambda a, b: cmp(a[0][1:], b[0][1:]))
1788 if guards:
1805 if guards:
1789 ui.note(_('guards in series file:\n'))
1806 ui.note(_('guards in series file:\n'))
1790 for guard, count in guards:
1807 for guard, count in guards:
1791 ui.note('%2d ' % count)
1808 ui.note('%2d ' % count)
1792 ui.write(guard, '\n')
1809 ui.write(guard, '\n')
1793 else:
1810 else:
1794 ui.note(_('no guards in series file\n'))
1811 ui.note(_('no guards in series file\n'))
1795 else:
1812 else:
1796 if guards:
1813 if guards:
1797 ui.note(_('active guards:\n'))
1814 ui.note(_('active guards:\n'))
1798 for g in guards:
1815 for g in guards:
1799 ui.write(g, '\n')
1816 ui.write(g, '\n')
1800 else:
1817 else:
1801 ui.write(_('no active guards\n'))
1818 ui.write(_('no active guards\n'))
1802 reapply = opts['reapply'] and q.applied and q.appliedname(-1)
1819 reapply = opts['reapply'] and q.applied and q.appliedname(-1)
1803 popped = False
1820 popped = False
1804 if opts['pop'] or opts['reapply']:
1821 if opts['pop'] or opts['reapply']:
1805 for i in xrange(len(q.applied)):
1822 for i in xrange(len(q.applied)):
1806 pushable, reason = q.pushable(i)
1823 pushable, reason = q.pushable(i)
1807 if not pushable:
1824 if not pushable:
1808 ui.status(_('popping guarded patches\n'))
1825 ui.status(_('popping guarded patches\n'))
1809 popped = True
1826 popped = True
1810 if i == 0:
1827 if i == 0:
1811 q.pop(repo, all=True)
1828 q.pop(repo, all=True)
1812 else:
1829 else:
1813 q.pop(repo, i-1)
1830 q.pop(repo, i-1)
1814 break
1831 break
1815 if popped:
1832 if popped:
1816 try:
1833 try:
1817 if reapply:
1834 if reapply:
1818 ui.status(_('reapplying unguarded patches\n'))
1835 ui.status(_('reapplying unguarded patches\n'))
1819 q.push(repo, reapply)
1836 q.push(repo, reapply)
1820 finally:
1837 finally:
1821 q.save_dirty()
1838 q.save_dirty()
1822
1839
1823 def reposetup(ui, repo):
1840 def reposetup(ui, repo):
1824 class mqrepo(repo.__class__):
1841 class mqrepo(repo.__class__):
1825 def abort_if_wdir_patched(self, errmsg, force=False):
1842 def abort_if_wdir_patched(self, errmsg, force=False):
1826 if self.mq.applied and not force:
1843 if self.mq.applied and not force:
1827 parent = revlog.hex(self.dirstate.parents()[0])
1844 parent = revlog.hex(self.dirstate.parents()[0])
1828 if parent in [s.rev for s in self.mq.applied]:
1845 if parent in [s.rev for s in self.mq.applied]:
1829 raise util.Abort(errmsg)
1846 raise util.Abort(errmsg)
1830
1847
1831 def commit(self, *args, **opts):
1848 def commit(self, *args, **opts):
1832 if len(args) >= 6:
1849 if len(args) >= 6:
1833 force = args[5]
1850 force = args[5]
1834 else:
1851 else:
1835 force = opts.get('force')
1852 force = opts.get('force')
1836 self.abort_if_wdir_patched(
1853 self.abort_if_wdir_patched(
1837 _('cannot commit over an applied mq patch'),
1854 _('cannot commit over an applied mq patch'),
1838 force)
1855 force)
1839
1856
1840 return super(mqrepo, self).commit(*args, **opts)
1857 return super(mqrepo, self).commit(*args, **opts)
1841
1858
1842 def push(self, remote, force=False, revs=None):
1859 def push(self, remote, force=False, revs=None):
1843 if self.mq.applied and not force:
1860 if self.mq.applied and not force:
1844 raise util.Abort(_('source has mq patches applied'))
1861 raise util.Abort(_('source has mq patches applied'))
1845 return super(mqrepo, self).push(remote, force, revs)
1862 return super(mqrepo, self).push(remote, force, revs)
1846
1863
1847 def tags(self):
1864 def tags(self):
1848 if self.tagscache:
1865 if self.tagscache:
1849 return self.tagscache
1866 return self.tagscache
1850
1867
1851 tagscache = super(mqrepo, self).tags()
1868 tagscache = super(mqrepo, self).tags()
1852
1869
1853 q = self.mq
1870 q = self.mq
1854 if not q.applied:
1871 if not q.applied:
1855 return tagscache
1872 return tagscache
1856
1873
1857 mqtags = [(patch.rev, patch.name) for patch in q.applied]
1874 mqtags = [(patch.rev, patch.name) for patch in q.applied]
1858 mqtags.append((mqtags[-1][0], 'qtip'))
1875 mqtags.append((mqtags[-1][0], 'qtip'))
1859 mqtags.append((mqtags[0][0], 'qbase'))
1876 mqtags.append((mqtags[0][0], 'qbase'))
1860 for patch in mqtags:
1877 for patch in mqtags:
1861 if patch[1] in tagscache:
1878 if patch[1] in tagscache:
1862 self.ui.warn('Tag %s overrides mq patch of the same name\n' % patch[1])
1879 self.ui.warn('Tag %s overrides mq patch of the same name\n' % patch[1])
1863 else:
1880 else:
1864 tagscache[patch[1]] = revlog.bin(patch[0])
1881 tagscache[patch[1]] = revlog.bin(patch[0])
1865
1882
1866 return tagscache
1883 return tagscache
1867
1884
1868 if repo.local():
1885 if repo.local():
1869 repo.__class__ = mqrepo
1886 repo.__class__ = mqrepo
1870 repo.mq = queue(ui, repo.join(""))
1887 repo.mq = queue(ui, repo.join(""))
1871
1888
1872 cmdtable = {
1889 cmdtable = {
1873 "qapplied": (applied, [], 'hg qapplied [PATCH]'),
1890 "qapplied": (applied, [], 'hg qapplied [PATCH]'),
1874 "qclone": (clone,
1891 "qclone": (clone,
1875 [('', 'pull', None, _('use pull protocol to copy metadata')),
1892 [('', 'pull', None, _('use pull protocol to copy metadata')),
1876 ('U', 'noupdate', None, _('do not update the new working directories')),
1893 ('U', 'noupdate', None, _('do not update the new working directories')),
1877 ('', 'uncompressed', None,
1894 ('', 'uncompressed', None,
1878 _('use uncompressed transfer (fast over LAN)')),
1895 _('use uncompressed transfer (fast over LAN)')),
1879 ('e', 'ssh', '', _('specify ssh command to use')),
1896 ('e', 'ssh', '', _('specify ssh command to use')),
1880 ('p', 'patches', '', _('location of source patch repo')),
1897 ('p', 'patches', '', _('location of source patch repo')),
1881 ('', 'remotecmd', '',
1898 ('', 'remotecmd', '',
1882 _('specify hg command to run on the remote side'))],
1899 _('specify hg command to run on the remote side'))],
1883 'hg qclone [OPTION]... SOURCE [DEST]'),
1900 'hg qclone [OPTION]... SOURCE [DEST]'),
1884 "qcommit|qci":
1901 "qcommit|qci":
1885 (commit,
1902 (commit,
1886 commands.table["^commit|ci"][1],
1903 commands.table["^commit|ci"][1],
1887 'hg qcommit [OPTION]... [FILE]...'),
1904 'hg qcommit [OPTION]... [FILE]...'),
1888 "^qdiff": (diff, [], 'hg qdiff [FILE]...'),
1905 "^qdiff": (diff, [], 'hg qdiff [FILE]...'),
1889 "qdelete|qremove|qrm":
1906 "qdelete":
1890 (delete,
1907 (delete,
1891 [('k', 'keep', None, _('keep patch file'))],
1908 [('f', 'force', None, _('delete patch file'))],
1892 'hg qdelete [-k] PATCH'),
1909 'hg qdelete [-f] PATCH'),
1893 'qfold':
1910 'qfold':
1894 (fold,
1911 (fold,
1895 [('e', 'edit', None, _('edit patch header')),
1912 [('e', 'edit', None, _('edit patch header')),
1896 ('k', 'keep', None, _('keep folded patch files')),
1913 ('f', 'force', None, _('delete folded patch files')),
1897 ('m', 'message', '', _('set patch header to <text>')),
1914 ('m', 'message', '', _('set patch header to <text>')),
1898 ('l', 'logfile', '', _('set patch header to contents of <file>'))],
1915 ('l', 'logfile', '', _('set patch header to contents of <file>'))],
1899 'hg qfold [-e] [-m <text>] [-l <file] PATCH...'),
1916 'hg qfold [-e] [-m <text>] [-l <file] PATCH...'),
1900 'qguard': (guard, [('l', 'list', None, _('list all patches and guards')),
1917 'qguard': (guard, [('l', 'list', None, _('list all patches and guards')),
1901 ('n', 'none', None, _('drop all guards'))],
1918 ('n', 'none', None, _('drop all guards'))],
1902 'hg qguard [PATCH] [+GUARD...] [-GUARD...]'),
1919 'hg qguard [PATCH] [+GUARD...] [-GUARD...]'),
1903 'qheader': (header, [],
1920 'qheader': (header, [],
1904 _('hg qheader [PATCH]')),
1921 _('hg qheader [PATCH]')),
1905 "^qimport":
1922 "^qimport":
1906 (qimport,
1923 (qimport,
1907 [('e', 'existing', None, 'import file in patch dir'),
1924 [('e', 'existing', None, 'import file in patch dir'),
1908 ('n', 'name', '', 'patch file name'),
1925 ('n', 'name', '', 'patch file name'),
1909 ('f', 'force', None, 'overwrite existing files')],
1926 ('f', 'force', None, 'overwrite existing files')],
1910 'hg qimport [-e] [-n NAME] [-f] FILE...'),
1927 'hg qimport [-e] [-n NAME] [-f] FILE...'),
1911 "^qinit":
1928 "^qinit":
1912 (init,
1929 (init,
1913 [('c', 'create-repo', None, 'create queue repository')],
1930 [('c', 'create-repo', None, 'create queue repository')],
1914 'hg qinit [-c]'),
1931 'hg qinit [-c]'),
1915 "qnew":
1932 "qnew":
1916 (new,
1933 (new,
1917 [('m', 'message', '', _('use <text> as commit message')),
1934 [('m', 'message', '', _('use <text> as commit message')),
1918 ('l', 'logfile', '', _('read the commit message from <file>')),
1935 ('l', 'logfile', '', _('read the commit message from <file>')),
1919 ('f', 'force', None, _('import uncommitted changes into patch'))],
1936 ('f', 'force', None, _('import uncommitted changes into patch'))],
1920 'hg qnew [-m TEXT] [-l FILE] [-f] PATCH'),
1937 'hg qnew [-m TEXT] [-l FILE] [-f] PATCH'),
1921 "qnext": (next, [], 'hg qnext'),
1938 "qnext": (next, [], 'hg qnext'),
1922 "qprev": (prev, [], 'hg qprev'),
1939 "qprev": (prev, [], 'hg qprev'),
1923 "^qpop":
1940 "^qpop":
1924 (pop,
1941 (pop,
1925 [('a', 'all', None, 'pop all patches'),
1942 [('a', 'all', None, 'pop all patches'),
1926 ('n', 'name', '', 'queue name to pop'),
1943 ('n', 'name', '', 'queue name to pop'),
1927 ('f', 'force', None, 'forget any local changes')],
1944 ('f', 'force', None, 'forget any local changes')],
1928 'hg qpop [-a] [-n NAME] [-f] [PATCH | INDEX]'),
1945 'hg qpop [-a] [-n NAME] [-f] [PATCH | INDEX]'),
1929 "^qpush":
1946 "^qpush":
1930 (push,
1947 (push,
1931 [('f', 'force', None, 'apply if the patch has rejects'),
1948 [('f', 'force', None, 'apply if the patch has rejects'),
1932 ('l', 'list', None, 'list patch name in commit text'),
1949 ('l', 'list', None, 'list patch name in commit text'),
1933 ('a', 'all', None, 'apply all patches'),
1950 ('a', 'all', None, 'apply all patches'),
1934 ('m', 'merge', None, 'merge from another queue'),
1951 ('m', 'merge', None, 'merge from another queue'),
1935 ('n', 'name', '', 'merge queue name')],
1952 ('n', 'name', '', 'merge queue name')],
1936 'hg qpush [-f] [-l] [-a] [-m] [-n NAME] [PATCH | INDEX]'),
1953 'hg qpush [-f] [-l] [-a] [-m] [-n NAME] [PATCH | INDEX]'),
1937 "^qrefresh":
1954 "^qrefresh":
1938 (refresh,
1955 (refresh,
1939 [('e', 'edit', None, _('edit commit message')),
1956 [('e', 'edit', None, _('edit commit message')),
1940 ('m', 'message', '', _('change commit message with <text>')),
1957 ('m', 'message', '', _('change commit message with <text>')),
1941 ('l', 'logfile', '', _('change commit message with <file> content')),
1958 ('l', 'logfile', '', _('change commit message with <file> content')),
1942 ('s', 'short', None, 'short refresh')],
1959 ('s', 'short', None, 'short refresh')],
1943 'hg qrefresh [-e] [-m TEXT] [-l FILE] [-s]'),
1960 'hg qrefresh [-e] [-m TEXT] [-l FILE] [-s]'),
1944 'qrename|qmv':
1961 'qrename|qmv':
1945 (rename, [], 'hg qrename PATCH1 [PATCH2]'),
1962 (rename, [], 'hg qrename PATCH1 [PATCH2]'),
1946 "qrestore":
1963 "qrestore":
1947 (restore,
1964 (restore,
1948 [('d', 'delete', None, 'delete save entry'),
1965 [('d', 'delete', None, 'delete save entry'),
1949 ('u', 'update', None, 'update queue working dir')],
1966 ('u', 'update', None, 'update queue working dir')],
1950 'hg qrestore [-d] [-u] REV'),
1967 'hg qrestore [-d] [-u] REV'),
1951 "qsave":
1968 "qsave":
1952 (save,
1969 (save,
1953 [('m', 'message', '', _('use <text> as commit message')),
1970 [('m', 'message', '', _('use <text> as commit message')),
1954 ('l', 'logfile', '', _('read the commit message from <file>')),
1971 ('l', 'logfile', '', _('read the commit message from <file>')),
1955 ('c', 'copy', None, 'copy patch directory'),
1972 ('c', 'copy', None, 'copy patch directory'),
1956 ('n', 'name', '', 'copy directory name'),
1973 ('n', 'name', '', 'copy directory name'),
1957 ('e', 'empty', None, 'clear queue status file'),
1974 ('e', 'empty', None, 'clear queue status file'),
1958 ('f', 'force', None, 'force copy')],
1975 ('f', 'force', None, 'force copy')],
1959 'hg qsave [-m TEXT] [-l FILE] [-c] [-n NAME] [-e] [-f]'),
1976 'hg qsave [-m TEXT] [-l FILE] [-c] [-n NAME] [-e] [-f]'),
1960 "qselect": (select,
1977 "qselect": (select,
1961 [('n', 'none', None, _('disable all guards')),
1978 [('n', 'none', None, _('disable all guards')),
1962 ('s', 'series', None, _('list all guards in series file')),
1979 ('s', 'series', None, _('list all guards in series file')),
1963 ('', 'pop', None,
1980 ('', 'pop', None,
1964 _('pop to before first guarded applied patch')),
1981 _('pop to before first guarded applied patch')),
1965 ('', 'reapply', None, _('pop, then reapply patches'))],
1982 ('', 'reapply', None, _('pop, then reapply patches'))],
1966 'hg qselect [OPTION...] [GUARD...]'),
1983 'hg qselect [OPTION...] [GUARD...]'),
1967 "qseries":
1984 "qseries":
1968 (series,
1985 (series,
1969 [('m', 'missing', None, 'print patches not in series'),
1986 [('m', 'missing', None, 'print patches not in series'),
1970 ('s', 'summary', None, _('print first line of patch header'))],
1987 ('s', 'summary', None, _('print first line of patch header'))],
1971 'hg qseries [-m]'),
1988 'hg qseries [-m]'),
1972 "^strip":
1989 "^strip":
1973 (strip,
1990 (strip,
1974 [('f', 'force', None, 'force multi-head removal'),
1991 [('f', 'force', None, 'force multi-head removal'),
1975 ('b', 'backup', None, 'bundle unrelated changesets'),
1992 ('b', 'backup', None, 'bundle unrelated changesets'),
1976 ('n', 'nobackup', None, 'no backups')],
1993 ('n', 'nobackup', None, 'no backups')],
1977 'hg strip [-f] [-b] [-n] REV'),
1994 'hg strip [-f] [-b] [-n] REV'),
1978 "qtop": (top, [], 'hg qtop'),
1995 "qtop": (top, [], 'hg qtop'),
1979 "qunapplied": (unapplied, [], 'hg qunapplied [PATCH]'),
1996 "qunapplied": (unapplied, [], 'hg qunapplied [PATCH]'),
1980 }
1997 }
@@ -1,276 +1,276 b''
1 # notify.py - email notifications for mercurial
1 # notify.py - email notifications for mercurial
2 #
2 #
3 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
3 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7 #
7 #
8 # hook extension to email notifications to people when changesets are
8 # hook extension to email notifications to people when changesets are
9 # committed to a repo they subscribe to.
9 # committed to a repo they subscribe to.
10 #
10 #
11 # default mode is to print messages to stdout, for testing and
11 # default mode is to print messages to stdout, for testing and
12 # configuring.
12 # configuring.
13 #
13 #
14 # to use, configure notify extension and enable in hgrc like this:
14 # to use, configure notify extension and enable in hgrc like this:
15 #
15 #
16 # [extensions]
16 # [extensions]
17 # hgext.notify =
17 # hgext.notify =
18 #
18 #
19 # [hooks]
19 # [hooks]
20 # # one email for each incoming changeset
20 # # one email for each incoming changeset
21 # incoming.notify = python:hgext.notify.hook
21 # incoming.notify = python:hgext.notify.hook
22 # # batch emails when many changesets incoming at one time
22 # # batch emails when many changesets incoming at one time
23 # changegroup.notify = python:hgext.notify.hook
23 # changegroup.notify = python:hgext.notify.hook
24 #
24 #
25 # [notify]
25 # [notify]
26 # # config items go in here
26 # # config items go in here
27 #
27 #
28 # config items:
28 # config items:
29 #
29 #
30 # REQUIRED:
30 # REQUIRED:
31 # config = /path/to/file # file containing subscriptions
31 # config = /path/to/file # file containing subscriptions
32 #
32 #
33 # OPTIONAL:
33 # OPTIONAL:
34 # test = True # print messages to stdout for testing
34 # test = True # print messages to stdout for testing
35 # strip = 3 # number of slashes to strip for url paths
35 # strip = 3 # number of slashes to strip for url paths
36 # domain = example.com # domain to use if committer missing domain
36 # domain = example.com # domain to use if committer missing domain
37 # style = ... # style file to use when formatting email
37 # style = ... # style file to use when formatting email
38 # template = ... # template to use when formatting email
38 # template = ... # template to use when formatting email
39 # incoming = ... # template to use when run as incoming hook
39 # incoming = ... # template to use when run as incoming hook
40 # changegroup = ... # template when run as changegroup hook
40 # changegroup = ... # template when run as changegroup hook
41 # maxdiff = 300 # max lines of diffs to include (0=none, -1=all)
41 # maxdiff = 300 # max lines of diffs to include (0=none, -1=all)
42 # maxsubject = 67 # truncate subject line longer than this
42 # maxsubject = 67 # truncate subject line longer than this
43 # sources = serve # notify if source of incoming changes in this list
43 # sources = serve # notify if source of incoming changes in this list
44 # # (serve == ssh or http, push, pull, bundle)
44 # # (serve == ssh or http, push, pull, bundle)
45 # [email]
45 # [email]
46 # from = user@host.com # email address to send as if none given
46 # from = user@host.com # email address to send as if none given
47 # [web]
47 # [web]
48 # baseurl = http://hgserver/... # root of hg web site for browsing commits
48 # baseurl = http://hgserver/... # root of hg web site for browsing commits
49 #
49 #
50 # notify config file has same format as regular hgrc. it has two
50 # notify config file has same format as regular hgrc. it has two
51 # sections so you can express subscriptions in whatever way is handier
51 # sections so you can express subscriptions in whatever way is handier
52 # for you.
52 # for you.
53 #
53 #
54 # [usersubs]
54 # [usersubs]
55 # # key is subscriber email, value is ","-separated list of glob patterns
55 # # key is subscriber email, value is ","-separated list of glob patterns
56 # user@host = pattern
56 # user@host = pattern
57 #
57 #
58 # [reposubs]
58 # [reposubs]
59 # # key is glob pattern, value is ","-separated list of subscriber emails
59 # # key is glob pattern, value is ","-separated list of subscriber emails
60 # pattern = user@host
60 # pattern = user@host
61 #
61 #
62 # glob patterns are matched against path to repo root.
62 # glob patterns are matched against path to repo root.
63 #
63 #
64 # if you like, you can put notify config file in repo that users can
64 # if you like, you can put notify config file in repo that users can
65 # push changes to, they can manage their own subscriptions.
65 # push changes to, they can manage their own subscriptions.
66
66
67 from mercurial.demandload import *
67 from mercurial.demandload import *
68 from mercurial.i18n import gettext as _
68 from mercurial.i18n import gettext as _
69 from mercurial.node import *
69 from mercurial.node import *
70 demandload(globals(), 'email.Parser mercurial:commands,patch,templater,util')
70 demandload(globals(), 'mercurial:commands,patch,templater,util,mail')
71 demandload(globals(), 'fnmatch socket time')
71 demandload(globals(), 'email.Parser fnmatch socket time')
72
72
73 # template for single changeset can include email headers.
73 # template for single changeset can include email headers.
74 single_template = '''
74 single_template = '''
75 Subject: changeset in {webroot}: {desc|firstline|strip}
75 Subject: changeset in {webroot}: {desc|firstline|strip}
76 From: {author}
76 From: {author}
77
77
78 changeset {node|short} in {root}
78 changeset {node|short} in {root}
79 details: {baseurl}{webroot}?cmd=changeset;node={node|short}
79 details: {baseurl}{webroot}?cmd=changeset;node={node|short}
80 description:
80 description:
81 \t{desc|tabindent|strip}
81 \t{desc|tabindent|strip}
82 '''.lstrip()
82 '''.lstrip()
83
83
84 # template for multiple changesets should not contain email headers,
84 # template for multiple changesets should not contain email headers,
85 # because only first set of headers will be used and result will look
85 # because only first set of headers will be used and result will look
86 # strange.
86 # strange.
87 multiple_template = '''
87 multiple_template = '''
88 changeset {node|short} in {root}
88 changeset {node|short} in {root}
89 details: {baseurl}{webroot}?cmd=changeset;node={node|short}
89 details: {baseurl}{webroot}?cmd=changeset;node={node|short}
90 summary: {desc|firstline}
90 summary: {desc|firstline}
91 '''
91 '''
92
92
93 deftemplates = {
93 deftemplates = {
94 'changegroup': multiple_template,
94 'changegroup': multiple_template,
95 }
95 }
96
96
97 class notifier(object):
97 class notifier(object):
98 '''email notification class.'''
98 '''email notification class.'''
99
99
100 def __init__(self, ui, repo, hooktype):
100 def __init__(self, ui, repo, hooktype):
101 self.ui = ui
101 self.ui = ui
102 cfg = self.ui.config('notify', 'config')
102 cfg = self.ui.config('notify', 'config')
103 if cfg:
103 if cfg:
104 self.ui.readconfig(cfg)
104 self.ui.readconfig(cfg)
105 self.repo = repo
105 self.repo = repo
106 self.stripcount = int(self.ui.config('notify', 'strip', 0))
106 self.stripcount = int(self.ui.config('notify', 'strip', 0))
107 self.root = self.strip(self.repo.root)
107 self.root = self.strip(self.repo.root)
108 self.domain = self.ui.config('notify', 'domain')
108 self.domain = self.ui.config('notify', 'domain')
109 self.sio = templater.stringio()
109 self.sio = templater.stringio()
110 self.subs = self.subscribers()
110 self.subs = self.subscribers()
111
111
112 mapfile = self.ui.config('notify', 'style')
112 mapfile = self.ui.config('notify', 'style')
113 template = (self.ui.config('notify', hooktype) or
113 template = (self.ui.config('notify', hooktype) or
114 self.ui.config('notify', 'template'))
114 self.ui.config('notify', 'template'))
115 self.t = templater.changeset_templater(self.ui, self.repo, mapfile,
115 self.t = templater.changeset_templater(self.ui, self.repo, mapfile,
116 self.sio)
116 self.sio)
117 if not mapfile and not template:
117 if not mapfile and not template:
118 template = deftemplates.get(hooktype) or single_template
118 template = deftemplates.get(hooktype) or single_template
119 if template:
119 if template:
120 template = templater.parsestring(template, quoted=False)
120 template = templater.parsestring(template, quoted=False)
121 self.t.use_template(template)
121 self.t.use_template(template)
122
122
123 def strip(self, path):
123 def strip(self, path):
124 '''strip leading slashes from local path, turn into web-safe path.'''
124 '''strip leading slashes from local path, turn into web-safe path.'''
125
125
126 path = util.pconvert(path)
126 path = util.pconvert(path)
127 count = self.stripcount
127 count = self.stripcount
128 while count > 0:
128 while count > 0:
129 c = path.find('/')
129 c = path.find('/')
130 if c == -1:
130 if c == -1:
131 break
131 break
132 path = path[c+1:]
132 path = path[c+1:]
133 count -= 1
133 count -= 1
134 return path
134 return path
135
135
136 def fixmail(self, addr):
136 def fixmail(self, addr):
137 '''try to clean up email addresses.'''
137 '''try to clean up email addresses.'''
138
138
139 addr = templater.email(addr.strip())
139 addr = templater.email(addr.strip())
140 a = addr.find('@localhost')
140 a = addr.find('@localhost')
141 if a != -1:
141 if a != -1:
142 addr = addr[:a]
142 addr = addr[:a]
143 if '@' not in addr:
143 if '@' not in addr:
144 return addr + '@' + self.domain
144 return addr + '@' + self.domain
145 return addr
145 return addr
146
146
147 def subscribers(self):
147 def subscribers(self):
148 '''return list of email addresses of subscribers to this repo.'''
148 '''return list of email addresses of subscribers to this repo.'''
149
149
150 subs = {}
150 subs = {}
151 for user, pats in self.ui.configitems('usersubs'):
151 for user, pats in self.ui.configitems('usersubs'):
152 for pat in pats.split(','):
152 for pat in pats.split(','):
153 if fnmatch.fnmatch(self.repo.root, pat.strip()):
153 if fnmatch.fnmatch(self.repo.root, pat.strip()):
154 subs[self.fixmail(user)] = 1
154 subs[self.fixmail(user)] = 1
155 for pat, users in self.ui.configitems('reposubs'):
155 for pat, users in self.ui.configitems('reposubs'):
156 if fnmatch.fnmatch(self.repo.root, pat):
156 if fnmatch.fnmatch(self.repo.root, pat):
157 for user in users.split(','):
157 for user in users.split(','):
158 subs[self.fixmail(user)] = 1
158 subs[self.fixmail(user)] = 1
159 subs = subs.keys()
159 subs = subs.keys()
160 subs.sort()
160 subs.sort()
161 return subs
161 return subs
162
162
163 def url(self, path=None):
163 def url(self, path=None):
164 return self.ui.config('web', 'baseurl') + (path or self.root)
164 return self.ui.config('web', 'baseurl') + (path or self.root)
165
165
166 def node(self, node):
166 def node(self, node):
167 '''format one changeset.'''
167 '''format one changeset.'''
168
168
169 self.t.show(changenode=node, changes=self.repo.changelog.read(node),
169 self.t.show(changenode=node, changes=self.repo.changelog.read(node),
170 baseurl=self.ui.config('web', 'baseurl'),
170 baseurl=self.ui.config('web', 'baseurl'),
171 root=self.repo.root,
171 root=self.repo.root,
172 webroot=self.root)
172 webroot=self.root)
173
173
174 def skipsource(self, source):
174 def skipsource(self, source):
175 '''true if incoming changes from this source should be skipped.'''
175 '''true if incoming changes from this source should be skipped.'''
176 ok_sources = self.ui.config('notify', 'sources', 'serve').split()
176 ok_sources = self.ui.config('notify', 'sources', 'serve').split()
177 return source not in ok_sources
177 return source not in ok_sources
178
178
179 def send(self, node, count):
179 def send(self, node, count):
180 '''send message.'''
180 '''send message.'''
181
181
182 p = email.Parser.Parser()
182 p = email.Parser.Parser()
183 self.sio.seek(0)
183 self.sio.seek(0)
184 msg = p.parse(self.sio)
184 msg = p.parse(self.sio)
185
185
186 def fix_subject():
186 def fix_subject():
187 '''try to make subject line exist and be useful.'''
187 '''try to make subject line exist and be useful.'''
188
188
189 subject = msg['Subject']
189 subject = msg['Subject']
190 if not subject:
190 if not subject:
191 if count > 1:
191 if count > 1:
192 subject = _('%s: %d new changesets') % (self.root, count)
192 subject = _('%s: %d new changesets') % (self.root, count)
193 else:
193 else:
194 changes = self.repo.changelog.read(node)
194 changes = self.repo.changelog.read(node)
195 s = changes[4].lstrip().split('\n', 1)[0].rstrip()
195 s = changes[4].lstrip().split('\n', 1)[0].rstrip()
196 subject = '%s: %s' % (self.root, s)
196 subject = '%s: %s' % (self.root, s)
197 maxsubject = int(self.ui.config('notify', 'maxsubject', 67))
197 maxsubject = int(self.ui.config('notify', 'maxsubject', 67))
198 if maxsubject and len(subject) > maxsubject:
198 if maxsubject and len(subject) > maxsubject:
199 subject = subject[:maxsubject-3] + '...'
199 subject = subject[:maxsubject-3] + '...'
200 del msg['Subject']
200 del msg['Subject']
201 msg['Subject'] = subject
201 msg['Subject'] = subject
202
202
203 def fix_sender():
203 def fix_sender():
204 '''try to make message have proper sender.'''
204 '''try to make message have proper sender.'''
205
205
206 sender = msg['From']
206 sender = msg['From']
207 if not sender:
207 if not sender:
208 sender = self.ui.config('email', 'from') or self.ui.username()
208 sender = self.ui.config('email', 'from') or self.ui.username()
209 if '@' not in sender or '@localhost' in sender:
209 if '@' not in sender or '@localhost' in sender:
210 sender = self.fixmail(sender)
210 sender = self.fixmail(sender)
211 del msg['From']
211 del msg['From']
212 msg['From'] = sender
212 msg['From'] = sender
213
213
214 fix_subject()
214 fix_subject()
215 fix_sender()
215 fix_sender()
216
216
217 msg['X-Hg-Notification'] = 'changeset ' + short(node)
217 msg['X-Hg-Notification'] = 'changeset ' + short(node)
218 if not msg['Message-Id']:
218 if not msg['Message-Id']:
219 msg['Message-Id'] = ('<hg.%s.%s.%s@%s>' %
219 msg['Message-Id'] = ('<hg.%s.%s.%s@%s>' %
220 (short(node), int(time.time()),
220 (short(node), int(time.time()),
221 hash(self.repo.root), socket.getfqdn()))
221 hash(self.repo.root), socket.getfqdn()))
222 msg['To'] = ', '.join(self.subs)
222 msg['To'] = ', '.join(self.subs)
223
223
224 msgtext = msg.as_string(0)
224 msgtext = msg.as_string(0)
225 if self.ui.configbool('notify', 'test', True):
225 if self.ui.configbool('notify', 'test', True):
226 self.ui.write(msgtext)
226 self.ui.write(msgtext)
227 if not msgtext.endswith('\n'):
227 if not msgtext.endswith('\n'):
228 self.ui.write('\n')
228 self.ui.write('\n')
229 else:
229 else:
230 self.ui.status(_('notify: sending %d subscribers %d changes\n') %
230 self.ui.status(_('notify: sending %d subscribers %d changes\n') %
231 (len(self.subs), count))
231 (len(self.subs), count))
232 mail = self.ui.sendmail()
232 mail.sendmail(self.ui, templater.email(msg['From']),
233 mail.sendmail(templater.email(msg['From']), self.subs, msgtext)
233 self.subs, msgtext)
234
234
235 def diff(self, node, ref):
235 def diff(self, node, ref):
236 maxdiff = int(self.ui.config('notify', 'maxdiff', 300))
236 maxdiff = int(self.ui.config('notify', 'maxdiff', 300))
237 if maxdiff == 0:
237 if maxdiff == 0:
238 return
238 return
239 fp = templater.stringio()
239 fp = templater.stringio()
240 prev = self.repo.changelog.parents(node)[0]
240 prev = self.repo.changelog.parents(node)[0]
241 patch.diff(self.repo, fp, prev, ref)
241 patch.diff(self.repo, fp, prev, ref)
242 difflines = fp.getvalue().splitlines(1)
242 difflines = fp.getvalue().splitlines(1)
243 if maxdiff > 0 and len(difflines) > maxdiff:
243 if maxdiff > 0 and len(difflines) > maxdiff:
244 self.sio.write(_('\ndiffs (truncated from %d to %d lines):\n\n') %
244 self.sio.write(_('\ndiffs (truncated from %d to %d lines):\n\n') %
245 (len(difflines), maxdiff))
245 (len(difflines), maxdiff))
246 difflines = difflines[:maxdiff]
246 difflines = difflines[:maxdiff]
247 elif difflines:
247 elif difflines:
248 self.sio.write(_('\ndiffs (%d lines):\n\n') % len(difflines))
248 self.sio.write(_('\ndiffs (%d lines):\n\n') % len(difflines))
249 self.sio.write(*difflines)
249 self.sio.write(*difflines)
250
250
251 def hook(ui, repo, hooktype, node=None, source=None, **kwargs):
251 def hook(ui, repo, hooktype, node=None, source=None, **kwargs):
252 '''send email notifications to interested subscribers.
252 '''send email notifications to interested subscribers.
253
253
254 if used as changegroup hook, send one email for all changesets in
254 if used as changegroup hook, send one email for all changesets in
255 changegroup. else send one email per changeset.'''
255 changegroup. else send one email per changeset.'''
256 n = notifier(ui, repo, hooktype)
256 n = notifier(ui, repo, hooktype)
257 if not n.subs:
257 if not n.subs:
258 ui.debug(_('notify: no subscribers to repo %s\n' % n.root))
258 ui.debug(_('notify: no subscribers to repo %s\n' % n.root))
259 return
259 return
260 if n.skipsource(source):
260 if n.skipsource(source):
261 ui.debug(_('notify: changes have source "%s" - skipping\n') %
261 ui.debug(_('notify: changes have source "%s" - skipping\n') %
262 source)
262 source)
263 return
263 return
264 node = bin(node)
264 node = bin(node)
265 if hooktype == 'changegroup':
265 if hooktype == 'changegroup':
266 start = repo.changelog.rev(node)
266 start = repo.changelog.rev(node)
267 end = repo.changelog.count()
267 end = repo.changelog.count()
268 count = end - start
268 count = end - start
269 for rev in xrange(start, end):
269 for rev in xrange(start, end):
270 n.node(repo.changelog.node(rev))
270 n.node(repo.changelog.node(rev))
271 n.diff(node, repo.changelog.tip())
271 n.diff(node, repo.changelog.tip())
272 else:
272 else:
273 count = 1
273 count = 1
274 n.node(node)
274 n.node(node)
275 n.diff(node, node)
275 n.diff(node, node)
276 n.send(node, count)
276 n.send(node, count)
@@ -1,309 +1,309 b''
1 # Command for sending a collection of Mercurial changesets as a series
1 # Command for sending a collection of Mercurial changesets as a series
2 # of patch emails.
2 # of patch emails.
3 #
3 #
4 # The series is started off with a "[PATCH 0 of N]" introduction,
4 # The series is started off with a "[PATCH 0 of N]" introduction,
5 # which describes the series as a whole.
5 # which describes the series as a whole.
6 #
6 #
7 # Each patch email has a Subject line of "[PATCH M of N] ...", using
7 # Each patch email has a Subject line of "[PATCH M of N] ...", using
8 # the first line of the changeset description as the subject text.
8 # the first line of the changeset description as the subject text.
9 # The message contains two or three body parts:
9 # The message contains two or three body parts:
10 #
10 #
11 # The remainder of the changeset description.
11 # The remainder of the changeset description.
12 #
12 #
13 # [Optional] If the diffstat program is installed, the result of
13 # [Optional] If the diffstat program is installed, the result of
14 # running diffstat on the patch.
14 # running diffstat on the patch.
15 #
15 #
16 # The patch itself, as generated by "hg export".
16 # The patch itself, as generated by "hg export".
17 #
17 #
18 # Each message refers to all of its predecessors using the In-Reply-To
18 # Each message refers to all of its predecessors using the In-Reply-To
19 # and References headers, so they will show up as a sequence in
19 # and References headers, so they will show up as a sequence in
20 # threaded mail and news readers, and in mail archives.
20 # threaded mail and news readers, and in mail archives.
21 #
21 #
22 # For each changeset, you will be prompted with a diffstat summary and
22 # For each changeset, you will be prompted with a diffstat summary and
23 # the changeset summary, so you can be sure you are sending the right
23 # the changeset summary, so you can be sure you are sending the right
24 # changes.
24 # changes.
25 #
25 #
26 # It is best to run this script with the "-n" (test only) flag before
26 # It is best to run this script with the "-n" (test only) flag before
27 # firing it up "for real", in which case it will use your pager to
27 # firing it up "for real", in which case it will use your pager to
28 # display each of the messages that it would send.
28 # display each of the messages that it would send.
29 #
29 #
30 # The "-m" (mbox) option will create an mbox file instead of sending
30 # The "-m" (mbox) option will create an mbox file instead of sending
31 # the messages directly. This can be reviewed e.g. with "mutt -R -f mbox",
31 # the messages directly. This can be reviewed e.g. with "mutt -R -f mbox",
32 # and finally sent with "formail -s sendmail -bm -t < mbox".
32 # and finally sent with "formail -s sendmail -bm -t < mbox".
33 #
33 #
34 # To configure other defaults, add a section like this to your hgrc
34 # To configure other defaults, add a section like this to your hgrc
35 # file:
35 # file:
36 #
36 #
37 # [email]
37 # [email]
38 # from = My Name <my@email>
38 # from = My Name <my@email>
39 # to = recipient1, recipient2, ...
39 # to = recipient1, recipient2, ...
40 # cc = cc1, cc2, ...
40 # cc = cc1, cc2, ...
41 # bcc = bcc1, bcc2, ...
41 # bcc = bcc1, bcc2, ...
42
42
43 from mercurial.demandload import *
43 from mercurial.demandload import *
44 demandload(globals(), '''email.MIMEMultipart email.MIMEText email.Utils
44 demandload(globals(), '''email.MIMEMultipart email.MIMEText email.Utils
45 mercurial:commands,hg,ui
45 mercurial:commands,hg,ui
46 os errno popen2 socket sys tempfile time''')
46 os errno popen2 socket sys tempfile time''')
47 from mercurial.i18n import gettext as _
47 from mercurial.i18n import gettext as _
48 from mercurial.node import *
48 from mercurial.node import *
49
49
50 try:
50 try:
51 # readline gives raw_input editing capabilities, but is not
51 # readline gives raw_input editing capabilities, but is not
52 # present on windows
52 # present on windows
53 import readline
53 import readline
54 except ImportError: pass
54 except ImportError: pass
55
55
56 def diffstat(patch):
56 def diffstat(patch):
57 fd, name = tempfile.mkstemp(prefix="hg-patchbomb-", suffix=".txt")
57 fd, name = tempfile.mkstemp(prefix="hg-patchbomb-", suffix=".txt")
58 try:
58 try:
59 p = popen2.Popen3('diffstat -p1 -w79 2>/dev/null > ' + name)
59 p = popen2.Popen3('diffstat -p1 -w79 2>/dev/null > ' + name)
60 try:
60 try:
61 for line in patch: print >> p.tochild, line
61 for line in patch: print >> p.tochild, line
62 p.tochild.close()
62 p.tochild.close()
63 if p.wait(): return
63 if p.wait(): return
64 fp = os.fdopen(fd, 'r')
64 fp = os.fdopen(fd, 'r')
65 stat = []
65 stat = []
66 for line in fp: stat.append(line.lstrip())
66 for line in fp: stat.append(line.lstrip())
67 last = stat.pop()
67 last = stat.pop()
68 stat.insert(0, last)
68 stat.insert(0, last)
69 stat = ''.join(stat)
69 stat = ''.join(stat)
70 if stat.startswith('0 files'): raise ValueError
70 if stat.startswith('0 files'): raise ValueError
71 return stat
71 return stat
72 except: raise
72 except: raise
73 finally:
73 finally:
74 try: os.unlink(name)
74 try: os.unlink(name)
75 except: pass
75 except: pass
76
76
77 def patchbomb(ui, repo, *revs, **opts):
77 def patchbomb(ui, repo, *revs, **opts):
78 '''send changesets as a series of patch emails
78 '''send changesets as a series of patch emails
79
79
80 The series starts with a "[PATCH 0 of N]" introduction, which
80 The series starts with a "[PATCH 0 of N]" introduction, which
81 describes the series as a whole.
81 describes the series as a whole.
82
82
83 Each patch email has a Subject line of "[PATCH M of N] ...", using
83 Each patch email has a Subject line of "[PATCH M of N] ...", using
84 the first line of the changeset description as the subject text.
84 the first line of the changeset description as the subject text.
85 The message contains two or three body parts. First, the rest of
85 The message contains two or three body parts. First, the rest of
86 the changeset description. Next, (optionally) if the diffstat
86 the changeset description. Next, (optionally) if the diffstat
87 program is installed, the result of running diffstat on the patch.
87 program is installed, the result of running diffstat on the patch.
88 Finally, the patch itself, as generated by "hg export".'''
88 Finally, the patch itself, as generated by "hg export".'''
89 def prompt(prompt, default = None, rest = ': ', empty_ok = False):
89 def prompt(prompt, default = None, rest = ': ', empty_ok = False):
90 if default: prompt += ' [%s]' % default
90 if default: prompt += ' [%s]' % default
91 prompt += rest
91 prompt += rest
92 while True:
92 while True:
93 r = raw_input(prompt)
93 r = raw_input(prompt)
94 if r: return r
94 if r: return r
95 if default is not None: return default
95 if default is not None: return default
96 if empty_ok: return r
96 if empty_ok: return r
97 ui.warn(_('Please enter a valid value.\n'))
97 ui.warn(_('Please enter a valid value.\n'))
98
98
99 def confirm(s):
99 def confirm(s):
100 if not prompt(s, default = 'y', rest = '? ').lower().startswith('y'):
100 if not prompt(s, default = 'y', rest = '? ').lower().startswith('y'):
101 raise ValueError
101 raise ValueError
102
102
103 def cdiffstat(summary, patch):
103 def cdiffstat(summary, patch):
104 s = diffstat(patch)
104 s = diffstat(patch)
105 if s:
105 if s:
106 if summary:
106 if summary:
107 ui.write(summary, '\n')
107 ui.write(summary, '\n')
108 ui.write(s, '\n')
108 ui.write(s, '\n')
109 confirm(_('Does the diffstat above look okay'))
109 confirm(_('Does the diffstat above look okay'))
110 return s
110 return s
111
111
112 def makepatch(patch, idx, total):
112 def makepatch(patch, idx, total):
113 desc = []
113 desc = []
114 node = None
114 node = None
115 body = ''
115 body = ''
116 for line in patch:
116 for line in patch:
117 if line.startswith('#'):
117 if line.startswith('#'):
118 if line.startswith('# Node ID'): node = line.split()[-1]
118 if line.startswith('# Node ID'): node = line.split()[-1]
119 continue
119 continue
120 if line.startswith('diff -r'): break
120 if line.startswith('diff -r'): break
121 desc.append(line)
121 desc.append(line)
122 if not node: raise ValueError
122 if not node: raise ValueError
123
123
124 #body = ('\n'.join(desc[1:]).strip() or
124 #body = ('\n'.join(desc[1:]).strip() or
125 # 'Patch subject is complete summary.')
125 # 'Patch subject is complete summary.')
126 #body += '\n\n\n'
126 #body += '\n\n\n'
127
127
128 if opts['plain']:
128 if opts['plain']:
129 while patch and patch[0].startswith('# '): patch.pop(0)
129 while patch and patch[0].startswith('# '): patch.pop(0)
130 if patch: patch.pop(0)
130 if patch: patch.pop(0)
131 while patch and not patch[0].strip(): patch.pop(0)
131 while patch and not patch[0].strip(): patch.pop(0)
132 if opts['diffstat']:
132 if opts['diffstat']:
133 body += cdiffstat('\n'.join(desc), patch) + '\n\n'
133 body += cdiffstat('\n'.join(desc), patch) + '\n\n'
134 if opts['attach']:
134 if opts['attach']:
135 msg = email.MIMEMultipart.MIMEMultipart()
135 msg = email.MIMEMultipart.MIMEMultipart()
136 if body: msg.attach(email.MIMEText.MIMEText(body, 'plain'))
136 if body: msg.attach(email.MIMEText.MIMEText(body, 'plain'))
137 p = email.MIMEText.MIMEText('\n'.join(patch), 'x-patch')
137 p = email.MIMEText.MIMEText('\n'.join(patch), 'x-patch')
138 binnode = bin(node)
138 binnode = bin(node)
139 # if node is mq patch, it will have patch file name as tag
139 # if node is mq patch, it will have patch file name as tag
140 patchname = [t for t in repo.nodetags(binnode)
140 patchname = [t for t in repo.nodetags(binnode)
141 if t.endswith('.patch') or t.endswith('.diff')]
141 if t.endswith('.patch') or t.endswith('.diff')]
142 if patchname:
142 if patchname:
143 patchname = patchname[0]
143 patchname = patchname[0]
144 elif total > 1:
144 elif total > 1:
145 patchname = commands.make_filename(repo, '%b-%n.patch',
145 patchname = commands.make_filename(repo, '%b-%n.patch',
146 binnode, idx, total)
146 binnode, idx, total)
147 else:
147 else:
148 patchname = commands.make_filename(repo, '%b.patch', binnode)
148 patchname = commands.make_filename(repo, '%b.patch', binnode)
149 p['Content-Disposition'] = 'inline; filename=' + patchname
149 p['Content-Disposition'] = 'inline; filename=' + patchname
150 msg.attach(p)
150 msg.attach(p)
151 else:
151 else:
152 body += '\n'.join(patch)
152 body += '\n'.join(patch)
153 msg = email.MIMEText.MIMEText(body)
153 msg = email.MIMEText.MIMEText(body)
154 if total == 1:
154 if total == 1:
155 subj = '[PATCH] ' + desc[0].strip()
155 subj = '[PATCH] ' + desc[0].strip()
156 else:
156 else:
157 subj = '[PATCH %d of %d] %s' % (idx, total, desc[0].strip())
157 subj = '[PATCH %d of %d] %s' % (idx, total, desc[0].strip())
158 if subj.endswith('.'): subj = subj[:-1]
158 if subj.endswith('.'): subj = subj[:-1]
159 msg['Subject'] = subj
159 msg['Subject'] = subj
160 msg['X-Mercurial-Node'] = node
160 msg['X-Mercurial-Node'] = node
161 return msg
161 return msg
162
162
163 start_time = int(time.time())
163 start_time = int(time.time())
164
164
165 def genmsgid(id):
165 def genmsgid(id):
166 return '<%s.%s@%s>' % (id[:20], start_time, socket.getfqdn())
166 return '<%s.%s@%s>' % (id[:20], start_time, socket.getfqdn())
167
167
168 patches = []
168 patches = []
169
169
170 class exportee:
170 class exportee:
171 def __init__(self, container):
171 def __init__(self, container):
172 self.lines = []
172 self.lines = []
173 self.container = container
173 self.container = container
174 self.name = 'email'
174 self.name = 'email'
175
175
176 def write(self, data):
176 def write(self, data):
177 self.lines.append(data)
177 self.lines.append(data)
178
178
179 def close(self):
179 def close(self):
180 self.container.append(''.join(self.lines).split('\n'))
180 self.container.append(''.join(self.lines).split('\n'))
181 self.lines = []
181 self.lines = []
182
182
183 commands.export(ui, repo, *revs, **{'output': exportee(patches),
183 commands.export(ui, repo, *revs, **{'output': exportee(patches),
184 'switch_parent': False,
184 'switch_parent': False,
185 'text': None})
185 'text': None})
186
186
187 jumbo = []
187 jumbo = []
188 msgs = []
188 msgs = []
189
189
190 ui.write(_('This patch series consists of %d patches.\n\n') % len(patches))
190 ui.write(_('This patch series consists of %d patches.\n\n') % len(patches))
191
191
192 for p, i in zip(patches, range(len(patches))):
192 for p, i in zip(patches, range(len(patches))):
193 jumbo.extend(p)
193 jumbo.extend(p)
194 msgs.append(makepatch(p, i + 1, len(patches)))
194 msgs.append(makepatch(p, i + 1, len(patches)))
195
195
196 sender = (opts['from'] or ui.config('email', 'from') or
196 sender = (opts['from'] or ui.config('email', 'from') or
197 ui.config('patchbomb', 'from') or
197 ui.config('patchbomb', 'from') or
198 prompt('From', ui.username()))
198 prompt('From', ui.username()))
199
199
200 def getaddrs(opt, prpt, default = None):
200 def getaddrs(opt, prpt, default = None):
201 addrs = opts[opt] or (ui.config('email', opt) or
201 addrs = opts[opt] or (ui.config('email', opt) or
202 ui.config('patchbomb', opt) or
202 ui.config('patchbomb', opt) or
203 prompt(prpt, default = default)).split(',')
203 prompt(prpt, default = default)).split(',')
204 return [a.strip() for a in addrs if a.strip()]
204 return [a.strip() for a in addrs if a.strip()]
205 to = getaddrs('to', 'To')
205 to = getaddrs('to', 'To')
206 cc = getaddrs('cc', 'Cc', '')
206 cc = getaddrs('cc', 'Cc', '')
207
207
208 bcc = opts['bcc'] or (ui.config('email', 'bcc') or
208 bcc = opts['bcc'] or (ui.config('email', 'bcc') or
209 ui.config('patchbomb', 'bcc') or '').split(',')
209 ui.config('patchbomb', 'bcc') or '').split(',')
210 bcc = [a.strip() for a in bcc if a.strip()]
210 bcc = [a.strip() for a in bcc if a.strip()]
211
211
212 if len(patches) > 1:
212 if len(patches) > 1:
213 ui.write(_('\nWrite the introductory message for the patch series.\n\n'))
213 ui.write(_('\nWrite the introductory message for the patch series.\n\n'))
214
214
215 subj = '[PATCH 0 of %d] %s' % (
215 subj = '[PATCH 0 of %d] %s' % (
216 len(patches),
216 len(patches),
217 opts['subject'] or
217 opts['subject'] or
218 prompt('Subject:', rest = ' [PATCH 0 of %d] ' % len(patches)))
218 prompt('Subject:', rest = ' [PATCH 0 of %d] ' % len(patches)))
219
219
220 ui.write(_('Finish with ^D or a dot on a line by itself.\n\n'))
220 ui.write(_('Finish with ^D or a dot on a line by itself.\n\n'))
221
221
222 body = []
222 body = []
223
223
224 while True:
224 while True:
225 try: l = raw_input()
225 try: l = raw_input()
226 except EOFError: break
226 except EOFError: break
227 if l == '.': break
227 if l == '.': break
228 body.append(l)
228 body.append(l)
229
229
230 if opts['diffstat']:
230 if opts['diffstat']:
231 d = cdiffstat(_('Final summary:\n'), jumbo)
231 d = cdiffstat(_('Final summary:\n'), jumbo)
232 if d: body.append('\n' + d)
232 if d: body.append('\n' + d)
233
233
234 body = '\n'.join(body) + '\n'
234 body = '\n'.join(body) + '\n'
235
235
236 msg = email.MIMEText.MIMEText(body)
236 msg = email.MIMEText.MIMEText(body)
237 msg['Subject'] = subj
237 msg['Subject'] = subj
238
238
239 msgs.insert(0, msg)
239 msgs.insert(0, msg)
240
240
241 ui.write('\n')
241 ui.write('\n')
242
242
243 if not opts['test'] and not opts['mbox']:
243 if not opts['test'] and not opts['mbox']:
244 mail = ui.sendmail()
244 mailer = mail.connect(ui)
245 parent = None
245 parent = None
246
246
247 # Calculate UTC offset
247 # Calculate UTC offset
248 if time.daylight: offset = time.altzone
248 if time.daylight: offset = time.altzone
249 else: offset = time.timezone
249 else: offset = time.timezone
250 if offset <= 0: sign, offset = '+', -offset
250 if offset <= 0: sign, offset = '+', -offset
251 else: sign = '-'
251 else: sign = '-'
252 offset = '%s%02d%02d' % (sign, offset / 3600, (offset % 3600) / 60)
252 offset = '%s%02d%02d' % (sign, offset / 3600, (offset % 3600) / 60)
253
253
254 sender_addr = email.Utils.parseaddr(sender)[1]
254 sender_addr = email.Utils.parseaddr(sender)[1]
255 for m in msgs:
255 for m in msgs:
256 try:
256 try:
257 m['Message-Id'] = genmsgid(m['X-Mercurial-Node'])
257 m['Message-Id'] = genmsgid(m['X-Mercurial-Node'])
258 except TypeError:
258 except TypeError:
259 m['Message-Id'] = genmsgid('patchbomb')
259 m['Message-Id'] = genmsgid('patchbomb')
260 if parent:
260 if parent:
261 m['In-Reply-To'] = parent
261 m['In-Reply-To'] = parent
262 else:
262 else:
263 parent = m['Message-Id']
263 parent = m['Message-Id']
264 m['Date'] = time.strftime("%a, %d %b %Y %H:%M:%S", time.localtime(start_time)) + ' ' + offset
264 m['Date'] = time.strftime("%a, %d %b %Y %H:%M:%S", time.localtime(start_time)) + ' ' + offset
265
265
266 start_time += 1
266 start_time += 1
267 m['From'] = sender
267 m['From'] = sender
268 m['To'] = ', '.join(to)
268 m['To'] = ', '.join(to)
269 if cc: m['Cc'] = ', '.join(cc)
269 if cc: m['Cc'] = ', '.join(cc)
270 if bcc: m['Bcc'] = ', '.join(bcc)
270 if bcc: m['Bcc'] = ', '.join(bcc)
271 if opts['test']:
271 if opts['test']:
272 ui.status('Displaying ', m['Subject'], ' ...\n')
272 ui.status('Displaying ', m['Subject'], ' ...\n')
273 fp = os.popen(os.getenv('PAGER', 'more'), 'w')
273 fp = os.popen(os.getenv('PAGER', 'more'), 'w')
274 try:
274 try:
275 fp.write(m.as_string(0))
275 fp.write(m.as_string(0))
276 fp.write('\n')
276 fp.write('\n')
277 except IOError, inst:
277 except IOError, inst:
278 if inst.errno != errno.EPIPE:
278 if inst.errno != errno.EPIPE:
279 raise
279 raise
280 fp.close()
280 fp.close()
281 elif opts['mbox']:
281 elif opts['mbox']:
282 ui.status('Writing ', m['Subject'], ' ...\n')
282 ui.status('Writing ', m['Subject'], ' ...\n')
283 fp = open(opts['mbox'], m.has_key('In-Reply-To') and 'ab+' or 'wb+')
283 fp = open(opts['mbox'], m.has_key('In-Reply-To') and 'ab+' or 'wb+')
284 date = time.asctime(time.localtime(start_time))
284 date = time.asctime(time.localtime(start_time))
285 fp.write('From %s %s\n' % (sender_addr, date))
285 fp.write('From %s %s\n' % (sender_addr, date))
286 fp.write(m.as_string(0))
286 fp.write(m.as_string(0))
287 fp.write('\n\n')
287 fp.write('\n\n')
288 fp.close()
288 fp.close()
289 else:
289 else:
290 ui.status('Sending ', m['Subject'], ' ...\n')
290 ui.status('Sending ', m['Subject'], ' ...\n')
291 # Exim does not remove the Bcc field
291 # Exim does not remove the Bcc field
292 del m['Bcc']
292 del m['Bcc']
293 mail.sendmail(sender, to + bcc + cc, m.as_string(0))
293 mailer.sendmail(sender, to + bcc + cc, m.as_string(0))
294
294
295 cmdtable = {
295 cmdtable = {
296 'email':
296 'email':
297 (patchbomb,
297 (patchbomb,
298 [('a', 'attach', None, 'send patches as inline attachments'),
298 [('a', 'attach', None, 'send patches as inline attachments'),
299 ('', 'bcc', [], 'email addresses of blind copy recipients'),
299 ('', 'bcc', [], 'email addresses of blind copy recipients'),
300 ('c', 'cc', [], 'email addresses of copy recipients'),
300 ('c', 'cc', [], 'email addresses of copy recipients'),
301 ('d', 'diffstat', None, 'add diffstat output to messages'),
301 ('d', 'diffstat', None, 'add diffstat output to messages'),
302 ('f', 'from', '', 'email address of sender'),
302 ('f', 'from', '', 'email address of sender'),
303 ('', 'plain', None, 'omit hg patch header'),
303 ('', 'plain', None, 'omit hg patch header'),
304 ('n', 'test', None, 'print messages that would be sent'),
304 ('n', 'test', None, 'print messages that would be sent'),
305 ('m', 'mbox', '', 'write messages to mbox file instead of sending them'),
305 ('m', 'mbox', '', 'write messages to mbox file instead of sending them'),
306 ('s', 'subject', '', 'subject of introductory message'),
306 ('s', 'subject', '', 'subject of introductory message'),
307 ('t', 'to', [], 'email addresses of recipients')],
307 ('t', 'to', [], 'email addresses of recipients')],
308 "hg email [OPTION]... [REV]...")
308 "hg email [OPTION]... [REV]...")
309 }
309 }
@@ -1,3521 +1,3490 b''
1 # commands.py - command processing for mercurial
1 # commands.py - command processing for mercurial
2 #
2 #
3 # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7
7
8 from demandload import demandload
8 from demandload import demandload
9 from node import *
9 from node import *
10 from i18n import gettext as _
10 from i18n import gettext as _
11 demandload(globals(), "os re sys signal shutil imp urllib pdb")
11 demandload(globals(), "os re sys signal shutil imp urllib pdb")
12 demandload(globals(), "fancyopts ui hg util lock revlog templater bundlerepo")
12 demandload(globals(), "fancyopts ui hg util lock revlog templater bundlerepo")
13 demandload(globals(), "fnmatch difflib patch random signal tempfile time")
13 demandload(globals(), "fnmatch difflib patch random signal tempfile time")
14 demandload(globals(), "traceback errno socket version struct atexit sets bz2")
14 demandload(globals(), "traceback errno socket version struct atexit sets bz2")
15 demandload(globals(), "archival cStringIO changegroup")
15 demandload(globals(), "archival cStringIO changegroup")
16 demandload(globals(), "cmdutil hgweb.server sshserver")
16 demandload(globals(), "cmdutil hgweb.server sshserver")
17
17
18 class UnknownCommand(Exception):
18 class UnknownCommand(Exception):
19 """Exception raised if command is not in the command table."""
19 """Exception raised if command is not in the command table."""
20 class AmbiguousCommand(Exception):
20 class AmbiguousCommand(Exception):
21 """Exception raised if command shortcut matches more than one command."""
21 """Exception raised if command shortcut matches more than one command."""
22
22
23 def bail_if_changed(repo):
23 def bail_if_changed(repo):
24 modified, added, removed, deleted = repo.status()[:4]
24 modified, added, removed, deleted = repo.status()[:4]
25 if modified or added or removed or deleted:
25 if modified or added or removed or deleted:
26 raise util.Abort(_("outstanding uncommitted changes"))
26 raise util.Abort(_("outstanding uncommitted changes"))
27
27
28 def relpath(repo, args):
28 def relpath(repo, args):
29 cwd = repo.getcwd()
29 cwd = repo.getcwd()
30 if cwd:
30 if cwd:
31 return [util.normpath(os.path.join(cwd, x)) for x in args]
31 return [util.normpath(os.path.join(cwd, x)) for x in args]
32 return args
32 return args
33
33
34 def logmessage(opts):
34 def logmessage(opts):
35 """ get the log message according to -m and -l option """
35 """ get the log message according to -m and -l option """
36 message = opts['message']
36 message = opts['message']
37 logfile = opts['logfile']
37 logfile = opts['logfile']
38
38
39 if message and logfile:
39 if message and logfile:
40 raise util.Abort(_('options --message and --logfile are mutually '
40 raise util.Abort(_('options --message and --logfile are mutually '
41 'exclusive'))
41 'exclusive'))
42 if not message and logfile:
42 if not message and logfile:
43 try:
43 try:
44 if logfile == '-':
44 if logfile == '-':
45 message = sys.stdin.read()
45 message = sys.stdin.read()
46 else:
46 else:
47 message = open(logfile).read()
47 message = open(logfile).read()
48 except IOError, inst:
48 except IOError, inst:
49 raise util.Abort(_("can't read commit message '%s': %s") %
49 raise util.Abort(_("can't read commit message '%s': %s") %
50 (logfile, inst.strerror))
50 (logfile, inst.strerror))
51 return message
51 return message
52
52
53 def walkchangerevs(ui, repo, pats, opts):
53 def walkchangerevs(ui, repo, pats, opts):
54 '''Iterate over files and the revs they changed in.
54 '''Iterate over files and the revs they changed in.
55
55
56 Callers most commonly need to iterate backwards over the history
56 Callers most commonly need to iterate backwards over the history
57 it is interested in. Doing so has awful (quadratic-looking)
57 it is interested in. Doing so has awful (quadratic-looking)
58 performance, so we use iterators in a "windowed" way.
58 performance, so we use iterators in a "windowed" way.
59
59
60 We walk a window of revisions in the desired order. Within the
60 We walk a window of revisions in the desired order. Within the
61 window, we first walk forwards to gather data, then in the desired
61 window, we first walk forwards to gather data, then in the desired
62 order (usually backwards) to display it.
62 order (usually backwards) to display it.
63
63
64 This function returns an (iterator, getchange, matchfn) tuple. The
64 This function returns an (iterator, getchange, matchfn) tuple. The
65 getchange function returns the changelog entry for a numeric
65 getchange function returns the changelog entry for a numeric
66 revision. The iterator yields 3-tuples. They will be of one of
66 revision. The iterator yields 3-tuples. They will be of one of
67 the following forms:
67 the following forms:
68
68
69 "window", incrementing, lastrev: stepping through a window,
69 "window", incrementing, lastrev: stepping through a window,
70 positive if walking forwards through revs, last rev in the
70 positive if walking forwards through revs, last rev in the
71 sequence iterated over - use to reset state for the current window
71 sequence iterated over - use to reset state for the current window
72
72
73 "add", rev, fns: out-of-order traversal of the given file names
73 "add", rev, fns: out-of-order traversal of the given file names
74 fns, which changed during revision rev - use to gather data for
74 fns, which changed during revision rev - use to gather data for
75 possible display
75 possible display
76
76
77 "iter", rev, None: in-order traversal of the revs earlier iterated
77 "iter", rev, None: in-order traversal of the revs earlier iterated
78 over with "add" - use to display data'''
78 over with "add" - use to display data'''
79
79
80 def increasing_windows(start, end, windowsize=8, sizelimit=512):
80 def increasing_windows(start, end, windowsize=8, sizelimit=512):
81 if start < end:
81 if start < end:
82 while start < end:
82 while start < end:
83 yield start, min(windowsize, end-start)
83 yield start, min(windowsize, end-start)
84 start += windowsize
84 start += windowsize
85 if windowsize < sizelimit:
85 if windowsize < sizelimit:
86 windowsize *= 2
86 windowsize *= 2
87 else:
87 else:
88 while start > end:
88 while start > end:
89 yield start, min(windowsize, start-end-1)
89 yield start, min(windowsize, start-end-1)
90 start -= windowsize
90 start -= windowsize
91 if windowsize < sizelimit:
91 if windowsize < sizelimit:
92 windowsize *= 2
92 windowsize *= 2
93
93
94
94
95 files, matchfn, anypats = cmdutil.matchpats(repo, pats, opts)
95 files, matchfn, anypats = cmdutil.matchpats(repo, pats, opts)
96 follow = opts.get('follow') or opts.get('follow_first')
96 follow = opts.get('follow') or opts.get('follow_first')
97
97
98 if repo.changelog.count() == 0:
98 if repo.changelog.count() == 0:
99 return [], False, matchfn
99 return [], False, matchfn
100
100
101 if follow:
101 if follow:
102 p = repo.dirstate.parents()[0]
102 p = repo.dirstate.parents()[0]
103 if p == nullid:
103 if p == nullid:
104 ui.warn(_('No working directory revision; defaulting to tip\n'))
104 ui.warn(_('No working directory revision; defaulting to tip\n'))
105 start = 'tip'
105 start = 'tip'
106 else:
106 else:
107 start = repo.changelog.rev(p)
107 start = repo.changelog.rev(p)
108 defrange = '%s:0' % start
108 defrange = '%s:0' % start
109 else:
109 else:
110 defrange = 'tip:0'
110 defrange = 'tip:0'
111 revs = map(int, revrange(ui, repo, opts['rev'] or [defrange]))
111 revs = map(int, revrange(ui, repo, opts['rev'] or [defrange]))
112 wanted = {}
112 wanted = {}
113 slowpath = anypats
113 slowpath = anypats
114 fncache = {}
114 fncache = {}
115
115
116 chcache = {}
116 chcache = {}
117 def getchange(rev):
117 def getchange(rev):
118 ch = chcache.get(rev)
118 ch = chcache.get(rev)
119 if ch is None:
119 if ch is None:
120 chcache[rev] = ch = repo.changelog.read(repo.lookup(str(rev)))
120 chcache[rev] = ch = repo.changelog.read(repo.lookup(str(rev)))
121 return ch
121 return ch
122
122
123 if not slowpath and not files:
123 if not slowpath and not files:
124 # No files, no patterns. Display all revs.
124 # No files, no patterns. Display all revs.
125 wanted = dict(zip(revs, revs))
125 wanted = dict(zip(revs, revs))
126 copies = []
126 copies = []
127 if not slowpath:
127 if not slowpath:
128 # Only files, no patterns. Check the history of each file.
128 # Only files, no patterns. Check the history of each file.
129 def filerevgen(filelog, node):
129 def filerevgen(filelog, node):
130 cl_count = repo.changelog.count()
130 cl_count = repo.changelog.count()
131 if node is None:
131 if node is None:
132 last = filelog.count() - 1
132 last = filelog.count() - 1
133 else:
133 else:
134 last = filelog.rev(node)
134 last = filelog.rev(node)
135 for i, window in increasing_windows(last, -1):
135 for i, window in increasing_windows(last, -1):
136 revs = []
136 revs = []
137 for j in xrange(i - window, i + 1):
137 for j in xrange(i - window, i + 1):
138 n = filelog.node(j)
138 n = filelog.node(j)
139 revs.append((filelog.linkrev(n),
139 revs.append((filelog.linkrev(n),
140 follow and filelog.renamed(n)))
140 follow and filelog.renamed(n)))
141 revs.reverse()
141 revs.reverse()
142 for rev in revs:
142 for rev in revs:
143 # only yield rev for which we have the changelog, it can
143 # only yield rev for which we have the changelog, it can
144 # happen while doing "hg log" during a pull or commit
144 # happen while doing "hg log" during a pull or commit
145 if rev[0] < cl_count:
145 if rev[0] < cl_count:
146 yield rev
146 yield rev
147 def iterfiles():
147 def iterfiles():
148 for filename in files:
148 for filename in files:
149 yield filename, None
149 yield filename, None
150 for filename_node in copies:
150 for filename_node in copies:
151 yield filename_node
151 yield filename_node
152 minrev, maxrev = min(revs), max(revs)
152 minrev, maxrev = min(revs), max(revs)
153 for file_, node in iterfiles():
153 for file_, node in iterfiles():
154 filelog = repo.file(file_)
154 filelog = repo.file(file_)
155 # A zero count may be a directory or deleted file, so
155 # A zero count may be a directory or deleted file, so
156 # try to find matching entries on the slow path.
156 # try to find matching entries on the slow path.
157 if filelog.count() == 0:
157 if filelog.count() == 0:
158 slowpath = True
158 slowpath = True
159 break
159 break
160 for rev, copied in filerevgen(filelog, node):
160 for rev, copied in filerevgen(filelog, node):
161 if rev <= maxrev:
161 if rev <= maxrev:
162 if rev < minrev:
162 if rev < minrev:
163 break
163 break
164 fncache.setdefault(rev, [])
164 fncache.setdefault(rev, [])
165 fncache[rev].append(file_)
165 fncache[rev].append(file_)
166 wanted[rev] = 1
166 wanted[rev] = 1
167 if follow and copied:
167 if follow and copied:
168 copies.append(copied)
168 copies.append(copied)
169 if slowpath:
169 if slowpath:
170 if follow:
170 if follow:
171 raise util.Abort(_('can only follow copies/renames for explicit '
171 raise util.Abort(_('can only follow copies/renames for explicit '
172 'file names'))
172 'file names'))
173
173
174 # The slow path checks files modified in every changeset.
174 # The slow path checks files modified in every changeset.
175 def changerevgen():
175 def changerevgen():
176 for i, window in increasing_windows(repo.changelog.count()-1, -1):
176 for i, window in increasing_windows(repo.changelog.count()-1, -1):
177 for j in xrange(i - window, i + 1):
177 for j in xrange(i - window, i + 1):
178 yield j, getchange(j)[3]
178 yield j, getchange(j)[3]
179
179
180 for rev, changefiles in changerevgen():
180 for rev, changefiles in changerevgen():
181 matches = filter(matchfn, changefiles)
181 matches = filter(matchfn, changefiles)
182 if matches:
182 if matches:
183 fncache[rev] = matches
183 fncache[rev] = matches
184 wanted[rev] = 1
184 wanted[rev] = 1
185
185
186 class followfilter:
186 def iterate():
187 def __init__(self, onlyfirst=False):
187 class followfilter:
188 self.startrev = -1
188 def __init__(self, onlyfirst=False):
189 self.roots = []
189 self.startrev = -1
190 self.onlyfirst = onlyfirst
190 self.roots = []
191
191 self.onlyfirst = onlyfirst
192 def match(self, rev):
192
193 def realparents(rev):
193 def match(self, rev):
194 if self.onlyfirst:
194 def realparents(rev):
195 return repo.changelog.parentrevs(rev)[0:1]
195 if self.onlyfirst:
196 return repo.changelog.parentrevs(rev)[0:1]
197 else:
198 return filter(lambda x: x != -1, repo.changelog.parentrevs(rev))
199
200 if self.startrev == -1:
201 self.startrev = rev
202 return True
203
204 if rev > self.startrev:
205 # forward: all descendants
206 if not self.roots:
207 self.roots.append(self.startrev)
208 for parent in realparents(rev):
209 if parent in self.roots:
210 self.roots.append(rev)
211 return True
196 else:
212 else:
197 return filter(lambda x: x != -1, repo.changelog.parentrevs(rev))
213 # backwards: all parents
198
214 if not self.roots:
199 if self.startrev == -1:
215 self.roots.extend(realparents(self.startrev))
200 self.startrev = rev
216 if rev in self.roots:
201 return True
217 self.roots.remove(rev)
202
218 self.roots.extend(realparents(rev))
203 if rev > self.startrev:
204 # forward: all descendants
205 if not self.roots:
206 self.roots.append(self.startrev)
207 for parent in realparents(rev):
208 if parent in self.roots:
209 self.roots.append(rev)
210 return True
219 return True
211 else:
220
212 # backwards: all parents
221 return False
213 if not self.roots:
222
214 self.roots.extend(realparents(self.startrev))
215 if rev in self.roots:
216 self.roots.remove(rev)
217 self.roots.extend(realparents(rev))
218 return True
219
220 return False
221
222 # it might be worthwhile to do this in the iterator if the rev range
223 # is descending and the prune args are all within that range
224 for rev in opts.get('prune', ()):
225 rev = repo.changelog.rev(repo.lookup(rev))
226 ff = followfilter()
227 stop = min(revs[0], revs[-1])
228 for x in range(rev, stop-1, -1):
229 if ff.match(x) and wanted.has_key(x):
230 del wanted[x]
231
232 def iterate():
233 if follow and not files:
223 if follow and not files:
234 ff = followfilter(onlyfirst=opts.get('follow_first'))
224 ff = followfilter(onlyfirst=opts.get('follow_first'))
235 def want(rev):
225 def want(rev):
236 if ff.match(rev) and rev in wanted:
226 if rev not in wanted:
237 return True
227 return False
238 return False
228 return ff.match(rev)
239 else:
229 else:
240 def want(rev):
230 def want(rev):
241 return rev in wanted
231 return rev in wanted
242
232
243 for i, window in increasing_windows(0, len(revs)):
233 for i, window in increasing_windows(0, len(revs)):
244 yield 'window', revs[0] < revs[-1], revs[-1]
234 yield 'window', revs[0] < revs[-1], revs[-1]
245 nrevs = [rev for rev in revs[i:i+window] if want(rev)]
235 nrevs = [rev for rev in revs[i:i+window] if want(rev)]
246 srevs = list(nrevs)
236 srevs = list(nrevs)
247 srevs.sort()
237 srevs.sort()
248 for rev in srevs:
238 for rev in srevs:
249 fns = fncache.get(rev) or filter(matchfn, getchange(rev)[3])
239 fns = fncache.get(rev) or filter(matchfn, getchange(rev)[3])
250 yield 'add', rev, fns
240 yield 'add', rev, fns
251 for rev in nrevs:
241 for rev in nrevs:
252 yield 'iter', rev, None
242 yield 'iter', rev, None
253 return iterate(), getchange, matchfn
243 return iterate(), getchange, matchfn
254
244
255 revrangesep = ':'
245 revrangesep = ':'
256
246
257 def revfix(repo, val, defval):
247 def revfix(repo, val, defval):
258 '''turn user-level id of changeset into rev number.
248 '''turn user-level id of changeset into rev number.
259 user-level id can be tag, changeset, rev number, or negative rev
249 user-level id can be tag, changeset, rev number, or negative rev
260 number relative to number of revs (-1 is tip, etc).'''
250 number relative to number of revs (-1 is tip, etc).'''
261 if not val:
251 if not val:
262 return defval
252 return defval
263 try:
253 try:
264 num = int(val)
254 num = int(val)
265 if str(num) != val:
255 if str(num) != val:
266 raise ValueError
256 raise ValueError
267 if num < 0:
257 if num < 0:
268 num += repo.changelog.count()
258 num += repo.changelog.count()
269 if num < 0:
259 if num < 0:
270 num = 0
260 num = 0
271 elif num >= repo.changelog.count():
261 elif num >= repo.changelog.count():
272 raise ValueError
262 raise ValueError
273 except ValueError:
263 except ValueError:
274 try:
264 try:
275 num = repo.changelog.rev(repo.lookup(val))
265 num = repo.changelog.rev(repo.lookup(val))
276 except KeyError:
266 except KeyError:
277 raise util.Abort(_('invalid revision identifier %s'), val)
267 raise util.Abort(_('invalid revision identifier %s'), val)
278 return num
268 return num
279
269
280 def revpair(ui, repo, revs):
270 def revpair(ui, repo, revs):
281 '''return pair of nodes, given list of revisions. second item can
271 '''return pair of nodes, given list of revisions. second item can
282 be None, meaning use working dir.'''
272 be None, meaning use working dir.'''
283 if not revs:
273 if not revs:
284 return repo.dirstate.parents()[0], None
274 return repo.dirstate.parents()[0], None
285 end = None
275 end = None
286 if len(revs) == 1:
276 if len(revs) == 1:
287 start = revs[0]
277 start = revs[0]
288 if revrangesep in start:
278 if revrangesep in start:
289 start, end = start.split(revrangesep, 1)
279 start, end = start.split(revrangesep, 1)
290 start = revfix(repo, start, 0)
280 start = revfix(repo, start, 0)
291 end = revfix(repo, end, repo.changelog.count() - 1)
281 end = revfix(repo, end, repo.changelog.count() - 1)
292 else:
282 else:
293 start = revfix(repo, start, None)
283 start = revfix(repo, start, None)
294 elif len(revs) == 2:
284 elif len(revs) == 2:
295 if revrangesep in revs[0] or revrangesep in revs[1]:
285 if revrangesep in revs[0] or revrangesep in revs[1]:
296 raise util.Abort(_('too many revisions specified'))
286 raise util.Abort(_('too many revisions specified'))
297 start = revfix(repo, revs[0], None)
287 start = revfix(repo, revs[0], None)
298 end = revfix(repo, revs[1], None)
288 end = revfix(repo, revs[1], None)
299 else:
289 else:
300 raise util.Abort(_('too many revisions specified'))
290 raise util.Abort(_('too many revisions specified'))
301 if end is not None: end = repo.lookup(str(end))
291 if end is not None: end = repo.lookup(str(end))
302 return repo.lookup(str(start)), end
292 return repo.lookup(str(start)), end
303
293
304 def revrange(ui, repo, revs):
294 def revrange(ui, repo, revs):
305 """Yield revision as strings from a list of revision specifications."""
295 """Yield revision as strings from a list of revision specifications."""
306 seen = {}
296 seen = {}
307 for spec in revs:
297 for spec in revs:
308 if revrangesep in spec:
298 if revrangesep in spec:
309 start, end = spec.split(revrangesep, 1)
299 start, end = spec.split(revrangesep, 1)
310 start = revfix(repo, start, 0)
300 start = revfix(repo, start, 0)
311 end = revfix(repo, end, repo.changelog.count() - 1)
301 end = revfix(repo, end, repo.changelog.count() - 1)
312 step = start > end and -1 or 1
302 step = start > end and -1 or 1
313 for rev in xrange(start, end+step, step):
303 for rev in xrange(start, end+step, step):
314 if rev in seen:
304 if rev in seen:
315 continue
305 continue
316 seen[rev] = 1
306 seen[rev] = 1
317 yield str(rev)
307 yield str(rev)
318 else:
308 else:
319 rev = revfix(repo, spec, None)
309 rev = revfix(repo, spec, None)
320 if rev in seen:
310 if rev in seen:
321 continue
311 continue
322 seen[rev] = 1
312 seen[rev] = 1
323 yield str(rev)
313 yield str(rev)
324
314
325 def write_bundle(cg, filename=None, compress=True):
315 def write_bundle(cg, filename=None, compress=True):
326 """Write a bundle file and return its filename.
316 """Write a bundle file and return its filename.
327
317
328 Existing files will not be overwritten.
318 Existing files will not be overwritten.
329 If no filename is specified, a temporary file is created.
319 If no filename is specified, a temporary file is created.
330 bz2 compression can be turned off.
320 bz2 compression can be turned off.
331 The bundle file will be deleted in case of errors.
321 The bundle file will be deleted in case of errors.
332 """
322 """
333 class nocompress(object):
323 class nocompress(object):
334 def compress(self, x):
324 def compress(self, x):
335 return x
325 return x
336 def flush(self):
326 def flush(self):
337 return ""
327 return ""
338
328
339 fh = None
329 fh = None
340 cleanup = None
330 cleanup = None
341 try:
331 try:
342 if filename:
332 if filename:
343 if os.path.exists(filename):
333 if os.path.exists(filename):
344 raise util.Abort(_("file '%s' already exists"), filename)
334 raise util.Abort(_("file '%s' already exists"), filename)
345 fh = open(filename, "wb")
335 fh = open(filename, "wb")
346 else:
336 else:
347 fd, filename = tempfile.mkstemp(prefix="hg-bundle-", suffix=".hg")
337 fd, filename = tempfile.mkstemp(prefix="hg-bundle-", suffix=".hg")
348 fh = os.fdopen(fd, "wb")
338 fh = os.fdopen(fd, "wb")
349 cleanup = filename
339 cleanup = filename
350
340
351 if compress:
341 if compress:
352 fh.write("HG10")
342 fh.write("HG10")
353 z = bz2.BZ2Compressor(9)
343 z = bz2.BZ2Compressor(9)
354 else:
344 else:
355 fh.write("HG10UN")
345 fh.write("HG10UN")
356 z = nocompress()
346 z = nocompress()
357 # parse the changegroup data, otherwise we will block
347 # parse the changegroup data, otherwise we will block
358 # in case of sshrepo because we don't know the end of the stream
348 # in case of sshrepo because we don't know the end of the stream
359
349
360 # an empty chunkiter is the end of the changegroup
350 # an empty chunkiter is the end of the changegroup
361 empty = False
351 empty = False
362 while not empty:
352 while not empty:
363 empty = True
353 empty = True
364 for chunk in changegroup.chunkiter(cg):
354 for chunk in changegroup.chunkiter(cg):
365 empty = False
355 empty = False
366 fh.write(z.compress(changegroup.genchunk(chunk)))
356 fh.write(z.compress(changegroup.genchunk(chunk)))
367 fh.write(z.compress(changegroup.closechunk()))
357 fh.write(z.compress(changegroup.closechunk()))
368 fh.write(z.flush())
358 fh.write(z.flush())
369 cleanup = None
359 cleanup = None
370 return filename
360 return filename
371 finally:
361 finally:
372 if fh is not None:
362 if fh is not None:
373 fh.close()
363 fh.close()
374 if cleanup is not None:
364 if cleanup is not None:
375 os.unlink(cleanup)
365 os.unlink(cleanup)
376
366
377 def trimuser(ui, name, rev, revcache):
367 def trimuser(ui, name, rev, revcache):
378 """trim the name of the user who committed a change"""
368 """trim the name of the user who committed a change"""
379 user = revcache.get(rev)
369 user = revcache.get(rev)
380 if user is None:
370 if user is None:
381 user = revcache[rev] = ui.shortuser(name)
371 user = revcache[rev] = ui.shortuser(name)
382 return user
372 return user
383
373
384 class changeset_printer(object):
374 class changeset_printer(object):
385 '''show changeset information when templating not requested.'''
375 '''show changeset information when templating not requested.'''
386
376
387 def __init__(self, ui, repo):
377 def __init__(self, ui, repo):
388 self.ui = ui
378 self.ui = ui
389 self.repo = repo
379 self.repo = repo
390
380
391 def show(self, rev=0, changenode=None, brinfo=None):
381 def show(self, rev=0, changenode=None, brinfo=None):
392 '''show a single changeset or file revision'''
382 '''show a single changeset or file revision'''
393 log = self.repo.changelog
383 log = self.repo.changelog
394 if changenode is None:
384 if changenode is None:
395 changenode = log.node(rev)
385 changenode = log.node(rev)
396 elif not rev:
386 elif not rev:
397 rev = log.rev(changenode)
387 rev = log.rev(changenode)
398
388
399 if self.ui.quiet:
389 if self.ui.quiet:
400 self.ui.write("%d:%s\n" % (rev, short(changenode)))
390 self.ui.write("%d:%s\n" % (rev, short(changenode)))
401 return
391 return
402
392
403 changes = log.read(changenode)
393 changes = log.read(changenode)
404 date = util.datestr(changes[2])
394 date = util.datestr(changes[2])
405
395
406 parents = [(log.rev(p), self.ui.verbose and hex(p) or short(p))
396 parents = [(log.rev(p), self.ui.verbose and hex(p) or short(p))
407 for p in log.parents(changenode)
397 for p in log.parents(changenode)
408 if self.ui.debugflag or p != nullid]
398 if self.ui.debugflag or p != nullid]
409 if (not self.ui.debugflag and len(parents) == 1 and
399 if (not self.ui.debugflag and len(parents) == 1 and
410 parents[0][0] == rev-1):
400 parents[0][0] == rev-1):
411 parents = []
401 parents = []
412
402
413 if self.ui.verbose:
403 if self.ui.verbose:
414 self.ui.write(_("changeset: %d:%s\n") % (rev, hex(changenode)))
404 self.ui.write(_("changeset: %d:%s\n") % (rev, hex(changenode)))
415 else:
405 else:
416 self.ui.write(_("changeset: %d:%s\n") % (rev, short(changenode)))
406 self.ui.write(_("changeset: %d:%s\n") % (rev, short(changenode)))
417
407
418 for tag in self.repo.nodetags(changenode):
408 for tag in self.repo.nodetags(changenode):
419 self.ui.status(_("tag: %s\n") % tag)
409 self.ui.status(_("tag: %s\n") % tag)
420 for parent in parents:
410 for parent in parents:
421 self.ui.write(_("parent: %d:%s\n") % parent)
411 self.ui.write(_("parent: %d:%s\n") % parent)
422
412
423 if brinfo and changenode in brinfo:
413 if brinfo and changenode in brinfo:
424 br = brinfo[changenode]
414 br = brinfo[changenode]
425 self.ui.write(_("branch: %s\n") % " ".join(br))
415 self.ui.write(_("branch: %s\n") % " ".join(br))
426
416
427 self.ui.debug(_("manifest: %d:%s\n") %
417 self.ui.debug(_("manifest: %d:%s\n") %
428 (self.repo.manifest.rev(changes[0]), hex(changes[0])))
418 (self.repo.manifest.rev(changes[0]), hex(changes[0])))
429 self.ui.status(_("user: %s\n") % changes[1])
419 self.ui.status(_("user: %s\n") % changes[1])
430 self.ui.status(_("date: %s\n") % date)
420 self.ui.status(_("date: %s\n") % date)
431
421
432 if self.ui.debugflag:
422 if self.ui.debugflag:
433 files = self.repo.status(log.parents(changenode)[0], changenode)[:3]
423 files = self.repo.status(log.parents(changenode)[0], changenode)[:3]
434 for key, value in zip([_("files:"), _("files+:"), _("files-:")],
424 for key, value in zip([_("files:"), _("files+:"), _("files-:")],
435 files):
425 files):
436 if value:
426 if value:
437 self.ui.note("%-12s %s\n" % (key, " ".join(value)))
427 self.ui.note("%-12s %s\n" % (key, " ".join(value)))
438 else:
428 else:
439 self.ui.note(_("files: %s\n") % " ".join(changes[3]))
429 self.ui.note(_("files: %s\n") % " ".join(changes[3]))
440
430
441 description = changes[4].strip()
431 description = changes[4].strip()
442 if description:
432 if description:
443 if self.ui.verbose:
433 if self.ui.verbose:
444 self.ui.status(_("description:\n"))
434 self.ui.status(_("description:\n"))
445 self.ui.status(description)
435 self.ui.status(description)
446 self.ui.status("\n\n")
436 self.ui.status("\n\n")
447 else:
437 else:
448 self.ui.status(_("summary: %s\n") %
438 self.ui.status(_("summary: %s\n") %
449 description.splitlines()[0])
439 description.splitlines()[0])
450 self.ui.status("\n")
440 self.ui.status("\n")
451
441
452 def show_changeset(ui, repo, opts):
442 def show_changeset(ui, repo, opts):
453 '''show one changeset. uses template or regular display. caller
443 '''show one changeset. uses template or regular display. caller
454 can pass in 'style' and 'template' options in opts.'''
444 can pass in 'style' and 'template' options in opts.'''
455
445
456 tmpl = opts.get('template')
446 tmpl = opts.get('template')
457 if tmpl:
447 if tmpl:
458 tmpl = templater.parsestring(tmpl, quoted=False)
448 tmpl = templater.parsestring(tmpl, quoted=False)
459 else:
449 else:
460 tmpl = ui.config('ui', 'logtemplate')
450 tmpl = ui.config('ui', 'logtemplate')
461 if tmpl: tmpl = templater.parsestring(tmpl)
451 if tmpl: tmpl = templater.parsestring(tmpl)
462 mapfile = opts.get('style') or ui.config('ui', 'style')
452 mapfile = opts.get('style') or ui.config('ui', 'style')
463 if tmpl or mapfile:
453 if tmpl or mapfile:
464 if mapfile:
454 if mapfile:
465 if not os.path.isfile(mapfile):
455 if not os.path.isfile(mapfile):
466 mapname = templater.templatepath('map-cmdline.' + mapfile)
456 mapname = templater.templatepath('map-cmdline.' + mapfile)
467 if not mapname: mapname = templater.templatepath(mapfile)
457 if not mapname: mapname = templater.templatepath(mapfile)
468 if mapname: mapfile = mapname
458 if mapname: mapfile = mapname
469 try:
459 try:
470 t = templater.changeset_templater(ui, repo, mapfile)
460 t = templater.changeset_templater(ui, repo, mapfile)
471 except SyntaxError, inst:
461 except SyntaxError, inst:
472 raise util.Abort(inst.args[0])
462 raise util.Abort(inst.args[0])
473 if tmpl: t.use_template(tmpl)
463 if tmpl: t.use_template(tmpl)
474 return t
464 return t
475 return changeset_printer(ui, repo)
465 return changeset_printer(ui, repo)
476
466
477 def setremoteconfig(ui, opts):
467 def setremoteconfig(ui, opts):
478 "copy remote options to ui tree"
468 "copy remote options to ui tree"
479 if opts.get('ssh'):
469 if opts.get('ssh'):
480 ui.setconfig("ui", "ssh", opts['ssh'])
470 ui.setconfig("ui", "ssh", opts['ssh'])
481 if opts.get('remotecmd'):
471 if opts.get('remotecmd'):
482 ui.setconfig("ui", "remotecmd", opts['remotecmd'])
472 ui.setconfig("ui", "remotecmd", opts['remotecmd'])
483
473
484 def show_version(ui):
474 def show_version(ui):
485 """output version and copyright information"""
475 """output version and copyright information"""
486 ui.write(_("Mercurial Distributed SCM (version %s)\n")
476 ui.write(_("Mercurial Distributed SCM (version %s)\n")
487 % version.get_version())
477 % version.get_version())
488 ui.status(_(
478 ui.status(_(
489 "\nCopyright (C) 2005, 2006 Matt Mackall <mpm@selenic.com>\n"
479 "\nCopyright (C) 2005, 2006 Matt Mackall <mpm@selenic.com>\n"
490 "This is free software; see the source for copying conditions. "
480 "This is free software; see the source for copying conditions. "
491 "There is NO\nwarranty; "
481 "There is NO\nwarranty; "
492 "not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.\n"
482 "not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.\n"
493 ))
483 ))
494
484
495 def help_(ui, name=None, with_version=False):
485 def help_(ui, name=None, with_version=False):
496 """show help for a command, extension, or list of commands
486 """show help for a command, extension, or list of commands
497
487
498 With no arguments, print a list of commands and short help.
488 With no arguments, print a list of commands and short help.
499
489
500 Given a command name, print help for that command.
490 Given a command name, print help for that command.
501
491
502 Given an extension name, print help for that extension, and the
492 Given an extension name, print help for that extension, and the
503 commands it provides."""
493 commands it provides."""
504 option_lists = []
494 option_lists = []
505
495
506 def helpcmd(name):
496 def helpcmd(name):
507 if with_version:
497 if with_version:
508 show_version(ui)
498 show_version(ui)
509 ui.write('\n')
499 ui.write('\n')
510 aliases, i = findcmd(name)
500 aliases, i = findcmd(name)
511 # synopsis
501 # synopsis
512 ui.write("%s\n\n" % i[2])
502 ui.write("%s\n\n" % i[2])
513
503
514 # description
504 # description
515 doc = i[0].__doc__
505 doc = i[0].__doc__
516 if not doc:
506 if not doc:
517 doc = _("(No help text available)")
507 doc = _("(No help text available)")
518 if ui.quiet:
508 if ui.quiet:
519 doc = doc.splitlines(0)[0]
509 doc = doc.splitlines(0)[0]
520 ui.write("%s\n" % doc.rstrip())
510 ui.write("%s\n" % doc.rstrip())
521
511
522 if not ui.quiet:
512 if not ui.quiet:
523 # aliases
513 # aliases
524 if len(aliases) > 1:
514 if len(aliases) > 1:
525 ui.write(_("\naliases: %s\n") % ', '.join(aliases[1:]))
515 ui.write(_("\naliases: %s\n") % ', '.join(aliases[1:]))
526
516
527 # options
517 # options
528 if i[1]:
518 if i[1]:
529 option_lists.append(("options", i[1]))
519 option_lists.append(("options", i[1]))
530
520
531 def helplist(select=None):
521 def helplist(select=None):
532 h = {}
522 h = {}
533 cmds = {}
523 cmds = {}
534 for c, e in table.items():
524 for c, e in table.items():
535 f = c.split("|", 1)[0]
525 f = c.split("|", 1)[0]
536 if select and not select(f):
526 if select and not select(f):
537 continue
527 continue
538 if name == "shortlist" and not f.startswith("^"):
528 if name == "shortlist" and not f.startswith("^"):
539 continue
529 continue
540 f = f.lstrip("^")
530 f = f.lstrip("^")
541 if not ui.debugflag and f.startswith("debug"):
531 if not ui.debugflag and f.startswith("debug"):
542 continue
532 continue
543 doc = e[0].__doc__
533 doc = e[0].__doc__
544 if not doc:
534 if not doc:
545 doc = _("(No help text available)")
535 doc = _("(No help text available)")
546 h[f] = doc.splitlines(0)[0].rstrip()
536 h[f] = doc.splitlines(0)[0].rstrip()
547 cmds[f] = c.lstrip("^")
537 cmds[f] = c.lstrip("^")
548
538
549 fns = h.keys()
539 fns = h.keys()
550 fns.sort()
540 fns.sort()
551 m = max(map(len, fns))
541 m = max(map(len, fns))
552 for f in fns:
542 for f in fns:
553 if ui.verbose:
543 if ui.verbose:
554 commands = cmds[f].replace("|",", ")
544 commands = cmds[f].replace("|",", ")
555 ui.write(" %s:\n %s\n"%(commands, h[f]))
545 ui.write(" %s:\n %s\n"%(commands, h[f]))
556 else:
546 else:
557 ui.write(' %-*s %s\n' % (m, f, h[f]))
547 ui.write(' %-*s %s\n' % (m, f, h[f]))
558
548
559 def helpext(name):
549 def helpext(name):
560 try:
550 try:
561 mod = findext(name)
551 mod = findext(name)
562 except KeyError:
552 except KeyError:
563 raise UnknownCommand(name)
553 raise UnknownCommand(name)
564
554
565 doc = (mod.__doc__ or _('No help text available')).splitlines(0)
555 doc = (mod.__doc__ or _('No help text available')).splitlines(0)
566 ui.write(_('%s extension - %s\n') % (name.split('.')[-1], doc[0]))
556 ui.write(_('%s extension - %s\n') % (name.split('.')[-1], doc[0]))
567 for d in doc[1:]:
557 for d in doc[1:]:
568 ui.write(d, '\n')
558 ui.write(d, '\n')
569
559
570 ui.status('\n')
560 ui.status('\n')
571 if ui.verbose:
561 if ui.verbose:
572 ui.status(_('list of commands:\n\n'))
562 ui.status(_('list of commands:\n\n'))
573 else:
563 else:
574 ui.status(_('list of commands (use "hg help -v %s" '
564 ui.status(_('list of commands (use "hg help -v %s" '
575 'to show aliases and global options):\n\n') % name)
565 'to show aliases and global options):\n\n') % name)
576
566
577 modcmds = dict.fromkeys([c.split('|', 1)[0] for c in mod.cmdtable])
567 modcmds = dict.fromkeys([c.split('|', 1)[0] for c in mod.cmdtable])
578 helplist(modcmds.has_key)
568 helplist(modcmds.has_key)
579
569
580 if name and name != 'shortlist':
570 if name and name != 'shortlist':
581 try:
571 try:
582 helpcmd(name)
572 helpcmd(name)
583 except UnknownCommand:
573 except UnknownCommand:
584 helpext(name)
574 helpext(name)
585
575
586 else:
576 else:
587 # program name
577 # program name
588 if ui.verbose or with_version:
578 if ui.verbose or with_version:
589 show_version(ui)
579 show_version(ui)
590 else:
580 else:
591 ui.status(_("Mercurial Distributed SCM\n"))
581 ui.status(_("Mercurial Distributed SCM\n"))
592 ui.status('\n')
582 ui.status('\n')
593
583
594 # list of commands
584 # list of commands
595 if name == "shortlist":
585 if name == "shortlist":
596 ui.status(_('basic commands (use "hg help" '
586 ui.status(_('basic commands (use "hg help" '
597 'for the full list or option "-v" for details):\n\n'))
587 'for the full list or option "-v" for details):\n\n'))
598 elif ui.verbose:
588 elif ui.verbose:
599 ui.status(_('list of commands:\n\n'))
589 ui.status(_('list of commands:\n\n'))
600 else:
590 else:
601 ui.status(_('list of commands (use "hg help -v" '
591 ui.status(_('list of commands (use "hg help -v" '
602 'to show aliases and global options):\n\n'))
592 'to show aliases and global options):\n\n'))
603
593
604 helplist()
594 helplist()
605
595
606 # global options
596 # global options
607 if ui.verbose:
597 if ui.verbose:
608 option_lists.append(("global options", globalopts))
598 option_lists.append(("global options", globalopts))
609
599
610 # list all option lists
600 # list all option lists
611 opt_output = []
601 opt_output = []
612 for title, options in option_lists:
602 for title, options in option_lists:
613 opt_output.append(("\n%s:\n" % title, None))
603 opt_output.append(("\n%s:\n" % title, None))
614 for shortopt, longopt, default, desc in options:
604 for shortopt, longopt, default, desc in options:
615 opt_output.append(("%2s%s" % (shortopt and "-%s" % shortopt,
605 opt_output.append(("%2s%s" % (shortopt and "-%s" % shortopt,
616 longopt and " --%s" % longopt),
606 longopt and " --%s" % longopt),
617 "%s%s" % (desc,
607 "%s%s" % (desc,
618 default
608 default
619 and _(" (default: %s)") % default
609 and _(" (default: %s)") % default
620 or "")))
610 or "")))
621
611
622 if opt_output:
612 if opt_output:
623 opts_len = max([len(line[0]) for line in opt_output if line[1]])
613 opts_len = max([len(line[0]) for line in opt_output if line[1]])
624 for first, second in opt_output:
614 for first, second in opt_output:
625 if second:
615 if second:
626 ui.write(" %-*s %s\n" % (opts_len, first, second))
616 ui.write(" %-*s %s\n" % (opts_len, first, second))
627 else:
617 else:
628 ui.write("%s\n" % first)
618 ui.write("%s\n" % first)
629
619
630 # Commands start here, listed alphabetically
620 # Commands start here, listed alphabetically
631
621
632 def add(ui, repo, *pats, **opts):
622 def add(ui, repo, *pats, **opts):
633 """add the specified files on the next commit
623 """add the specified files on the next commit
634
624
635 Schedule files to be version controlled and added to the repository.
625 Schedule files to be version controlled and added to the repository.
636
626
637 The files will be added to the repository at the next commit.
627 The files will be added to the repository at the next commit.
638
628
639 If no names are given, add all files in the repository.
629 If no names are given, add all files in the repository.
640 """
630 """
641
631
642 names = []
632 names = []
643 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts):
633 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts):
644 if exact:
634 if exact:
645 if ui.verbose:
635 if ui.verbose:
646 ui.status(_('adding %s\n') % rel)
636 ui.status(_('adding %s\n') % rel)
647 names.append(abs)
637 names.append(abs)
648 elif repo.dirstate.state(abs) == '?':
638 elif repo.dirstate.state(abs) == '?':
649 ui.status(_('adding %s\n') % rel)
639 ui.status(_('adding %s\n') % rel)
650 names.append(abs)
640 names.append(abs)
651 if not opts.get('dry_run'):
641 if not opts.get('dry_run'):
652 repo.add(names)
642 repo.add(names)
653
643
654 def addremove(ui, repo, *pats, **opts):
644 def addremove(ui, repo, *pats, **opts):
655 """add all new files, delete all missing files (DEPRECATED)
645 """add all new files, delete all missing files (DEPRECATED)
656
646
657 (DEPRECATED)
647 (DEPRECATED)
658 Add all new files and remove all missing files from the repository.
648 Add all new files and remove all missing files from the repository.
659
649
660 New files are ignored if they match any of the patterns in .hgignore. As
650 New files are ignored if they match any of the patterns in .hgignore. As
661 with add, these changes take effect at the next commit.
651 with add, these changes take effect at the next commit.
662
652
663 This command is now deprecated and will be removed in a future
653 This command is now deprecated and will be removed in a future
664 release. Please use add and remove --after instead.
654 release. Please use add and remove --after instead.
665 """
655 """
666 ui.warn(_('(the addremove command is deprecated; use add and remove '
656 ui.warn(_('(the addremove command is deprecated; use add and remove '
667 '--after instead)\n'))
657 '--after instead)\n'))
668 return cmdutil.addremove(repo, pats, opts)
658 return cmdutil.addremove(repo, pats, opts)
669
659
670 def annotate(ui, repo, *pats, **opts):
660 def annotate(ui, repo, *pats, **opts):
671 """show changeset information per file line
661 """show changeset information per file line
672
662
673 List changes in files, showing the revision id responsible for each line
663 List changes in files, showing the revision id responsible for each line
674
664
675 This command is useful to discover who did a change or when a change took
665 This command is useful to discover who did a change or when a change took
676 place.
666 place.
677
667
678 Without the -a option, annotate will avoid processing files it
668 Without the -a option, annotate will avoid processing files it
679 detects as binary. With -a, annotate will generate an annotation
669 detects as binary. With -a, annotate will generate an annotation
680 anyway, probably with undesirable results.
670 anyway, probably with undesirable results.
681 """
671 """
682 def getnode(rev):
672 def getnode(rev):
683 return short(repo.changelog.node(rev))
673 return short(repo.changelog.node(rev))
684
674
685 ucache = {}
675 ucache = {}
686 def getname(rev):
676 def getname(rev):
687 try:
677 try:
688 return ucache[rev]
678 return ucache[rev]
689 except:
679 except:
690 u = trimuser(ui, repo.changectx(rev).user(), rev, ucache)
680 u = trimuser(ui, repo.changectx(rev).user(), rev, ucache)
691 ucache[rev] = u
681 ucache[rev] = u
692 return u
682 return u
693
683
694 dcache = {}
684 dcache = {}
695 def getdate(rev):
685 def getdate(rev):
696 datestr = dcache.get(rev)
686 datestr = dcache.get(rev)
697 if datestr is None:
687 if datestr is None:
698 datestr = dcache[rev] = util.datestr(repo.changectx(rev).date())
688 datestr = dcache[rev] = util.datestr(repo.changectx(rev).date())
699 return datestr
689 return datestr
700
690
701 if not pats:
691 if not pats:
702 raise util.Abort(_('at least one file name or pattern required'))
692 raise util.Abort(_('at least one file name or pattern required'))
703
693
704 opmap = [['user', getname], ['number', str], ['changeset', getnode],
694 opmap = [['user', getname], ['number', str], ['changeset', getnode],
705 ['date', getdate]]
695 ['date', getdate]]
706 if not opts['user'] and not opts['changeset'] and not opts['date']:
696 if not opts['user'] and not opts['changeset'] and not opts['date']:
707 opts['number'] = 1
697 opts['number'] = 1
708
698
709 ctx = repo.changectx(opts['rev'] or repo.dirstate.parents()[0])
699 ctx = repo.changectx(opts['rev'] or repo.dirstate.parents()[0])
710
700
711 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts,
701 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts,
712 node=ctx.node()):
702 node=ctx.node()):
713 fctx = ctx.filectx(abs)
703 fctx = ctx.filectx(abs)
714 if not opts['text'] and util.binary(fctx.data()):
704 if not opts['text'] and util.binary(fctx.data()):
715 ui.write(_("%s: binary file\n") % ((pats and rel) or abs))
705 ui.write(_("%s: binary file\n") % ((pats and rel) or abs))
716 continue
706 continue
717
707
718 lines = fctx.annotate()
708 lines = fctx.annotate()
719 pieces = []
709 pieces = []
720
710
721 for o, f in opmap:
711 for o, f in opmap:
722 if opts[o]:
712 if opts[o]:
723 l = [f(n) for n, dummy in lines]
713 l = [f(n) for n, dummy in lines]
724 if l:
714 if l:
725 m = max(map(len, l))
715 m = max(map(len, l))
726 pieces.append(["%*s" % (m, x) for x in l])
716 pieces.append(["%*s" % (m, x) for x in l])
727
717
728 if pieces:
718 if pieces:
729 for p, l in zip(zip(*pieces), lines):
719 for p, l in zip(zip(*pieces), lines):
730 ui.write("%s: %s" % (" ".join(p), l[1]))
720 ui.write("%s: %s" % (" ".join(p), l[1]))
731
721
732 def archive(ui, repo, dest, **opts):
722 def archive(ui, repo, dest, **opts):
733 '''create unversioned archive of a repository revision
723 '''create unversioned archive of a repository revision
734
724
735 By default, the revision used is the parent of the working
725 By default, the revision used is the parent of the working
736 directory; use "-r" to specify a different revision.
726 directory; use "-r" to specify a different revision.
737
727
738 To specify the type of archive to create, use "-t". Valid
728 To specify the type of archive to create, use "-t". Valid
739 types are:
729 types are:
740
730
741 "files" (default): a directory full of files
731 "files" (default): a directory full of files
742 "tar": tar archive, uncompressed
732 "tar": tar archive, uncompressed
743 "tbz2": tar archive, compressed using bzip2
733 "tbz2": tar archive, compressed using bzip2
744 "tgz": tar archive, compressed using gzip
734 "tgz": tar archive, compressed using gzip
745 "uzip": zip archive, uncompressed
735 "uzip": zip archive, uncompressed
746 "zip": zip archive, compressed using deflate
736 "zip": zip archive, compressed using deflate
747
737
748 The exact name of the destination archive or directory is given
738 The exact name of the destination archive or directory is given
749 using a format string; see "hg help export" for details.
739 using a format string; see "hg help export" for details.
750
740
751 Each member added to an archive file has a directory prefix
741 Each member added to an archive file has a directory prefix
752 prepended. Use "-p" to specify a format string for the prefix.
742 prepended. Use "-p" to specify a format string for the prefix.
753 The default is the basename of the archive, with suffixes removed.
743 The default is the basename of the archive, with suffixes removed.
754 '''
744 '''
755
745
756 if opts['rev']:
746 if opts['rev']:
757 node = repo.lookup(opts['rev'])
747 node = repo.lookup(opts['rev'])
758 else:
748 else:
759 node, p2 = repo.dirstate.parents()
749 node, p2 = repo.dirstate.parents()
760 if p2 != nullid:
750 if p2 != nullid:
761 raise util.Abort(_('uncommitted merge - please provide a '
751 raise util.Abort(_('uncommitted merge - please provide a '
762 'specific revision'))
752 'specific revision'))
763
753
764 dest = cmdutil.make_filename(repo, dest, node)
754 dest = cmdutil.make_filename(repo, dest, node)
765 if os.path.realpath(dest) == repo.root:
755 if os.path.realpath(dest) == repo.root:
766 raise util.Abort(_('repository root cannot be destination'))
756 raise util.Abort(_('repository root cannot be destination'))
767 dummy, matchfn, dummy = cmdutil.matchpats(repo, [], opts)
757 dummy, matchfn, dummy = cmdutil.matchpats(repo, [], opts)
768 kind = opts.get('type') or 'files'
758 kind = opts.get('type') or 'files'
769 prefix = opts['prefix']
759 prefix = opts['prefix']
770 if dest == '-':
760 if dest == '-':
771 if kind == 'files':
761 if kind == 'files':
772 raise util.Abort(_('cannot archive plain files to stdout'))
762 raise util.Abort(_('cannot archive plain files to stdout'))
773 dest = sys.stdout
763 dest = sys.stdout
774 if not prefix: prefix = os.path.basename(repo.root) + '-%h'
764 if not prefix: prefix = os.path.basename(repo.root) + '-%h'
775 prefix = cmdutil.make_filename(repo, prefix, node)
765 prefix = cmdutil.make_filename(repo, prefix, node)
776 archival.archive(repo, dest, node, kind, not opts['no_decode'],
766 archival.archive(repo, dest, node, kind, not opts['no_decode'],
777 matchfn, prefix)
767 matchfn, prefix)
778
768
779 def backout(ui, repo, rev, **opts):
769 def backout(ui, repo, rev, **opts):
780 '''reverse effect of earlier changeset
770 '''reverse effect of earlier changeset
781
771
782 Commit the backed out changes as a new changeset. The new
772 Commit the backed out changes as a new changeset. The new
783 changeset is a child of the backed out changeset.
773 changeset is a child of the backed out changeset.
784
774
785 If you back out a changeset other than the tip, a new head is
775 If you back out a changeset other than the tip, a new head is
786 created. This head is the parent of the working directory. If
776 created. This head is the parent of the working directory. If
787 you back out an old changeset, your working directory will appear
777 you back out an old changeset, your working directory will appear
788 old after the backout. You should merge the backout changeset
778 old after the backout. You should merge the backout changeset
789 with another head.
779 with another head.
790
780
791 The --merge option remembers the parent of the working directory
781 The --merge option remembers the parent of the working directory
792 before starting the backout, then merges the new head with that
782 before starting the backout, then merges the new head with that
793 changeset afterwards. This saves you from doing the merge by
783 changeset afterwards. This saves you from doing the merge by
794 hand. The result of this merge is not committed, as for a normal
784 hand. The result of this merge is not committed, as for a normal
795 merge.'''
785 merge.'''
796
786
797 bail_if_changed(repo)
787 bail_if_changed(repo)
798 op1, op2 = repo.dirstate.parents()
788 op1, op2 = repo.dirstate.parents()
799 if op2 != nullid:
789 if op2 != nullid:
800 raise util.Abort(_('outstanding uncommitted merge'))
790 raise util.Abort(_('outstanding uncommitted merge'))
801 node = repo.lookup(rev)
791 node = repo.lookup(rev)
802 p1, p2 = repo.changelog.parents(node)
792 p1, p2 = repo.changelog.parents(node)
803 if p1 == nullid:
793 if p1 == nullid:
804 raise util.Abort(_('cannot back out a change with no parents'))
794 raise util.Abort(_('cannot back out a change with no parents'))
805 if p2 != nullid:
795 if p2 != nullid:
806 if not opts['parent']:
796 if not opts['parent']:
807 raise util.Abort(_('cannot back out a merge changeset without '
797 raise util.Abort(_('cannot back out a merge changeset without '
808 '--parent'))
798 '--parent'))
809 p = repo.lookup(opts['parent'])
799 p = repo.lookup(opts['parent'])
810 if p not in (p1, p2):
800 if p not in (p1, p2):
811 raise util.Abort(_('%s is not a parent of %s' %
801 raise util.Abort(_('%s is not a parent of %s' %
812 (short(p), short(node))))
802 (short(p), short(node))))
813 parent = p
803 parent = p
814 else:
804 else:
815 if opts['parent']:
805 if opts['parent']:
816 raise util.Abort(_('cannot use --parent on non-merge changeset'))
806 raise util.Abort(_('cannot use --parent on non-merge changeset'))
817 parent = p1
807 parent = p1
818 hg.clean(repo, node, show_stats=False)
808 hg.clean(repo, node, show_stats=False)
819 revert_opts = opts.copy()
809 revert_opts = opts.copy()
820 revert_opts['rev'] = hex(parent)
810 revert_opts['rev'] = hex(parent)
821 revert(ui, repo, **revert_opts)
811 revert(ui, repo, **revert_opts)
822 commit_opts = opts.copy()
812 commit_opts = opts.copy()
823 commit_opts['addremove'] = False
813 commit_opts['addremove'] = False
824 if not commit_opts['message'] and not commit_opts['logfile']:
814 if not commit_opts['message'] and not commit_opts['logfile']:
825 commit_opts['message'] = _("Backed out changeset %s") % (hex(node))
815 commit_opts['message'] = _("Backed out changeset %s") % (hex(node))
826 commit_opts['force_editor'] = True
816 commit_opts['force_editor'] = True
827 commit(ui, repo, **commit_opts)
817 commit(ui, repo, **commit_opts)
828 def nice(node):
818 def nice(node):
829 return '%d:%s' % (repo.changelog.rev(node), short(node))
819 return '%d:%s' % (repo.changelog.rev(node), short(node))
830 ui.status(_('changeset %s backs out changeset %s\n') %
820 ui.status(_('changeset %s backs out changeset %s\n') %
831 (nice(repo.changelog.tip()), nice(node)))
821 (nice(repo.changelog.tip()), nice(node)))
832 if op1 != node:
822 if op1 != node:
833 if opts['merge']:
823 if opts['merge']:
834 ui.status(_('merging with changeset %s\n') % nice(op1))
824 ui.status(_('merging with changeset %s\n') % nice(op1))
835 n = _lookup(repo, hex(op1))
825 n = _lookup(repo, hex(op1))
836 hg.merge(repo, n)
826 hg.merge(repo, n)
837 else:
827 else:
838 ui.status(_('the backout changeset is a new head - '
828 ui.status(_('the backout changeset is a new head - '
839 'do not forget to merge\n'))
829 'do not forget to merge\n'))
840 ui.status(_('(use "backout --merge" '
830 ui.status(_('(use "backout --merge" '
841 'if you want to auto-merge)\n'))
831 'if you want to auto-merge)\n'))
842
832
843 def bundle(ui, repo, fname, dest=None, **opts):
833 def bundle(ui, repo, fname, dest=None, **opts):
844 """create a changegroup file
834 """create a changegroup file
845
835
846 Generate a compressed changegroup file collecting all changesets
836 Generate a compressed changegroup file collecting all changesets
847 not found in the other repository.
837 not found in the other repository.
848
838
849 This file can then be transferred using conventional means and
839 This file can then be transferred using conventional means and
850 applied to another repository with the unbundle command. This is
840 applied to another repository with the unbundle command. This is
851 useful when native push and pull are not available or when
841 useful when native push and pull are not available or when
852 exporting an entire repository is undesirable. The standard file
842 exporting an entire repository is undesirable. The standard file
853 extension is ".hg".
843 extension is ".hg".
854
844
855 Unlike import/export, this exactly preserves all changeset
845 Unlike import/export, this exactly preserves all changeset
856 contents including permissions, rename data, and revision history.
846 contents including permissions, rename data, and revision history.
857 """
847 """
858 dest = ui.expandpath(dest or 'default-push', dest or 'default')
848 dest = ui.expandpath(dest or 'default-push', dest or 'default')
859 other = hg.repository(ui, dest)
849 other = hg.repository(ui, dest)
860 o = repo.findoutgoing(other, force=opts['force'])
850 o = repo.findoutgoing(other, force=opts['force'])
861 cg = repo.changegroup(o, 'bundle')
851 cg = repo.changegroup(o, 'bundle')
862 write_bundle(cg, fname)
852 write_bundle(cg, fname)
863
853
864 def cat(ui, repo, file1, *pats, **opts):
854 def cat(ui, repo, file1, *pats, **opts):
865 """output the latest or given revisions of files
855 """output the latest or given revisions of files
866
856
867 Print the specified files as they were at the given revision.
857 Print the specified files as they were at the given revision.
868 If no revision is given then the tip is used.
858 If no revision is given then the tip is used.
869
859
870 Output may be to a file, in which case the name of the file is
860 Output may be to a file, in which case the name of the file is
871 given using a format string. The formatting rules are the same as
861 given using a format string. The formatting rules are the same as
872 for the export command, with the following additions:
862 for the export command, with the following additions:
873
863
874 %s basename of file being printed
864 %s basename of file being printed
875 %d dirname of file being printed, or '.' if in repo root
865 %d dirname of file being printed, or '.' if in repo root
876 %p root-relative path name of file being printed
866 %p root-relative path name of file being printed
877 """
867 """
878 ctx = repo.changectx(opts['rev'] or "-1")
868 ctx = repo.changectx(opts['rev'] or "-1")
879 for src, abs, rel, exact in cmdutil.walk(repo, (file1,) + pats, opts,
869 for src, abs, rel, exact in cmdutil.walk(repo, (file1,) + pats, opts,
880 ctx.node()):
870 ctx.node()):
881 fp = cmdutil.make_file(repo, opts['output'], ctx.node(), pathname=abs)
871 fp = cmdutil.make_file(repo, opts['output'], ctx.node(), pathname=abs)
882 fp.write(ctx.filectx(abs).data())
872 fp.write(ctx.filectx(abs).data())
883
873
884 def clone(ui, source, dest=None, **opts):
874 def clone(ui, source, dest=None, **opts):
885 """make a copy of an existing repository
875 """make a copy of an existing repository
886
876
887 Create a copy of an existing repository in a new directory.
877 Create a copy of an existing repository in a new directory.
888
878
889 If no destination directory name is specified, it defaults to the
879 If no destination directory name is specified, it defaults to the
890 basename of the source.
880 basename of the source.
891
881
892 The location of the source is added to the new repository's
882 The location of the source is added to the new repository's
893 .hg/hgrc file, as the default to be used for future pulls.
883 .hg/hgrc file, as the default to be used for future pulls.
894
884
895 For efficiency, hardlinks are used for cloning whenever the source
885 For efficiency, hardlinks are used for cloning whenever the source
896 and destination are on the same filesystem (note this applies only
886 and destination are on the same filesystem (note this applies only
897 to the repository data, not to the checked out files). Some
887 to the repository data, not to the checked out files). Some
898 filesystems, such as AFS, implement hardlinking incorrectly, but
888 filesystems, such as AFS, implement hardlinking incorrectly, but
899 do not report errors. In these cases, use the --pull option to
889 do not report errors. In these cases, use the --pull option to
900 avoid hardlinking.
890 avoid hardlinking.
901
891
902 You can safely clone repositories and checked out files using full
892 You can safely clone repositories and checked out files using full
903 hardlinks with
893 hardlinks with
904
894
905 $ cp -al REPO REPOCLONE
895 $ cp -al REPO REPOCLONE
906
896
907 which is the fastest way to clone. However, the operation is not
897 which is the fastest way to clone. However, the operation is not
908 atomic (making sure REPO is not modified during the operation is
898 atomic (making sure REPO is not modified during the operation is
909 up to you) and you have to make sure your editor breaks hardlinks
899 up to you) and you have to make sure your editor breaks hardlinks
910 (Emacs and most Linux Kernel tools do so).
900 (Emacs and most Linux Kernel tools do so).
911
901
912 If you use the -r option to clone up to a specific revision, no
902 If you use the -r option to clone up to a specific revision, no
913 subsequent revisions will be present in the cloned repository.
903 subsequent revisions will be present in the cloned repository.
914 This option implies --pull, even on local repositories.
904 This option implies --pull, even on local repositories.
915
905
916 See pull for valid source format details.
906 See pull for valid source format details.
917
907
918 It is possible to specify an ssh:// URL as the destination, but no
908 It is possible to specify an ssh:// URL as the destination, but no
919 .hg/hgrc will be created on the remote side. Look at the help text
909 .hg/hgrc will be created on the remote side. Look at the help text
920 for the pull command for important details about ssh:// URLs.
910 for the pull command for important details about ssh:// URLs.
921 """
911 """
922 setremoteconfig(ui, opts)
912 setremoteconfig(ui, opts)
923 hg.clone(ui, ui.expandpath(source), dest,
913 hg.clone(ui, ui.expandpath(source), dest,
924 pull=opts['pull'],
914 pull=opts['pull'],
925 stream=opts['uncompressed'],
915 stream=opts['uncompressed'],
926 rev=opts['rev'],
916 rev=opts['rev'],
927 update=not opts['noupdate'])
917 update=not opts['noupdate'])
928
918
929 def commit(ui, repo, *pats, **opts):
919 def commit(ui, repo, *pats, **opts):
930 """commit the specified files or all outstanding changes
920 """commit the specified files or all outstanding changes
931
921
932 Commit changes to the given files into the repository.
922 Commit changes to the given files into the repository.
933
923
934 If a list of files is omitted, all changes reported by "hg status"
924 If a list of files is omitted, all changes reported by "hg status"
935 will be committed.
925 will be committed.
936
926
937 If no commit message is specified, the editor configured in your hgrc
927 If no commit message is specified, the editor configured in your hgrc
938 or in the EDITOR environment variable is started to enter a message.
928 or in the EDITOR environment variable is started to enter a message.
939 """
929 """
940 message = logmessage(opts)
930 message = logmessage(opts)
941
931
942 if opts['addremove']:
932 if opts['addremove']:
943 cmdutil.addremove(repo, pats, opts)
933 cmdutil.addremove(repo, pats, opts)
944 fns, match, anypats = cmdutil.matchpats(repo, pats, opts)
934 fns, match, anypats = cmdutil.matchpats(repo, pats, opts)
945 if pats:
935 if pats:
946 modified, added, removed = repo.status(files=fns, match=match)[:3]
936 modified, added, removed = repo.status(files=fns, match=match)[:3]
947 files = modified + added + removed
937 files = modified + added + removed
948 else:
938 else:
949 files = []
939 files = []
950 try:
940 try:
951 repo.commit(files, message, opts['user'], opts['date'], match,
941 repo.commit(files, message, opts['user'], opts['date'], match,
952 force_editor=opts.get('force_editor'))
942 force_editor=opts.get('force_editor'))
953 except ValueError, inst:
943 except ValueError, inst:
954 raise util.Abort(str(inst))
944 raise util.Abort(str(inst))
955
945
956 def docopy(ui, repo, pats, opts, wlock):
946 def docopy(ui, repo, pats, opts, wlock):
957 # called with the repo lock held
947 # called with the repo lock held
958 cwd = repo.getcwd()
948 cwd = repo.getcwd()
959 errors = 0
949 errors = 0
960 copied = []
950 copied = []
961 targets = {}
951 targets = {}
962
952
963 def okaytocopy(abs, rel, exact):
953 def okaytocopy(abs, rel, exact):
964 reasons = {'?': _('is not managed'),
954 reasons = {'?': _('is not managed'),
965 'a': _('has been marked for add'),
955 'a': _('has been marked for add'),
966 'r': _('has been marked for remove')}
956 'r': _('has been marked for remove')}
967 state = repo.dirstate.state(abs)
957 state = repo.dirstate.state(abs)
968 reason = reasons.get(state)
958 reason = reasons.get(state)
969 if reason:
959 if reason:
970 if state == 'a':
960 if state == 'a':
971 origsrc = repo.dirstate.copied(abs)
961 origsrc = repo.dirstate.copied(abs)
972 if origsrc is not None:
962 if origsrc is not None:
973 return origsrc
963 return origsrc
974 if exact:
964 if exact:
975 ui.warn(_('%s: not copying - file %s\n') % (rel, reason))
965 ui.warn(_('%s: not copying - file %s\n') % (rel, reason))
976 else:
966 else:
977 return abs
967 return abs
978
968
979 def copy(origsrc, abssrc, relsrc, target, exact):
969 def copy(origsrc, abssrc, relsrc, target, exact):
980 abstarget = util.canonpath(repo.root, cwd, target)
970 abstarget = util.canonpath(repo.root, cwd, target)
981 reltarget = util.pathto(cwd, abstarget)
971 reltarget = util.pathto(cwd, abstarget)
982 prevsrc = targets.get(abstarget)
972 prevsrc = targets.get(abstarget)
983 if prevsrc is not None:
973 if prevsrc is not None:
984 ui.warn(_('%s: not overwriting - %s collides with %s\n') %
974 ui.warn(_('%s: not overwriting - %s collides with %s\n') %
985 (reltarget, abssrc, prevsrc))
975 (reltarget, abssrc, prevsrc))
986 return
976 return
987 if (not opts['after'] and os.path.exists(reltarget) or
977 if (not opts['after'] and os.path.exists(reltarget) or
988 opts['after'] and repo.dirstate.state(abstarget) not in '?r'):
978 opts['after'] and repo.dirstate.state(abstarget) not in '?r'):
989 if not opts['force']:
979 if not opts['force']:
990 ui.warn(_('%s: not overwriting - file exists\n') %
980 ui.warn(_('%s: not overwriting - file exists\n') %
991 reltarget)
981 reltarget)
992 return
982 return
993 if not opts['after'] and not opts.get('dry_run'):
983 if not opts['after'] and not opts.get('dry_run'):
994 os.unlink(reltarget)
984 os.unlink(reltarget)
995 if opts['after']:
985 if opts['after']:
996 if not os.path.exists(reltarget):
986 if not os.path.exists(reltarget):
997 return
987 return
998 else:
988 else:
999 targetdir = os.path.dirname(reltarget) or '.'
989 targetdir = os.path.dirname(reltarget) or '.'
1000 if not os.path.isdir(targetdir) and not opts.get('dry_run'):
990 if not os.path.isdir(targetdir) and not opts.get('dry_run'):
1001 os.makedirs(targetdir)
991 os.makedirs(targetdir)
1002 try:
992 try:
1003 restore = repo.dirstate.state(abstarget) == 'r'
993 restore = repo.dirstate.state(abstarget) == 'r'
1004 if restore and not opts.get('dry_run'):
994 if restore and not opts.get('dry_run'):
1005 repo.undelete([abstarget], wlock)
995 repo.undelete([abstarget], wlock)
1006 try:
996 try:
1007 if not opts.get('dry_run'):
997 if not opts.get('dry_run'):
1008 shutil.copyfile(relsrc, reltarget)
998 shutil.copyfile(relsrc, reltarget)
1009 shutil.copymode(relsrc, reltarget)
999 shutil.copymode(relsrc, reltarget)
1010 restore = False
1000 restore = False
1011 finally:
1001 finally:
1012 if restore:
1002 if restore:
1013 repo.remove([abstarget], wlock)
1003 repo.remove([abstarget], wlock)
1014 except shutil.Error, inst:
1004 except shutil.Error, inst:
1015 raise util.Abort(str(inst))
1005 raise util.Abort(str(inst))
1016 except IOError, inst:
1006 except IOError, inst:
1017 if inst.errno == errno.ENOENT:
1007 if inst.errno == errno.ENOENT:
1018 ui.warn(_('%s: deleted in working copy\n') % relsrc)
1008 ui.warn(_('%s: deleted in working copy\n') % relsrc)
1019 else:
1009 else:
1020 ui.warn(_('%s: cannot copy - %s\n') %
1010 ui.warn(_('%s: cannot copy - %s\n') %
1021 (relsrc, inst.strerror))
1011 (relsrc, inst.strerror))
1022 errors += 1
1012 errors += 1
1023 return
1013 return
1024 if ui.verbose or not exact:
1014 if ui.verbose or not exact:
1025 ui.status(_('copying %s to %s\n') % (relsrc, reltarget))
1015 ui.status(_('copying %s to %s\n') % (relsrc, reltarget))
1026 targets[abstarget] = abssrc
1016 targets[abstarget] = abssrc
1027 if abstarget != origsrc and not opts.get('dry_run'):
1017 if abstarget != origsrc and not opts.get('dry_run'):
1028 repo.copy(origsrc, abstarget, wlock)
1018 repo.copy(origsrc, abstarget, wlock)
1029 copied.append((abssrc, relsrc, exact))
1019 copied.append((abssrc, relsrc, exact))
1030
1020
1031 def targetpathfn(pat, dest, srcs):
1021 def targetpathfn(pat, dest, srcs):
1032 if os.path.isdir(pat):
1022 if os.path.isdir(pat):
1033 abspfx = util.canonpath(repo.root, cwd, pat)
1023 abspfx = util.canonpath(repo.root, cwd, pat)
1034 if destdirexists:
1024 if destdirexists:
1035 striplen = len(os.path.split(abspfx)[0])
1025 striplen = len(os.path.split(abspfx)[0])
1036 else:
1026 else:
1037 striplen = len(abspfx)
1027 striplen = len(abspfx)
1038 if striplen:
1028 if striplen:
1039 striplen += len(os.sep)
1029 striplen += len(os.sep)
1040 res = lambda p: os.path.join(dest, p[striplen:])
1030 res = lambda p: os.path.join(dest, p[striplen:])
1041 elif destdirexists:
1031 elif destdirexists:
1042 res = lambda p: os.path.join(dest, os.path.basename(p))
1032 res = lambda p: os.path.join(dest, os.path.basename(p))
1043 else:
1033 else:
1044 res = lambda p: dest
1034 res = lambda p: dest
1045 return res
1035 return res
1046
1036
1047 def targetpathafterfn(pat, dest, srcs):
1037 def targetpathafterfn(pat, dest, srcs):
1048 if util.patkind(pat, None)[0]:
1038 if util.patkind(pat, None)[0]:
1049 # a mercurial pattern
1039 # a mercurial pattern
1050 res = lambda p: os.path.join(dest, os.path.basename(p))
1040 res = lambda p: os.path.join(dest, os.path.basename(p))
1051 else:
1041 else:
1052 abspfx = util.canonpath(repo.root, cwd, pat)
1042 abspfx = util.canonpath(repo.root, cwd, pat)
1053 if len(abspfx) < len(srcs[0][0]):
1043 if len(abspfx) < len(srcs[0][0]):
1054 # A directory. Either the target path contains the last
1044 # A directory. Either the target path contains the last
1055 # component of the source path or it does not.
1045 # component of the source path or it does not.
1056 def evalpath(striplen):
1046 def evalpath(striplen):
1057 score = 0
1047 score = 0
1058 for s in srcs:
1048 for s in srcs:
1059 t = os.path.join(dest, s[0][striplen:])
1049 t = os.path.join(dest, s[0][striplen:])
1060 if os.path.exists(t):
1050 if os.path.exists(t):
1061 score += 1
1051 score += 1
1062 return score
1052 return score
1063
1053
1064 striplen = len(abspfx)
1054 striplen = len(abspfx)
1065 if striplen:
1055 if striplen:
1066 striplen += len(os.sep)
1056 striplen += len(os.sep)
1067 if os.path.isdir(os.path.join(dest, os.path.split(abspfx)[1])):
1057 if os.path.isdir(os.path.join(dest, os.path.split(abspfx)[1])):
1068 score = evalpath(striplen)
1058 score = evalpath(striplen)
1069 striplen1 = len(os.path.split(abspfx)[0])
1059 striplen1 = len(os.path.split(abspfx)[0])
1070 if striplen1:
1060 if striplen1:
1071 striplen1 += len(os.sep)
1061 striplen1 += len(os.sep)
1072 if evalpath(striplen1) > score:
1062 if evalpath(striplen1) > score:
1073 striplen = striplen1
1063 striplen = striplen1
1074 res = lambda p: os.path.join(dest, p[striplen:])
1064 res = lambda p: os.path.join(dest, p[striplen:])
1075 else:
1065 else:
1076 # a file
1066 # a file
1077 if destdirexists:
1067 if destdirexists:
1078 res = lambda p: os.path.join(dest, os.path.basename(p))
1068 res = lambda p: os.path.join(dest, os.path.basename(p))
1079 else:
1069 else:
1080 res = lambda p: dest
1070 res = lambda p: dest
1081 return res
1071 return res
1082
1072
1083
1073
1084 pats = list(pats)
1074 pats = list(pats)
1085 if not pats:
1075 if not pats:
1086 raise util.Abort(_('no source or destination specified'))
1076 raise util.Abort(_('no source or destination specified'))
1087 if len(pats) == 1:
1077 if len(pats) == 1:
1088 raise util.Abort(_('no destination specified'))
1078 raise util.Abort(_('no destination specified'))
1089 dest = pats.pop()
1079 dest = pats.pop()
1090 destdirexists = os.path.isdir(dest)
1080 destdirexists = os.path.isdir(dest)
1091 if (len(pats) > 1 or util.patkind(pats[0], None)[0]) and not destdirexists:
1081 if (len(pats) > 1 or util.patkind(pats[0], None)[0]) and not destdirexists:
1092 raise util.Abort(_('with multiple sources, destination must be an '
1082 raise util.Abort(_('with multiple sources, destination must be an '
1093 'existing directory'))
1083 'existing directory'))
1094 if opts['after']:
1084 if opts['after']:
1095 tfn = targetpathafterfn
1085 tfn = targetpathafterfn
1096 else:
1086 else:
1097 tfn = targetpathfn
1087 tfn = targetpathfn
1098 copylist = []
1088 copylist = []
1099 for pat in pats:
1089 for pat in pats:
1100 srcs = []
1090 srcs = []
1101 for tag, abssrc, relsrc, exact in cmdutil.walk(repo, [pat], opts):
1091 for tag, abssrc, relsrc, exact in cmdutil.walk(repo, [pat], opts):
1102 origsrc = okaytocopy(abssrc, relsrc, exact)
1092 origsrc = okaytocopy(abssrc, relsrc, exact)
1103 if origsrc:
1093 if origsrc:
1104 srcs.append((origsrc, abssrc, relsrc, exact))
1094 srcs.append((origsrc, abssrc, relsrc, exact))
1105 if not srcs:
1095 if not srcs:
1106 continue
1096 continue
1107 copylist.append((tfn(pat, dest, srcs), srcs))
1097 copylist.append((tfn(pat, dest, srcs), srcs))
1108 if not copylist:
1098 if not copylist:
1109 raise util.Abort(_('no files to copy'))
1099 raise util.Abort(_('no files to copy'))
1110
1100
1111 for targetpath, srcs in copylist:
1101 for targetpath, srcs in copylist:
1112 for origsrc, abssrc, relsrc, exact in srcs:
1102 for origsrc, abssrc, relsrc, exact in srcs:
1113 copy(origsrc, abssrc, relsrc, targetpath(abssrc), exact)
1103 copy(origsrc, abssrc, relsrc, targetpath(abssrc), exact)
1114
1104
1115 if errors:
1105 if errors:
1116 ui.warn(_('(consider using --after)\n'))
1106 ui.warn(_('(consider using --after)\n'))
1117 return errors, copied
1107 return errors, copied
1118
1108
1119 def copy(ui, repo, *pats, **opts):
1109 def copy(ui, repo, *pats, **opts):
1120 """mark files as copied for the next commit
1110 """mark files as copied for the next commit
1121
1111
1122 Mark dest as having copies of source files. If dest is a
1112 Mark dest as having copies of source files. If dest is a
1123 directory, copies are put in that directory. If dest is a file,
1113 directory, copies are put in that directory. If dest is a file,
1124 there can only be one source.
1114 there can only be one source.
1125
1115
1126 By default, this command copies the contents of files as they
1116 By default, this command copies the contents of files as they
1127 stand in the working directory. If invoked with --after, the
1117 stand in the working directory. If invoked with --after, the
1128 operation is recorded, but no copying is performed.
1118 operation is recorded, but no copying is performed.
1129
1119
1130 This command takes effect in the next commit.
1120 This command takes effect in the next commit.
1131
1121
1132 NOTE: This command should be treated as experimental. While it
1122 NOTE: This command should be treated as experimental. While it
1133 should properly record copied files, this information is not yet
1123 should properly record copied files, this information is not yet
1134 fully used by merge, nor fully reported by log.
1124 fully used by merge, nor fully reported by log.
1135 """
1125 """
1136 wlock = repo.wlock(0)
1126 wlock = repo.wlock(0)
1137 errs, copied = docopy(ui, repo, pats, opts, wlock)
1127 errs, copied = docopy(ui, repo, pats, opts, wlock)
1138 return errs
1128 return errs
1139
1129
1140 def debugancestor(ui, index, rev1, rev2):
1130 def debugancestor(ui, index, rev1, rev2):
1141 """find the ancestor revision of two revisions in a given index"""
1131 """find the ancestor revision of two revisions in a given index"""
1142 r = revlog.revlog(util.opener(os.getcwd(), audit=False), index, "", 0)
1132 r = revlog.revlog(util.opener(os.getcwd(), audit=False), index, "", 0)
1143 a = r.ancestor(r.lookup(rev1), r.lookup(rev2))
1133 a = r.ancestor(r.lookup(rev1), r.lookup(rev2))
1144 ui.write("%d:%s\n" % (r.rev(a), hex(a)))
1134 ui.write("%d:%s\n" % (r.rev(a), hex(a)))
1145
1135
1146 def debugcomplete(ui, cmd='', **opts):
1136 def debugcomplete(ui, cmd='', **opts):
1147 """returns the completion list associated with the given command"""
1137 """returns the completion list associated with the given command"""
1148
1138
1149 if opts['options']:
1139 if opts['options']:
1150 options = []
1140 options = []
1151 otables = [globalopts]
1141 otables = [globalopts]
1152 if cmd:
1142 if cmd:
1153 aliases, entry = findcmd(cmd)
1143 aliases, entry = findcmd(cmd)
1154 otables.append(entry[1])
1144 otables.append(entry[1])
1155 for t in otables:
1145 for t in otables:
1156 for o in t:
1146 for o in t:
1157 if o[0]:
1147 if o[0]:
1158 options.append('-%s' % o[0])
1148 options.append('-%s' % o[0])
1159 options.append('--%s' % o[1])
1149 options.append('--%s' % o[1])
1160 ui.write("%s\n" % "\n".join(options))
1150 ui.write("%s\n" % "\n".join(options))
1161 return
1151 return
1162
1152
1163 clist = findpossible(cmd).keys()
1153 clist = findpossible(cmd).keys()
1164 clist.sort()
1154 clist.sort()
1165 ui.write("%s\n" % "\n".join(clist))
1155 ui.write("%s\n" % "\n".join(clist))
1166
1156
1167 def debugrebuildstate(ui, repo, rev=None):
1157 def debugrebuildstate(ui, repo, rev=None):
1168 """rebuild the dirstate as it would look like for the given revision"""
1158 """rebuild the dirstate as it would look like for the given revision"""
1169 if not rev:
1159 if not rev:
1170 rev = repo.changelog.tip()
1160 rev = repo.changelog.tip()
1171 else:
1161 else:
1172 rev = repo.lookup(rev)
1162 rev = repo.lookup(rev)
1173 change = repo.changelog.read(rev)
1163 change = repo.changelog.read(rev)
1174 n = change[0]
1164 n = change[0]
1175 files = repo.manifest.read(n)
1165 files = repo.manifest.read(n)
1176 wlock = repo.wlock()
1166 wlock = repo.wlock()
1177 repo.dirstate.rebuild(rev, files)
1167 repo.dirstate.rebuild(rev, files)
1178
1168
1179 def debugcheckstate(ui, repo):
1169 def debugcheckstate(ui, repo):
1180 """validate the correctness of the current dirstate"""
1170 """validate the correctness of the current dirstate"""
1181 parent1, parent2 = repo.dirstate.parents()
1171 parent1, parent2 = repo.dirstate.parents()
1182 repo.dirstate.read()
1172 repo.dirstate.read()
1183 dc = repo.dirstate.map
1173 dc = repo.dirstate.map
1184 keys = dc.keys()
1174 keys = dc.keys()
1185 keys.sort()
1175 keys.sort()
1186 m1n = repo.changelog.read(parent1)[0]
1176 m1n = repo.changelog.read(parent1)[0]
1187 m2n = repo.changelog.read(parent2)[0]
1177 m2n = repo.changelog.read(parent2)[0]
1188 m1 = repo.manifest.read(m1n)
1178 m1 = repo.manifest.read(m1n)
1189 m2 = repo.manifest.read(m2n)
1179 m2 = repo.manifest.read(m2n)
1190 errors = 0
1180 errors = 0
1191 for f in dc:
1181 for f in dc:
1192 state = repo.dirstate.state(f)
1182 state = repo.dirstate.state(f)
1193 if state in "nr" and f not in m1:
1183 if state in "nr" and f not in m1:
1194 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
1184 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
1195 errors += 1
1185 errors += 1
1196 if state in "a" and f in m1:
1186 if state in "a" and f in m1:
1197 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
1187 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
1198 errors += 1
1188 errors += 1
1199 if state in "m" and f not in m1 and f not in m2:
1189 if state in "m" and f not in m1 and f not in m2:
1200 ui.warn(_("%s in state %s, but not in either manifest\n") %
1190 ui.warn(_("%s in state %s, but not in either manifest\n") %
1201 (f, state))
1191 (f, state))
1202 errors += 1
1192 errors += 1
1203 for f in m1:
1193 for f in m1:
1204 state = repo.dirstate.state(f)
1194 state = repo.dirstate.state(f)
1205 if state not in "nrm":
1195 if state not in "nrm":
1206 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
1196 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
1207 errors += 1
1197 errors += 1
1208 if errors:
1198 if errors:
1209 error = _(".hg/dirstate inconsistent with current parent's manifest")
1199 error = _(".hg/dirstate inconsistent with current parent's manifest")
1210 raise util.Abort(error)
1200 raise util.Abort(error)
1211
1201
1212 def debugconfig(ui, repo, *values):
1202 def debugconfig(ui, repo, *values):
1213 """show combined config settings from all hgrc files
1203 """show combined config settings from all hgrc files
1214
1204
1215 With no args, print names and values of all config items.
1205 With no args, print names and values of all config items.
1216
1206
1217 With one arg of the form section.name, print just the value of
1207 With one arg of the form section.name, print just the value of
1218 that config item.
1208 that config item.
1219
1209
1220 With multiple args, print names and values of all config items
1210 With multiple args, print names and values of all config items
1221 with matching section names."""
1211 with matching section names."""
1222
1212
1223 if values:
1213 if values:
1224 if len([v for v in values if '.' in v]) > 1:
1214 if len([v for v in values if '.' in v]) > 1:
1225 raise util.Abort(_('only one config item permitted'))
1215 raise util.Abort(_('only one config item permitted'))
1226 for section, name, value in ui.walkconfig():
1216 for section, name, value in ui.walkconfig():
1227 sectname = section + '.' + name
1217 sectname = section + '.' + name
1228 if values:
1218 if values:
1229 for v in values:
1219 for v in values:
1230 if v == section:
1220 if v == section:
1231 ui.write('%s=%s\n' % (sectname, value))
1221 ui.write('%s=%s\n' % (sectname, value))
1232 elif v == sectname:
1222 elif v == sectname:
1233 ui.write(value, '\n')
1223 ui.write(value, '\n')
1234 else:
1224 else:
1235 ui.write('%s=%s\n' % (sectname, value))
1225 ui.write('%s=%s\n' % (sectname, value))
1236
1226
1237 def debugsetparents(ui, repo, rev1, rev2=None):
1227 def debugsetparents(ui, repo, rev1, rev2=None):
1238 """manually set the parents of the current working directory
1228 """manually set the parents of the current working directory
1239
1229
1240 This is useful for writing repository conversion tools, but should
1230 This is useful for writing repository conversion tools, but should
1241 be used with care.
1231 be used with care.
1242 """
1232 """
1243
1233
1244 if not rev2:
1234 if not rev2:
1245 rev2 = hex(nullid)
1235 rev2 = hex(nullid)
1246
1236
1247 repo.dirstate.setparents(repo.lookup(rev1), repo.lookup(rev2))
1237 repo.dirstate.setparents(repo.lookup(rev1), repo.lookup(rev2))
1248
1238
1249 def debugstate(ui, repo):
1239 def debugstate(ui, repo):
1250 """show the contents of the current dirstate"""
1240 """show the contents of the current dirstate"""
1251 repo.dirstate.read()
1241 repo.dirstate.read()
1252 dc = repo.dirstate.map
1242 dc = repo.dirstate.map
1253 keys = dc.keys()
1243 keys = dc.keys()
1254 keys.sort()
1244 keys.sort()
1255 for file_ in keys:
1245 for file_ in keys:
1256 ui.write("%c %3o %10d %s %s\n"
1246 ui.write("%c %3o %10d %s %s\n"
1257 % (dc[file_][0], dc[file_][1] & 0777, dc[file_][2],
1247 % (dc[file_][0], dc[file_][1] & 0777, dc[file_][2],
1258 time.strftime("%x %X",
1248 time.strftime("%x %X",
1259 time.localtime(dc[file_][3])), file_))
1249 time.localtime(dc[file_][3])), file_))
1260 for f in repo.dirstate.copies:
1250 for f in repo.dirstate.copies:
1261 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copies[f], f))
1251 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copies[f], f))
1262
1252
1263 def debugdata(ui, file_, rev):
1253 def debugdata(ui, file_, rev):
1264 """dump the contents of an data file revision"""
1254 """dump the contents of an data file revision"""
1265 r = revlog.revlog(util.opener(os.getcwd(), audit=False),
1255 r = revlog.revlog(util.opener(os.getcwd(), audit=False),
1266 file_[:-2] + ".i", file_, 0)
1256 file_[:-2] + ".i", file_, 0)
1267 try:
1257 try:
1268 ui.write(r.revision(r.lookup(rev)))
1258 ui.write(r.revision(r.lookup(rev)))
1269 except KeyError:
1259 except KeyError:
1270 raise util.Abort(_('invalid revision identifier %s'), rev)
1260 raise util.Abort(_('invalid revision identifier %s'), rev)
1271
1261
1272 def debugindex(ui, file_):
1262 def debugindex(ui, file_):
1273 """dump the contents of an index file"""
1263 """dump the contents of an index file"""
1274 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_, "", 0)
1264 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_, "", 0)
1275 ui.write(" rev offset length base linkrev" +
1265 ui.write(" rev offset length base linkrev" +
1276 " nodeid p1 p2\n")
1266 " nodeid p1 p2\n")
1277 for i in range(r.count()):
1267 for i in range(r.count()):
1278 node = r.node(i)
1268 node = r.node(i)
1279 pp = r.parents(node)
1269 pp = r.parents(node)
1280 ui.write("% 6d % 9d % 7d % 6d % 7d %s %s %s\n" % (
1270 ui.write("% 6d % 9d % 7d % 6d % 7d %s %s %s\n" % (
1281 i, r.start(i), r.length(i), r.base(i), r.linkrev(node),
1271 i, r.start(i), r.length(i), r.base(i), r.linkrev(node),
1282 short(node), short(pp[0]), short(pp[1])))
1272 short(node), short(pp[0]), short(pp[1])))
1283
1273
1284 def debugindexdot(ui, file_):
1274 def debugindexdot(ui, file_):
1285 """dump an index DAG as a .dot file"""
1275 """dump an index DAG as a .dot file"""
1286 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_, "", 0)
1276 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_, "", 0)
1287 ui.write("digraph G {\n")
1277 ui.write("digraph G {\n")
1288 for i in range(r.count()):
1278 for i in range(r.count()):
1289 node = r.node(i)
1279 node = r.node(i)
1290 pp = r.parents(node)
1280 pp = r.parents(node)
1291 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
1281 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
1292 if pp[1] != nullid:
1282 if pp[1] != nullid:
1293 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
1283 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
1294 ui.write("}\n")
1284 ui.write("}\n")
1295
1285
1296 def debugrename(ui, repo, file, rev=None):
1286 def debugrename(ui, repo, file, rev=None):
1297 """dump rename information"""
1287 """dump rename information"""
1298 r = repo.file(relpath(repo, [file])[0])
1288 r = repo.file(relpath(repo, [file])[0])
1299 if rev:
1289 if rev:
1300 try:
1290 try:
1301 # assume all revision numbers are for changesets
1291 # assume all revision numbers are for changesets
1302 n = repo.lookup(rev)
1292 n = repo.lookup(rev)
1303 change = repo.changelog.read(n)
1293 change = repo.changelog.read(n)
1304 m = repo.manifest.read(change[0])
1294 m = repo.manifest.read(change[0])
1305 n = m[relpath(repo, [file])[0]]
1295 n = m[relpath(repo, [file])[0]]
1306 except (hg.RepoError, KeyError):
1296 except (hg.RepoError, KeyError):
1307 n = r.lookup(rev)
1297 n = r.lookup(rev)
1308 else:
1298 else:
1309 n = r.tip()
1299 n = r.tip()
1310 m = r.renamed(n)
1300 m = r.renamed(n)
1311 if m:
1301 if m:
1312 ui.write(_("renamed from %s:%s\n") % (m[0], hex(m[1])))
1302 ui.write(_("renamed from %s:%s\n") % (m[0], hex(m[1])))
1313 else:
1303 else:
1314 ui.write(_("not renamed\n"))
1304 ui.write(_("not renamed\n"))
1315
1305
1316 def debugwalk(ui, repo, *pats, **opts):
1306 def debugwalk(ui, repo, *pats, **opts):
1317 """show how files match on given patterns"""
1307 """show how files match on given patterns"""
1318 items = list(cmdutil.walk(repo, pats, opts))
1308 items = list(cmdutil.walk(repo, pats, opts))
1319 if not items:
1309 if not items:
1320 return
1310 return
1321 fmt = '%%s %%-%ds %%-%ds %%s' % (
1311 fmt = '%%s %%-%ds %%-%ds %%s' % (
1322 max([len(abs) for (src, abs, rel, exact) in items]),
1312 max([len(abs) for (src, abs, rel, exact) in items]),
1323 max([len(rel) for (src, abs, rel, exact) in items]))
1313 max([len(rel) for (src, abs, rel, exact) in items]))
1324 for src, abs, rel, exact in items:
1314 for src, abs, rel, exact in items:
1325 line = fmt % (src, abs, rel, exact and 'exact' or '')
1315 line = fmt % (src, abs, rel, exact and 'exact' or '')
1326 ui.write("%s\n" % line.rstrip())
1316 ui.write("%s\n" % line.rstrip())
1327
1317
1328 def diff(ui, repo, *pats, **opts):
1318 def diff(ui, repo, *pats, **opts):
1329 """diff repository (or selected files)
1319 """diff repository (or selected files)
1330
1320
1331 Show differences between revisions for the specified files.
1321 Show differences between revisions for the specified files.
1332
1322
1333 Differences between files are shown using the unified diff format.
1323 Differences between files are shown using the unified diff format.
1334
1324
1335 When two revision arguments are given, then changes are shown
1325 When two revision arguments are given, then changes are shown
1336 between those revisions. If only one revision is specified then
1326 between those revisions. If only one revision is specified then
1337 that revision is compared to the working directory, and, when no
1327 that revision is compared to the working directory, and, when no
1338 revisions are specified, the working directory files are compared
1328 revisions are specified, the working directory files are compared
1339 to its parent.
1329 to its parent.
1340
1330
1341 Without the -a option, diff will avoid generating diffs of files
1331 Without the -a option, diff will avoid generating diffs of files
1342 it detects as binary. With -a, diff will generate a diff anyway,
1332 it detects as binary. With -a, diff will generate a diff anyway,
1343 probably with undesirable results.
1333 probably with undesirable results.
1344 """
1334 """
1345 node1, node2 = revpair(ui, repo, opts['rev'])
1335 node1, node2 = revpair(ui, repo, opts['rev'])
1346
1336
1347 fns, matchfn, anypats = cmdutil.matchpats(repo, pats, opts)
1337 fns, matchfn, anypats = cmdutil.matchpats(repo, pats, opts)
1348
1338
1349 patch.diff(repo, node1, node2, fns, match=matchfn,
1339 patch.diff(repo, node1, node2, fns, match=matchfn,
1350 opts=ui.diffopts(opts))
1340 opts=patch.diffopts(ui, opts))
1351
1341
1352 def export(ui, repo, *changesets, **opts):
1342 def export(ui, repo, *changesets, **opts):
1353 """dump the header and diffs for one or more changesets
1343 """dump the header and diffs for one or more changesets
1354
1344
1355 Print the changeset header and diffs for one or more revisions.
1345 Print the changeset header and diffs for one or more revisions.
1356
1346
1357 The information shown in the changeset header is: author,
1347 The information shown in the changeset header is: author,
1358 changeset hash, parent and commit comment.
1348 changeset hash, parent and commit comment.
1359
1349
1360 Output may be to a file, in which case the name of the file is
1350 Output may be to a file, in which case the name of the file is
1361 given using a format string. The formatting rules are as follows:
1351 given using a format string. The formatting rules are as follows:
1362
1352
1363 %% literal "%" character
1353 %% literal "%" character
1364 %H changeset hash (40 bytes of hexadecimal)
1354 %H changeset hash (40 bytes of hexadecimal)
1365 %N number of patches being generated
1355 %N number of patches being generated
1366 %R changeset revision number
1356 %R changeset revision number
1367 %b basename of the exporting repository
1357 %b basename of the exporting repository
1368 %h short-form changeset hash (12 bytes of hexadecimal)
1358 %h short-form changeset hash (12 bytes of hexadecimal)
1369 %n zero-padded sequence number, starting at 1
1359 %n zero-padded sequence number, starting at 1
1370 %r zero-padded changeset revision number
1360 %r zero-padded changeset revision number
1371
1361
1372 Without the -a option, export will avoid generating diffs of files
1362 Without the -a option, export will avoid generating diffs of files
1373 it detects as binary. With -a, export will generate a diff anyway,
1363 it detects as binary. With -a, export will generate a diff anyway,
1374 probably with undesirable results.
1364 probably with undesirable results.
1375
1365
1376 With the --switch-parent option, the diff will be against the second
1366 With the --switch-parent option, the diff will be against the second
1377 parent. It can be useful to review a merge.
1367 parent. It can be useful to review a merge.
1378 """
1368 """
1379 if not changesets:
1369 if not changesets:
1380 raise util.Abort(_("export requires at least one changeset"))
1370 raise util.Abort(_("export requires at least one changeset"))
1381 revs = list(revrange(ui, repo, changesets))
1371 revs = list(revrange(ui, repo, changesets))
1382 if len(revs) > 1:
1372 if len(revs) > 1:
1383 ui.note(_('exporting patches:\n'))
1373 ui.note(_('exporting patches:\n'))
1384 else:
1374 else:
1385 ui.note(_('exporting patch:\n'))
1375 ui.note(_('exporting patch:\n'))
1386 patch.export(repo, map(repo.lookup, revs), template=opts['output'],
1376 patch.export(repo, map(repo.lookup, revs), template=opts['output'],
1387 switch_parent=opts['switch_parent'], opts=ui.diffopts(opts))
1377 switch_parent=opts['switch_parent'],
1378 opts=patch.diffopts(ui, opts))
1388
1379
1389 def forget(ui, repo, *pats, **opts):
1380 def forget(ui, repo, *pats, **opts):
1390 """don't add the specified files on the next commit (DEPRECATED)
1381 """don't add the specified files on the next commit (DEPRECATED)
1391
1382
1392 (DEPRECATED)
1383 (DEPRECATED)
1393 Undo an 'hg add' scheduled for the next commit.
1384 Undo an 'hg add' scheduled for the next commit.
1394
1385
1395 This command is now deprecated and will be removed in a future
1386 This command is now deprecated and will be removed in a future
1396 release. Please use revert instead.
1387 release. Please use revert instead.
1397 """
1388 """
1398 ui.warn(_("(the forget command is deprecated; use revert instead)\n"))
1389 ui.warn(_("(the forget command is deprecated; use revert instead)\n"))
1399 forget = []
1390 forget = []
1400 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts):
1391 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts):
1401 if repo.dirstate.state(abs) == 'a':
1392 if repo.dirstate.state(abs) == 'a':
1402 forget.append(abs)
1393 forget.append(abs)
1403 if ui.verbose or not exact:
1394 if ui.verbose or not exact:
1404 ui.status(_('forgetting %s\n') % ((pats and rel) or abs))
1395 ui.status(_('forgetting %s\n') % ((pats and rel) or abs))
1405 repo.forget(forget)
1396 repo.forget(forget)
1406
1397
1407 def grep(ui, repo, pattern, *pats, **opts):
1398 def grep(ui, repo, pattern, *pats, **opts):
1408 """search for a pattern in specified files and revisions
1399 """search for a pattern in specified files and revisions
1409
1400
1410 Search revisions of files for a regular expression.
1401 Search revisions of files for a regular expression.
1411
1402
1412 This command behaves differently than Unix grep. It only accepts
1403 This command behaves differently than Unix grep. It only accepts
1413 Python/Perl regexps. It searches repository history, not the
1404 Python/Perl regexps. It searches repository history, not the
1414 working directory. It always prints the revision number in which
1405 working directory. It always prints the revision number in which
1415 a match appears.
1406 a match appears.
1416
1407
1417 By default, grep only prints output for the first revision of a
1408 By default, grep only prints output for the first revision of a
1418 file in which it finds a match. To get it to print every revision
1409 file in which it finds a match. To get it to print every revision
1419 that contains a change in match status ("-" for a match that
1410 that contains a change in match status ("-" for a match that
1420 becomes a non-match, or "+" for a non-match that becomes a match),
1411 becomes a non-match, or "+" for a non-match that becomes a match),
1421 use the --all flag.
1412 use the --all flag.
1422 """
1413 """
1423 reflags = 0
1414 reflags = 0
1424 if opts['ignore_case']:
1415 if opts['ignore_case']:
1425 reflags |= re.I
1416 reflags |= re.I
1426 regexp = re.compile(pattern, reflags)
1417 regexp = re.compile(pattern, reflags)
1427 sep, eol = ':', '\n'
1418 sep, eol = ':', '\n'
1428 if opts['print0']:
1419 if opts['print0']:
1429 sep = eol = '\0'
1420 sep = eol = '\0'
1430
1421
1431 fcache = {}
1422 fcache = {}
1432 def getfile(fn):
1423 def getfile(fn):
1433 if fn not in fcache:
1424 if fn not in fcache:
1434 fcache[fn] = repo.file(fn)
1425 fcache[fn] = repo.file(fn)
1435 return fcache[fn]
1426 return fcache[fn]
1436
1427
1437 def matchlines(body):
1428 def matchlines(body):
1438 begin = 0
1429 begin = 0
1439 linenum = 0
1430 linenum = 0
1440 while True:
1431 while True:
1441 match = regexp.search(body, begin)
1432 match = regexp.search(body, begin)
1442 if not match:
1433 if not match:
1443 break
1434 break
1444 mstart, mend = match.span()
1435 mstart, mend = match.span()
1445 linenum += body.count('\n', begin, mstart) + 1
1436 linenum += body.count('\n', begin, mstart) + 1
1446 lstart = body.rfind('\n', begin, mstart) + 1 or begin
1437 lstart = body.rfind('\n', begin, mstart) + 1 or begin
1447 lend = body.find('\n', mend)
1438 lend = body.find('\n', mend)
1448 yield linenum, mstart - lstart, mend - lstart, body[lstart:lend]
1439 yield linenum, mstart - lstart, mend - lstart, body[lstart:lend]
1449 begin = lend + 1
1440 begin = lend + 1
1450
1441
1451 class linestate(object):
1442 class linestate(object):
1452 def __init__(self, line, linenum, colstart, colend):
1443 def __init__(self, line, linenum, colstart, colend):
1453 self.line = line
1444 self.line = line
1454 self.linenum = linenum
1445 self.linenum = linenum
1455 self.colstart = colstart
1446 self.colstart = colstart
1456 self.colend = colend
1447 self.colend = colend
1457
1448
1458 def __eq__(self, other):
1449 def __eq__(self, other):
1459 return self.line == other.line
1450 return self.line == other.line
1460
1451
1461 matches = {}
1452 matches = {}
1462 copies = {}
1453 copies = {}
1463 def grepbody(fn, rev, body):
1454 def grepbody(fn, rev, body):
1464 matches[rev].setdefault(fn, [])
1455 matches[rev].setdefault(fn, [])
1465 m = matches[rev][fn]
1456 m = matches[rev][fn]
1466 for lnum, cstart, cend, line in matchlines(body):
1457 for lnum, cstart, cend, line in matchlines(body):
1467 s = linestate(line, lnum, cstart, cend)
1458 s = linestate(line, lnum, cstart, cend)
1468 m.append(s)
1459 m.append(s)
1469
1460
1470 def difflinestates(a, b):
1461 def difflinestates(a, b):
1471 sm = difflib.SequenceMatcher(None, a, b)
1462 sm = difflib.SequenceMatcher(None, a, b)
1472 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
1463 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
1473 if tag == 'insert':
1464 if tag == 'insert':
1474 for i in range(blo, bhi):
1465 for i in range(blo, bhi):
1475 yield ('+', b[i])
1466 yield ('+', b[i])
1476 elif tag == 'delete':
1467 elif tag == 'delete':
1477 for i in range(alo, ahi):
1468 for i in range(alo, ahi):
1478 yield ('-', a[i])
1469 yield ('-', a[i])
1479 elif tag == 'replace':
1470 elif tag == 'replace':
1480 for i in range(alo, ahi):
1471 for i in range(alo, ahi):
1481 yield ('-', a[i])
1472 yield ('-', a[i])
1482 for i in range(blo, bhi):
1473 for i in range(blo, bhi):
1483 yield ('+', b[i])
1474 yield ('+', b[i])
1484
1475
1485 prev = {}
1476 prev = {}
1486 ucache = {}
1477 ucache = {}
1487 def display(fn, rev, states, prevstates):
1478 def display(fn, rev, states, prevstates):
1488 counts = {'-': 0, '+': 0}
1479 counts = {'-': 0, '+': 0}
1489 filerevmatches = {}
1480 filerevmatches = {}
1490 if incrementing or not opts['all']:
1481 if incrementing or not opts['all']:
1491 a, b = prevstates, states
1482 a, b = prevstates, states
1492 else:
1483 else:
1493 a, b = states, prevstates
1484 a, b = states, prevstates
1494 for change, l in difflinestates(a, b):
1485 for change, l in difflinestates(a, b):
1495 if incrementing or not opts['all']:
1486 if incrementing or not opts['all']:
1496 r = rev
1487 r = rev
1497 else:
1488 else:
1498 r = prev[fn]
1489 r = prev[fn]
1499 cols = [fn, str(r)]
1490 cols = [fn, str(r)]
1500 if opts['line_number']:
1491 if opts['line_number']:
1501 cols.append(str(l.linenum))
1492 cols.append(str(l.linenum))
1502 if opts['all']:
1493 if opts['all']:
1503 cols.append(change)
1494 cols.append(change)
1504 if opts['user']:
1495 if opts['user']:
1505 cols.append(trimuser(ui, getchange(r)[1], rev,
1496 cols.append(trimuser(ui, getchange(r)[1], rev,
1506 ucache))
1497 ucache))
1507 if opts['files_with_matches']:
1498 if opts['files_with_matches']:
1508 c = (fn, rev)
1499 c = (fn, rev)
1509 if c in filerevmatches:
1500 if c in filerevmatches:
1510 continue
1501 continue
1511 filerevmatches[c] = 1
1502 filerevmatches[c] = 1
1512 else:
1503 else:
1513 cols.append(l.line)
1504 cols.append(l.line)
1514 ui.write(sep.join(cols), eol)
1505 ui.write(sep.join(cols), eol)
1515 counts[change] += 1
1506 counts[change] += 1
1516 return counts['+'], counts['-']
1507 return counts['+'], counts['-']
1517
1508
1518 fstate = {}
1509 fstate = {}
1519 skip = {}
1510 skip = {}
1520 changeiter, getchange, matchfn = walkchangerevs(ui, repo, pats, opts)
1511 changeiter, getchange, matchfn = walkchangerevs(ui, repo, pats, opts)
1521 count = 0
1512 count = 0
1522 incrementing = False
1513 incrementing = False
1523 follow = opts.get('follow')
1514 follow = opts.get('follow')
1524 for st, rev, fns in changeiter:
1515 for st, rev, fns in changeiter:
1525 if st == 'window':
1516 if st == 'window':
1526 incrementing = rev
1517 incrementing = rev
1527 matches.clear()
1518 matches.clear()
1528 copies.clear()
1519 copies.clear()
1529 elif st == 'add':
1520 elif st == 'add':
1530 change = repo.changelog.read(repo.lookup(str(rev)))
1521 change = repo.changelog.read(repo.lookup(str(rev)))
1531 mf = repo.manifest.read(change[0])
1522 mf = repo.manifest.read(change[0])
1532 matches[rev] = {}
1523 matches[rev] = {}
1533 for fn in fns:
1524 for fn in fns:
1534 if fn in skip:
1525 if fn in skip:
1535 continue
1526 continue
1536 fstate.setdefault(fn, {})
1527 fstate.setdefault(fn, {})
1537 copies.setdefault(rev, {})
1528 copies.setdefault(rev, {})
1538 try:
1529 try:
1539 grepbody(fn, rev, getfile(fn).read(mf[fn]))
1530 grepbody(fn, rev, getfile(fn).read(mf[fn]))
1540 if follow:
1531 if follow:
1541 copied = getfile(fn).renamed(mf[fn])
1532 copied = getfile(fn).renamed(mf[fn])
1542 if copied:
1533 if copied:
1543 copies[rev][fn] = copied[0]
1534 copies[rev][fn] = copied[0]
1544 except KeyError:
1535 except KeyError:
1545 pass
1536 pass
1546 elif st == 'iter':
1537 elif st == 'iter':
1547 states = matches[rev].items()
1538 states = matches[rev].items()
1548 states.sort()
1539 states.sort()
1549 for fn, m in states:
1540 for fn, m in states:
1550 copy = copies[rev].get(fn)
1541 copy = copies[rev].get(fn)
1551 if fn in skip:
1542 if fn in skip:
1552 if copy:
1543 if copy:
1553 skip[copy] = True
1544 skip[copy] = True
1554 continue
1545 continue
1555 if incrementing or not opts['all'] or fstate[fn]:
1546 if incrementing or not opts['all'] or fstate[fn]:
1556 pos, neg = display(fn, rev, m, fstate[fn])
1547 pos, neg = display(fn, rev, m, fstate[fn])
1557 count += pos + neg
1548 count += pos + neg
1558 if pos and not opts['all']:
1549 if pos and not opts['all']:
1559 skip[fn] = True
1550 skip[fn] = True
1560 if copy:
1551 if copy:
1561 skip[copy] = True
1552 skip[copy] = True
1562 fstate[fn] = m
1553 fstate[fn] = m
1563 if copy:
1554 if copy:
1564 fstate[copy] = m
1555 fstate[copy] = m
1565 prev[fn] = rev
1556 prev[fn] = rev
1566
1557
1567 if not incrementing:
1558 if not incrementing:
1568 fstate = fstate.items()
1559 fstate = fstate.items()
1569 fstate.sort()
1560 fstate.sort()
1570 for fn, state in fstate:
1561 for fn, state in fstate:
1571 if fn in skip:
1562 if fn in skip:
1572 continue
1563 continue
1573 if fn not in copies[prev[fn]]:
1564 if fn not in copies[prev[fn]]:
1574 display(fn, rev, {}, state)
1565 display(fn, rev, {}, state)
1575 return (count == 0 and 1) or 0
1566 return (count == 0 and 1) or 0
1576
1567
1577 def heads(ui, repo, **opts):
1568 def heads(ui, repo, **opts):
1578 """show current repository heads
1569 """show current repository heads
1579
1570
1580 Show all repository head changesets.
1571 Show all repository head changesets.
1581
1572
1582 Repository "heads" are changesets that don't have children
1573 Repository "heads" are changesets that don't have children
1583 changesets. They are where development generally takes place and
1574 changesets. They are where development generally takes place and
1584 are the usual targets for update and merge operations.
1575 are the usual targets for update and merge operations.
1585 """
1576 """
1586 if opts['rev']:
1577 if opts['rev']:
1587 heads = repo.heads(repo.lookup(opts['rev']))
1578 heads = repo.heads(repo.lookup(opts['rev']))
1588 else:
1579 else:
1589 heads = repo.heads()
1580 heads = repo.heads()
1590 br = None
1581 br = None
1591 if opts['branches']:
1582 if opts['branches']:
1592 br = repo.branchlookup(heads)
1583 br = repo.branchlookup(heads)
1593 displayer = show_changeset(ui, repo, opts)
1584 displayer = show_changeset(ui, repo, opts)
1594 for n in heads:
1585 for n in heads:
1595 displayer.show(changenode=n, brinfo=br)
1586 displayer.show(changenode=n, brinfo=br)
1596
1587
1597 def identify(ui, repo):
1588 def identify(ui, repo):
1598 """print information about the working copy
1589 """print information about the working copy
1599
1590
1600 Print a short summary of the current state of the repo.
1591 Print a short summary of the current state of the repo.
1601
1592
1602 This summary identifies the repository state using one or two parent
1593 This summary identifies the repository state using one or two parent
1603 hash identifiers, followed by a "+" if there are uncommitted changes
1594 hash identifiers, followed by a "+" if there are uncommitted changes
1604 in the working directory, followed by a list of tags for this revision.
1595 in the working directory, followed by a list of tags for this revision.
1605 """
1596 """
1606 parents = [p for p in repo.dirstate.parents() if p != nullid]
1597 parents = [p for p in repo.dirstate.parents() if p != nullid]
1607 if not parents:
1598 if not parents:
1608 ui.write(_("unknown\n"))
1599 ui.write(_("unknown\n"))
1609 return
1600 return
1610
1601
1611 hexfunc = ui.verbose and hex or short
1602 hexfunc = ui.verbose and hex or short
1612 modified, added, removed, deleted = repo.status()[:4]
1603 modified, added, removed, deleted = repo.status()[:4]
1613 output = ["%s%s" %
1604 output = ["%s%s" %
1614 ('+'.join([hexfunc(parent) for parent in parents]),
1605 ('+'.join([hexfunc(parent) for parent in parents]),
1615 (modified or added or removed or deleted) and "+" or "")]
1606 (modified or added or removed or deleted) and "+" or "")]
1616
1607
1617 if not ui.quiet:
1608 if not ui.quiet:
1618 # multiple tags for a single parent separated by '/'
1609 # multiple tags for a single parent separated by '/'
1619 parenttags = ['/'.join(tags)
1610 parenttags = ['/'.join(tags)
1620 for tags in map(repo.nodetags, parents) if tags]
1611 for tags in map(repo.nodetags, parents) if tags]
1621 # tags for multiple parents separated by ' + '
1612 # tags for multiple parents separated by ' + '
1622 if parenttags:
1613 if parenttags:
1623 output.append(' + '.join(parenttags))
1614 output.append(' + '.join(parenttags))
1624
1615
1625 ui.write("%s\n" % ' '.join(output))
1616 ui.write("%s\n" % ' '.join(output))
1626
1617
1627 def import_(ui, repo, patch1, *patches, **opts):
1618 def import_(ui, repo, patch1, *patches, **opts):
1628 """import an ordered set of patches
1619 """import an ordered set of patches
1629
1620
1630 Import a list of patches and commit them individually.
1621 Import a list of patches and commit them individually.
1631
1622
1632 If there are outstanding changes in the working directory, import
1623 If there are outstanding changes in the working directory, import
1633 will abort unless given the -f flag.
1624 will abort unless given the -f flag.
1634
1625
1635 You can import a patch straight from a mail message. Even patches
1626 You can import a patch straight from a mail message. Even patches
1636 as attachments work (body part must be type text/plain or
1627 as attachments work (body part must be type text/plain or
1637 text/x-patch to be used). From and Subject headers of email
1628 text/x-patch to be used). From and Subject headers of email
1638 message are used as default committer and commit message. All
1629 message are used as default committer and commit message. All
1639 text/plain body parts before first diff are added to commit
1630 text/plain body parts before first diff are added to commit
1640 message.
1631 message.
1641
1632
1642 If imported patch was generated by hg export, user and description
1633 If imported patch was generated by hg export, user and description
1643 from patch override values from message headers and body. Values
1634 from patch override values from message headers and body. Values
1644 given on command line with -m and -u override these.
1635 given on command line with -m and -u override these.
1645
1636
1646 To read a patch from standard input, use patch name "-".
1637 To read a patch from standard input, use patch name "-".
1647 """
1638 """
1648 patches = (patch1,) + patches
1639 patches = (patch1,) + patches
1649
1640
1650 if not opts['force']:
1641 if not opts['force']:
1651 bail_if_changed(repo)
1642 bail_if_changed(repo)
1652
1643
1653 d = opts["base"]
1644 d = opts["base"]
1654 strip = opts["strip"]
1645 strip = opts["strip"]
1655
1646
1656 wlock = repo.wlock()
1647 wlock = repo.wlock()
1657 lock = repo.lock()
1648 lock = repo.lock()
1658
1649
1659 for p in patches:
1650 for p in patches:
1660 pf = os.path.join(d, p)
1651 pf = os.path.join(d, p)
1661
1652
1662 if pf == '-':
1653 if pf == '-':
1663 ui.status(_("applying patch from stdin\n"))
1654 ui.status(_("applying patch from stdin\n"))
1664 tmpname, message, user, date = patch.extract(ui, sys.stdin)
1655 tmpname, message, user, date = patch.extract(ui, sys.stdin)
1665 else:
1656 else:
1666 ui.status(_("applying %s\n") % p)
1657 ui.status(_("applying %s\n") % p)
1667 tmpname, message, user, date = patch.extract(ui, file(pf))
1658 tmpname, message, user, date = patch.extract(ui, file(pf))
1668
1659
1669 if tmpname is None:
1660 if tmpname is None:
1670 raise util.Abort(_('no diffs found'))
1661 raise util.Abort(_('no diffs found'))
1671
1662
1672 try:
1663 try:
1673 if opts['message']:
1664 if opts['message']:
1674 # pickup the cmdline msg
1665 # pickup the cmdline msg
1675 message = opts['message']
1666 message = opts['message']
1676 elif message:
1667 elif message:
1677 # pickup the patch msg
1668 # pickup the patch msg
1678 message = message.strip()
1669 message = message.strip()
1679 else:
1670 else:
1680 # launch the editor
1671 # launch the editor
1681 message = None
1672 message = None
1682 ui.debug(_('message:\n%s\n') % message)
1673 ui.debug(_('message:\n%s\n') % message)
1683
1674
1684 files, fuzz = patch.patch(tmpname, ui, strip=strip, cwd=repo.root)
1675 files = patch.patch(strip, tmpname, ui, cwd=repo.root)
1685 removes = []
1676 removes = []
1686 if len(files) > 0:
1677 if len(files) > 0:
1687 cfiles = files.keys()
1678 cfiles = files.keys()
1688 copies = []
1679 copies = []
1689 copts = {'after': False, 'force': False}
1680 copts = {'after': False, 'force': False}
1690 cwd = repo.getcwd()
1681 cwd = repo.getcwd()
1691 if cwd:
1682 if cwd:
1692 cfiles = [util.pathto(cwd, f) for f in files.keys()]
1683 cfiles = [util.pathto(cwd, f) for f in files.keys()]
1693 for f in files:
1684 for f in files:
1694 ctype, gp = files[f]
1685 ctype, gp = files[f]
1695 if ctype == 'RENAME':
1686 if ctype == 'RENAME':
1696 copies.append((gp.oldpath, gp.path, gp.copymod))
1687 copies.append((gp.oldpath, gp.path, gp.copymod))
1697 removes.append(gp.oldpath)
1688 removes.append(gp.oldpath)
1698 elif ctype == 'COPY':
1689 elif ctype == 'COPY':
1699 copies.append((gp.oldpath, gp.path, gp.copymod))
1690 copies.append((gp.oldpath, gp.path, gp.copymod))
1700 elif ctype == 'DELETE':
1691 elif ctype == 'DELETE':
1701 removes.append(gp.path)
1692 removes.append(gp.path)
1702 for src, dst, after in copies:
1693 for src, dst, after in copies:
1703 absdst = os.path.join(repo.root, dst)
1694 absdst = os.path.join(repo.root, dst)
1704 if not after and os.path.exists(absdst):
1695 if not after and os.path.exists(absdst):
1705 raise util.Abort(_('patch creates existing file %s') % dst)
1696 raise util.Abort(_('patch creates existing file %s') % dst)
1706 if cwd:
1697 if cwd:
1707 src, dst = [util.pathto(cwd, f) for f in (src, dst)]
1698 src, dst = [util.pathto(cwd, f) for f in (src, dst)]
1708 copts['after'] = after
1699 copts['after'] = after
1709 errs, copied = docopy(ui, repo, (src, dst), copts, wlock=wlock)
1700 errs, copied = docopy(ui, repo, (src, dst), copts, wlock=wlock)
1710 if errs:
1701 if errs:
1711 raise util.Abort(errs)
1702 raise util.Abort(errs)
1712 if removes:
1703 if removes:
1713 repo.remove(removes, True, wlock=wlock)
1704 repo.remove(removes, True, wlock=wlock)
1714 for f in files:
1705 for f in files:
1715 ctype, gp = files[f]
1706 ctype, gp = files[f]
1716 if gp and gp.mode:
1707 if gp and gp.mode:
1717 x = gp.mode & 0100 != 0
1708 x = gp.mode & 0100 != 0
1718 dst = os.path.join(repo.root, gp.path)
1709 dst = os.path.join(repo.root, gp.path)
1719 util.set_exec(dst, x)
1710 util.set_exec(dst, x)
1720 cmdutil.addremove(repo, cfiles, wlock=wlock)
1711 cmdutil.addremove(repo, cfiles, wlock=wlock)
1721 files = files.keys()
1712 files = files.keys()
1722 files.extend([r for r in removes if r not in files])
1713 files.extend([r for r in removes if r not in files])
1723 repo.commit(files, message, user, date, wlock=wlock, lock=lock)
1714 repo.commit(files, message, user, date, wlock=wlock, lock=lock)
1724 finally:
1715 finally:
1725 os.unlink(tmpname)
1716 os.unlink(tmpname)
1726
1717
1727 def incoming(ui, repo, source="default", **opts):
1718 def incoming(ui, repo, source="default", **opts):
1728 """show new changesets found in source
1719 """show new changesets found in source
1729
1720
1730 Show new changesets found in the specified path/URL or the default
1721 Show new changesets found in the specified path/URL or the default
1731 pull location. These are the changesets that would be pulled if a pull
1722 pull location. These are the changesets that would be pulled if a pull
1732 was requested.
1723 was requested.
1733
1724
1734 For remote repository, using --bundle avoids downloading the changesets
1725 For remote repository, using --bundle avoids downloading the changesets
1735 twice if the incoming is followed by a pull.
1726 twice if the incoming is followed by a pull.
1736
1727
1737 See pull for valid source format details.
1728 See pull for valid source format details.
1738 """
1729 """
1739 source = ui.expandpath(source)
1730 source = ui.expandpath(source)
1740 setremoteconfig(ui, opts)
1731 setremoteconfig(ui, opts)
1741
1732
1742 other = hg.repository(ui, source)
1733 other = hg.repository(ui, source)
1743 incoming = repo.findincoming(other, force=opts["force"])
1734 incoming = repo.findincoming(other, force=opts["force"])
1744 if not incoming:
1735 if not incoming:
1745 ui.status(_("no changes found\n"))
1736 ui.status(_("no changes found\n"))
1746 return
1737 return
1747
1738
1748 cleanup = None
1739 cleanup = None
1749 try:
1740 try:
1750 fname = opts["bundle"]
1741 fname = opts["bundle"]
1751 if fname or not other.local():
1742 if fname or not other.local():
1752 # create a bundle (uncompressed if other repo is not local)
1743 # create a bundle (uncompressed if other repo is not local)
1753 cg = other.changegroup(incoming, "incoming")
1744 cg = other.changegroup(incoming, "incoming")
1754 fname = cleanup = write_bundle(cg, fname, compress=other.local())
1745 fname = cleanup = write_bundle(cg, fname, compress=other.local())
1755 # keep written bundle?
1746 # keep written bundle?
1756 if opts["bundle"]:
1747 if opts["bundle"]:
1757 cleanup = None
1748 cleanup = None
1758 if not other.local():
1749 if not other.local():
1759 # use the created uncompressed bundlerepo
1750 # use the created uncompressed bundlerepo
1760 other = bundlerepo.bundlerepository(ui, repo.root, fname)
1751 other = bundlerepo.bundlerepository(ui, repo.root, fname)
1761
1752
1762 revs = None
1753 revs = None
1763 if opts['rev']:
1754 if opts['rev']:
1764 revs = [other.lookup(rev) for rev in opts['rev']]
1755 revs = [other.lookup(rev) for rev in opts['rev']]
1765 o = other.changelog.nodesbetween(incoming, revs)[0]
1756 o = other.changelog.nodesbetween(incoming, revs)[0]
1766 if opts['newest_first']:
1757 if opts['newest_first']:
1767 o.reverse()
1758 o.reverse()
1768 displayer = show_changeset(ui, other, opts)
1759 displayer = show_changeset(ui, other, opts)
1769 for n in o:
1760 for n in o:
1770 parents = [p for p in other.changelog.parents(n) if p != nullid]
1761 parents = [p for p in other.changelog.parents(n) if p != nullid]
1771 if opts['no_merges'] and len(parents) == 2:
1762 if opts['no_merges'] and len(parents) == 2:
1772 continue
1763 continue
1773 displayer.show(changenode=n)
1764 displayer.show(changenode=n)
1774 if opts['patch']:
1765 if opts['patch']:
1775 prev = (parents and parents[0]) or nullid
1766 prev = (parents and parents[0]) or nullid
1776 patch.diff(repo, other, prev, n)
1767 patch.diff(repo, other, prev, n)
1777 ui.write("\n")
1768 ui.write("\n")
1778 finally:
1769 finally:
1779 if hasattr(other, 'close'):
1770 if hasattr(other, 'close'):
1780 other.close()
1771 other.close()
1781 if cleanup:
1772 if cleanup:
1782 os.unlink(cleanup)
1773 os.unlink(cleanup)
1783
1774
1784 def init(ui, dest=".", **opts):
1775 def init(ui, dest=".", **opts):
1785 """create a new repository in the given directory
1776 """create a new repository in the given directory
1786
1777
1787 Initialize a new repository in the given directory. If the given
1778 Initialize a new repository in the given directory. If the given
1788 directory does not exist, it is created.
1779 directory does not exist, it is created.
1789
1780
1790 If no directory is given, the current directory is used.
1781 If no directory is given, the current directory is used.
1791
1782
1792 It is possible to specify an ssh:// URL as the destination.
1783 It is possible to specify an ssh:// URL as the destination.
1793 Look at the help text for the pull command for important details
1784 Look at the help text for the pull command for important details
1794 about ssh:// URLs.
1785 about ssh:// URLs.
1795 """
1786 """
1796 setremoteconfig(ui, opts)
1787 setremoteconfig(ui, opts)
1797 hg.repository(ui, dest, create=1)
1788 hg.repository(ui, dest, create=1)
1798
1789
1799 def locate(ui, repo, *pats, **opts):
1790 def locate(ui, repo, *pats, **opts):
1800 """locate files matching specific patterns
1791 """locate files matching specific patterns
1801
1792
1802 Print all files under Mercurial control whose names match the
1793 Print all files under Mercurial control whose names match the
1803 given patterns.
1794 given patterns.
1804
1795
1805 This command searches the current directory and its
1796 This command searches the current directory and its
1806 subdirectories. To search an entire repository, move to the root
1797 subdirectories. To search an entire repository, move to the root
1807 of the repository.
1798 of the repository.
1808
1799
1809 If no patterns are given to match, this command prints all file
1800 If no patterns are given to match, this command prints all file
1810 names.
1801 names.
1811
1802
1812 If you want to feed the output of this command into the "xargs"
1803 If you want to feed the output of this command into the "xargs"
1813 command, use the "-0" option to both this command and "xargs".
1804 command, use the "-0" option to both this command and "xargs".
1814 This will avoid the problem of "xargs" treating single filenames
1805 This will avoid the problem of "xargs" treating single filenames
1815 that contain white space as multiple filenames.
1806 that contain white space as multiple filenames.
1816 """
1807 """
1817 end = opts['print0'] and '\0' or '\n'
1808 end = opts['print0'] and '\0' or '\n'
1818 rev = opts['rev']
1809 rev = opts['rev']
1819 if rev:
1810 if rev:
1820 node = repo.lookup(rev)
1811 node = repo.lookup(rev)
1821 else:
1812 else:
1822 node = None
1813 node = None
1823
1814
1824 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts, node=node,
1815 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts, node=node,
1825 head='(?:.*/|)'):
1816 head='(?:.*/|)'):
1826 if not node and repo.dirstate.state(abs) == '?':
1817 if not node and repo.dirstate.state(abs) == '?':
1827 continue
1818 continue
1828 if opts['fullpath']:
1819 if opts['fullpath']:
1829 ui.write(os.path.join(repo.root, abs), end)
1820 ui.write(os.path.join(repo.root, abs), end)
1830 else:
1821 else:
1831 ui.write(((pats and rel) or abs), end)
1822 ui.write(((pats and rel) or abs), end)
1832
1823
1833 def log(ui, repo, *pats, **opts):
1824 def log(ui, repo, *pats, **opts):
1834 """show revision history of entire repository or files
1825 """show revision history of entire repository or files
1835
1826
1836 Print the revision history of the specified files or the entire
1827 Print the revision history of the specified files or the entire
1837 project.
1828 project.
1838
1829
1839 File history is shown without following rename or copy history of
1830 File history is shown without following rename or copy history of
1840 files. Use -f/--follow with a file name to follow history across
1831 files. Use -f/--follow with a file name to follow history across
1841 renames and copies. --follow without a file name will only show
1832 renames and copies. --follow without a file name will only show
1842 ancestors or descendants of the starting revision. --follow-first
1833 ancestors or descendants of the starting revision. --follow-first
1843 only follows the first parent of merge revisions.
1834 only follows the first parent of merge revisions.
1844
1835
1845 If no revision range is specified, the default is tip:0 unless
1836 If no revision range is specified, the default is tip:0 unless
1846 --follow is set, in which case the working directory parent is
1837 --follow is set, in which case the working directory parent is
1847 used as the starting revision.
1838 used as the starting revision.
1848
1839
1849 By default this command outputs: changeset id and hash, tags,
1840 By default this command outputs: changeset id and hash, tags,
1850 non-trivial parents, user, date and time, and a summary for each
1841 non-trivial parents, user, date and time, and a summary for each
1851 commit. When the -v/--verbose switch is used, the list of changed
1842 commit. When the -v/--verbose switch is used, the list of changed
1852 files and full commit message is shown.
1843 files and full commit message is shown.
1853 """
1844 """
1854 class dui(object):
1845 class dui(object):
1855 # Implement and delegate some ui protocol. Save hunks of
1846 # Implement and delegate some ui protocol. Save hunks of
1856 # output for later display in the desired order.
1847 # output for later display in the desired order.
1857 def __init__(self, ui):
1848 def __init__(self, ui):
1858 self.ui = ui
1849 self.ui = ui
1859 self.hunk = {}
1850 self.hunk = {}
1860 self.header = {}
1851 self.header = {}
1861 def bump(self, rev):
1852 def bump(self, rev):
1862 self.rev = rev
1853 self.rev = rev
1863 self.hunk[rev] = []
1854 self.hunk[rev] = []
1864 self.header[rev] = []
1855 self.header[rev] = []
1865 def note(self, *args):
1856 def note(self, *args):
1866 if self.verbose:
1857 if self.verbose:
1867 self.write(*args)
1858 self.write(*args)
1868 def status(self, *args):
1859 def status(self, *args):
1869 if not self.quiet:
1860 if not self.quiet:
1870 self.write(*args)
1861 self.write(*args)
1871 def write(self, *args):
1862 def write(self, *args):
1872 self.hunk[self.rev].append(args)
1863 self.hunk[self.rev].append(args)
1873 def write_header(self, *args):
1864 def write_header(self, *args):
1874 self.header[self.rev].append(args)
1865 self.header[self.rev].append(args)
1875 def debug(self, *args):
1866 def debug(self, *args):
1876 if self.debugflag:
1867 if self.debugflag:
1877 self.write(*args)
1868 self.write(*args)
1878 def __getattr__(self, key):
1869 def __getattr__(self, key):
1879 return getattr(self.ui, key)
1870 return getattr(self.ui, key)
1880
1871
1881 changeiter, getchange, matchfn = walkchangerevs(ui, repo, pats, opts)
1872 changeiter, getchange, matchfn = walkchangerevs(ui, repo, pats, opts)
1882
1873
1883 if opts['limit']:
1874 if opts['limit']:
1884 try:
1875 try:
1885 limit = int(opts['limit'])
1876 limit = int(opts['limit'])
1886 except ValueError:
1877 except ValueError:
1887 raise util.Abort(_('limit must be a positive integer'))
1878 raise util.Abort(_('limit must be a positive integer'))
1888 if limit <= 0: raise util.Abort(_('limit must be positive'))
1879 if limit <= 0: raise util.Abort(_('limit must be positive'))
1889 else:
1880 else:
1890 limit = sys.maxint
1881 limit = sys.maxint
1891 count = 0
1882 count = 0
1892
1883
1893 displayer = show_changeset(ui, repo, opts)
1884 displayer = show_changeset(ui, repo, opts)
1894 for st, rev, fns in changeiter:
1885 for st, rev, fns in changeiter:
1895 if st == 'window':
1886 if st == 'window':
1896 du = dui(ui)
1887 du = dui(ui)
1897 displayer.ui = du
1888 displayer.ui = du
1898 elif st == 'add':
1889 elif st == 'add':
1899 du.bump(rev)
1890 du.bump(rev)
1900 changenode = repo.changelog.node(rev)
1891 changenode = repo.changelog.node(rev)
1901 parents = [p for p in repo.changelog.parents(changenode)
1892 parents = [p for p in repo.changelog.parents(changenode)
1902 if p != nullid]
1893 if p != nullid]
1903 if opts['no_merges'] and len(parents) == 2:
1894 if opts['no_merges'] and len(parents) == 2:
1904 continue
1895 continue
1905 if opts['only_merges'] and len(parents) != 2:
1896 if opts['only_merges'] and len(parents) != 2:
1906 continue
1897 continue
1907
1898
1908 if opts['keyword']:
1899 if opts['keyword']:
1909 changes = getchange(rev)
1900 changes = getchange(rev)
1910 miss = 0
1901 miss = 0
1911 for k in [kw.lower() for kw in opts['keyword']]:
1902 for k in [kw.lower() for kw in opts['keyword']]:
1912 if not (k in changes[1].lower() or
1903 if not (k in changes[1].lower() or
1913 k in changes[4].lower() or
1904 k in changes[4].lower() or
1914 k in " ".join(changes[3][:20]).lower()):
1905 k in " ".join(changes[3][:20]).lower()):
1915 miss = 1
1906 miss = 1
1916 break
1907 break
1917 if miss:
1908 if miss:
1918 continue
1909 continue
1919
1910
1920 br = None
1911 br = None
1921 if opts['branches']:
1912 if opts['branches']:
1922 br = repo.branchlookup([repo.changelog.node(rev)])
1913 br = repo.branchlookup([repo.changelog.node(rev)])
1923
1914
1924 displayer.show(rev, brinfo=br)
1915 displayer.show(rev, brinfo=br)
1925 if opts['patch']:
1916 if opts['patch']:
1926 prev = (parents and parents[0]) or nullid
1917 prev = (parents and parents[0]) or nullid
1927 patch.diff(repo, prev, changenode, match=matchfn, fp=du)
1918 patch.diff(repo, prev, changenode, match=matchfn, fp=du)
1928 du.write("\n\n")
1919 du.write("\n\n")
1929 elif st == 'iter':
1920 elif st == 'iter':
1930 if count == limit: break
1921 if count == limit: break
1931 if du.header[rev]:
1922 if du.header[rev]:
1932 for args in du.header[rev]:
1923 for args in du.header[rev]:
1933 ui.write_header(*args)
1924 ui.write_header(*args)
1934 if du.hunk[rev]:
1925 if du.hunk[rev]:
1935 count += 1
1926 count += 1
1936 for args in du.hunk[rev]:
1927 for args in du.hunk[rev]:
1937 ui.write(*args)
1928 ui.write(*args)
1938
1929
1939 def manifest(ui, repo, rev=None):
1930 def manifest(ui, repo, rev=None):
1940 """output the latest or given revision of the project manifest
1931 """output the latest or given revision of the project manifest
1941
1932
1942 Print a list of version controlled files for the given revision.
1933 Print a list of version controlled files for the given revision.
1943
1934
1944 The manifest is the list of files being version controlled. If no revision
1935 The manifest is the list of files being version controlled. If no revision
1945 is given then the tip is used.
1936 is given then the tip is used.
1946 """
1937 """
1947 if rev:
1938 if rev:
1948 try:
1939 try:
1949 # assume all revision numbers are for changesets
1940 # assume all revision numbers are for changesets
1950 n = repo.lookup(rev)
1941 n = repo.lookup(rev)
1951 change = repo.changelog.read(n)
1942 change = repo.changelog.read(n)
1952 n = change[0]
1943 n = change[0]
1953 except hg.RepoError:
1944 except hg.RepoError:
1954 n = repo.manifest.lookup(rev)
1945 n = repo.manifest.lookup(rev)
1955 else:
1946 else:
1956 n = repo.manifest.tip()
1947 n = repo.manifest.tip()
1957 m = repo.manifest.read(n)
1948 m = repo.manifest.read(n)
1958 files = m.keys()
1949 files = m.keys()
1959 files.sort()
1950 files.sort()
1960
1951
1961 for f in files:
1952 for f in files:
1962 ui.write("%40s %3s %s\n" % (hex(m[f]),
1953 ui.write("%40s %3s %s\n" % (hex(m[f]),
1963 m.execf(f) and "755" or "644", f))
1954 m.execf(f) and "755" or "644", f))
1964
1955
1965 def merge(ui, repo, node=None, force=None, branch=None):
1956 def merge(ui, repo, node=None, force=None, branch=None):
1966 """Merge working directory with another revision
1957 """Merge working directory with another revision
1967
1958
1968 Merge the contents of the current working directory and the
1959 Merge the contents of the current working directory and the
1969 requested revision. Files that changed between either parent are
1960 requested revision. Files that changed between either parent are
1970 marked as changed for the next commit and a commit must be
1961 marked as changed for the next commit and a commit must be
1971 performed before any further updates are allowed.
1962 performed before any further updates are allowed.
1972
1973 If no revision is specified, the working directory's parent is a
1974 head revision, and the repository contains exactly one other head,
1975 the other head is merged with by default. Otherwise, an explicit
1976 revision to merge with must be provided.
1977 """
1963 """
1978
1964
1979 if node:
1965 node = _lookup(repo, node, branch)
1980 node = _lookup(repo, node, branch)
1981 else:
1982 heads = repo.heads()
1983 if len(heads) > 2:
1984 raise util.Abort(_('repo has %d heads - '
1985 'please merge with an explicit rev') %
1986 len(heads))
1987 if len(heads) == 1:
1988 raise util.Abort(_('there is nothing to merge - '
1989 'use "hg update" instead'))
1990 parent = repo.dirstate.parents()[0]
1991 if parent not in heads:
1992 raise util.Abort(_('working dir not at a head rev - '
1993 'use "hg update" or merge with an explicit rev'))
1994 node = parent == heads[0] and heads[-1] or heads[0]
1995 return hg.merge(repo, node, force=force)
1966 return hg.merge(repo, node, force=force)
1996
1967
1997 def outgoing(ui, repo, dest=None, **opts):
1968 def outgoing(ui, repo, dest=None, **opts):
1998 """show changesets not found in destination
1969 """show changesets not found in destination
1999
1970
2000 Show changesets not found in the specified destination repository or
1971 Show changesets not found in the specified destination repository or
2001 the default push location. These are the changesets that would be pushed
1972 the default push location. These are the changesets that would be pushed
2002 if a push was requested.
1973 if a push was requested.
2003
1974
2004 See pull for valid destination format details.
1975 See pull for valid destination format details.
2005 """
1976 """
2006 dest = ui.expandpath(dest or 'default-push', dest or 'default')
1977 dest = ui.expandpath(dest or 'default-push', dest or 'default')
2007 setremoteconfig(ui, opts)
1978 setremoteconfig(ui, opts)
2008 revs = None
1979 revs = None
2009 if opts['rev']:
1980 if opts['rev']:
2010 revs = [repo.lookup(rev) for rev in opts['rev']]
1981 revs = [repo.lookup(rev) for rev in opts['rev']]
2011
1982
2012 other = hg.repository(ui, dest)
1983 other = hg.repository(ui, dest)
2013 o = repo.findoutgoing(other, force=opts['force'])
1984 o = repo.findoutgoing(other, force=opts['force'])
2014 if not o:
1985 if not o:
2015 ui.status(_("no changes found\n"))
1986 ui.status(_("no changes found\n"))
2016 return
1987 return
2017 o = repo.changelog.nodesbetween(o, revs)[0]
1988 o = repo.changelog.nodesbetween(o, revs)[0]
2018 if opts['newest_first']:
1989 if opts['newest_first']:
2019 o.reverse()
1990 o.reverse()
2020 displayer = show_changeset(ui, repo, opts)
1991 displayer = show_changeset(ui, repo, opts)
2021 for n in o:
1992 for n in o:
2022 parents = [p for p in repo.changelog.parents(n) if p != nullid]
1993 parents = [p for p in repo.changelog.parents(n) if p != nullid]
2023 if opts['no_merges'] and len(parents) == 2:
1994 if opts['no_merges'] and len(parents) == 2:
2024 continue
1995 continue
2025 displayer.show(changenode=n)
1996 displayer.show(changenode=n)
2026 if opts['patch']:
1997 if opts['patch']:
2027 prev = (parents and parents[0]) or nullid
1998 prev = (parents and parents[0]) or nullid
2028 patch.diff(repo, prev, n)
1999 patch.diff(repo, prev, n)
2029 ui.write("\n")
2000 ui.write("\n")
2030
2001
2031 def parents(ui, repo, file_=None, rev=None, branches=None, **opts):
2002 def parents(ui, repo, file_=None, rev=None, branches=None, **opts):
2032 """show the parents of the working dir or revision
2003 """show the parents of the working dir or revision
2033
2004
2034 Print the working directory's parent revisions.
2005 Print the working directory's parent revisions.
2035 """
2006 """
2036 # legacy
2007 # legacy
2037 if file_ and not rev:
2008 if file_ and not rev:
2038 try:
2009 try:
2039 rev = repo.lookup(file_)
2010 rev = repo.lookup(file_)
2040 file_ = None
2011 file_ = None
2041 except hg.RepoError:
2012 except hg.RepoError:
2042 pass
2013 pass
2043 else:
2014 else:
2044 ui.warn(_("'hg parent REV' is deprecated, "
2015 ui.warn(_("'hg parent REV' is deprecated, "
2045 "please use 'hg parents -r REV instead\n"))
2016 "please use 'hg parents -r REV instead\n"))
2046
2017
2047 if rev:
2018 if rev:
2048 if file_:
2019 if file_:
2049 ctx = repo.filectx(file_, changeid=rev)
2020 ctx = repo.filectx(file_, changeid=rev)
2050 else:
2021 else:
2051 ctx = repo.changectx(rev)
2022 ctx = repo.changectx(rev)
2052 p = [cp.node() for cp in ctx.parents()]
2023 p = [cp.node() for cp in ctx.parents()]
2053 else:
2024 else:
2054 p = repo.dirstate.parents()
2025 p = repo.dirstate.parents()
2055
2026
2056 br = None
2027 br = None
2057 if branches is not None:
2028 if branches is not None:
2058 br = repo.branchlookup(p)
2029 br = repo.branchlookup(p)
2059 displayer = show_changeset(ui, repo, opts)
2030 displayer = show_changeset(ui, repo, opts)
2060 for n in p:
2031 for n in p:
2061 if n != nullid:
2032 if n != nullid:
2062 displayer.show(changenode=n, brinfo=br)
2033 displayer.show(changenode=n, brinfo=br)
2063
2034
2064 def paths(ui, repo, search=None):
2035 def paths(ui, repo, search=None):
2065 """show definition of symbolic path names
2036 """show definition of symbolic path names
2066
2037
2067 Show definition of symbolic path name NAME. If no name is given, show
2038 Show definition of symbolic path name NAME. If no name is given, show
2068 definition of available names.
2039 definition of available names.
2069
2040
2070 Path names are defined in the [paths] section of /etc/mercurial/hgrc
2041 Path names are defined in the [paths] section of /etc/mercurial/hgrc
2071 and $HOME/.hgrc. If run inside a repository, .hg/hgrc is used, too.
2042 and $HOME/.hgrc. If run inside a repository, .hg/hgrc is used, too.
2072 """
2043 """
2073 if search:
2044 if search:
2074 for name, path in ui.configitems("paths"):
2045 for name, path in ui.configitems("paths"):
2075 if name == search:
2046 if name == search:
2076 ui.write("%s\n" % path)
2047 ui.write("%s\n" % path)
2077 return
2048 return
2078 ui.warn(_("not found!\n"))
2049 ui.warn(_("not found!\n"))
2079 return 1
2050 return 1
2080 else:
2051 else:
2081 for name, path in ui.configitems("paths"):
2052 for name, path in ui.configitems("paths"):
2082 ui.write("%s = %s\n" % (name, path))
2053 ui.write("%s = %s\n" % (name, path))
2083
2054
2084 def postincoming(ui, repo, modheads, optupdate):
2055 def postincoming(ui, repo, modheads, optupdate):
2085 if modheads == 0:
2056 if modheads == 0:
2086 return
2057 return
2087 if optupdate:
2058 if optupdate:
2088 if modheads == 1:
2059 if modheads == 1:
2089 return hg.update(repo, repo.changelog.tip()) # update
2060 return hg.update(repo, repo.changelog.tip()) # update
2090 else:
2061 else:
2091 ui.status(_("not updating, since new heads added\n"))
2062 ui.status(_("not updating, since new heads added\n"))
2092 if modheads > 1:
2063 if modheads > 1:
2093 ui.status(_("(run 'hg heads' to see heads, 'hg merge' to merge)\n"))
2064 ui.status(_("(run 'hg heads' to see heads, 'hg merge' to merge)\n"))
2094 else:
2065 else:
2095 ui.status(_("(run 'hg update' to get a working copy)\n"))
2066 ui.status(_("(run 'hg update' to get a working copy)\n"))
2096
2067
2097 def pull(ui, repo, source="default", **opts):
2068 def pull(ui, repo, source="default", **opts):
2098 """pull changes from the specified source
2069 """pull changes from the specified source
2099
2070
2100 Pull changes from a remote repository to a local one.
2071 Pull changes from a remote repository to a local one.
2101
2072
2102 This finds all changes from the repository at the specified path
2073 This finds all changes from the repository at the specified path
2103 or URL and adds them to the local repository. By default, this
2074 or URL and adds them to the local repository. By default, this
2104 does not update the copy of the project in the working directory.
2075 does not update the copy of the project in the working directory.
2105
2076
2106 Valid URLs are of the form:
2077 Valid URLs are of the form:
2107
2078
2108 local/filesystem/path
2079 local/filesystem/path
2109 http://[user@]host[:port]/[path]
2080 http://[user@]host[:port]/[path]
2110 https://[user@]host[:port]/[path]
2081 https://[user@]host[:port]/[path]
2111 ssh://[user@]host[:port]/[path]
2082 ssh://[user@]host[:port]/[path]
2112
2083
2113 Some notes about using SSH with Mercurial:
2084 Some notes about using SSH with Mercurial:
2114 - SSH requires an accessible shell account on the destination machine
2085 - SSH requires an accessible shell account on the destination machine
2115 and a copy of hg in the remote path or specified with as remotecmd.
2086 and a copy of hg in the remote path or specified with as remotecmd.
2116 - path is relative to the remote user's home directory by default.
2087 - path is relative to the remote user's home directory by default.
2117 Use an extra slash at the start of a path to specify an absolute path:
2088 Use an extra slash at the start of a path to specify an absolute path:
2118 ssh://example.com//tmp/repository
2089 ssh://example.com//tmp/repository
2119 - Mercurial doesn't use its own compression via SSH; the right thing
2090 - Mercurial doesn't use its own compression via SSH; the right thing
2120 to do is to configure it in your ~/.ssh/ssh_config, e.g.:
2091 to do is to configure it in your ~/.ssh/ssh_config, e.g.:
2121 Host *.mylocalnetwork.example.com
2092 Host *.mylocalnetwork.example.com
2122 Compression off
2093 Compression off
2123 Host *
2094 Host *
2124 Compression on
2095 Compression on
2125 Alternatively specify "ssh -C" as your ssh command in your hgrc or
2096 Alternatively specify "ssh -C" as your ssh command in your hgrc or
2126 with the --ssh command line option.
2097 with the --ssh command line option.
2127 """
2098 """
2128 source = ui.expandpath(source)
2099 source = ui.expandpath(source)
2129 setremoteconfig(ui, opts)
2100 setremoteconfig(ui, opts)
2130
2101
2131 other = hg.repository(ui, source)
2102 other = hg.repository(ui, source)
2132 ui.status(_('pulling from %s\n') % (source))
2103 ui.status(_('pulling from %s\n') % (source))
2133 revs = None
2104 revs = None
2134 if opts['rev'] and not other.local():
2105 if opts['rev'] and not other.local():
2135 raise util.Abort(_("pull -r doesn't work for remote repositories yet"))
2106 raise util.Abort(_("pull -r doesn't work for remote repositories yet"))
2136 elif opts['rev']:
2107 elif opts['rev']:
2137 revs = [other.lookup(rev) for rev in opts['rev']]
2108 revs = [other.lookup(rev) for rev in opts['rev']]
2138 modheads = repo.pull(other, heads=revs, force=opts['force'])
2109 modheads = repo.pull(other, heads=revs, force=opts['force'])
2139 return postincoming(ui, repo, modheads, opts['update'])
2110 return postincoming(ui, repo, modheads, opts['update'])
2140
2111
2141 def push(ui, repo, dest=None, **opts):
2112 def push(ui, repo, dest=None, **opts):
2142 """push changes to the specified destination
2113 """push changes to the specified destination
2143
2114
2144 Push changes from the local repository to the given destination.
2115 Push changes from the local repository to the given destination.
2145
2116
2146 This is the symmetrical operation for pull. It helps to move
2117 This is the symmetrical operation for pull. It helps to move
2147 changes from the current repository to a different one. If the
2118 changes from the current repository to a different one. If the
2148 destination is local this is identical to a pull in that directory
2119 destination is local this is identical to a pull in that directory
2149 from the current one.
2120 from the current one.
2150
2121
2151 By default, push will refuse to run if it detects the result would
2122 By default, push will refuse to run if it detects the result would
2152 increase the number of remote heads. This generally indicates the
2123 increase the number of remote heads. This generally indicates the
2153 the client has forgotten to sync and merge before pushing.
2124 the client has forgotten to sync and merge before pushing.
2154
2125
2155 Valid URLs are of the form:
2126 Valid URLs are of the form:
2156
2127
2157 local/filesystem/path
2128 local/filesystem/path
2158 ssh://[user@]host[:port]/[path]
2129 ssh://[user@]host[:port]/[path]
2159
2130
2160 Look at the help text for the pull command for important details
2131 Look at the help text for the pull command for important details
2161 about ssh:// URLs.
2132 about ssh:// URLs.
2162
2133
2163 Pushing to http:// and https:// URLs is possible, too, if this
2134 Pushing to http:// and https:// URLs is possible, too, if this
2164 feature is enabled on the remote Mercurial server.
2135 feature is enabled on the remote Mercurial server.
2165 """
2136 """
2166 dest = ui.expandpath(dest or 'default-push', dest or 'default')
2137 dest = ui.expandpath(dest or 'default-push', dest or 'default')
2167 setremoteconfig(ui, opts)
2138 setremoteconfig(ui, opts)
2168
2139
2169 other = hg.repository(ui, dest)
2140 other = hg.repository(ui, dest)
2170 ui.status('pushing to %s\n' % (dest))
2141 ui.status('pushing to %s\n' % (dest))
2171 revs = None
2142 revs = None
2172 if opts['rev']:
2143 if opts['rev']:
2173 revs = [repo.lookup(rev) for rev in opts['rev']]
2144 revs = [repo.lookup(rev) for rev in opts['rev']]
2174 r = repo.push(other, opts['force'], revs=revs)
2145 r = repo.push(other, opts['force'], revs=revs)
2175 return r == 0
2146 return r == 0
2176
2147
2177 def rawcommit(ui, repo, *flist, **rc):
2148 def rawcommit(ui, repo, *flist, **rc):
2178 """raw commit interface (DEPRECATED)
2149 """raw commit interface (DEPRECATED)
2179
2150
2180 (DEPRECATED)
2151 (DEPRECATED)
2181 Lowlevel commit, for use in helper scripts.
2152 Lowlevel commit, for use in helper scripts.
2182
2153
2183 This command is not intended to be used by normal users, as it is
2154 This command is not intended to be used by normal users, as it is
2184 primarily useful for importing from other SCMs.
2155 primarily useful for importing from other SCMs.
2185
2156
2186 This command is now deprecated and will be removed in a future
2157 This command is now deprecated and will be removed in a future
2187 release, please use debugsetparents and commit instead.
2158 release, please use debugsetparents and commit instead.
2188 """
2159 """
2189
2160
2190 ui.warn(_("(the rawcommit command is deprecated)\n"))
2161 ui.warn(_("(the rawcommit command is deprecated)\n"))
2191
2162
2192 message = rc['message']
2163 message = rc['message']
2193 if not message and rc['logfile']:
2164 if not message and rc['logfile']:
2194 try:
2165 try:
2195 message = open(rc['logfile']).read()
2166 message = open(rc['logfile']).read()
2196 except IOError:
2167 except IOError:
2197 pass
2168 pass
2198 if not message and not rc['logfile']:
2169 if not message and not rc['logfile']:
2199 raise util.Abort(_("missing commit message"))
2170 raise util.Abort(_("missing commit message"))
2200
2171
2201 files = relpath(repo, list(flist))
2172 files = relpath(repo, list(flist))
2202 if rc['files']:
2173 if rc['files']:
2203 files += open(rc['files']).read().splitlines()
2174 files += open(rc['files']).read().splitlines()
2204
2175
2205 rc['parent'] = map(repo.lookup, rc['parent'])
2176 rc['parent'] = map(repo.lookup, rc['parent'])
2206
2177
2207 try:
2178 try:
2208 repo.rawcommit(files, message, rc['user'], rc['date'], *rc['parent'])
2179 repo.rawcommit(files, message, rc['user'], rc['date'], *rc['parent'])
2209 except ValueError, inst:
2180 except ValueError, inst:
2210 raise util.Abort(str(inst))
2181 raise util.Abort(str(inst))
2211
2182
2212 def recover(ui, repo):
2183 def recover(ui, repo):
2213 """roll back an interrupted transaction
2184 """roll back an interrupted transaction
2214
2185
2215 Recover from an interrupted commit or pull.
2186 Recover from an interrupted commit or pull.
2216
2187
2217 This command tries to fix the repository status after an interrupted
2188 This command tries to fix the repository status after an interrupted
2218 operation. It should only be necessary when Mercurial suggests it.
2189 operation. It should only be necessary when Mercurial suggests it.
2219 """
2190 """
2220 if repo.recover():
2191 if repo.recover():
2221 return hg.verify(repo)
2192 return hg.verify(repo)
2222 return 1
2193 return 1
2223
2194
2224 def remove(ui, repo, *pats, **opts):
2195 def remove(ui, repo, *pats, **opts):
2225 """remove the specified files on the next commit
2196 """remove the specified files on the next commit
2226
2197
2227 Schedule the indicated files for removal from the repository.
2198 Schedule the indicated files for removal from the repository.
2228
2199
2229 This command schedules the files to be removed at the next commit.
2200 This command schedules the files to be removed at the next commit.
2230 This only removes files from the current branch, not from the
2201 This only removes files from the current branch, not from the
2231 entire project history. If the files still exist in the working
2202 entire project history. If the files still exist in the working
2232 directory, they will be deleted from it. If invoked with --after,
2203 directory, they will be deleted from it. If invoked with --after,
2233 files that have been manually deleted are marked as removed.
2204 files that have been manually deleted are marked as removed.
2234
2205
2235 Modified files and added files are not removed by default. To
2206 Modified files and added files are not removed by default. To
2236 remove them, use the -f/--force option.
2207 remove them, use the -f/--force option.
2237 """
2208 """
2238 names = []
2209 names = []
2239 if not opts['after'] and not pats:
2210 if not opts['after'] and not pats:
2240 raise util.Abort(_('no files specified'))
2211 raise util.Abort(_('no files specified'))
2241 files, matchfn, anypats = cmdutil.matchpats(repo, pats, opts)
2212 files, matchfn, anypats = cmdutil.matchpats(repo, pats, opts)
2242 exact = dict.fromkeys(files)
2213 exact = dict.fromkeys(files)
2243 mardu = map(dict.fromkeys, repo.status(files=files, match=matchfn))[:5]
2214 mardu = map(dict.fromkeys, repo.status(files=files, match=matchfn))[:5]
2244 modified, added, removed, deleted, unknown = mardu
2215 modified, added, removed, deleted, unknown = mardu
2245 remove, forget = [], []
2216 remove, forget = [], []
2246 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts):
2217 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts):
2247 reason = None
2218 reason = None
2248 if abs not in deleted and opts['after']:
2219 if abs not in deleted and opts['after']:
2249 reason = _('is still present')
2220 reason = _('is still present')
2250 elif abs in modified and not opts['force']:
2221 elif abs in modified and not opts['force']:
2251 reason = _('is modified (use -f to force removal)')
2222 reason = _('is modified (use -f to force removal)')
2252 elif abs in added:
2223 elif abs in added:
2253 if opts['force']:
2224 if opts['force']:
2254 forget.append(abs)
2225 forget.append(abs)
2255 continue
2226 continue
2256 reason = _('has been marked for add (use -f to force removal)')
2227 reason = _('has been marked for add (use -f to force removal)')
2257 elif abs in unknown:
2228 elif abs in unknown:
2258 reason = _('is not managed')
2229 reason = _('is not managed')
2259 elif abs in removed:
2230 elif abs in removed:
2260 continue
2231 continue
2261 if reason:
2232 if reason:
2262 if exact:
2233 if exact:
2263 ui.warn(_('not removing %s: file %s\n') % (rel, reason))
2234 ui.warn(_('not removing %s: file %s\n') % (rel, reason))
2264 else:
2235 else:
2265 if ui.verbose or not exact:
2236 if ui.verbose or not exact:
2266 ui.status(_('removing %s\n') % rel)
2237 ui.status(_('removing %s\n') % rel)
2267 remove.append(abs)
2238 remove.append(abs)
2268 repo.forget(forget)
2239 repo.forget(forget)
2269 repo.remove(remove, unlink=not opts['after'])
2240 repo.remove(remove, unlink=not opts['after'])
2270
2241
2271 def rename(ui, repo, *pats, **opts):
2242 def rename(ui, repo, *pats, **opts):
2272 """rename files; equivalent of copy + remove
2243 """rename files; equivalent of copy + remove
2273
2244
2274 Mark dest as copies of sources; mark sources for deletion. If
2245 Mark dest as copies of sources; mark sources for deletion. If
2275 dest is a directory, copies are put in that directory. If dest is
2246 dest is a directory, copies are put in that directory. If dest is
2276 a file, there can only be one source.
2247 a file, there can only be one source.
2277
2248
2278 By default, this command copies the contents of files as they
2249 By default, this command copies the contents of files as they
2279 stand in the working directory. If invoked with --after, the
2250 stand in the working directory. If invoked with --after, the
2280 operation is recorded, but no copying is performed.
2251 operation is recorded, but no copying is performed.
2281
2252
2282 This command takes effect in the next commit.
2253 This command takes effect in the next commit.
2283
2254
2284 NOTE: This command should be treated as experimental. While it
2255 NOTE: This command should be treated as experimental. While it
2285 should properly record rename files, this information is not yet
2256 should properly record rename files, this information is not yet
2286 fully used by merge, nor fully reported by log.
2257 fully used by merge, nor fully reported by log.
2287 """
2258 """
2288 wlock = repo.wlock(0)
2259 wlock = repo.wlock(0)
2289 errs, copied = docopy(ui, repo, pats, opts, wlock)
2260 errs, copied = docopy(ui, repo, pats, opts, wlock)
2290 names = []
2261 names = []
2291 for abs, rel, exact in copied:
2262 for abs, rel, exact in copied:
2292 if ui.verbose or not exact:
2263 if ui.verbose or not exact:
2293 ui.status(_('removing %s\n') % rel)
2264 ui.status(_('removing %s\n') % rel)
2294 names.append(abs)
2265 names.append(abs)
2295 if not opts.get('dry_run'):
2266 if not opts.get('dry_run'):
2296 repo.remove(names, True, wlock)
2267 repo.remove(names, True, wlock)
2297 return errs
2268 return errs
2298
2269
2299 def revert(ui, repo, *pats, **opts):
2270 def revert(ui, repo, *pats, **opts):
2300 """revert files or dirs to their states as of some revision
2271 """revert files or dirs to their states as of some revision
2301
2272
2302 With no revision specified, revert the named files or directories
2273 With no revision specified, revert the named files or directories
2303 to the contents they had in the parent of the working directory.
2274 to the contents they had in the parent of the working directory.
2304 This restores the contents of the affected files to an unmodified
2275 This restores the contents of the affected files to an unmodified
2305 state. If the working directory has two parents, you must
2276 state. If the working directory has two parents, you must
2306 explicitly specify the revision to revert to.
2277 explicitly specify the revision to revert to.
2307
2278
2308 Modified files are saved with a .orig suffix before reverting.
2279 Modified files are saved with a .orig suffix before reverting.
2309 To disable these backups, use --no-backup.
2280 To disable these backups, use --no-backup.
2310
2281
2311 Using the -r option, revert the given files or directories to
2282 Using the -r option, revert the given files or directories to
2312 their contents as of a specific revision. This can be helpful to"roll
2283 their contents as of a specific revision. This can be helpful to"roll
2313 back" some or all of a change that should not have been committed.
2284 back" some or all of a change that should not have been committed.
2314
2285
2315 Revert modifies the working directory. It does not commit any
2286 Revert modifies the working directory. It does not commit any
2316 changes, or change the parent of the working directory. If you
2287 changes, or change the parent of the working directory. If you
2317 revert to a revision other than the parent of the working
2288 revert to a revision other than the parent of the working
2318 directory, the reverted files will thus appear modified
2289 directory, the reverted files will thus appear modified
2319 afterwards.
2290 afterwards.
2320
2291
2321 If a file has been deleted, it is recreated. If the executable
2292 If a file has been deleted, it is recreated. If the executable
2322 mode of a file was changed, it is reset.
2293 mode of a file was changed, it is reset.
2323
2294
2324 If names are given, all files matching the names are reverted.
2295 If names are given, all files matching the names are reverted.
2325
2296
2326 If no arguments are given, all files in the repository are reverted.
2297 If no arguments are given, all files in the repository are reverted.
2327 """
2298 """
2328 parent, p2 = repo.dirstate.parents()
2299 parent, p2 = repo.dirstate.parents()
2329 if opts['rev']:
2300 if opts['rev']:
2330 node = repo.lookup(opts['rev'])
2301 node = repo.lookup(opts['rev'])
2331 elif p2 != nullid:
2302 elif p2 != nullid:
2332 raise util.Abort(_('working dir has two parents; '
2303 raise util.Abort(_('working dir has two parents; '
2333 'you must specify the revision to revert to'))
2304 'you must specify the revision to revert to'))
2334 else:
2305 else:
2335 node = parent
2306 node = parent
2336 mf = repo.manifest.read(repo.changelog.read(node)[0])
2307 mf = repo.manifest.read(repo.changelog.read(node)[0])
2337 if node == parent:
2308 if node == parent:
2338 pmf = mf
2309 pmf = mf
2339 else:
2310 else:
2340 pmf = None
2311 pmf = None
2341
2312
2342 wlock = repo.wlock()
2313 wlock = repo.wlock()
2343
2314
2344 # need all matching names in dirstate and manifest of target rev,
2315 # need all matching names in dirstate and manifest of target rev,
2345 # so have to walk both. do not print errors if files exist in one
2316 # so have to walk both. do not print errors if files exist in one
2346 # but not other.
2317 # but not other.
2347
2318
2348 names = {}
2319 names = {}
2349 target_only = {}
2320 target_only = {}
2350
2321
2351 # walk dirstate.
2322 # walk dirstate.
2352
2323
2353 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts,
2324 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts,
2354 badmatch=mf.has_key):
2325 badmatch=mf.has_key):
2355 names[abs] = (rel, exact)
2326 names[abs] = (rel, exact)
2356 if src == 'b':
2327 if src == 'b':
2357 target_only[abs] = True
2328 target_only[abs] = True
2358
2329
2359 # walk target manifest.
2330 # walk target manifest.
2360
2331
2361 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts, node=node,
2332 for src, abs, rel, exact in cmdutil.walk(repo, pats, opts, node=node,
2362 badmatch=names.has_key):
2333 badmatch=names.has_key):
2363 if abs in names: continue
2334 if abs in names: continue
2364 names[abs] = (rel, exact)
2335 names[abs] = (rel, exact)
2365 target_only[abs] = True
2336 target_only[abs] = True
2366
2337
2367 changes = repo.status(match=names.has_key, wlock=wlock)[:5]
2338 changes = repo.status(match=names.has_key, wlock=wlock)[:5]
2368 modified, added, removed, deleted, unknown = map(dict.fromkeys, changes)
2339 modified, added, removed, deleted, unknown = map(dict.fromkeys, changes)
2369
2340
2370 revert = ([], _('reverting %s\n'))
2341 revert = ([], _('reverting %s\n'))
2371 add = ([], _('adding %s\n'))
2342 add = ([], _('adding %s\n'))
2372 remove = ([], _('removing %s\n'))
2343 remove = ([], _('removing %s\n'))
2373 forget = ([], _('forgetting %s\n'))
2344 forget = ([], _('forgetting %s\n'))
2374 undelete = ([], _('undeleting %s\n'))
2345 undelete = ([], _('undeleting %s\n'))
2375 update = {}
2346 update = {}
2376
2347
2377 disptable = (
2348 disptable = (
2378 # dispatch table:
2349 # dispatch table:
2379 # file state
2350 # file state
2380 # action if in target manifest
2351 # action if in target manifest
2381 # action if not in target manifest
2352 # action if not in target manifest
2382 # make backup if in target manifest
2353 # make backup if in target manifest
2383 # make backup if not in target manifest
2354 # make backup if not in target manifest
2384 (modified, revert, remove, True, True),
2355 (modified, revert, remove, True, True),
2385 (added, revert, forget, True, False),
2356 (added, revert, forget, True, False),
2386 (removed, undelete, None, False, False),
2357 (removed, undelete, None, False, False),
2387 (deleted, revert, remove, False, False),
2358 (deleted, revert, remove, False, False),
2388 (unknown, add, None, True, False),
2359 (unknown, add, None, True, False),
2389 (target_only, add, None, False, False),
2360 (target_only, add, None, False, False),
2390 )
2361 )
2391
2362
2392 entries = names.items()
2363 entries = names.items()
2393 entries.sort()
2364 entries.sort()
2394
2365
2395 for abs, (rel, exact) in entries:
2366 for abs, (rel, exact) in entries:
2396 mfentry = mf.get(abs)
2367 mfentry = mf.get(abs)
2397 def handle(xlist, dobackup):
2368 def handle(xlist, dobackup):
2398 xlist[0].append(abs)
2369 xlist[0].append(abs)
2399 update[abs] = 1
2370 update[abs] = 1
2400 if dobackup and not opts['no_backup'] and os.path.exists(rel):
2371 if dobackup and not opts['no_backup'] and os.path.exists(rel):
2401 bakname = "%s.orig" % rel
2372 bakname = "%s.orig" % rel
2402 ui.note(_('saving current version of %s as %s\n') %
2373 ui.note(_('saving current version of %s as %s\n') %
2403 (rel, bakname))
2374 (rel, bakname))
2404 if not opts.get('dry_run'):
2375 if not opts.get('dry_run'):
2405 shutil.copyfile(rel, bakname)
2376 shutil.copyfile(rel, bakname)
2406 shutil.copymode(rel, bakname)
2377 shutil.copymode(rel, bakname)
2407 if ui.verbose or not exact:
2378 if ui.verbose or not exact:
2408 ui.status(xlist[1] % rel)
2379 ui.status(xlist[1] % rel)
2409 for table, hitlist, misslist, backuphit, backupmiss in disptable:
2380 for table, hitlist, misslist, backuphit, backupmiss in disptable:
2410 if abs not in table: continue
2381 if abs not in table: continue
2411 # file has changed in dirstate
2382 # file has changed in dirstate
2412 if mfentry:
2383 if mfentry:
2413 handle(hitlist, backuphit)
2384 handle(hitlist, backuphit)
2414 elif misslist is not None:
2385 elif misslist is not None:
2415 handle(misslist, backupmiss)
2386 handle(misslist, backupmiss)
2416 else:
2387 else:
2417 if exact: ui.warn(_('file not managed: %s\n' % rel))
2388 if exact: ui.warn(_('file not managed: %s\n' % rel))
2418 break
2389 break
2419 else:
2390 else:
2420 # file has not changed in dirstate
2391 # file has not changed in dirstate
2421 if node == parent:
2392 if node == parent:
2422 if exact: ui.warn(_('no changes needed to %s\n' % rel))
2393 if exact: ui.warn(_('no changes needed to %s\n' % rel))
2423 continue
2394 continue
2424 if pmf is None:
2395 if pmf is None:
2425 # only need parent manifest in this unlikely case,
2396 # only need parent manifest in this unlikely case,
2426 # so do not read by default
2397 # so do not read by default
2427 pmf = repo.manifest.read(repo.changelog.read(parent)[0])
2398 pmf = repo.manifest.read(repo.changelog.read(parent)[0])
2428 if abs in pmf:
2399 if abs in pmf:
2429 if mfentry:
2400 if mfentry:
2430 # if version of file is same in parent and target
2401 # if version of file is same in parent and target
2431 # manifests, do nothing
2402 # manifests, do nothing
2432 if pmf[abs] != mfentry:
2403 if pmf[abs] != mfentry:
2433 handle(revert, False)
2404 handle(revert, False)
2434 else:
2405 else:
2435 handle(remove, False)
2406 handle(remove, False)
2436
2407
2437 if not opts.get('dry_run'):
2408 if not opts.get('dry_run'):
2438 repo.dirstate.forget(forget[0])
2409 repo.dirstate.forget(forget[0])
2439 r = hg.revert(repo, node, update.has_key, wlock)
2410 r = hg.revert(repo, node, update.has_key, wlock)
2440 repo.dirstate.update(add[0], 'a')
2411 repo.dirstate.update(add[0], 'a')
2441 repo.dirstate.update(undelete[0], 'n')
2412 repo.dirstate.update(undelete[0], 'n')
2442 repo.dirstate.update(remove[0], 'r')
2413 repo.dirstate.update(remove[0], 'r')
2443 return r
2414 return r
2444
2415
2445 def rollback(ui, repo):
2416 def rollback(ui, repo):
2446 """roll back the last transaction in this repository
2417 """roll back the last transaction in this repository
2447
2418
2448 Roll back the last transaction in this repository, restoring the
2419 Roll back the last transaction in this repository, restoring the
2449 project to its state prior to the transaction.
2420 project to its state prior to the transaction.
2450
2421
2451 Transactions are used to encapsulate the effects of all commands
2422 Transactions are used to encapsulate the effects of all commands
2452 that create new changesets or propagate existing changesets into a
2423 that create new changesets or propagate existing changesets into a
2453 repository. For example, the following commands are transactional,
2424 repository. For example, the following commands are transactional,
2454 and their effects can be rolled back:
2425 and their effects can be rolled back:
2455
2426
2456 commit
2427 commit
2457 import
2428 import
2458 pull
2429 pull
2459 push (with this repository as destination)
2430 push (with this repository as destination)
2460 unbundle
2431 unbundle
2461
2432
2462 This command should be used with care. There is only one level of
2433 This command should be used with care. There is only one level of
2463 rollback, and there is no way to undo a rollback.
2434 rollback, and there is no way to undo a rollback.
2464
2435
2465 This command is not intended for use on public repositories. Once
2436 This command is not intended for use on public repositories. Once
2466 changes are visible for pull by other users, rolling a transaction
2437 changes are visible for pull by other users, rolling a transaction
2467 back locally is ineffective (someone else may already have pulled
2438 back locally is ineffective (someone else may already have pulled
2468 the changes). Furthermore, a race is possible with readers of the
2439 the changes). Furthermore, a race is possible with readers of the
2469 repository; for example an in-progress pull from the repository
2440 repository; for example an in-progress pull from the repository
2470 may fail if a rollback is performed.
2441 may fail if a rollback is performed.
2471 """
2442 """
2472 repo.rollback()
2443 repo.rollback()
2473
2444
2474 def root(ui, repo):
2445 def root(ui, repo):
2475 """print the root (top) of the current working dir
2446 """print the root (top) of the current working dir
2476
2447
2477 Print the root directory of the current repository.
2448 Print the root directory of the current repository.
2478 """
2449 """
2479 ui.write(repo.root + "\n")
2450 ui.write(repo.root + "\n")
2480
2451
2481 def serve(ui, repo, **opts):
2452 def serve(ui, repo, **opts):
2482 """export the repository via HTTP
2453 """export the repository via HTTP
2483
2454
2484 Start a local HTTP repository browser and pull server.
2455 Start a local HTTP repository browser and pull server.
2485
2456
2486 By default, the server logs accesses to stdout and errors to
2457 By default, the server logs accesses to stdout and errors to
2487 stderr. Use the "-A" and "-E" options to log to files.
2458 stderr. Use the "-A" and "-E" options to log to files.
2488 """
2459 """
2489
2460
2490 if opts["stdio"]:
2461 if opts["stdio"]:
2491 if repo is None:
2462 if repo is None:
2492 raise hg.RepoError(_('no repo found'))
2463 raise hg.RepoError(_('no repo found'))
2493 s = sshserver.sshserver(ui, repo)
2464 s = sshserver.sshserver(ui, repo)
2494 s.serve_forever()
2465 s.serve_forever()
2495
2466
2496 optlist = ("name templates style address port ipv6"
2467 optlist = ("name templates style address port ipv6"
2497 " accesslog errorlog webdir_conf")
2468 " accesslog errorlog webdir_conf")
2498 for o in optlist.split():
2469 for o in optlist.split():
2499 if opts[o]:
2470 if opts[o]:
2500 ui.setconfig("web", o, opts[o])
2471 ui.setconfig("web", o, opts[o])
2501
2472
2502 if repo is None and not ui.config("web", "webdir_conf"):
2473 if repo is None and not ui.config("web", "webdir_conf"):
2503 raise hg.RepoError(_('no repo found'))
2474 raise hg.RepoError(_('no repo found'))
2504
2475
2505 if opts['daemon'] and not opts['daemon_pipefds']:
2476 if opts['daemon'] and not opts['daemon_pipefds']:
2506 rfd, wfd = os.pipe()
2477 rfd, wfd = os.pipe()
2507 args = sys.argv[:]
2478 args = sys.argv[:]
2508 args.append('--daemon-pipefds=%d,%d' % (rfd, wfd))
2479 args.append('--daemon-pipefds=%d,%d' % (rfd, wfd))
2509 pid = os.spawnvp(os.P_NOWAIT | getattr(os, 'P_DETACH', 0),
2480 pid = os.spawnvp(os.P_NOWAIT | getattr(os, 'P_DETACH', 0),
2510 args[0], args)
2481 args[0], args)
2511 os.close(wfd)
2482 os.close(wfd)
2512 os.read(rfd, 1)
2483 os.read(rfd, 1)
2513 os._exit(0)
2484 os._exit(0)
2514
2485
2515 try:
2486 try:
2516 httpd = hgweb.server.create_server(ui, repo)
2487 httpd = hgweb.server.create_server(ui, repo)
2517 except socket.error, inst:
2488 except socket.error, inst:
2518 raise util.Abort(_('cannot start server: ') + inst.args[1])
2489 raise util.Abort(_('cannot start server: ') + inst.args[1])
2519
2490
2520 if ui.verbose:
2491 if ui.verbose:
2521 addr, port = httpd.socket.getsockname()
2492 addr, port = httpd.socket.getsockname()
2522 if addr == '0.0.0.0':
2493 if addr == '0.0.0.0':
2523 addr = socket.gethostname()
2494 addr = socket.gethostname()
2524 else:
2495 else:
2525 try:
2496 try:
2526 addr = socket.gethostbyaddr(addr)[0]
2497 addr = socket.gethostbyaddr(addr)[0]
2527 except socket.error:
2498 except socket.error:
2528 pass
2499 pass
2529 if port != 80:
2500 if port != 80:
2530 ui.status(_('listening at http://%s:%d/\n') % (addr, port))
2501 ui.status(_('listening at http://%s:%d/\n') % (addr, port))
2531 else:
2502 else:
2532 ui.status(_('listening at http://%s/\n') % addr)
2503 ui.status(_('listening at http://%s/\n') % addr)
2533
2504
2534 if opts['pid_file']:
2505 if opts['pid_file']:
2535 fp = open(opts['pid_file'], 'w')
2506 fp = open(opts['pid_file'], 'w')
2536 fp.write(str(os.getpid()) + '\n')
2507 fp.write(str(os.getpid()) + '\n')
2537 fp.close()
2508 fp.close()
2538
2509
2539 if opts['daemon_pipefds']:
2510 if opts['daemon_pipefds']:
2540 rfd, wfd = [int(x) for x in opts['daemon_pipefds'].split(',')]
2511 rfd, wfd = [int(x) for x in opts['daemon_pipefds'].split(',')]
2541 os.close(rfd)
2512 os.close(rfd)
2542 os.write(wfd, 'y')
2513 os.write(wfd, 'y')
2543 os.close(wfd)
2514 os.close(wfd)
2544 sys.stdout.flush()
2515 sys.stdout.flush()
2545 sys.stderr.flush()
2516 sys.stderr.flush()
2546 fd = os.open(util.nulldev, os.O_RDWR)
2517 fd = os.open(util.nulldev, os.O_RDWR)
2547 if fd != 0: os.dup2(fd, 0)
2518 if fd != 0: os.dup2(fd, 0)
2548 if fd != 1: os.dup2(fd, 1)
2519 if fd != 1: os.dup2(fd, 1)
2549 if fd != 2: os.dup2(fd, 2)
2520 if fd != 2: os.dup2(fd, 2)
2550 if fd not in (0, 1, 2): os.close(fd)
2521 if fd not in (0, 1, 2): os.close(fd)
2551
2522
2552 httpd.serve_forever()
2523 httpd.serve_forever()
2553
2524
2554 def status(ui, repo, *pats, **opts):
2525 def status(ui, repo, *pats, **opts):
2555 """show changed files in the working directory
2526 """show changed files in the working directory
2556
2527
2557 Show status of files in the repository. If names are given, only
2528 Show status of files in the repository. If names are given, only
2558 files that match are shown. Files that are clean or ignored, are
2529 files that match are shown. Files that are clean or ignored, are
2559 not listed unless -c (clean), -i (ignored) or -A is given.
2530 not listed unless -c (clean), -i (ignored) or -A is given.
2560
2531
2561 The codes used to show the status of files are:
2532 The codes used to show the status of files are:
2562 M = modified
2533 M = modified
2563 A = added
2534 A = added
2564 R = removed
2535 R = removed
2565 C = clean
2536 C = clean
2566 ! = deleted, but still tracked
2537 ! = deleted, but still tracked
2567 ? = not tracked
2538 ? = not tracked
2568 I = ignored (not shown by default)
2539 I = ignored (not shown by default)
2569 = the previous added file was copied from here
2540 = the previous added file was copied from here
2570 """
2541 """
2571
2542
2572 all = opts['all']
2543 all = opts['all']
2573
2544
2574 files, matchfn, anypats = cmdutil.matchpats(repo, pats, opts)
2545 files, matchfn, anypats = cmdutil.matchpats(repo, pats, opts)
2575 cwd = (pats and repo.getcwd()) or ''
2546 cwd = (pats and repo.getcwd()) or ''
2576 modified, added, removed, deleted, unknown, ignored, clean = [
2547 modified, added, removed, deleted, unknown, ignored, clean = [
2577 [util.pathto(cwd, x) for x in n]
2548 [util.pathto(cwd, x) for x in n]
2578 for n in repo.status(files=files, match=matchfn,
2549 for n in repo.status(files=files, match=matchfn,
2579 list_ignored=all or opts['ignored'],
2550 list_ignored=all or opts['ignored'],
2580 list_clean=all or opts['clean'])]
2551 list_clean=all or opts['clean'])]
2581
2552
2582 changetypes = (('modified', 'M', modified),
2553 changetypes = (('modified', 'M', modified),
2583 ('added', 'A', added),
2554 ('added', 'A', added),
2584 ('removed', 'R', removed),
2555 ('removed', 'R', removed),
2585 ('deleted', '!', deleted),
2556 ('deleted', '!', deleted),
2586 ('unknown', '?', unknown),
2557 ('unknown', '?', unknown),
2587 ('ignored', 'I', ignored))
2558 ('ignored', 'I', ignored))
2588
2559
2589 explicit_changetypes = changetypes + (('clean', 'C', clean),)
2560 explicit_changetypes = changetypes + (('clean', 'C', clean),)
2590
2561
2591 end = opts['print0'] and '\0' or '\n'
2562 end = opts['print0'] and '\0' or '\n'
2592
2563
2593 for opt, char, changes in ([ct for ct in explicit_changetypes
2564 for opt, char, changes in ([ct for ct in explicit_changetypes
2594 if all or opts[ct[0]]]
2565 if all or opts[ct[0]]]
2595 or changetypes):
2566 or changetypes):
2596 if opts['no_status']:
2567 if opts['no_status']:
2597 format = "%%s%s" % end
2568 format = "%%s%s" % end
2598 else:
2569 else:
2599 format = "%s %%s%s" % (char, end)
2570 format = "%s %%s%s" % (char, end)
2600
2571
2601 for f in changes:
2572 for f in changes:
2602 ui.write(format % f)
2573 ui.write(format % f)
2603 if ((all or opts.get('copies')) and not opts.get('no_status')
2574 if ((all or opts.get('copies')) and not opts.get('no_status')
2604 and opt == 'added' and repo.dirstate.copies.has_key(f)):
2575 and opt == 'added' and repo.dirstate.copies.has_key(f)):
2605 ui.write(' %s%s' % (repo.dirstate.copies[f], end))
2576 ui.write(' %s%s' % (repo.dirstate.copies[f], end))
2606
2577
2607 def tag(ui, repo, name, rev_=None, **opts):
2578 def tag(ui, repo, name, rev_=None, **opts):
2608 """add a tag for the current tip or a given revision
2579 """add a tag for the current tip or a given revision
2609
2580
2610 Name a particular revision using <name>.
2581 Name a particular revision using <name>.
2611
2582
2612 Tags are used to name particular revisions of the repository and are
2583 Tags are used to name particular revisions of the repository and are
2613 very useful to compare different revision, to go back to significant
2584 very useful to compare different revision, to go back to significant
2614 earlier versions or to mark branch points as releases, etc.
2585 earlier versions or to mark branch points as releases, etc.
2615
2586
2616 If no revision is given, the parent of the working directory is used.
2587 If no revision is given, the parent of the working directory is used.
2617
2588
2618 To facilitate version control, distribution, and merging of tags,
2589 To facilitate version control, distribution, and merging of tags,
2619 they are stored as a file named ".hgtags" which is managed
2590 they are stored as a file named ".hgtags" which is managed
2620 similarly to other project files and can be hand-edited if
2591 similarly to other project files and can be hand-edited if
2621 necessary. The file '.hg/localtags' is used for local tags (not
2592 necessary. The file '.hg/localtags' is used for local tags (not
2622 shared among repositories).
2593 shared among repositories).
2623 """
2594 """
2624 if name in ['tip', '.']:
2595 if name in ['tip', '.']:
2625 raise util.Abort(_("the name '%s' is reserved") % name)
2596 raise util.Abort(_("the name '%s' is reserved") % name)
2626 if rev_ is not None:
2597 if rev_ is not None:
2627 ui.warn(_("use of 'hg tag NAME [REV]' is deprecated, "
2598 ui.warn(_("use of 'hg tag NAME [REV]' is deprecated, "
2628 "please use 'hg tag [-r REV] NAME' instead\n"))
2599 "please use 'hg tag [-r REV] NAME' instead\n"))
2629 if opts['rev']:
2600 if opts['rev']:
2630 raise util.Abort(_("use only one form to specify the revision"))
2601 raise util.Abort(_("use only one form to specify the revision"))
2631 if opts['rev']:
2602 if opts['rev']:
2632 rev_ = opts['rev']
2603 rev_ = opts['rev']
2633 if rev_:
2604 if rev_:
2634 r = hex(repo.lookup(rev_))
2605 r = hex(repo.lookup(rev_))
2635 else:
2606 else:
2636 p1, p2 = repo.dirstate.parents()
2607 p1, p2 = repo.dirstate.parents()
2637 if p1 == nullid:
2608 if p1 == nullid:
2638 raise util.Abort(_('no revision to tag'))
2609 raise util.Abort(_('no revision to tag'))
2639 if p2 != nullid:
2610 if p2 != nullid:
2640 raise util.Abort(_('outstanding uncommitted merges'))
2611 raise util.Abort(_('outstanding uncommitted merges'))
2641 r = hex(p1)
2612 r = hex(p1)
2642
2613
2643 repo.tag(name, r, opts['local'], opts['message'], opts['user'],
2614 repo.tag(name, r, opts['local'], opts['message'], opts['user'],
2644 opts['date'])
2615 opts['date'])
2645
2616
2646 def tags(ui, repo):
2617 def tags(ui, repo):
2647 """list repository tags
2618 """list repository tags
2648
2619
2649 List the repository tags.
2620 List the repository tags.
2650
2621
2651 This lists both regular and local tags.
2622 This lists both regular and local tags.
2652 """
2623 """
2653
2624
2654 l = repo.tagslist()
2625 l = repo.tagslist()
2655 l.reverse()
2626 l.reverse()
2656 for t, n in l:
2627 for t, n in l:
2657 try:
2628 try:
2658 r = "%5d:%s" % (repo.changelog.rev(n), hex(n))
2629 r = "%5d:%s" % (repo.changelog.rev(n), hex(n))
2659 except KeyError:
2630 except KeyError:
2660 r = " ?:?"
2631 r = " ?:?"
2661 if ui.quiet:
2632 if ui.quiet:
2662 ui.write("%s\n" % t)
2633 ui.write("%s\n" % t)
2663 else:
2634 else:
2664 ui.write("%-30s %s\n" % (t, r))
2635 ui.write("%-30s %s\n" % (t, r))
2665
2636
2666 def tip(ui, repo, **opts):
2637 def tip(ui, repo, **opts):
2667 """show the tip revision
2638 """show the tip revision
2668
2639
2669 Show the tip revision.
2640 Show the tip revision.
2670 """
2641 """
2671 n = repo.changelog.tip()
2642 n = repo.changelog.tip()
2672 br = None
2643 br = None
2673 if opts['branches']:
2644 if opts['branches']:
2674 br = repo.branchlookup([n])
2645 br = repo.branchlookup([n])
2675 show_changeset(ui, repo, opts).show(changenode=n, brinfo=br)
2646 show_changeset(ui, repo, opts).show(changenode=n, brinfo=br)
2676 if opts['patch']:
2647 if opts['patch']:
2677 patch.diff(repo, repo.changelog.parents(n)[0], n)
2648 patch.diff(repo, repo.changelog.parents(n)[0], n)
2678
2649
2679 def unbundle(ui, repo, fname, **opts):
2650 def unbundle(ui, repo, fname, **opts):
2680 """apply a changegroup file
2651 """apply a changegroup file
2681
2652
2682 Apply a compressed changegroup file generated by the bundle
2653 Apply a compressed changegroup file generated by the bundle
2683 command.
2654 command.
2684 """
2655 """
2685 f = urllib.urlopen(fname)
2656 f = urllib.urlopen(fname)
2686
2657
2687 header = f.read(6)
2658 header = f.read(6)
2688 if not header.startswith("HG"):
2659 if not header.startswith("HG"):
2689 raise util.Abort(_("%s: not a Mercurial bundle file") % fname)
2660 raise util.Abort(_("%s: not a Mercurial bundle file") % fname)
2690 elif not header.startswith("HG10"):
2661 elif not header.startswith("HG10"):
2691 raise util.Abort(_("%s: unknown bundle version") % fname)
2662 raise util.Abort(_("%s: unknown bundle version") % fname)
2692 elif header == "HG10BZ":
2663 elif header == "HG10BZ":
2693 def generator(f):
2664 def generator(f):
2694 zd = bz2.BZ2Decompressor()
2665 zd = bz2.BZ2Decompressor()
2695 zd.decompress("BZ")
2666 zd.decompress("BZ")
2696 for chunk in f:
2667 for chunk in f:
2697 yield zd.decompress(chunk)
2668 yield zd.decompress(chunk)
2698 elif header == "HG10UN":
2669 elif header == "HG10UN":
2699 def generator(f):
2670 def generator(f):
2700 for chunk in f:
2671 for chunk in f:
2701 yield chunk
2672 yield chunk
2702 else:
2673 else:
2703 raise util.Abort(_("%s: unknown bundle compression type")
2674 raise util.Abort(_("%s: unknown bundle compression type")
2704 % fname)
2675 % fname)
2705 gen = generator(util.filechunkiter(f, 4096))
2676 gen = generator(util.filechunkiter(f, 4096))
2706 modheads = repo.addchangegroup(util.chunkbuffer(gen), 'unbundle',
2677 modheads = repo.addchangegroup(util.chunkbuffer(gen), 'unbundle',
2707 'bundle:' + fname)
2678 'bundle:' + fname)
2708 return postincoming(ui, repo, modheads, opts['update'])
2679 return postincoming(ui, repo, modheads, opts['update'])
2709
2680
2710 def undo(ui, repo):
2681 def undo(ui, repo):
2711 """undo the last commit or pull (DEPRECATED)
2682 """undo the last commit or pull (DEPRECATED)
2712
2683
2713 (DEPRECATED)
2684 (DEPRECATED)
2714 This command is now deprecated and will be removed in a future
2685 This command is now deprecated and will be removed in a future
2715 release. Please use the rollback command instead. For usage
2686 release. Please use the rollback command instead. For usage
2716 instructions, see the rollback command.
2687 instructions, see the rollback command.
2717 """
2688 """
2718 ui.warn(_('(the undo command is deprecated; use rollback instead)\n'))
2689 ui.warn(_('(the undo command is deprecated; use rollback instead)\n'))
2719 repo.rollback()
2690 repo.rollback()
2720
2691
2721 def update(ui, repo, node=None, merge=False, clean=False, force=None,
2692 def update(ui, repo, node=None, merge=False, clean=False, force=None,
2722 branch=None):
2693 branch=None):
2723 """update or merge working directory
2694 """update or merge working directory
2724
2695
2725 Update the working directory to the specified revision.
2696 Update the working directory to the specified revision.
2726
2697
2727 If there are no outstanding changes in the working directory and
2698 If there are no outstanding changes in the working directory and
2728 there is a linear relationship between the current version and the
2699 there is a linear relationship between the current version and the
2729 requested version, the result is the requested version.
2700 requested version, the result is the requested version.
2730
2701
2731 To merge the working directory with another revision, use the
2702 To merge the working directory with another revision, use the
2732 merge command.
2703 merge command.
2733
2704
2734 By default, update will refuse to run if doing so would require
2705 By default, update will refuse to run if doing so would require
2735 merging or discarding local changes.
2706 merging or discarding local changes.
2736 """
2707 """
2737 node = _lookup(repo, node, branch)
2708 node = _lookup(repo, node, branch)
2738 if merge:
2709 if merge:
2739 ui.warn(_('(the -m/--merge option is deprecated; '
2710 ui.warn(_('(the -m/--merge option is deprecated; '
2740 'use the merge command instead)\n'))
2711 'use the merge command instead)\n'))
2741 return hg.merge(repo, node, force=force)
2712 return hg.merge(repo, node, force=force)
2742 elif clean:
2713 elif clean:
2743 return hg.clean(repo, node)
2714 return hg.clean(repo, node)
2744 else:
2715 else:
2745 return hg.update(repo, node)
2716 return hg.update(repo, node)
2746
2717
2747 def _lookup(repo, node, branch=None):
2718 def _lookup(repo, node, branch=None):
2748 if branch:
2719 if branch:
2749 br = repo.branchlookup(branch=branch)
2720 br = repo.branchlookup(branch=branch)
2750 found = []
2721 found = []
2751 for x in br:
2722 for x in br:
2752 if branch in br[x]:
2723 if branch in br[x]:
2753 found.append(x)
2724 found.append(x)
2754 if len(found) > 1:
2725 if len(found) > 1:
2755 repo.ui.warn(_("Found multiple heads for %s\n") % branch)
2726 repo.ui.warn(_("Found multiple heads for %s\n") % branch)
2756 for x in found:
2727 for x in found:
2757 show_changeset(ui, repo, {}).show(changenode=x, brinfo=br)
2728 show_changeset(ui, repo, {}).show(changenode=x, brinfo=br)
2758 raise util.Abort("")
2729 raise util.Abort("")
2759 if len(found) == 1:
2730 if len(found) == 1:
2760 node = found[0]
2731 node = found[0]
2761 repo.ui.warn(_("Using head %s for branch %s\n")
2732 repo.ui.warn(_("Using head %s for branch %s\n")
2762 % (short(node), branch))
2733 % (short(node), branch))
2763 else:
2734 else:
2764 raise util.Abort(_("branch %s not found\n") % (branch))
2735 raise util.Abort(_("branch %s not found\n") % (branch))
2765 else:
2736 else:
2766 node = node and repo.lookup(node) or repo.changelog.tip()
2737 node = node and repo.lookup(node) or repo.changelog.tip()
2767 return node
2738 return node
2768
2739
2769 def verify(ui, repo):
2740 def verify(ui, repo):
2770 """verify the integrity of the repository
2741 """verify the integrity of the repository
2771
2742
2772 Verify the integrity of the current repository.
2743 Verify the integrity of the current repository.
2773
2744
2774 This will perform an extensive check of the repository's
2745 This will perform an extensive check of the repository's
2775 integrity, validating the hashes and checksums of each entry in
2746 integrity, validating the hashes and checksums of each entry in
2776 the changelog, manifest, and tracked files, as well as the
2747 the changelog, manifest, and tracked files, as well as the
2777 integrity of their crosslinks and indices.
2748 integrity of their crosslinks and indices.
2778 """
2749 """
2779 return hg.verify(repo)
2750 return hg.verify(repo)
2780
2751
2781 # Command options and aliases are listed here, alphabetically
2752 # Command options and aliases are listed here, alphabetically
2782
2753
2783 table = {
2754 table = {
2784 "^add":
2755 "^add":
2785 (add,
2756 (add,
2786 [('I', 'include', [], _('include names matching the given patterns')),
2757 [('I', 'include', [], _('include names matching the given patterns')),
2787 ('X', 'exclude', [], _('exclude names matching the given patterns')),
2758 ('X', 'exclude', [], _('exclude names matching the given patterns')),
2788 ('n', 'dry-run', None, _('do not perform actions, just print output'))],
2759 ('n', 'dry-run', None, _('do not perform actions, just print output'))],
2789 _('hg add [OPTION]... [FILE]...')),
2760 _('hg add [OPTION]... [FILE]...')),
2790 "debugaddremove|addremove":
2761 "debugaddremove|addremove":
2791 (addremove,
2762 (addremove,
2792 [('I', 'include', [], _('include names matching the given patterns')),
2763 [('I', 'include', [], _('include names matching the given patterns')),
2793 ('X', 'exclude', [], _('exclude names matching the given patterns')),
2764 ('X', 'exclude', [], _('exclude names matching the given patterns')),
2794 ('n', 'dry-run', None, _('do not perform actions, just print output'))],
2765 ('n', 'dry-run', None, _('do not perform actions, just print output'))],
2795 _('hg addremove [OPTION]... [FILE]...')),
2766 _('hg addremove [OPTION]... [FILE]...')),
2796 "^annotate":
2767 "^annotate":
2797 (annotate,
2768 (annotate,
2798 [('r', 'rev', '', _('annotate the specified revision')),
2769 [('r', 'rev', '', _('annotate the specified revision')),
2799 ('a', 'text', None, _('treat all files as text')),
2770 ('a', 'text', None, _('treat all files as text')),
2800 ('u', 'user', None, _('list the author')),
2771 ('u', 'user', None, _('list the author')),
2801 ('d', 'date', None, _('list the date')),
2772 ('d', 'date', None, _('list the date')),
2802 ('n', 'number', None, _('list the revision number (default)')),
2773 ('n', 'number', None, _('list the revision number (default)')),
2803 ('c', 'changeset', None, _('list the changeset')),
2774 ('c', 'changeset', None, _('list the changeset')),
2804 ('I', 'include', [], _('include names matching the given patterns')),
2775 ('I', 'include', [], _('include names matching the given patterns')),
2805 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2776 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2806 _('hg annotate [-r REV] [-a] [-u] [-d] [-n] [-c] FILE...')),
2777 _('hg annotate [-r REV] [-a] [-u] [-d] [-n] [-c] FILE...')),
2807 "archive":
2778 "archive":
2808 (archive,
2779 (archive,
2809 [('', 'no-decode', None, _('do not pass files through decoders')),
2780 [('', 'no-decode', None, _('do not pass files through decoders')),
2810 ('p', 'prefix', '', _('directory prefix for files in archive')),
2781 ('p', 'prefix', '', _('directory prefix for files in archive')),
2811 ('r', 'rev', '', _('revision to distribute')),
2782 ('r', 'rev', '', _('revision to distribute')),
2812 ('t', 'type', '', _('type of distribution to create')),
2783 ('t', 'type', '', _('type of distribution to create')),
2813 ('I', 'include', [], _('include names matching the given patterns')),
2784 ('I', 'include', [], _('include names matching the given patterns')),
2814 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2785 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2815 _('hg archive [OPTION]... DEST')),
2786 _('hg archive [OPTION]... DEST')),
2816 "backout":
2787 "backout":
2817 (backout,
2788 (backout,
2818 [('', 'merge', None,
2789 [('', 'merge', None,
2819 _('merge with old dirstate parent after backout')),
2790 _('merge with old dirstate parent after backout')),
2820 ('m', 'message', '', _('use <text> as commit message')),
2791 ('m', 'message', '', _('use <text> as commit message')),
2821 ('l', 'logfile', '', _('read commit message from <file>')),
2792 ('l', 'logfile', '', _('read commit message from <file>')),
2822 ('d', 'date', '', _('record datecode as commit date')),
2793 ('d', 'date', '', _('record datecode as commit date')),
2823 ('', 'parent', '', _('parent to choose when backing out merge')),
2794 ('', 'parent', '', _('parent to choose when backing out merge')),
2824 ('u', 'user', '', _('record user as committer')),
2795 ('u', 'user', '', _('record user as committer')),
2825 ('I', 'include', [], _('include names matching the given patterns')),
2796 ('I', 'include', [], _('include names matching the given patterns')),
2826 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2797 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2827 _('hg backout [OPTION]... REV')),
2798 _('hg backout [OPTION]... REV')),
2828 "bundle":
2799 "bundle":
2829 (bundle,
2800 (bundle,
2830 [('f', 'force', None,
2801 [('f', 'force', None,
2831 _('run even when remote repository is unrelated'))],
2802 _('run even when remote repository is unrelated'))],
2832 _('hg bundle FILE DEST')),
2803 _('hg bundle FILE DEST')),
2833 "cat":
2804 "cat":
2834 (cat,
2805 (cat,
2835 [('o', 'output', '', _('print output to file with formatted name')),
2806 [('o', 'output', '', _('print output to file with formatted name')),
2836 ('r', 'rev', '', _('print the given revision')),
2807 ('r', 'rev', '', _('print the given revision')),
2837 ('I', 'include', [], _('include names matching the given patterns')),
2808 ('I', 'include', [], _('include names matching the given patterns')),
2838 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2809 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2839 _('hg cat [OPTION]... FILE...')),
2810 _('hg cat [OPTION]... FILE...')),
2840 "^clone":
2811 "^clone":
2841 (clone,
2812 (clone,
2842 [('U', 'noupdate', None, _('do not update the new working directory')),
2813 [('U', 'noupdate', None, _('do not update the new working directory')),
2843 ('r', 'rev', [],
2814 ('r', 'rev', [],
2844 _('a changeset you would like to have after cloning')),
2815 _('a changeset you would like to have after cloning')),
2845 ('', 'pull', None, _('use pull protocol to copy metadata')),
2816 ('', 'pull', None, _('use pull protocol to copy metadata')),
2846 ('', 'uncompressed', None,
2817 ('', 'uncompressed', None,
2847 _('use uncompressed transfer (fast over LAN)')),
2818 _('use uncompressed transfer (fast over LAN)')),
2848 ('e', 'ssh', '', _('specify ssh command to use')),
2819 ('e', 'ssh', '', _('specify ssh command to use')),
2849 ('', 'remotecmd', '',
2820 ('', 'remotecmd', '',
2850 _('specify hg command to run on the remote side'))],
2821 _('specify hg command to run on the remote side'))],
2851 _('hg clone [OPTION]... SOURCE [DEST]')),
2822 _('hg clone [OPTION]... SOURCE [DEST]')),
2852 "^commit|ci":
2823 "^commit|ci":
2853 (commit,
2824 (commit,
2854 [('A', 'addremove', None,
2825 [('A', 'addremove', None,
2855 _('mark new/missing files as added/removed before committing')),
2826 _('mark new/missing files as added/removed before committing')),
2856 ('m', 'message', '', _('use <text> as commit message')),
2827 ('m', 'message', '', _('use <text> as commit message')),
2857 ('l', 'logfile', '', _('read the commit message from <file>')),
2828 ('l', 'logfile', '', _('read the commit message from <file>')),
2858 ('d', 'date', '', _('record datecode as commit date')),
2829 ('d', 'date', '', _('record datecode as commit date')),
2859 ('u', 'user', '', _('record user as commiter')),
2830 ('u', 'user', '', _('record user as commiter')),
2860 ('I', 'include', [], _('include names matching the given patterns')),
2831 ('I', 'include', [], _('include names matching the given patterns')),
2861 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2832 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2862 _('hg commit [OPTION]... [FILE]...')),
2833 _('hg commit [OPTION]... [FILE]...')),
2863 "copy|cp":
2834 "copy|cp":
2864 (copy,
2835 (copy,
2865 [('A', 'after', None, _('record a copy that has already occurred')),
2836 [('A', 'after', None, _('record a copy that has already occurred')),
2866 ('f', 'force', None,
2837 ('f', 'force', None,
2867 _('forcibly copy over an existing managed file')),
2838 _('forcibly copy over an existing managed file')),
2868 ('I', 'include', [], _('include names matching the given patterns')),
2839 ('I', 'include', [], _('include names matching the given patterns')),
2869 ('X', 'exclude', [], _('exclude names matching the given patterns')),
2840 ('X', 'exclude', [], _('exclude names matching the given patterns')),
2870 ('n', 'dry-run', None, _('do not perform actions, just print output'))],
2841 ('n', 'dry-run', None, _('do not perform actions, just print output'))],
2871 _('hg copy [OPTION]... [SOURCE]... DEST')),
2842 _('hg copy [OPTION]... [SOURCE]... DEST')),
2872 "debugancestor": (debugancestor, [], _('debugancestor INDEX REV1 REV2')),
2843 "debugancestor": (debugancestor, [], _('debugancestor INDEX REV1 REV2')),
2873 "debugcomplete":
2844 "debugcomplete":
2874 (debugcomplete,
2845 (debugcomplete,
2875 [('o', 'options', None, _('show the command options'))],
2846 [('o', 'options', None, _('show the command options'))],
2876 _('debugcomplete [-o] CMD')),
2847 _('debugcomplete [-o] CMD')),
2877 "debugrebuildstate":
2848 "debugrebuildstate":
2878 (debugrebuildstate,
2849 (debugrebuildstate,
2879 [('r', 'rev', '', _('revision to rebuild to'))],
2850 [('r', 'rev', '', _('revision to rebuild to'))],
2880 _('debugrebuildstate [-r REV] [REV]')),
2851 _('debugrebuildstate [-r REV] [REV]')),
2881 "debugcheckstate": (debugcheckstate, [], _('debugcheckstate')),
2852 "debugcheckstate": (debugcheckstate, [], _('debugcheckstate')),
2882 "debugconfig": (debugconfig, [], _('debugconfig [NAME]...')),
2853 "debugconfig": (debugconfig, [], _('debugconfig [NAME]...')),
2883 "debugsetparents": (debugsetparents, [], _('debugsetparents REV1 [REV2]')),
2854 "debugsetparents": (debugsetparents, [], _('debugsetparents REV1 [REV2]')),
2884 "debugstate": (debugstate, [], _('debugstate')),
2855 "debugstate": (debugstate, [], _('debugstate')),
2885 "debugdata": (debugdata, [], _('debugdata FILE REV')),
2856 "debugdata": (debugdata, [], _('debugdata FILE REV')),
2886 "debugindex": (debugindex, [], _('debugindex FILE')),
2857 "debugindex": (debugindex, [], _('debugindex FILE')),
2887 "debugindexdot": (debugindexdot, [], _('debugindexdot FILE')),
2858 "debugindexdot": (debugindexdot, [], _('debugindexdot FILE')),
2888 "debugrename": (debugrename, [], _('debugrename FILE [REV]')),
2859 "debugrename": (debugrename, [], _('debugrename FILE [REV]')),
2889 "debugwalk":
2860 "debugwalk":
2890 (debugwalk,
2861 (debugwalk,
2891 [('I', 'include', [], _('include names matching the given patterns')),
2862 [('I', 'include', [], _('include names matching the given patterns')),
2892 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2863 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2893 _('debugwalk [OPTION]... [FILE]...')),
2864 _('debugwalk [OPTION]... [FILE]...')),
2894 "^diff":
2865 "^diff":
2895 (diff,
2866 (diff,
2896 [('r', 'rev', [], _('revision')),
2867 [('r', 'rev', [], _('revision')),
2897 ('a', 'text', None, _('treat all files as text')),
2868 ('a', 'text', None, _('treat all files as text')),
2898 ('p', 'show-function', None,
2869 ('p', 'show-function', None,
2899 _('show which function each change is in')),
2870 _('show which function each change is in')),
2900 ('g', 'git', None, _('use git extended diff format')),
2901 ('w', 'ignore-all-space', None,
2871 ('w', 'ignore-all-space', None,
2902 _('ignore white space when comparing lines')),
2872 _('ignore white space when comparing lines')),
2903 ('b', 'ignore-space-change', None,
2873 ('b', 'ignore-space-change', None,
2904 _('ignore changes in the amount of white space')),
2874 _('ignore changes in the amount of white space')),
2905 ('B', 'ignore-blank-lines', None,
2875 ('B', 'ignore-blank-lines', None,
2906 _('ignore changes whose lines are all blank')),
2876 _('ignore changes whose lines are all blank')),
2907 ('I', 'include', [], _('include names matching the given patterns')),
2877 ('I', 'include', [], _('include names matching the given patterns')),
2908 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2878 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2909 _('hg diff [-a] [-I] [-X] [-r REV1 [-r REV2]] [FILE]...')),
2879 _('hg diff [-a] [-I] [-X] [-r REV1 [-r REV2]] [FILE]...')),
2910 "^export":
2880 "^export":
2911 (export,
2881 (export,
2912 [('o', 'output', '', _('print output to file with formatted name')),
2882 [('o', 'output', '', _('print output to file with formatted name')),
2913 ('a', 'text', None, _('treat all files as text')),
2883 ('a', 'text', None, _('treat all files as text')),
2914 ('', 'switch-parent', None, _('diff against the second parent'))],
2884 ('', 'switch-parent', None, _('diff against the second parent'))],
2915 _('hg export [-a] [-o OUTFILESPEC] REV...')),
2885 _('hg export [-a] [-o OUTFILESPEC] REV...')),
2916 "debugforget|forget":
2886 "debugforget|forget":
2917 (forget,
2887 (forget,
2918 [('I', 'include', [], _('include names matching the given patterns')),
2888 [('I', 'include', [], _('include names matching the given patterns')),
2919 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2889 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2920 _('hg forget [OPTION]... FILE...')),
2890 _('hg forget [OPTION]... FILE...')),
2921 "grep":
2891 "grep":
2922 (grep,
2892 (grep,
2923 [('0', 'print0', None, _('end fields with NUL')),
2893 [('0', 'print0', None, _('end fields with NUL')),
2924 ('', 'all', None, _('print all revisions that match')),
2894 ('', 'all', None, _('print all revisions that match')),
2925 ('f', 'follow', None,
2895 ('f', 'follow', None,
2926 _('follow changeset history, or file history across copies and renames')),
2896 _('follow changeset history, or file history across copies and renames')),
2927 ('i', 'ignore-case', None, _('ignore case when matching')),
2897 ('i', 'ignore-case', None, _('ignore case when matching')),
2928 ('l', 'files-with-matches', None,
2898 ('l', 'files-with-matches', None,
2929 _('print only filenames and revs that match')),
2899 _('print only filenames and revs that match')),
2930 ('n', 'line-number', None, _('print matching line numbers')),
2900 ('n', 'line-number', None, _('print matching line numbers')),
2931 ('r', 'rev', [], _('search in given revision range')),
2901 ('r', 'rev', [], _('search in given revision range')),
2932 ('u', 'user', None, _('print user who committed change')),
2902 ('u', 'user', None, _('print user who committed change')),
2933 ('I', 'include', [], _('include names matching the given patterns')),
2903 ('I', 'include', [], _('include names matching the given patterns')),
2934 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2904 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2935 _('hg grep [OPTION]... PATTERN [FILE]...')),
2905 _('hg grep [OPTION]... PATTERN [FILE]...')),
2936 "heads":
2906 "heads":
2937 (heads,
2907 (heads,
2938 [('b', 'branches', None, _('show branches')),
2908 [('b', 'branches', None, _('show branches')),
2939 ('', 'style', '', _('display using template map file')),
2909 ('', 'style', '', _('display using template map file')),
2940 ('r', 'rev', '', _('show only heads which are descendants of rev')),
2910 ('r', 'rev', '', _('show only heads which are descendants of rev')),
2941 ('', 'template', '', _('display with template'))],
2911 ('', 'template', '', _('display with template'))],
2942 _('hg heads [-b] [-r <rev>]')),
2912 _('hg heads [-b] [-r <rev>]')),
2943 "help": (help_, [], _('hg help [COMMAND]')),
2913 "help": (help_, [], _('hg help [COMMAND]')),
2944 "identify|id": (identify, [], _('hg identify')),
2914 "identify|id": (identify, [], _('hg identify')),
2945 "import|patch":
2915 "import|patch":
2946 (import_,
2916 (import_,
2947 [('p', 'strip', 1,
2917 [('p', 'strip', 1,
2948 _('directory strip option for patch. This has the same\n'
2918 _('directory strip option for patch. This has the same\n'
2949 'meaning as the corresponding patch option')),
2919 'meaning as the corresponding patch option')),
2950 ('m', 'message', '', _('use <text> as commit message')),
2920 ('m', 'message', '', _('use <text> as commit message')),
2951 ('b', 'base', '', _('base path')),
2921 ('b', 'base', '', _('base path')),
2952 ('f', 'force', None,
2922 ('f', 'force', None,
2953 _('skip check for outstanding uncommitted changes'))],
2923 _('skip check for outstanding uncommitted changes'))],
2954 _('hg import [-p NUM] [-b BASE] [-m MESSAGE] [-f] PATCH...')),
2924 _('hg import [-p NUM] [-b BASE] [-m MESSAGE] [-f] PATCH...')),
2955 "incoming|in": (incoming,
2925 "incoming|in": (incoming,
2956 [('M', 'no-merges', None, _('do not show merges')),
2926 [('M', 'no-merges', None, _('do not show merges')),
2957 ('f', 'force', None,
2927 ('f', 'force', None,
2958 _('run even when remote repository is unrelated')),
2928 _('run even when remote repository is unrelated')),
2959 ('', 'style', '', _('display using template map file')),
2929 ('', 'style', '', _('display using template map file')),
2960 ('n', 'newest-first', None, _('show newest record first')),
2930 ('n', 'newest-first', None, _('show newest record first')),
2961 ('', 'bundle', '', _('file to store the bundles into')),
2931 ('', 'bundle', '', _('file to store the bundles into')),
2962 ('p', 'patch', None, _('show patch')),
2932 ('p', 'patch', None, _('show patch')),
2963 ('r', 'rev', [], _('a specific revision up to which you would like to pull')),
2933 ('r', 'rev', [], _('a specific revision up to which you would like to pull')),
2964 ('', 'template', '', _('display with template')),
2934 ('', 'template', '', _('display with template')),
2965 ('e', 'ssh', '', _('specify ssh command to use')),
2935 ('e', 'ssh', '', _('specify ssh command to use')),
2966 ('', 'remotecmd', '',
2936 ('', 'remotecmd', '',
2967 _('specify hg command to run on the remote side'))],
2937 _('specify hg command to run on the remote side'))],
2968 _('hg incoming [-p] [-n] [-M] [-r REV]...'
2938 _('hg incoming [-p] [-n] [-M] [-r REV]...'
2969 ' [--bundle FILENAME] [SOURCE]')),
2939 ' [--bundle FILENAME] [SOURCE]')),
2970 "^init":
2940 "^init":
2971 (init,
2941 (init,
2972 [('e', 'ssh', '', _('specify ssh command to use')),
2942 [('e', 'ssh', '', _('specify ssh command to use')),
2973 ('', 'remotecmd', '',
2943 ('', 'remotecmd', '',
2974 _('specify hg command to run on the remote side'))],
2944 _('specify hg command to run on the remote side'))],
2975 _('hg init [-e FILE] [--remotecmd FILE] [DEST]')),
2945 _('hg init [-e FILE] [--remotecmd FILE] [DEST]')),
2976 "locate":
2946 "locate":
2977 (locate,
2947 (locate,
2978 [('r', 'rev', '', _('search the repository as it stood at rev')),
2948 [('r', 'rev', '', _('search the repository as it stood at rev')),
2979 ('0', 'print0', None,
2949 ('0', 'print0', None,
2980 _('end filenames with NUL, for use with xargs')),
2950 _('end filenames with NUL, for use with xargs')),
2981 ('f', 'fullpath', None,
2951 ('f', 'fullpath', None,
2982 _('print complete paths from the filesystem root')),
2952 _('print complete paths from the filesystem root')),
2983 ('I', 'include', [], _('include names matching the given patterns')),
2953 ('I', 'include', [], _('include names matching the given patterns')),
2984 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2954 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2985 _('hg locate [OPTION]... [PATTERN]...')),
2955 _('hg locate [OPTION]... [PATTERN]...')),
2986 "^log|history":
2956 "^log|history":
2987 (log,
2957 (log,
2988 [('b', 'branches', None, _('show branches')),
2958 [('b', 'branches', None, _('show branches')),
2989 ('f', 'follow', None,
2959 ('f', 'follow', None,
2990 _('follow changeset history, or file history across copies and renames')),
2960 _('follow changeset history, or file history across copies and renames')),
2991 ('', 'follow-first', None,
2961 ('', 'follow-first', None,
2992 _('only follow the first parent of merge changesets')),
2962 _('only follow the first parent of merge changesets')),
2993 ('k', 'keyword', [], _('search for a keyword')),
2963 ('k', 'keyword', [], _('search for a keyword')),
2994 ('l', 'limit', '', _('limit number of changes displayed')),
2964 ('l', 'limit', '', _('limit number of changes displayed')),
2995 ('r', 'rev', [], _('show the specified revision or range')),
2965 ('r', 'rev', [], _('show the specified revision or range')),
2996 ('M', 'no-merges', None, _('do not show merges')),
2966 ('M', 'no-merges', None, _('do not show merges')),
2997 ('', 'style', '', _('display using template map file')),
2967 ('', 'style', '', _('display using template map file')),
2998 ('m', 'only-merges', None, _('show only merges')),
2968 ('m', 'only-merges', None, _('show only merges')),
2999 ('p', 'patch', None, _('show patch')),
2969 ('p', 'patch', None, _('show patch')),
3000 ('P', 'prune', [], _('do not display revision or any of its ancestors')),
3001 ('', 'template', '', _('display with template')),
2970 ('', 'template', '', _('display with template')),
3002 ('I', 'include', [], _('include names matching the given patterns')),
2971 ('I', 'include', [], _('include names matching the given patterns')),
3003 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
2972 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
3004 _('hg log [OPTION]... [FILE]')),
2973 _('hg log [OPTION]... [FILE]')),
3005 "manifest": (manifest, [], _('hg manifest [REV]')),
2974 "manifest": (manifest, [], _('hg manifest [REV]')),
3006 "merge":
2975 "merge":
3007 (merge,
2976 (merge,
3008 [('b', 'branch', '', _('merge with head of a specific branch')),
2977 [('b', 'branch', '', _('merge with head of a specific branch')),
3009 ('f', 'force', None, _('force a merge with outstanding changes'))],
2978 ('f', 'force', None, _('force a merge with outstanding changes'))],
3010 _('hg merge [-b TAG] [-f] [REV]')),
2979 _('hg merge [-b TAG] [-f] [REV]')),
3011 "outgoing|out": (outgoing,
2980 "outgoing|out": (outgoing,
3012 [('M', 'no-merges', None, _('do not show merges')),
2981 [('M', 'no-merges', None, _('do not show merges')),
3013 ('f', 'force', None,
2982 ('f', 'force', None,
3014 _('run even when remote repository is unrelated')),
2983 _('run even when remote repository is unrelated')),
3015 ('p', 'patch', None, _('show patch')),
2984 ('p', 'patch', None, _('show patch')),
3016 ('', 'style', '', _('display using template map file')),
2985 ('', 'style', '', _('display using template map file')),
3017 ('r', 'rev', [], _('a specific revision you would like to push')),
2986 ('r', 'rev', [], _('a specific revision you would like to push')),
3018 ('n', 'newest-first', None, _('show newest record first')),
2987 ('n', 'newest-first', None, _('show newest record first')),
3019 ('', 'template', '', _('display with template')),
2988 ('', 'template', '', _('display with template')),
3020 ('e', 'ssh', '', _('specify ssh command to use')),
2989 ('e', 'ssh', '', _('specify ssh command to use')),
3021 ('', 'remotecmd', '',
2990 ('', 'remotecmd', '',
3022 _('specify hg command to run on the remote side'))],
2991 _('specify hg command to run on the remote side'))],
3023 _('hg outgoing [-M] [-p] [-n] [-r REV]... [DEST]')),
2992 _('hg outgoing [-M] [-p] [-n] [-r REV]... [DEST]')),
3024 "^parents":
2993 "^parents":
3025 (parents,
2994 (parents,
3026 [('b', 'branches', None, _('show branches')),
2995 [('b', 'branches', None, _('show branches')),
3027 ('r', 'rev', '', _('show parents from the specified rev')),
2996 ('r', 'rev', '', _('show parents from the specified rev')),
3028 ('', 'style', '', _('display using template map file')),
2997 ('', 'style', '', _('display using template map file')),
3029 ('', 'template', '', _('display with template'))],
2998 ('', 'template', '', _('display with template'))],
3030 _('hg parents [-b] [-r REV] [FILE]')),
2999 _('hg parents [-b] [-r REV] [FILE]')),
3031 "paths": (paths, [], _('hg paths [NAME]')),
3000 "paths": (paths, [], _('hg paths [NAME]')),
3032 "^pull":
3001 "^pull":
3033 (pull,
3002 (pull,
3034 [('u', 'update', None,
3003 [('u', 'update', None,
3035 _('update the working directory to tip after pull')),
3004 _('update the working directory to tip after pull')),
3036 ('e', 'ssh', '', _('specify ssh command to use')),
3005 ('e', 'ssh', '', _('specify ssh command to use')),
3037 ('f', 'force', None,
3006 ('f', 'force', None,
3038 _('run even when remote repository is unrelated')),
3007 _('run even when remote repository is unrelated')),
3039 ('r', 'rev', [], _('a specific revision up to which you would like to pull')),
3008 ('r', 'rev', [], _('a specific revision up to which you would like to pull')),
3040 ('', 'remotecmd', '',
3009 ('', 'remotecmd', '',
3041 _('specify hg command to run on the remote side'))],
3010 _('specify hg command to run on the remote side'))],
3042 _('hg pull [-u] [-r REV]... [-e FILE] [--remotecmd FILE] [SOURCE]')),
3011 _('hg pull [-u] [-r REV]... [-e FILE] [--remotecmd FILE] [SOURCE]')),
3043 "^push":
3012 "^push":
3044 (push,
3013 (push,
3045 [('f', 'force', None, _('force push')),
3014 [('f', 'force', None, _('force push')),
3046 ('e', 'ssh', '', _('specify ssh command to use')),
3015 ('e', 'ssh', '', _('specify ssh command to use')),
3047 ('r', 'rev', [], _('a specific revision you would like to push')),
3016 ('r', 'rev', [], _('a specific revision you would like to push')),
3048 ('', 'remotecmd', '',
3017 ('', 'remotecmd', '',
3049 _('specify hg command to run on the remote side'))],
3018 _('specify hg command to run on the remote side'))],
3050 _('hg push [-f] [-r REV]... [-e FILE] [--remotecmd FILE] [DEST]')),
3019 _('hg push [-f] [-r REV]... [-e FILE] [--remotecmd FILE] [DEST]')),
3051 "debugrawcommit|rawcommit":
3020 "debugrawcommit|rawcommit":
3052 (rawcommit,
3021 (rawcommit,
3053 [('p', 'parent', [], _('parent')),
3022 [('p', 'parent', [], _('parent')),
3054 ('d', 'date', '', _('date code')),
3023 ('d', 'date', '', _('date code')),
3055 ('u', 'user', '', _('user')),
3024 ('u', 'user', '', _('user')),
3056 ('F', 'files', '', _('file list')),
3025 ('F', 'files', '', _('file list')),
3057 ('m', 'message', '', _('commit message')),
3026 ('m', 'message', '', _('commit message')),
3058 ('l', 'logfile', '', _('commit message file'))],
3027 ('l', 'logfile', '', _('commit message file'))],
3059 _('hg debugrawcommit [OPTION]... [FILE]...')),
3028 _('hg debugrawcommit [OPTION]... [FILE]...')),
3060 "recover": (recover, [], _('hg recover')),
3029 "recover": (recover, [], _('hg recover')),
3061 "^remove|rm":
3030 "^remove|rm":
3062 (remove,
3031 (remove,
3063 [('A', 'after', None, _('record remove that has already occurred')),
3032 [('A', 'after', None, _('record remove that has already occurred')),
3064 ('f', 'force', None, _('remove file even if modified')),
3033 ('f', 'force', None, _('remove file even if modified')),
3065 ('I', 'include', [], _('include names matching the given patterns')),
3034 ('I', 'include', [], _('include names matching the given patterns')),
3066 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
3035 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
3067 _('hg remove [OPTION]... FILE...')),
3036 _('hg remove [OPTION]... FILE...')),
3068 "rename|mv":
3037 "rename|mv":
3069 (rename,
3038 (rename,
3070 [('A', 'after', None, _('record a rename that has already occurred')),
3039 [('A', 'after', None, _('record a rename that has already occurred')),
3071 ('f', 'force', None,
3040 ('f', 'force', None,
3072 _('forcibly copy over an existing managed file')),
3041 _('forcibly copy over an existing managed file')),
3073 ('I', 'include', [], _('include names matching the given patterns')),
3042 ('I', 'include', [], _('include names matching the given patterns')),
3074 ('X', 'exclude', [], _('exclude names matching the given patterns')),
3043 ('X', 'exclude', [], _('exclude names matching the given patterns')),
3075 ('n', 'dry-run', None, _('do not perform actions, just print output'))],
3044 ('n', 'dry-run', None, _('do not perform actions, just print output'))],
3076 _('hg rename [OPTION]... SOURCE... DEST')),
3045 _('hg rename [OPTION]... SOURCE... DEST')),
3077 "^revert":
3046 "^revert":
3078 (revert,
3047 (revert,
3079 [('r', 'rev', '', _('revision to revert to')),
3048 [('r', 'rev', '', _('revision to revert to')),
3080 ('', 'no-backup', None, _('do not save backup copies of files')),
3049 ('', 'no-backup', None, _('do not save backup copies of files')),
3081 ('I', 'include', [], _('include names matching given patterns')),
3050 ('I', 'include', [], _('include names matching given patterns')),
3082 ('X', 'exclude', [], _('exclude names matching given patterns')),
3051 ('X', 'exclude', [], _('exclude names matching given patterns')),
3083 ('n', 'dry-run', None, _('do not perform actions, just print output'))],
3052 ('n', 'dry-run', None, _('do not perform actions, just print output'))],
3084 _('hg revert [-r REV] [NAME]...')),
3053 _('hg revert [-r REV] [NAME]...')),
3085 "rollback": (rollback, [], _('hg rollback')),
3054 "rollback": (rollback, [], _('hg rollback')),
3086 "root": (root, [], _('hg root')),
3055 "root": (root, [], _('hg root')),
3087 "^serve":
3056 "^serve":
3088 (serve,
3057 (serve,
3089 [('A', 'accesslog', '', _('name of access log file to write to')),
3058 [('A', 'accesslog', '', _('name of access log file to write to')),
3090 ('d', 'daemon', None, _('run server in background')),
3059 ('d', 'daemon', None, _('run server in background')),
3091 ('', 'daemon-pipefds', '', _('used internally by daemon mode')),
3060 ('', 'daemon-pipefds', '', _('used internally by daemon mode')),
3092 ('E', 'errorlog', '', _('name of error log file to write to')),
3061 ('E', 'errorlog', '', _('name of error log file to write to')),
3093 ('p', 'port', 0, _('port to use (default: 8000)')),
3062 ('p', 'port', 0, _('port to use (default: 8000)')),
3094 ('a', 'address', '', _('address to use')),
3063 ('a', 'address', '', _('address to use')),
3095 ('n', 'name', '',
3064 ('n', 'name', '',
3096 _('name to show in web pages (default: working dir)')),
3065 _('name to show in web pages (default: working dir)')),
3097 ('', 'webdir-conf', '', _('name of the webdir config file'
3066 ('', 'webdir-conf', '', _('name of the webdir config file'
3098 ' (serve more than one repo)')),
3067 ' (serve more than one repo)')),
3099 ('', 'pid-file', '', _('name of file to write process ID to')),
3068 ('', 'pid-file', '', _('name of file to write process ID to')),
3100 ('', 'stdio', None, _('for remote clients')),
3069 ('', 'stdio', None, _('for remote clients')),
3101 ('t', 'templates', '', _('web templates to use')),
3070 ('t', 'templates', '', _('web templates to use')),
3102 ('', 'style', '', _('template style to use')),
3071 ('', 'style', '', _('template style to use')),
3103 ('6', 'ipv6', None, _('use IPv6 in addition to IPv4'))],
3072 ('6', 'ipv6', None, _('use IPv6 in addition to IPv4'))],
3104 _('hg serve [OPTION]...')),
3073 _('hg serve [OPTION]...')),
3105 "^status|st":
3074 "^status|st":
3106 (status,
3075 (status,
3107 [('A', 'all', None, _('show status of all files')),
3076 [('A', 'all', None, _('show status of all files')),
3108 ('m', 'modified', None, _('show only modified files')),
3077 ('m', 'modified', None, _('show only modified files')),
3109 ('a', 'added', None, _('show only added files')),
3078 ('a', 'added', None, _('show only added files')),
3110 ('r', 'removed', None, _('show only removed files')),
3079 ('r', 'removed', None, _('show only removed files')),
3111 ('d', 'deleted', None, _('show only deleted (but tracked) files')),
3080 ('d', 'deleted', None, _('show only deleted (but tracked) files')),
3112 ('c', 'clean', None, _('show only files without changes')),
3081 ('c', 'clean', None, _('show only files without changes')),
3113 ('u', 'unknown', None, _('show only unknown (not tracked) files')),
3082 ('u', 'unknown', None, _('show only unknown (not tracked) files')),
3114 ('i', 'ignored', None, _('show ignored files')),
3083 ('i', 'ignored', None, _('show ignored files')),
3115 ('n', 'no-status', None, _('hide status prefix')),
3084 ('n', 'no-status', None, _('hide status prefix')),
3116 ('C', 'copies', None, _('show source of copied files')),
3085 ('C', 'copies', None, _('show source of copied files')),
3117 ('0', 'print0', None,
3086 ('0', 'print0', None,
3118 _('end filenames with NUL, for use with xargs')),
3087 _('end filenames with NUL, for use with xargs')),
3119 ('I', 'include', [], _('include names matching the given patterns')),
3088 ('I', 'include', [], _('include names matching the given patterns')),
3120 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
3089 ('X', 'exclude', [], _('exclude names matching the given patterns'))],
3121 _('hg status [OPTION]... [FILE]...')),
3090 _('hg status [OPTION]... [FILE]...')),
3122 "tag":
3091 "tag":
3123 (tag,
3092 (tag,
3124 [('l', 'local', None, _('make the tag local')),
3093 [('l', 'local', None, _('make the tag local')),
3125 ('m', 'message', '', _('message for tag commit log entry')),
3094 ('m', 'message', '', _('message for tag commit log entry')),
3126 ('d', 'date', '', _('record datecode as commit date')),
3095 ('d', 'date', '', _('record datecode as commit date')),
3127 ('u', 'user', '', _('record user as commiter')),
3096 ('u', 'user', '', _('record user as commiter')),
3128 ('r', 'rev', '', _('revision to tag'))],
3097 ('r', 'rev', '', _('revision to tag'))],
3129 _('hg tag [-l] [-m TEXT] [-d DATE] [-u USER] [-r REV] NAME')),
3098 _('hg tag [-l] [-m TEXT] [-d DATE] [-u USER] [-r REV] NAME')),
3130 "tags": (tags, [], _('hg tags')),
3099 "tags": (tags, [], _('hg tags')),
3131 "tip":
3100 "tip":
3132 (tip,
3101 (tip,
3133 [('b', 'branches', None, _('show branches')),
3102 [('b', 'branches', None, _('show branches')),
3134 ('', 'style', '', _('display using template map file')),
3103 ('', 'style', '', _('display using template map file')),
3135 ('p', 'patch', None, _('show patch')),
3104 ('p', 'patch', None, _('show patch')),
3136 ('', 'template', '', _('display with template'))],
3105 ('', 'template', '', _('display with template'))],
3137 _('hg tip [-b] [-p]')),
3106 _('hg tip [-b] [-p]')),
3138 "unbundle":
3107 "unbundle":
3139 (unbundle,
3108 (unbundle,
3140 [('u', 'update', None,
3109 [('u', 'update', None,
3141 _('update the working directory to tip after unbundle'))],
3110 _('update the working directory to tip after unbundle'))],
3142 _('hg unbundle [-u] FILE')),
3111 _('hg unbundle [-u] FILE')),
3143 "debugundo|undo": (undo, [], _('hg undo')),
3112 "debugundo|undo": (undo, [], _('hg undo')),
3144 "^update|up|checkout|co":
3113 "^update|up|checkout|co":
3145 (update,
3114 (update,
3146 [('b', 'branch', '', _('checkout the head of a specific branch')),
3115 [('b', 'branch', '', _('checkout the head of a specific branch')),
3147 ('m', 'merge', None, _('allow merging of branches (DEPRECATED)')),
3116 ('m', 'merge', None, _('allow merging of branches (DEPRECATED)')),
3148 ('C', 'clean', None, _('overwrite locally modified files')),
3117 ('C', 'clean', None, _('overwrite locally modified files')),
3149 ('f', 'force', None, _('force a merge with outstanding changes'))],
3118 ('f', 'force', None, _('force a merge with outstanding changes'))],
3150 _('hg update [-b TAG] [-m] [-C] [-f] [REV]')),
3119 _('hg update [-b TAG] [-m] [-C] [-f] [REV]')),
3151 "verify": (verify, [], _('hg verify')),
3120 "verify": (verify, [], _('hg verify')),
3152 "version": (show_version, [], _('hg version')),
3121 "version": (show_version, [], _('hg version')),
3153 }
3122 }
3154
3123
3155 globalopts = [
3124 globalopts = [
3156 ('R', 'repository', '',
3125 ('R', 'repository', '',
3157 _('repository root directory or symbolic path name')),
3126 _('repository root directory or symbolic path name')),
3158 ('', 'cwd', '', _('change working directory')),
3127 ('', 'cwd', '', _('change working directory')),
3159 ('y', 'noninteractive', None,
3128 ('y', 'noninteractive', None,
3160 _('do not prompt, assume \'yes\' for any required answers')),
3129 _('do not prompt, assume \'yes\' for any required answers')),
3161 ('q', 'quiet', None, _('suppress output')),
3130 ('q', 'quiet', None, _('suppress output')),
3162 ('v', 'verbose', None, _('enable additional output')),
3131 ('v', 'verbose', None, _('enable additional output')),
3163 ('', 'config', [], _('set/override config option')),
3132 ('', 'config', [], _('set/override config option')),
3164 ('', 'debug', None, _('enable debugging output')),
3133 ('', 'debug', None, _('enable debugging output')),
3165 ('', 'debugger', None, _('start debugger')),
3134 ('', 'debugger', None, _('start debugger')),
3166 ('', 'lsprof', None, _('print improved command execution profile')),
3135 ('', 'lsprof', None, _('print improved command execution profile')),
3167 ('', 'traceback', None, _('print traceback on exception')),
3136 ('', 'traceback', None, _('print traceback on exception')),
3168 ('', 'time', None, _('time how long the command takes')),
3137 ('', 'time', None, _('time how long the command takes')),
3169 ('', 'profile', None, _('print command execution profile')),
3138 ('', 'profile', None, _('print command execution profile')),
3170 ('', 'version', None, _('output version information and exit')),
3139 ('', 'version', None, _('output version information and exit')),
3171 ('h', 'help', None, _('display help and exit')),
3140 ('h', 'help', None, _('display help and exit')),
3172 ]
3141 ]
3173
3142
3174 norepo = ("clone init version help debugancestor debugcomplete debugdata"
3143 norepo = ("clone init version help debugancestor debugcomplete debugdata"
3175 " debugindex debugindexdot")
3144 " debugindex debugindexdot")
3176 optionalrepo = ("paths serve debugconfig")
3145 optionalrepo = ("paths serve debugconfig")
3177
3146
3178 def findpossible(cmd):
3147 def findpossible(cmd):
3179 """
3148 """
3180 Return cmd -> (aliases, command table entry)
3149 Return cmd -> (aliases, command table entry)
3181 for each matching command.
3150 for each matching command.
3182 Return debug commands (or their aliases) only if no normal command matches.
3151 Return debug commands (or their aliases) only if no normal command matches.
3183 """
3152 """
3184 choice = {}
3153 choice = {}
3185 debugchoice = {}
3154 debugchoice = {}
3186 for e in table.keys():
3155 for e in table.keys():
3187 aliases = e.lstrip("^").split("|")
3156 aliases = e.lstrip("^").split("|")
3188 found = None
3157 found = None
3189 if cmd in aliases:
3158 if cmd in aliases:
3190 found = cmd
3159 found = cmd
3191 else:
3160 else:
3192 for a in aliases:
3161 for a in aliases:
3193 if a.startswith(cmd):
3162 if a.startswith(cmd):
3194 found = a
3163 found = a
3195 break
3164 break
3196 if found is not None:
3165 if found is not None:
3197 if aliases[0].startswith("debug"):
3166 if aliases[0].startswith("debug"):
3198 debugchoice[found] = (aliases, table[e])
3167 debugchoice[found] = (aliases, table[e])
3199 else:
3168 else:
3200 choice[found] = (aliases, table[e])
3169 choice[found] = (aliases, table[e])
3201
3170
3202 if not choice and debugchoice:
3171 if not choice and debugchoice:
3203 choice = debugchoice
3172 choice = debugchoice
3204
3173
3205 return choice
3174 return choice
3206
3175
3207 def findcmd(cmd):
3176 def findcmd(cmd):
3208 """Return (aliases, command table entry) for command string."""
3177 """Return (aliases, command table entry) for command string."""
3209 choice = findpossible(cmd)
3178 choice = findpossible(cmd)
3210
3179
3211 if choice.has_key(cmd):
3180 if choice.has_key(cmd):
3212 return choice[cmd]
3181 return choice[cmd]
3213
3182
3214 if len(choice) > 1:
3183 if len(choice) > 1:
3215 clist = choice.keys()
3184 clist = choice.keys()
3216 clist.sort()
3185 clist.sort()
3217 raise AmbiguousCommand(cmd, clist)
3186 raise AmbiguousCommand(cmd, clist)
3218
3187
3219 if choice:
3188 if choice:
3220 return choice.values()[0]
3189 return choice.values()[0]
3221
3190
3222 raise UnknownCommand(cmd)
3191 raise UnknownCommand(cmd)
3223
3192
3224 def catchterm(*args):
3193 def catchterm(*args):
3225 raise util.SignalInterrupt
3194 raise util.SignalInterrupt
3226
3195
3227 def run():
3196 def run():
3228 sys.exit(dispatch(sys.argv[1:]))
3197 sys.exit(dispatch(sys.argv[1:]))
3229
3198
3230 class ParseError(Exception):
3199 class ParseError(Exception):
3231 """Exception raised on errors in parsing the command line."""
3200 """Exception raised on errors in parsing the command line."""
3232
3201
3233 def parse(ui, args):
3202 def parse(ui, args):
3234 options = {}
3203 options = {}
3235 cmdoptions = {}
3204 cmdoptions = {}
3236
3205
3237 try:
3206 try:
3238 args = fancyopts.fancyopts(args, globalopts, options)
3207 args = fancyopts.fancyopts(args, globalopts, options)
3239 except fancyopts.getopt.GetoptError, inst:
3208 except fancyopts.getopt.GetoptError, inst:
3240 raise ParseError(None, inst)
3209 raise ParseError(None, inst)
3241
3210
3242 if args:
3211 if args:
3243 cmd, args = args[0], args[1:]
3212 cmd, args = args[0], args[1:]
3244 aliases, i = findcmd(cmd)
3213 aliases, i = findcmd(cmd)
3245 cmd = aliases[0]
3214 cmd = aliases[0]
3246 defaults = ui.config("defaults", cmd)
3215 defaults = ui.config("defaults", cmd)
3247 if defaults:
3216 if defaults:
3248 args = defaults.split() + args
3217 args = defaults.split() + args
3249 c = list(i[1])
3218 c = list(i[1])
3250 else:
3219 else:
3251 cmd = None
3220 cmd = None
3252 c = []
3221 c = []
3253
3222
3254 # combine global options into local
3223 # combine global options into local
3255 for o in globalopts:
3224 for o in globalopts:
3256 c.append((o[0], o[1], options[o[1]], o[3]))
3225 c.append((o[0], o[1], options[o[1]], o[3]))
3257
3226
3258 try:
3227 try:
3259 args = fancyopts.fancyopts(args, c, cmdoptions)
3228 args = fancyopts.fancyopts(args, c, cmdoptions)
3260 except fancyopts.getopt.GetoptError, inst:
3229 except fancyopts.getopt.GetoptError, inst:
3261 raise ParseError(cmd, inst)
3230 raise ParseError(cmd, inst)
3262
3231
3263 # separate global options back out
3232 # separate global options back out
3264 for o in globalopts:
3233 for o in globalopts:
3265 n = o[1]
3234 n = o[1]
3266 options[n] = cmdoptions[n]
3235 options[n] = cmdoptions[n]
3267 del cmdoptions[n]
3236 del cmdoptions[n]
3268
3237
3269 return (cmd, cmd and i[0] or None, args, options, cmdoptions)
3238 return (cmd, cmd and i[0] or None, args, options, cmdoptions)
3270
3239
3271 external = {}
3240 external = {}
3272
3241
3273 def findext(name):
3242 def findext(name):
3274 '''return module with given extension name'''
3243 '''return module with given extension name'''
3275 try:
3244 try:
3276 return sys.modules[external[name]]
3245 return sys.modules[external[name]]
3277 except KeyError:
3246 except KeyError:
3278 for k, v in external.iteritems():
3247 for k, v in external.iteritems():
3279 if k.endswith('.' + name) or k.endswith('/' + name) or v == name:
3248 if k.endswith('.' + name) or k.endswith('/' + name) or v == name:
3280 return sys.modules[v]
3249 return sys.modules[v]
3281 raise KeyError(name)
3250 raise KeyError(name)
3282
3251
3283 def dispatch(args):
3252 def dispatch(args):
3284 for name in 'SIGBREAK', 'SIGHUP', 'SIGTERM':
3253 for name in 'SIGBREAK', 'SIGHUP', 'SIGTERM':
3285 num = getattr(signal, name, None)
3254 num = getattr(signal, name, None)
3286 if num: signal.signal(num, catchterm)
3255 if num: signal.signal(num, catchterm)
3287
3256
3288 try:
3257 try:
3289 u = ui.ui(traceback='--traceback' in sys.argv[1:])
3258 u = ui.ui(traceback='--traceback' in sys.argv[1:])
3290 except util.Abort, inst:
3259 except util.Abort, inst:
3291 sys.stderr.write(_("abort: %s\n") % inst)
3260 sys.stderr.write(_("abort: %s\n") % inst)
3292 return -1
3261 return -1
3293
3262
3294 for ext_name, load_from_name in u.extensions():
3263 for ext_name, load_from_name in u.extensions():
3295 try:
3264 try:
3296 if load_from_name:
3265 if load_from_name:
3297 # the module will be loaded in sys.modules
3266 # the module will be loaded in sys.modules
3298 # choose an unique name so that it doesn't
3267 # choose an unique name so that it doesn't
3299 # conflicts with other modules
3268 # conflicts with other modules
3300 module_name = "hgext_%s" % ext_name.replace('.', '_')
3269 module_name = "hgext_%s" % ext_name.replace('.', '_')
3301 mod = imp.load_source(module_name, load_from_name)
3270 mod = imp.load_source(module_name, load_from_name)
3302 else:
3271 else:
3303 def importh(name):
3272 def importh(name):
3304 mod = __import__(name)
3273 mod = __import__(name)
3305 components = name.split('.')
3274 components = name.split('.')
3306 for comp in components[1:]:
3275 for comp in components[1:]:
3307 mod = getattr(mod, comp)
3276 mod = getattr(mod, comp)
3308 return mod
3277 return mod
3309 try:
3278 try:
3310 mod = importh("hgext.%s" % ext_name)
3279 mod = importh("hgext.%s" % ext_name)
3311 except ImportError:
3280 except ImportError:
3312 mod = importh(ext_name)
3281 mod = importh(ext_name)
3313 external[ext_name] = mod.__name__
3282 external[ext_name] = mod.__name__
3314 except (util.SignalInterrupt, KeyboardInterrupt):
3283 except (util.SignalInterrupt, KeyboardInterrupt):
3315 raise
3284 raise
3316 except Exception, inst:
3285 except Exception, inst:
3317 u.warn(_("*** failed to import extension %s: %s\n") % (ext_name, inst))
3286 u.warn(_("*** failed to import extension %s: %s\n") % (ext_name, inst))
3318 if u.print_exc():
3287 if u.print_exc():
3319 return 1
3288 return 1
3320
3289
3321 for name in external.itervalues():
3290 for name in external.itervalues():
3322 mod = sys.modules[name]
3291 mod = sys.modules[name]
3323 uisetup = getattr(mod, 'uisetup', None)
3292 uisetup = getattr(mod, 'uisetup', None)
3324 if uisetup:
3293 if uisetup:
3325 uisetup(u)
3294 uisetup(u)
3326 cmdtable = getattr(mod, 'cmdtable', {})
3295 cmdtable = getattr(mod, 'cmdtable', {})
3327 for t in cmdtable:
3296 for t in cmdtable:
3328 if t in table:
3297 if t in table:
3329 u.warn(_("module %s overrides %s\n") % (name, t))
3298 u.warn(_("module %s overrides %s\n") % (name, t))
3330 table.update(cmdtable)
3299 table.update(cmdtable)
3331
3300
3332 try:
3301 try:
3333 cmd, func, args, options, cmdoptions = parse(u, args)
3302 cmd, func, args, options, cmdoptions = parse(u, args)
3334 if options["time"]:
3303 if options["time"]:
3335 def get_times():
3304 def get_times():
3336 t = os.times()
3305 t = os.times()
3337 if t[4] == 0.0: # Windows leaves this as zero, so use time.clock()
3306 if t[4] == 0.0: # Windows leaves this as zero, so use time.clock()
3338 t = (t[0], t[1], t[2], t[3], time.clock())
3307 t = (t[0], t[1], t[2], t[3], time.clock())
3339 return t
3308 return t
3340 s = get_times()
3309 s = get_times()
3341 def print_time():
3310 def print_time():
3342 t = get_times()
3311 t = get_times()
3343 u.warn(_("Time: real %.3f secs (user %.3f+%.3f sys %.3f+%.3f)\n") %
3312 u.warn(_("Time: real %.3f secs (user %.3f+%.3f sys %.3f+%.3f)\n") %
3344 (t[4]-s[4], t[0]-s[0], t[2]-s[2], t[1]-s[1], t[3]-s[3]))
3313 (t[4]-s[4], t[0]-s[0], t[2]-s[2], t[1]-s[1], t[3]-s[3]))
3345 atexit.register(print_time)
3314 atexit.register(print_time)
3346
3315
3347 u.updateopts(options["verbose"], options["debug"], options["quiet"],
3316 u.updateopts(options["verbose"], options["debug"], options["quiet"],
3348 not options["noninteractive"], options["traceback"],
3317 not options["noninteractive"], options["traceback"],
3349 options["config"])
3318 options["config"])
3350
3319
3351 # enter the debugger before command execution
3320 # enter the debugger before command execution
3352 if options['debugger']:
3321 if options['debugger']:
3353 pdb.set_trace()
3322 pdb.set_trace()
3354
3323
3355 try:
3324 try:
3356 if options['cwd']:
3325 if options['cwd']:
3357 try:
3326 try:
3358 os.chdir(options['cwd'])
3327 os.chdir(options['cwd'])
3359 except OSError, inst:
3328 except OSError, inst:
3360 raise util.Abort('%s: %s' %
3329 raise util.Abort('%s: %s' %
3361 (options['cwd'], inst.strerror))
3330 (options['cwd'], inst.strerror))
3362
3331
3363 path = u.expandpath(options["repository"]) or ""
3332 path = u.expandpath(options["repository"]) or ""
3364 repo = path and hg.repository(u, path=path) or None
3333 repo = path and hg.repository(u, path=path) or None
3365
3334
3366 if options['help']:
3335 if options['help']:
3367 return help_(u, cmd, options['version'])
3336 return help_(u, cmd, options['version'])
3368 elif options['version']:
3337 elif options['version']:
3369 return show_version(u)
3338 return show_version(u)
3370 elif not cmd:
3339 elif not cmd:
3371 return help_(u, 'shortlist')
3340 return help_(u, 'shortlist')
3372
3341
3373 if cmd not in norepo.split():
3342 if cmd not in norepo.split():
3374 try:
3343 try:
3375 if not repo:
3344 if not repo:
3376 repo = hg.repository(u, path=path)
3345 repo = hg.repository(u, path=path)
3377 u = repo.ui
3346 u = repo.ui
3378 for name in external.itervalues():
3347 for name in external.itervalues():
3379 mod = sys.modules[name]
3348 mod = sys.modules[name]
3380 if hasattr(mod, 'reposetup'):
3349 if hasattr(mod, 'reposetup'):
3381 mod.reposetup(u, repo)
3350 mod.reposetup(u, repo)
3382 hg.repo_setup_hooks.append(mod.reposetup)
3351 hg.repo_setup_hooks.append(mod.reposetup)
3383 except hg.RepoError:
3352 except hg.RepoError:
3384 if cmd not in optionalrepo.split():
3353 if cmd not in optionalrepo.split():
3385 raise
3354 raise
3386 d = lambda: func(u, repo, *args, **cmdoptions)
3355 d = lambda: func(u, repo, *args, **cmdoptions)
3387 else:
3356 else:
3388 d = lambda: func(u, *args, **cmdoptions)
3357 d = lambda: func(u, *args, **cmdoptions)
3389
3358
3390 # reupdate the options, repo/.hg/hgrc may have changed them
3359 # reupdate the options, repo/.hg/hgrc may have changed them
3391 u.updateopts(options["verbose"], options["debug"], options["quiet"],
3360 u.updateopts(options["verbose"], options["debug"], options["quiet"],
3392 not options["noninteractive"], options["traceback"],
3361 not options["noninteractive"], options["traceback"],
3393 options["config"])
3362 options["config"])
3394
3363
3395 try:
3364 try:
3396 if options['profile']:
3365 if options['profile']:
3397 import hotshot, hotshot.stats
3366 import hotshot, hotshot.stats
3398 prof = hotshot.Profile("hg.prof")
3367 prof = hotshot.Profile("hg.prof")
3399 try:
3368 try:
3400 try:
3369 try:
3401 return prof.runcall(d)
3370 return prof.runcall(d)
3402 except:
3371 except:
3403 try:
3372 try:
3404 u.warn(_('exception raised - generating '
3373 u.warn(_('exception raised - generating '
3405 'profile anyway\n'))
3374 'profile anyway\n'))
3406 except:
3375 except:
3407 pass
3376 pass
3408 raise
3377 raise
3409 finally:
3378 finally:
3410 prof.close()
3379 prof.close()
3411 stats = hotshot.stats.load("hg.prof")
3380 stats = hotshot.stats.load("hg.prof")
3412 stats.strip_dirs()
3381 stats.strip_dirs()
3413 stats.sort_stats('time', 'calls')
3382 stats.sort_stats('time', 'calls')
3414 stats.print_stats(40)
3383 stats.print_stats(40)
3415 elif options['lsprof']:
3384 elif options['lsprof']:
3416 try:
3385 try:
3417 from mercurial import lsprof
3386 from mercurial import lsprof
3418 except ImportError:
3387 except ImportError:
3419 raise util.Abort(_(
3388 raise util.Abort(_(
3420 'lsprof not available - install from '
3389 'lsprof not available - install from '
3421 'http://codespeak.net/svn/user/arigo/hack/misc/lsprof/'))
3390 'http://codespeak.net/svn/user/arigo/hack/misc/lsprof/'))
3422 p = lsprof.Profiler()
3391 p = lsprof.Profiler()
3423 p.enable(subcalls=True)
3392 p.enable(subcalls=True)
3424 try:
3393 try:
3425 return d()
3394 return d()
3426 finally:
3395 finally:
3427 p.disable()
3396 p.disable()
3428 stats = lsprof.Stats(p.getstats())
3397 stats = lsprof.Stats(p.getstats())
3429 stats.sort()
3398 stats.sort()
3430 stats.pprint(top=10, file=sys.stderr, climit=5)
3399 stats.pprint(top=10, file=sys.stderr, climit=5)
3431 else:
3400 else:
3432 return d()
3401 return d()
3433 finally:
3402 finally:
3434 u.flush()
3403 u.flush()
3435 except:
3404 except:
3436 # enter the debugger when we hit an exception
3405 # enter the debugger when we hit an exception
3437 if options['debugger']:
3406 if options['debugger']:
3438 pdb.post_mortem(sys.exc_info()[2])
3407 pdb.post_mortem(sys.exc_info()[2])
3439 u.print_exc()
3408 u.print_exc()
3440 raise
3409 raise
3441 except ParseError, inst:
3410 except ParseError, inst:
3442 if inst.args[0]:
3411 if inst.args[0]:
3443 u.warn(_("hg %s: %s\n") % (inst.args[0], inst.args[1]))
3412 u.warn(_("hg %s: %s\n") % (inst.args[0], inst.args[1]))
3444 help_(u, inst.args[0])
3413 help_(u, inst.args[0])
3445 else:
3414 else:
3446 u.warn(_("hg: %s\n") % inst.args[1])
3415 u.warn(_("hg: %s\n") % inst.args[1])
3447 help_(u, 'shortlist')
3416 help_(u, 'shortlist')
3448 except AmbiguousCommand, inst:
3417 except AmbiguousCommand, inst:
3449 u.warn(_("hg: command '%s' is ambiguous:\n %s\n") %
3418 u.warn(_("hg: command '%s' is ambiguous:\n %s\n") %
3450 (inst.args[0], " ".join(inst.args[1])))
3419 (inst.args[0], " ".join(inst.args[1])))
3451 except UnknownCommand, inst:
3420 except UnknownCommand, inst:
3452 u.warn(_("hg: unknown command '%s'\n") % inst.args[0])
3421 u.warn(_("hg: unknown command '%s'\n") % inst.args[0])
3453 help_(u, 'shortlist')
3422 help_(u, 'shortlist')
3454 except hg.RepoError, inst:
3423 except hg.RepoError, inst:
3455 u.warn(_("abort: %s!\n") % inst)
3424 u.warn(_("abort: %s!\n") % inst)
3456 except lock.LockHeld, inst:
3425 except lock.LockHeld, inst:
3457 if inst.errno == errno.ETIMEDOUT:
3426 if inst.errno == errno.ETIMEDOUT:
3458 reason = _('timed out waiting for lock held by %s') % inst.locker
3427 reason = _('timed out waiting for lock held by %s') % inst.locker
3459 else:
3428 else:
3460 reason = _('lock held by %s') % inst.locker
3429 reason = _('lock held by %s') % inst.locker
3461 u.warn(_("abort: %s: %s\n") % (inst.desc or inst.filename, reason))
3430 u.warn(_("abort: %s: %s\n") % (inst.desc or inst.filename, reason))
3462 except lock.LockUnavailable, inst:
3431 except lock.LockUnavailable, inst:
3463 u.warn(_("abort: could not lock %s: %s\n") %
3432 u.warn(_("abort: could not lock %s: %s\n") %
3464 (inst.desc or inst.filename, inst.strerror))
3433 (inst.desc or inst.filename, inst.strerror))
3465 except revlog.RevlogError, inst:
3434 except revlog.RevlogError, inst:
3466 u.warn(_("abort: "), inst, "!\n")
3435 u.warn(_("abort: "), inst, "!\n")
3467 except util.SignalInterrupt:
3436 except util.SignalInterrupt:
3468 u.warn(_("killed!\n"))
3437 u.warn(_("killed!\n"))
3469 except KeyboardInterrupt:
3438 except KeyboardInterrupt:
3470 try:
3439 try:
3471 u.warn(_("interrupted!\n"))
3440 u.warn(_("interrupted!\n"))
3472 except IOError, inst:
3441 except IOError, inst:
3473 if inst.errno == errno.EPIPE:
3442 if inst.errno == errno.EPIPE:
3474 if u.debugflag:
3443 if u.debugflag:
3475 u.warn(_("\nbroken pipe\n"))
3444 u.warn(_("\nbroken pipe\n"))
3476 else:
3445 else:
3477 raise
3446 raise
3478 except IOError, inst:
3447 except IOError, inst:
3479 if hasattr(inst, "code"):
3448 if hasattr(inst, "code"):
3480 u.warn(_("abort: %s\n") % inst)
3449 u.warn(_("abort: %s\n") % inst)
3481 elif hasattr(inst, "reason"):
3450 elif hasattr(inst, "reason"):
3482 u.warn(_("abort: error: %s\n") % inst.reason[1])
3451 u.warn(_("abort: error: %s\n") % inst.reason[1])
3483 elif hasattr(inst, "args") and inst[0] == errno.EPIPE:
3452 elif hasattr(inst, "args") and inst[0] == errno.EPIPE:
3484 if u.debugflag:
3453 if u.debugflag:
3485 u.warn(_("broken pipe\n"))
3454 u.warn(_("broken pipe\n"))
3486 elif getattr(inst, "strerror", None):
3455 elif getattr(inst, "strerror", None):
3487 if getattr(inst, "filename", None):
3456 if getattr(inst, "filename", None):
3488 u.warn(_("abort: %s - %s\n") % (inst.strerror, inst.filename))
3457 u.warn(_("abort: %s - %s\n") % (inst.strerror, inst.filename))
3489 else:
3458 else:
3490 u.warn(_("abort: %s\n") % inst.strerror)
3459 u.warn(_("abort: %s\n") % inst.strerror)
3491 else:
3460 else:
3492 raise
3461 raise
3493 except OSError, inst:
3462 except OSError, inst:
3494 if hasattr(inst, "filename"):
3463 if hasattr(inst, "filename"):
3495 u.warn(_("abort: %s: %s\n") % (inst.strerror, inst.filename))
3464 u.warn(_("abort: %s: %s\n") % (inst.strerror, inst.filename))
3496 else:
3465 else:
3497 u.warn(_("abort: %s\n") % inst.strerror)
3466 u.warn(_("abort: %s\n") % inst.strerror)
3498 except util.Abort, inst:
3467 except util.Abort, inst:
3499 u.warn(_('abort: '), inst.args[0] % inst.args[1:], '\n')
3468 u.warn(_('abort: '), inst.args[0] % inst.args[1:], '\n')
3500 except TypeError, inst:
3469 except TypeError, inst:
3501 # was this an argument error?
3470 # was this an argument error?
3502 tb = traceback.extract_tb(sys.exc_info()[2])
3471 tb = traceback.extract_tb(sys.exc_info()[2])
3503 if len(tb) > 2: # no
3472 if len(tb) > 2: # no
3504 raise
3473 raise
3505 u.debug(inst, "\n")
3474 u.debug(inst, "\n")
3506 u.warn(_("%s: invalid arguments\n") % cmd)
3475 u.warn(_("%s: invalid arguments\n") % cmd)
3507 help_(u, cmd)
3476 help_(u, cmd)
3508 except SystemExit, inst:
3477 except SystemExit, inst:
3509 # Commands shouldn't sys.exit directly, but give a return code.
3478 # Commands shouldn't sys.exit directly, but give a return code.
3510 # Just in case catch this and and pass exit code to caller.
3479 # Just in case catch this and and pass exit code to caller.
3511 return inst.code
3480 return inst.code
3512 except:
3481 except:
3513 u.warn(_("** unknown exception encountered, details follow\n"))
3482 u.warn(_("** unknown exception encountered, details follow\n"))
3514 u.warn(_("** report bug details to "
3483 u.warn(_("** report bug details to "
3515 "http://www.selenic.com/mercurial/bts\n"))
3484 "http://www.selenic.com/mercurial/bts\n"))
3516 u.warn(_("** or mercurial@selenic.com\n"))
3485 u.warn(_("** or mercurial@selenic.com\n"))
3517 u.warn(_("** Mercurial Distributed SCM (version %s)\n")
3486 u.warn(_("** Mercurial Distributed SCM (version %s)\n")
3518 % version.get_version())
3487 % version.get_version())
3519 raise
3488 raise
3520
3489
3521 return -1
3490 return -1
@@ -1,128 +1,127 b''
1 # filelog.py - file history class for mercurial
1 # filelog.py - file history class for mercurial
2 #
2 #
3 # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7
7
8 from revlog import *
8 from revlog import *
9 from demandload import *
9 from demandload import *
10 demandload(globals(), "bdiff os")
10 demandload(globals(), "bdiff os")
11
11
12 class filelog(revlog):
12 class filelog(revlog):
13 def __init__(self, opener, path, defversion=REVLOG_DEFAULT_VERSION):
13 def __init__(self, opener, path, defversion=REVLOG_DEFAULT_VERSION):
14 revlog.__init__(self, opener,
14 revlog.__init__(self, opener,
15 os.path.join("data", self.encodedir(path + ".i")),
15 os.path.join("data", self.encodedir(path + ".i")),
16 os.path.join("data", self.encodedir(path + ".d")),
16 os.path.join("data", self.encodedir(path + ".d")),
17 defversion)
17 defversion)
18
18
19 # This avoids a collision between a file named foo and a dir named
19 # This avoids a collision between a file named foo and a dir named
20 # foo.i or foo.d
20 # foo.i or foo.d
21 def encodedir(self, path):
21 def encodedir(self, path):
22 return (path
22 return (path
23 .replace(".hg/", ".hg.hg/")
23 .replace(".hg/", ".hg.hg/")
24 .replace(".i/", ".i.hg/")
24 .replace(".i/", ".i.hg/")
25 .replace(".d/", ".d.hg/"))
25 .replace(".d/", ".d.hg/"))
26
26
27 def decodedir(self, path):
27 def decodedir(self, path):
28 return (path
28 return (path
29 .replace(".d.hg/", ".d/")
29 .replace(".d.hg/", ".d/")
30 .replace(".i.hg/", ".i/")
30 .replace(".i.hg/", ".i/")
31 .replace(".hg.hg/", ".hg/"))
31 .replace(".hg.hg/", ".hg/"))
32
32
33 def read(self, node):
33 def read(self, node):
34 t = self.revision(node)
34 t = self.revision(node)
35 if not t.startswith('\1\n'):
35 if not t.startswith('\1\n'):
36 return t
36 return t
37 s = t.index('\1\n', 2)
37 s = t.index('\1\n', 2)
38 return t[s+2:]
38 return t[s+2:]
39
39
40 def readmeta(self, node):
40 def readmeta(self, node):
41 t = self.revision(node)
41 t = self.revision(node)
42 if not t.startswith('\1\n'):
42 if not t.startswith('\1\n'):
43 return {}
43 return {}
44 s = t.index('\1\n', 2)
44 s = t.index('\1\n', 2)
45 mt = t[2:s]
45 mt = t[2:s]
46 m = {}
46 m = {}
47 for l in mt.splitlines():
47 for l in mt.splitlines():
48 k, v = l.split(": ", 1)
48 k, v = l.split(": ", 1)
49 m[k] = v
49 m[k] = v
50 return m
50 return m
51
51
52 def add(self, text, meta, transaction, link, p1=None, p2=None):
52 def add(self, text, meta, transaction, link, p1=None, p2=None):
53 if meta or text.startswith('\1\n'):
53 if meta or text.startswith('\1\n'):
54 mt = ""
54 mt = ""
55 if meta:
55 if meta:
56 mt = [ "%s: %s\n" % (k, v) for k,v in meta.items() ]
56 mt = [ "%s: %s\n" % (k, v) for k,v in meta.items() ]
57 text = "\1\n%s\1\n%s" % ("".join(mt), text)
57 text = "\1\n%s\1\n%s" % ("".join(mt), text)
58 return self.addrevision(text, transaction, link, p1, p2)
58 return self.addrevision(text, transaction, link, p1, p2)
59
59
60 def renamed(self, node):
60 def renamed(self, node):
61 if self.parents(node)[0] != nullid:
61 if self.parents(node)[0] != nullid:
62 return False
62 return False
63 m = self.readmeta(node)
63 m = self.readmeta(node)
64 if m and m.has_key("copy"):
64 if m and m.has_key("copy"):
65 return (m["copy"], bin(m["copyrev"]))
65 return (m["copy"], bin(m["copyrev"]))
66 return False
66 return False
67
67
68 def size(self, rev):
69 """return the size of a given revision"""
70
71 # for revisions with renames, we have to go the slow way
72 node = self.node(rev)
73 if self.renamed(node):
74 return len(self.read(node))
75
76 return revlog.size(self, rev)
77
68 def cmp(self, node, text):
78 def cmp(self, node, text):
69 """compare text with a given file revision"""
79 """compare text with a given file revision"""
70
80
71 # for renames, we have to go the slow way
81 # for renames, we have to go the slow way
72 if self.renamed(node):
82 if self.renamed(node):
73 t2 = self.read(node)
83 t2 = self.read(node)
74 return t2 == text
84 return t2 != text
75
76 p1, p2 = self.parents(node)
77 h = hash(text, p1, p2)
78
79 return h != node
80
85
81 def makenode(self, node, text):
86 return revlog.cmp(self, node, text)
82 """calculate a file nodeid for text, descended or possibly
83 unchanged from node"""
84
85 if self.cmp(node, text):
86 return hash(text, node, nullid)
87 return node
88
87
89 def annotate(self, node):
88 def annotate(self, node):
90
89
91 def decorate(text, rev):
90 def decorate(text, rev):
92 return ([rev] * len(text.splitlines()), text)
91 return ([rev] * len(text.splitlines()), text)
93
92
94 def pair(parent, child):
93 def pair(parent, child):
95 for a1, a2, b1, b2 in bdiff.blocks(parent[1], child[1]):
94 for a1, a2, b1, b2 in bdiff.blocks(parent[1], child[1]):
96 child[0][b1:b2] = parent[0][a1:a2]
95 child[0][b1:b2] = parent[0][a1:a2]
97 return child
96 return child
98
97
99 # find all ancestors
98 # find all ancestors
100 needed = {node:1}
99 needed = {node:1}
101 visit = [node]
100 visit = [node]
102 while visit:
101 while visit:
103 n = visit.pop(0)
102 n = visit.pop(0)
104 for p in self.parents(n):
103 for p in self.parents(n):
105 if p not in needed:
104 if p not in needed:
106 needed[p] = 1
105 needed[p] = 1
107 visit.append(p)
106 visit.append(p)
108 else:
107 else:
109 # count how many times we'll use this
108 # count how many times we'll use this
110 needed[p] += 1
109 needed[p] += 1
111
110
112 # sort by revision which is a topological order
111 # sort by revision which is a topological order
113 visit = [ (self.rev(n), n) for n in needed.keys() ]
112 visit = [ (self.rev(n), n) for n in needed.keys() ]
114 visit.sort()
113 visit.sort()
115 hist = {}
114 hist = {}
116
115
117 for r,n in visit:
116 for r,n in visit:
118 curr = decorate(self.read(n), self.linkrev(n))
117 curr = decorate(self.read(n), self.linkrev(n))
119 for p in self.parents(n):
118 for p in self.parents(n):
120 if p != nullid:
119 if p != nullid:
121 curr = pair(hist[p], curr)
120 curr = pair(hist[p], curr)
122 # trim the history of unneeded revs
121 # trim the history of unneeded revs
123 needed[p] -= 1
122 needed[p] -= 1
124 if not needed[p]:
123 if not needed[p]:
125 del hist[p]
124 del hist[p]
126 hist[n] = curr
125 hist[n] = curr
127
126
128 return zip(hist[n][0], hist[n][1].splitlines(1))
127 return zip(hist[n][0], hist[n][1].splitlines(1))
@@ -1,981 +1,981 b''
1 # hgweb/hgweb_mod.py - Web interface for a repository.
1 # hgweb/hgweb_mod.py - Web interface for a repository.
2 #
2 #
3 # Copyright 21 May 2005 - (c) 2005 Jake Edge <jake@edge2.net>
3 # Copyright 21 May 2005 - (c) 2005 Jake Edge <jake@edge2.net>
4 # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
4 # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
5 #
5 #
6 # This software may be used and distributed according to the terms
6 # This software may be used and distributed according to the terms
7 # of the GNU General Public License, incorporated herein by reference.
7 # of the GNU General Public License, incorporated herein by reference.
8
8
9 import os
9 import os
10 import os.path
10 import os.path
11 import mimetypes
11 import mimetypes
12 from mercurial.demandload import demandload
12 from mercurial.demandload import demandload
13 demandload(globals(), "re zlib ConfigParser mimetools cStringIO sys tempfile")
13 demandload(globals(), "re zlib ConfigParser mimetools cStringIO sys tempfile")
14 demandload(globals(), "mercurial:mdiff,ui,hg,util,archival,streamclone")
14 demandload(globals(), "mercurial:mdiff,ui,hg,util,archival,streamclone,patch")
15 demandload(globals(), "mercurial:templater")
15 demandload(globals(), "mercurial:templater")
16 demandload(globals(), "mercurial.hgweb.common:get_mtime,staticfile")
16 demandload(globals(), "mercurial.hgweb.common:get_mtime,staticfile")
17 from mercurial.node import *
17 from mercurial.node import *
18 from mercurial.i18n import gettext as _
18 from mercurial.i18n import gettext as _
19
19
20 def _up(p):
20 def _up(p):
21 if p[0] != "/":
21 if p[0] != "/":
22 p = "/" + p
22 p = "/" + p
23 if p[-1] == "/":
23 if p[-1] == "/":
24 p = p[:-1]
24 p = p[:-1]
25 up = os.path.dirname(p)
25 up = os.path.dirname(p)
26 if up == "/":
26 if up == "/":
27 return "/"
27 return "/"
28 return up + "/"
28 return up + "/"
29
29
30 class hgweb(object):
30 class hgweb(object):
31 def __init__(self, repo, name=None):
31 def __init__(self, repo, name=None):
32 if type(repo) == type(""):
32 if type(repo) == type(""):
33 self.repo = hg.repository(ui.ui(), repo)
33 self.repo = hg.repository(ui.ui(), repo)
34 else:
34 else:
35 self.repo = repo
35 self.repo = repo
36
36
37 self.mtime = -1
37 self.mtime = -1
38 self.reponame = name
38 self.reponame = name
39 self.archives = 'zip', 'gz', 'bz2'
39 self.archives = 'zip', 'gz', 'bz2'
40 self.stripecount = 1
40 self.stripecount = 1
41 self.templatepath = self.repo.ui.config("web", "templates",
41 self.templatepath = self.repo.ui.config("web", "templates",
42 templater.templatepath())
42 templater.templatepath())
43
43
44 def refresh(self):
44 def refresh(self):
45 mtime = get_mtime(self.repo.root)
45 mtime = get_mtime(self.repo.root)
46 if mtime != self.mtime:
46 if mtime != self.mtime:
47 self.mtime = mtime
47 self.mtime = mtime
48 self.repo = hg.repository(self.repo.ui, self.repo.root)
48 self.repo = hg.repository(self.repo.ui, self.repo.root)
49 self.maxchanges = int(self.repo.ui.config("web", "maxchanges", 10))
49 self.maxchanges = int(self.repo.ui.config("web", "maxchanges", 10))
50 self.stripecount = int(self.repo.ui.config("web", "stripes", 1))
50 self.stripecount = int(self.repo.ui.config("web", "stripes", 1))
51 self.maxshortchanges = int(self.repo.ui.config("web", "maxshortchanges", 60))
51 self.maxshortchanges = int(self.repo.ui.config("web", "maxshortchanges", 60))
52 self.maxfiles = int(self.repo.ui.config("web", "maxfiles", 10))
52 self.maxfiles = int(self.repo.ui.config("web", "maxfiles", 10))
53 self.allowpull = self.repo.ui.configbool("web", "allowpull", True)
53 self.allowpull = self.repo.ui.configbool("web", "allowpull", True)
54
54
55 def archivelist(self, nodeid):
55 def archivelist(self, nodeid):
56 allowed = self.repo.ui.configlist("web", "allow_archive")
56 allowed = self.repo.ui.configlist("web", "allow_archive")
57 for i in self.archives:
57 for i in self.archives:
58 if i in allowed or self.repo.ui.configbool("web", "allow" + i):
58 if i in allowed or self.repo.ui.configbool("web", "allow" + i):
59 yield {"type" : i, "node" : nodeid, "url": ""}
59 yield {"type" : i, "node" : nodeid, "url": ""}
60
60
61 def listfiles(self, files, mf):
61 def listfiles(self, files, mf):
62 for f in files[:self.maxfiles]:
62 for f in files[:self.maxfiles]:
63 yield self.t("filenodelink", node=hex(mf[f]), file=f)
63 yield self.t("filenodelink", node=hex(mf[f]), file=f)
64 if len(files) > self.maxfiles:
64 if len(files) > self.maxfiles:
65 yield self.t("fileellipses")
65 yield self.t("fileellipses")
66
66
67 def listfilediffs(self, files, changeset):
67 def listfilediffs(self, files, changeset):
68 for f in files[:self.maxfiles]:
68 for f in files[:self.maxfiles]:
69 yield self.t("filedifflink", node=hex(changeset), file=f)
69 yield self.t("filedifflink", node=hex(changeset), file=f)
70 if len(files) > self.maxfiles:
70 if len(files) > self.maxfiles:
71 yield self.t("fileellipses")
71 yield self.t("fileellipses")
72
72
73 def siblings(self, siblings=[], rev=None, hiderev=None, **args):
73 def siblings(self, siblings=[], rev=None, hiderev=None, **args):
74 if not rev:
74 if not rev:
75 rev = lambda x: ""
75 rev = lambda x: ""
76 siblings = [s for s in siblings if s != nullid]
76 siblings = [s for s in siblings if s != nullid]
77 if len(siblings) == 1 and rev(siblings[0]) == hiderev:
77 if len(siblings) == 1 and rev(siblings[0]) == hiderev:
78 return
78 return
79 for s in siblings:
79 for s in siblings:
80 yield dict(node=hex(s), rev=rev(s), **args)
80 yield dict(node=hex(s), rev=rev(s), **args)
81
81
82 def renamelink(self, fl, node):
82 def renamelink(self, fl, node):
83 r = fl.renamed(node)
83 r = fl.renamed(node)
84 if r:
84 if r:
85 return [dict(file=r[0], node=hex(r[1]))]
85 return [dict(file=r[0], node=hex(r[1]))]
86 return []
86 return []
87
87
88 def showtag(self, t1, node=nullid, **args):
88 def showtag(self, t1, node=nullid, **args):
89 for t in self.repo.nodetags(node):
89 for t in self.repo.nodetags(node):
90 yield self.t(t1, tag=t, **args)
90 yield self.t(t1, tag=t, **args)
91
91
92 def diff(self, node1, node2, files):
92 def diff(self, node1, node2, files):
93 def filterfiles(filters, files):
93 def filterfiles(filters, files):
94 l = [x for x in files if x in filters]
94 l = [x for x in files if x in filters]
95
95
96 for t in filters:
96 for t in filters:
97 if t and t[-1] != os.sep:
97 if t and t[-1] != os.sep:
98 t += os.sep
98 t += os.sep
99 l += [x for x in files if x.startswith(t)]
99 l += [x for x in files if x.startswith(t)]
100 return l
100 return l
101
101
102 parity = [0]
102 parity = [0]
103 def diffblock(diff, f, fn):
103 def diffblock(diff, f, fn):
104 yield self.t("diffblock",
104 yield self.t("diffblock",
105 lines=prettyprintlines(diff),
105 lines=prettyprintlines(diff),
106 parity=parity[0],
106 parity=parity[0],
107 file=f,
107 file=f,
108 filenode=hex(fn or nullid))
108 filenode=hex(fn or nullid))
109 parity[0] = 1 - parity[0]
109 parity[0] = 1 - parity[0]
110
110
111 def prettyprintlines(diff):
111 def prettyprintlines(diff):
112 for l in diff.splitlines(1):
112 for l in diff.splitlines(1):
113 if l.startswith('+'):
113 if l.startswith('+'):
114 yield self.t("difflineplus", line=l)
114 yield self.t("difflineplus", line=l)
115 elif l.startswith('-'):
115 elif l.startswith('-'):
116 yield self.t("difflineminus", line=l)
116 yield self.t("difflineminus", line=l)
117 elif l.startswith('@'):
117 elif l.startswith('@'):
118 yield self.t("difflineat", line=l)
118 yield self.t("difflineat", line=l)
119 else:
119 else:
120 yield self.t("diffline", line=l)
120 yield self.t("diffline", line=l)
121
121
122 r = self.repo
122 r = self.repo
123 cl = r.changelog
123 cl = r.changelog
124 mf = r.manifest
124 mf = r.manifest
125 change1 = cl.read(node1)
125 change1 = cl.read(node1)
126 change2 = cl.read(node2)
126 change2 = cl.read(node2)
127 mmap1 = mf.read(change1[0])
127 mmap1 = mf.read(change1[0])
128 mmap2 = mf.read(change2[0])
128 mmap2 = mf.read(change2[0])
129 date1 = util.datestr(change1[2])
129 date1 = util.datestr(change1[2])
130 date2 = util.datestr(change2[2])
130 date2 = util.datestr(change2[2])
131
131
132 modified, added, removed, deleted, unknown = r.status(node1, node2)[:5]
132 modified, added, removed, deleted, unknown = r.status(node1, node2)[:5]
133 if files:
133 if files:
134 modified, added, removed = map(lambda x: filterfiles(files, x),
134 modified, added, removed = map(lambda x: filterfiles(files, x),
135 (modified, added, removed))
135 (modified, added, removed))
136
136
137 diffopts = self.repo.ui.diffopts()
137 diffopts = patch.diffopts(ui)
138 for f in modified:
138 for f in modified:
139 to = r.file(f).read(mmap1[f])
139 to = r.file(f).read(mmap1[f])
140 tn = r.file(f).read(mmap2[f])
140 tn = r.file(f).read(mmap2[f])
141 yield diffblock(mdiff.unidiff(to, date1, tn, date2, f,
141 yield diffblock(mdiff.unidiff(to, date1, tn, date2, f,
142 opts=diffopts), f, tn)
142 opts=diffopts), f, tn)
143 for f in added:
143 for f in added:
144 to = None
144 to = None
145 tn = r.file(f).read(mmap2[f])
145 tn = r.file(f).read(mmap2[f])
146 yield diffblock(mdiff.unidiff(to, date1, tn, date2, f,
146 yield diffblock(mdiff.unidiff(to, date1, tn, date2, f,
147 opts=diffopts), f, tn)
147 opts=diffopts), f, tn)
148 for f in removed:
148 for f in removed:
149 to = r.file(f).read(mmap1[f])
149 to = r.file(f).read(mmap1[f])
150 tn = None
150 tn = None
151 yield diffblock(mdiff.unidiff(to, date1, tn, date2, f,
151 yield diffblock(mdiff.unidiff(to, date1, tn, date2, f,
152 opts=diffopts), f, tn)
152 opts=diffopts), f, tn)
153
153
154 def changelog(self, pos, shortlog=False):
154 def changelog(self, pos, shortlog=False):
155 def changenav(**map):
155 def changenav(**map):
156 def seq(factor, maxchanges=None):
156 def seq(factor, maxchanges=None):
157 if maxchanges:
157 if maxchanges:
158 yield maxchanges
158 yield maxchanges
159 if maxchanges >= 20 and maxchanges <= 40:
159 if maxchanges >= 20 and maxchanges <= 40:
160 yield 50
160 yield 50
161 else:
161 else:
162 yield 1 * factor
162 yield 1 * factor
163 yield 3 * factor
163 yield 3 * factor
164 for f in seq(factor * 10):
164 for f in seq(factor * 10):
165 yield f
165 yield f
166
166
167 l = []
167 l = []
168 last = 0
168 last = 0
169 maxchanges = shortlog and self.maxshortchanges or self.maxchanges
169 maxchanges = shortlog and self.maxshortchanges or self.maxchanges
170 for f in seq(1, maxchanges):
170 for f in seq(1, maxchanges):
171 if f < maxchanges or f <= last:
171 if f < maxchanges or f <= last:
172 continue
172 continue
173 if f > count:
173 if f > count:
174 break
174 break
175 last = f
175 last = f
176 r = "%d" % f
176 r = "%d" % f
177 if pos + f < count:
177 if pos + f < count:
178 l.append(("+" + r, pos + f))
178 l.append(("+" + r, pos + f))
179 if pos - f >= 0:
179 if pos - f >= 0:
180 l.insert(0, ("-" + r, pos - f))
180 l.insert(0, ("-" + r, pos - f))
181
181
182 yield {"rev": 0, "label": "(0)"}
182 yield {"rev": 0, "label": "(0)"}
183
183
184 for label, rev in l:
184 for label, rev in l:
185 yield {"label": label, "rev": rev}
185 yield {"label": label, "rev": rev}
186
186
187 yield {"label": "tip", "rev": "tip"}
187 yield {"label": "tip", "rev": "tip"}
188
188
189 def changelist(**map):
189 def changelist(**map):
190 parity = (start - end) & 1
190 parity = (start - end) & 1
191 cl = self.repo.changelog
191 cl = self.repo.changelog
192 l = [] # build a list in forward order for efficiency
192 l = [] # build a list in forward order for efficiency
193 for i in range(start, end):
193 for i in range(start, end):
194 n = cl.node(i)
194 n = cl.node(i)
195 changes = cl.read(n)
195 changes = cl.read(n)
196 hn = hex(n)
196 hn = hex(n)
197
197
198 l.insert(0, {"parity": parity,
198 l.insert(0, {"parity": parity,
199 "author": changes[1],
199 "author": changes[1],
200 "parent": self.siblings(cl.parents(n), cl.rev,
200 "parent": self.siblings(cl.parents(n), cl.rev,
201 cl.rev(n) - 1),
201 cl.rev(n) - 1),
202 "child": self.siblings(cl.children(n), cl.rev,
202 "child": self.siblings(cl.children(n), cl.rev,
203 cl.rev(n) + 1),
203 cl.rev(n) + 1),
204 "changelogtag": self.showtag("changelogtag",n),
204 "changelogtag": self.showtag("changelogtag",n),
205 "manifest": hex(changes[0]),
205 "manifest": hex(changes[0]),
206 "desc": changes[4],
206 "desc": changes[4],
207 "date": changes[2],
207 "date": changes[2],
208 "files": self.listfilediffs(changes[3], n),
208 "files": self.listfilediffs(changes[3], n),
209 "rev": i,
209 "rev": i,
210 "node": hn})
210 "node": hn})
211 parity = 1 - parity
211 parity = 1 - parity
212
212
213 for e in l:
213 for e in l:
214 yield e
214 yield e
215
215
216 maxchanges = shortlog and self.maxshortchanges or self.maxchanges
216 maxchanges = shortlog and self.maxshortchanges or self.maxchanges
217 cl = self.repo.changelog
217 cl = self.repo.changelog
218 mf = cl.read(cl.tip())[0]
218 mf = cl.read(cl.tip())[0]
219 count = cl.count()
219 count = cl.count()
220 start = max(0, pos - maxchanges + 1)
220 start = max(0, pos - maxchanges + 1)
221 end = min(count, start + maxchanges)
221 end = min(count, start + maxchanges)
222 pos = end - 1
222 pos = end - 1
223
223
224 yield self.t(shortlog and 'shortlog' or 'changelog',
224 yield self.t(shortlog and 'shortlog' or 'changelog',
225 changenav=changenav,
225 changenav=changenav,
226 manifest=hex(mf),
226 manifest=hex(mf),
227 rev=pos, changesets=count, entries=changelist,
227 rev=pos, changesets=count, entries=changelist,
228 archives=self.archivelist("tip"))
228 archives=self.archivelist("tip"))
229
229
230 def search(self, query):
230 def search(self, query):
231
231
232 def changelist(**map):
232 def changelist(**map):
233 cl = self.repo.changelog
233 cl = self.repo.changelog
234 count = 0
234 count = 0
235 qw = query.lower().split()
235 qw = query.lower().split()
236
236
237 def revgen():
237 def revgen():
238 for i in range(cl.count() - 1, 0, -100):
238 for i in range(cl.count() - 1, 0, -100):
239 l = []
239 l = []
240 for j in range(max(0, i - 100), i):
240 for j in range(max(0, i - 100), i):
241 n = cl.node(j)
241 n = cl.node(j)
242 changes = cl.read(n)
242 changes = cl.read(n)
243 l.append((n, j, changes))
243 l.append((n, j, changes))
244 l.reverse()
244 l.reverse()
245 for e in l:
245 for e in l:
246 yield e
246 yield e
247
247
248 for n, i, changes in revgen():
248 for n, i, changes in revgen():
249 miss = 0
249 miss = 0
250 for q in qw:
250 for q in qw:
251 if not (q in changes[1].lower() or
251 if not (q in changes[1].lower() or
252 q in changes[4].lower() or
252 q in changes[4].lower() or
253 q in " ".join(changes[3][:20]).lower()):
253 q in " ".join(changes[3][:20]).lower()):
254 miss = 1
254 miss = 1
255 break
255 break
256 if miss:
256 if miss:
257 continue
257 continue
258
258
259 count += 1
259 count += 1
260 hn = hex(n)
260 hn = hex(n)
261
261
262 yield self.t('searchentry',
262 yield self.t('searchentry',
263 parity=self.stripes(count),
263 parity=self.stripes(count),
264 author=changes[1],
264 author=changes[1],
265 parent=self.siblings(cl.parents(n), cl.rev),
265 parent=self.siblings(cl.parents(n), cl.rev),
266 child=self.siblings(cl.children(n), cl.rev),
266 child=self.siblings(cl.children(n), cl.rev),
267 changelogtag=self.showtag("changelogtag",n),
267 changelogtag=self.showtag("changelogtag",n),
268 manifest=hex(changes[0]),
268 manifest=hex(changes[0]),
269 desc=changes[4],
269 desc=changes[4],
270 date=changes[2],
270 date=changes[2],
271 files=self.listfilediffs(changes[3], n),
271 files=self.listfilediffs(changes[3], n),
272 rev=i,
272 rev=i,
273 node=hn)
273 node=hn)
274
274
275 if count >= self.maxchanges:
275 if count >= self.maxchanges:
276 break
276 break
277
277
278 cl = self.repo.changelog
278 cl = self.repo.changelog
279 mf = cl.read(cl.tip())[0]
279 mf = cl.read(cl.tip())[0]
280
280
281 yield self.t('search',
281 yield self.t('search',
282 query=query,
282 query=query,
283 manifest=hex(mf),
283 manifest=hex(mf),
284 entries=changelist)
284 entries=changelist)
285
285
286 def changeset(self, nodeid):
286 def changeset(self, nodeid):
287 cl = self.repo.changelog
287 cl = self.repo.changelog
288 n = self.repo.lookup(nodeid)
288 n = self.repo.lookup(nodeid)
289 nodeid = hex(n)
289 nodeid = hex(n)
290 changes = cl.read(n)
290 changes = cl.read(n)
291 p1 = cl.parents(n)[0]
291 p1 = cl.parents(n)[0]
292
292
293 files = []
293 files = []
294 mf = self.repo.manifest.read(changes[0])
294 mf = self.repo.manifest.read(changes[0])
295 for f in changes[3]:
295 for f in changes[3]:
296 files.append(self.t("filenodelink",
296 files.append(self.t("filenodelink",
297 filenode=hex(mf.get(f, nullid)), file=f))
297 filenode=hex(mf.get(f, nullid)), file=f))
298
298
299 def diff(**map):
299 def diff(**map):
300 yield self.diff(p1, n, None)
300 yield self.diff(p1, n, None)
301
301
302 yield self.t('changeset',
302 yield self.t('changeset',
303 diff=diff,
303 diff=diff,
304 rev=cl.rev(n),
304 rev=cl.rev(n),
305 node=nodeid,
305 node=nodeid,
306 parent=self.siblings(cl.parents(n), cl.rev),
306 parent=self.siblings(cl.parents(n), cl.rev),
307 child=self.siblings(cl.children(n), cl.rev),
307 child=self.siblings(cl.children(n), cl.rev),
308 changesettag=self.showtag("changesettag",n),
308 changesettag=self.showtag("changesettag",n),
309 manifest=hex(changes[0]),
309 manifest=hex(changes[0]),
310 author=changes[1],
310 author=changes[1],
311 desc=changes[4],
311 desc=changes[4],
312 date=changes[2],
312 date=changes[2],
313 files=files,
313 files=files,
314 archives=self.archivelist(nodeid))
314 archives=self.archivelist(nodeid))
315
315
316 def filelog(self, f, filenode):
316 def filelog(self, f, filenode):
317 cl = self.repo.changelog
317 cl = self.repo.changelog
318 fl = self.repo.file(f)
318 fl = self.repo.file(f)
319 filenode = hex(fl.lookup(filenode))
319 filenode = hex(fl.lookup(filenode))
320 count = fl.count()
320 count = fl.count()
321
321
322 def entries(**map):
322 def entries(**map):
323 l = []
323 l = []
324 parity = (count - 1) & 1
324 parity = (count - 1) & 1
325
325
326 for i in range(count):
326 for i in range(count):
327 n = fl.node(i)
327 n = fl.node(i)
328 lr = fl.linkrev(n)
328 lr = fl.linkrev(n)
329 cn = cl.node(lr)
329 cn = cl.node(lr)
330 cs = cl.read(cl.node(lr))
330 cs = cl.read(cl.node(lr))
331
331
332 l.insert(0, {"parity": parity,
332 l.insert(0, {"parity": parity,
333 "filenode": hex(n),
333 "filenode": hex(n),
334 "filerev": i,
334 "filerev": i,
335 "file": f,
335 "file": f,
336 "node": hex(cn),
336 "node": hex(cn),
337 "author": cs[1],
337 "author": cs[1],
338 "date": cs[2],
338 "date": cs[2],
339 "rename": self.renamelink(fl, n),
339 "rename": self.renamelink(fl, n),
340 "parent": self.siblings(fl.parents(n),
340 "parent": self.siblings(fl.parents(n),
341 fl.rev, file=f),
341 fl.rev, file=f),
342 "child": self.siblings(fl.children(n),
342 "child": self.siblings(fl.children(n),
343 fl.rev, file=f),
343 fl.rev, file=f),
344 "desc": cs[4]})
344 "desc": cs[4]})
345 parity = 1 - parity
345 parity = 1 - parity
346
346
347 for e in l:
347 for e in l:
348 yield e
348 yield e
349
349
350 yield self.t("filelog", file=f, filenode=filenode, entries=entries)
350 yield self.t("filelog", file=f, filenode=filenode, entries=entries)
351
351
352 def filerevision(self, f, node):
352 def filerevision(self, f, node):
353 fl = self.repo.file(f)
353 fl = self.repo.file(f)
354 n = fl.lookup(node)
354 n = fl.lookup(node)
355 node = hex(n)
355 node = hex(n)
356 text = fl.read(n)
356 text = fl.read(n)
357 changerev = fl.linkrev(n)
357 changerev = fl.linkrev(n)
358 cl = self.repo.changelog
358 cl = self.repo.changelog
359 cn = cl.node(changerev)
359 cn = cl.node(changerev)
360 cs = cl.read(cn)
360 cs = cl.read(cn)
361 mfn = cs[0]
361 mfn = cs[0]
362
362
363 mt = mimetypes.guess_type(f)[0]
363 mt = mimetypes.guess_type(f)[0]
364 rawtext = text
364 rawtext = text
365 if util.binary(text):
365 if util.binary(text):
366 mt = mt or 'application/octet-stream'
366 mt = mt or 'application/octet-stream'
367 text = "(binary:%s)" % mt
367 text = "(binary:%s)" % mt
368 mt = mt or 'text/plain'
368 mt = mt or 'text/plain'
369
369
370 def lines():
370 def lines():
371 for l, t in enumerate(text.splitlines(1)):
371 for l, t in enumerate(text.splitlines(1)):
372 yield {"line": t,
372 yield {"line": t,
373 "linenumber": "% 6d" % (l + 1),
373 "linenumber": "% 6d" % (l + 1),
374 "parity": self.stripes(l)}
374 "parity": self.stripes(l)}
375
375
376 yield self.t("filerevision",
376 yield self.t("filerevision",
377 file=f,
377 file=f,
378 filenode=node,
378 filenode=node,
379 path=_up(f),
379 path=_up(f),
380 text=lines(),
380 text=lines(),
381 raw=rawtext,
381 raw=rawtext,
382 mimetype=mt,
382 mimetype=mt,
383 rev=changerev,
383 rev=changerev,
384 node=hex(cn),
384 node=hex(cn),
385 manifest=hex(mfn),
385 manifest=hex(mfn),
386 author=cs[1],
386 author=cs[1],
387 date=cs[2],
387 date=cs[2],
388 parent=self.siblings(fl.parents(n), fl.rev, file=f),
388 parent=self.siblings(fl.parents(n), fl.rev, file=f),
389 child=self.siblings(fl.children(n), fl.rev, file=f),
389 child=self.siblings(fl.children(n), fl.rev, file=f),
390 rename=self.renamelink(fl, n),
390 rename=self.renamelink(fl, n),
391 permissions=self.repo.manifest.read(mfn).execf(f))
391 permissions=self.repo.manifest.read(mfn).execf(f))
392
392
393 def fileannotate(self, f, node):
393 def fileannotate(self, f, node):
394 bcache = {}
394 bcache = {}
395 ncache = {}
395 ncache = {}
396 fl = self.repo.file(f)
396 fl = self.repo.file(f)
397 n = fl.lookup(node)
397 n = fl.lookup(node)
398 node = hex(n)
398 node = hex(n)
399 changerev = fl.linkrev(n)
399 changerev = fl.linkrev(n)
400
400
401 cl = self.repo.changelog
401 cl = self.repo.changelog
402 cn = cl.node(changerev)
402 cn = cl.node(changerev)
403 cs = cl.read(cn)
403 cs = cl.read(cn)
404 mfn = cs[0]
404 mfn = cs[0]
405
405
406 def annotate(**map):
406 def annotate(**map):
407 parity = 0
407 parity = 0
408 last = None
408 last = None
409 for r, l in fl.annotate(n):
409 for r, l in fl.annotate(n):
410 try:
410 try:
411 cnode = ncache[r]
411 cnode = ncache[r]
412 except KeyError:
412 except KeyError:
413 cnode = ncache[r] = self.repo.changelog.node(r)
413 cnode = ncache[r] = self.repo.changelog.node(r)
414
414
415 try:
415 try:
416 name = bcache[r]
416 name = bcache[r]
417 except KeyError:
417 except KeyError:
418 cl = self.repo.changelog.read(cnode)
418 cl = self.repo.changelog.read(cnode)
419 bcache[r] = name = self.repo.ui.shortuser(cl[1])
419 bcache[r] = name = self.repo.ui.shortuser(cl[1])
420
420
421 if last != cnode:
421 if last != cnode:
422 parity = 1 - parity
422 parity = 1 - parity
423 last = cnode
423 last = cnode
424
424
425 yield {"parity": parity,
425 yield {"parity": parity,
426 "node": hex(cnode),
426 "node": hex(cnode),
427 "rev": r,
427 "rev": r,
428 "author": name,
428 "author": name,
429 "file": f,
429 "file": f,
430 "line": l}
430 "line": l}
431
431
432 yield self.t("fileannotate",
432 yield self.t("fileannotate",
433 file=f,
433 file=f,
434 filenode=node,
434 filenode=node,
435 annotate=annotate,
435 annotate=annotate,
436 path=_up(f),
436 path=_up(f),
437 rev=changerev,
437 rev=changerev,
438 node=hex(cn),
438 node=hex(cn),
439 manifest=hex(mfn),
439 manifest=hex(mfn),
440 author=cs[1],
440 author=cs[1],
441 date=cs[2],
441 date=cs[2],
442 rename=self.renamelink(fl, n),
442 rename=self.renamelink(fl, n),
443 parent=self.siblings(fl.parents(n), fl.rev, file=f),
443 parent=self.siblings(fl.parents(n), fl.rev, file=f),
444 child=self.siblings(fl.children(n), fl.rev, file=f),
444 child=self.siblings(fl.children(n), fl.rev, file=f),
445 permissions=self.repo.manifest.read(mfn).execf(f))
445 permissions=self.repo.manifest.read(mfn).execf(f))
446
446
447 def manifest(self, mnode, path):
447 def manifest(self, mnode, path):
448 man = self.repo.manifest
448 man = self.repo.manifest
449 mn = man.lookup(mnode)
449 mn = man.lookup(mnode)
450 mnode = hex(mn)
450 mnode = hex(mn)
451 mf = man.read(mn)
451 mf = man.read(mn)
452 rev = man.rev(mn)
452 rev = man.rev(mn)
453 changerev = man.linkrev(mn)
453 changerev = man.linkrev(mn)
454 node = self.repo.changelog.node(changerev)
454 node = self.repo.changelog.node(changerev)
455
455
456 files = {}
456 files = {}
457
457
458 p = path[1:]
458 p = path[1:]
459 if p and p[-1] != "/":
459 if p and p[-1] != "/":
460 p += "/"
460 p += "/"
461 l = len(p)
461 l = len(p)
462
462
463 for f,n in mf.items():
463 for f,n in mf.items():
464 if f[:l] != p:
464 if f[:l] != p:
465 continue
465 continue
466 remain = f[l:]
466 remain = f[l:]
467 if "/" in remain:
467 if "/" in remain:
468 short = remain[:remain.index("/") + 1] # bleah
468 short = remain[:remain.index("/") + 1] # bleah
469 files[short] = (f, None)
469 files[short] = (f, None)
470 else:
470 else:
471 short = os.path.basename(remain)
471 short = os.path.basename(remain)
472 files[short] = (f, n)
472 files[short] = (f, n)
473
473
474 def filelist(**map):
474 def filelist(**map):
475 parity = 0
475 parity = 0
476 fl = files.keys()
476 fl = files.keys()
477 fl.sort()
477 fl.sort()
478 for f in fl:
478 for f in fl:
479 full, fnode = files[f]
479 full, fnode = files[f]
480 if not fnode:
480 if not fnode:
481 continue
481 continue
482
482
483 yield {"file": full,
483 yield {"file": full,
484 "manifest": mnode,
484 "manifest": mnode,
485 "filenode": hex(fnode),
485 "filenode": hex(fnode),
486 "parity": self.stripes(parity),
486 "parity": self.stripes(parity),
487 "basename": f,
487 "basename": f,
488 "permissions": mf.execf(full)}
488 "permissions": mf.execf(full)}
489 parity += 1
489 parity += 1
490
490
491 def dirlist(**map):
491 def dirlist(**map):
492 parity = 0
492 parity = 0
493 fl = files.keys()
493 fl = files.keys()
494 fl.sort()
494 fl.sort()
495 for f in fl:
495 for f in fl:
496 full, fnode = files[f]
496 full, fnode = files[f]
497 if fnode:
497 if fnode:
498 continue
498 continue
499
499
500 yield {"parity": self.stripes(parity),
500 yield {"parity": self.stripes(parity),
501 "path": os.path.join(path, f),
501 "path": os.path.join(path, f),
502 "manifest": mnode,
502 "manifest": mnode,
503 "basename": f[:-1]}
503 "basename": f[:-1]}
504 parity += 1
504 parity += 1
505
505
506 yield self.t("manifest",
506 yield self.t("manifest",
507 manifest=mnode,
507 manifest=mnode,
508 rev=rev,
508 rev=rev,
509 node=hex(node),
509 node=hex(node),
510 path=path,
510 path=path,
511 up=_up(path),
511 up=_up(path),
512 fentries=filelist,
512 fentries=filelist,
513 dentries=dirlist,
513 dentries=dirlist,
514 archives=self.archivelist(hex(node)))
514 archives=self.archivelist(hex(node)))
515
515
516 def tags(self):
516 def tags(self):
517 cl = self.repo.changelog
517 cl = self.repo.changelog
518 mf = cl.read(cl.tip())[0]
518 mf = cl.read(cl.tip())[0]
519
519
520 i = self.repo.tagslist()
520 i = self.repo.tagslist()
521 i.reverse()
521 i.reverse()
522
522
523 def entries(notip=False, **map):
523 def entries(notip=False, **map):
524 parity = 0
524 parity = 0
525 for k,n in i:
525 for k,n in i:
526 if notip and k == "tip": continue
526 if notip and k == "tip": continue
527 yield {"parity": self.stripes(parity),
527 yield {"parity": self.stripes(parity),
528 "tag": k,
528 "tag": k,
529 "tagmanifest": hex(cl.read(n)[0]),
529 "tagmanifest": hex(cl.read(n)[0]),
530 "date": cl.read(n)[2],
530 "date": cl.read(n)[2],
531 "node": hex(n)}
531 "node": hex(n)}
532 parity += 1
532 parity += 1
533
533
534 yield self.t("tags",
534 yield self.t("tags",
535 manifest=hex(mf),
535 manifest=hex(mf),
536 entries=lambda **x: entries(False, **x),
536 entries=lambda **x: entries(False, **x),
537 entriesnotip=lambda **x: entries(True, **x))
537 entriesnotip=lambda **x: entries(True, **x))
538
538
539 def summary(self):
539 def summary(self):
540 cl = self.repo.changelog
540 cl = self.repo.changelog
541 mf = cl.read(cl.tip())[0]
541 mf = cl.read(cl.tip())[0]
542
542
543 i = self.repo.tagslist()
543 i = self.repo.tagslist()
544 i.reverse()
544 i.reverse()
545
545
546 def tagentries(**map):
546 def tagentries(**map):
547 parity = 0
547 parity = 0
548 count = 0
548 count = 0
549 for k,n in i:
549 for k,n in i:
550 if k == "tip": # skip tip
550 if k == "tip": # skip tip
551 continue;
551 continue;
552
552
553 count += 1
553 count += 1
554 if count > 10: # limit to 10 tags
554 if count > 10: # limit to 10 tags
555 break;
555 break;
556
556
557 c = cl.read(n)
557 c = cl.read(n)
558 m = c[0]
558 m = c[0]
559 t = c[2]
559 t = c[2]
560
560
561 yield self.t("tagentry",
561 yield self.t("tagentry",
562 parity = self.stripes(parity),
562 parity = self.stripes(parity),
563 tag = k,
563 tag = k,
564 node = hex(n),
564 node = hex(n),
565 date = t,
565 date = t,
566 tagmanifest = hex(m))
566 tagmanifest = hex(m))
567 parity += 1
567 parity += 1
568
568
569 def changelist(**map):
569 def changelist(**map):
570 parity = 0
570 parity = 0
571 cl = self.repo.changelog
571 cl = self.repo.changelog
572 l = [] # build a list in forward order for efficiency
572 l = [] # build a list in forward order for efficiency
573 for i in range(start, end):
573 for i in range(start, end):
574 n = cl.node(i)
574 n = cl.node(i)
575 changes = cl.read(n)
575 changes = cl.read(n)
576 hn = hex(n)
576 hn = hex(n)
577 t = changes[2]
577 t = changes[2]
578
578
579 l.insert(0, self.t(
579 l.insert(0, self.t(
580 'shortlogentry',
580 'shortlogentry',
581 parity = parity,
581 parity = parity,
582 author = changes[1],
582 author = changes[1],
583 manifest = hex(changes[0]),
583 manifest = hex(changes[0]),
584 desc = changes[4],
584 desc = changes[4],
585 date = t,
585 date = t,
586 rev = i,
586 rev = i,
587 node = hn))
587 node = hn))
588 parity = 1 - parity
588 parity = 1 - parity
589
589
590 yield l
590 yield l
591
591
592 cl = self.repo.changelog
592 cl = self.repo.changelog
593 mf = cl.read(cl.tip())[0]
593 mf = cl.read(cl.tip())[0]
594 count = cl.count()
594 count = cl.count()
595 start = max(0, count - self.maxchanges)
595 start = max(0, count - self.maxchanges)
596 end = min(count, start + self.maxchanges)
596 end = min(count, start + self.maxchanges)
597
597
598 yield self.t("summary",
598 yield self.t("summary",
599 desc = self.repo.ui.config("web", "description", "unknown"),
599 desc = self.repo.ui.config("web", "description", "unknown"),
600 owner = (self.repo.ui.config("ui", "username") or # preferred
600 owner = (self.repo.ui.config("ui", "username") or # preferred
601 self.repo.ui.config("web", "contact") or # deprecated
601 self.repo.ui.config("web", "contact") or # deprecated
602 self.repo.ui.config("web", "author", "unknown")), # also
602 self.repo.ui.config("web", "author", "unknown")), # also
603 lastchange = (0, 0), # FIXME
603 lastchange = (0, 0), # FIXME
604 manifest = hex(mf),
604 manifest = hex(mf),
605 tags = tagentries,
605 tags = tagentries,
606 shortlog = changelist,
606 shortlog = changelist,
607 archives=self.archivelist("tip"))
607 archives=self.archivelist("tip"))
608
608
609 def filediff(self, file, changeset):
609 def filediff(self, file, changeset):
610 cl = self.repo.changelog
610 cl = self.repo.changelog
611 n = self.repo.lookup(changeset)
611 n = self.repo.lookup(changeset)
612 changeset = hex(n)
612 changeset = hex(n)
613 p1 = cl.parents(n)[0]
613 p1 = cl.parents(n)[0]
614 cs = cl.read(n)
614 cs = cl.read(n)
615 mf = self.repo.manifest.read(cs[0])
615 mf = self.repo.manifest.read(cs[0])
616
616
617 def diff(**map):
617 def diff(**map):
618 yield self.diff(p1, n, [file])
618 yield self.diff(p1, n, [file])
619
619
620 yield self.t("filediff",
620 yield self.t("filediff",
621 file=file,
621 file=file,
622 filenode=hex(mf.get(file, nullid)),
622 filenode=hex(mf.get(file, nullid)),
623 node=changeset,
623 node=changeset,
624 rev=self.repo.changelog.rev(n),
624 rev=self.repo.changelog.rev(n),
625 parent=self.siblings(cl.parents(n), cl.rev),
625 parent=self.siblings(cl.parents(n), cl.rev),
626 child=self.siblings(cl.children(n), cl.rev),
626 child=self.siblings(cl.children(n), cl.rev),
627 diff=diff)
627 diff=diff)
628
628
629 archive_specs = {
629 archive_specs = {
630 'bz2': ('application/x-tar', 'tbz2', '.tar.bz2', None),
630 'bz2': ('application/x-tar', 'tbz2', '.tar.bz2', None),
631 'gz': ('application/x-tar', 'tgz', '.tar.gz', None),
631 'gz': ('application/x-tar', 'tgz', '.tar.gz', None),
632 'zip': ('application/zip', 'zip', '.zip', None),
632 'zip': ('application/zip', 'zip', '.zip', None),
633 }
633 }
634
634
635 def archive(self, req, cnode, type_):
635 def archive(self, req, cnode, type_):
636 reponame = re.sub(r"\W+", "-", os.path.basename(self.reponame))
636 reponame = re.sub(r"\W+", "-", os.path.basename(self.reponame))
637 name = "%s-%s" % (reponame, short(cnode))
637 name = "%s-%s" % (reponame, short(cnode))
638 mimetype, artype, extension, encoding = self.archive_specs[type_]
638 mimetype, artype, extension, encoding = self.archive_specs[type_]
639 headers = [('Content-type', mimetype),
639 headers = [('Content-type', mimetype),
640 ('Content-disposition', 'attachment; filename=%s%s' %
640 ('Content-disposition', 'attachment; filename=%s%s' %
641 (name, extension))]
641 (name, extension))]
642 if encoding:
642 if encoding:
643 headers.append(('Content-encoding', encoding))
643 headers.append(('Content-encoding', encoding))
644 req.header(headers)
644 req.header(headers)
645 archival.archive(self.repo, req.out, cnode, artype, prefix=name)
645 archival.archive(self.repo, req.out, cnode, artype, prefix=name)
646
646
647 # add tags to things
647 # add tags to things
648 # tags -> list of changesets corresponding to tags
648 # tags -> list of changesets corresponding to tags
649 # find tag, changeset, file
649 # find tag, changeset, file
650
650
651 def cleanpath(self, path):
651 def cleanpath(self, path):
652 p = util.normpath(path)
652 p = util.normpath(path)
653 if p[:2] == "..":
653 if p[:2] == "..":
654 raise Exception("suspicious path")
654 raise Exception("suspicious path")
655 return p
655 return p
656
656
657 def run(self):
657 def run(self):
658 if not os.environ.get('GATEWAY_INTERFACE', '').startswith("CGI/1."):
658 if not os.environ.get('GATEWAY_INTERFACE', '').startswith("CGI/1."):
659 raise RuntimeError("This function is only intended to be called while running as a CGI script.")
659 raise RuntimeError("This function is only intended to be called while running as a CGI script.")
660 import mercurial.hgweb.wsgicgi as wsgicgi
660 import mercurial.hgweb.wsgicgi as wsgicgi
661 from request import wsgiapplication
661 from request import wsgiapplication
662 def make_web_app():
662 def make_web_app():
663 return self
663 return self
664 wsgicgi.launch(wsgiapplication(make_web_app))
664 wsgicgi.launch(wsgiapplication(make_web_app))
665
665
666 def run_wsgi(self, req):
666 def run_wsgi(self, req):
667 def header(**map):
667 def header(**map):
668 header_file = cStringIO.StringIO(''.join(self.t("header", **map)))
668 header_file = cStringIO.StringIO(''.join(self.t("header", **map)))
669 msg = mimetools.Message(header_file, 0)
669 msg = mimetools.Message(header_file, 0)
670 req.header(msg.items())
670 req.header(msg.items())
671 yield header_file.read()
671 yield header_file.read()
672
672
673 def rawfileheader(**map):
673 def rawfileheader(**map):
674 req.header([('Content-type', map['mimetype']),
674 req.header([('Content-type', map['mimetype']),
675 ('Content-disposition', 'filename=%s' % map['file']),
675 ('Content-disposition', 'filename=%s' % map['file']),
676 ('Content-length', str(len(map['raw'])))])
676 ('Content-length', str(len(map['raw'])))])
677 yield ''
677 yield ''
678
678
679 def footer(**map):
679 def footer(**map):
680 yield self.t("footer",
680 yield self.t("footer",
681 motd=self.repo.ui.config("web", "motd", ""),
681 motd=self.repo.ui.config("web", "motd", ""),
682 **map)
682 **map)
683
683
684 def expand_form(form):
684 def expand_form(form):
685 shortcuts = {
685 shortcuts = {
686 'cl': [('cmd', ['changelog']), ('rev', None)],
686 'cl': [('cmd', ['changelog']), ('rev', None)],
687 'sl': [('cmd', ['shortlog']), ('rev', None)],
687 'sl': [('cmd', ['shortlog']), ('rev', None)],
688 'cs': [('cmd', ['changeset']), ('node', None)],
688 'cs': [('cmd', ['changeset']), ('node', None)],
689 'f': [('cmd', ['file']), ('filenode', None)],
689 'f': [('cmd', ['file']), ('filenode', None)],
690 'fl': [('cmd', ['filelog']), ('filenode', None)],
690 'fl': [('cmd', ['filelog']), ('filenode', None)],
691 'fd': [('cmd', ['filediff']), ('node', None)],
691 'fd': [('cmd', ['filediff']), ('node', None)],
692 'fa': [('cmd', ['annotate']), ('filenode', None)],
692 'fa': [('cmd', ['annotate']), ('filenode', None)],
693 'mf': [('cmd', ['manifest']), ('manifest', None)],
693 'mf': [('cmd', ['manifest']), ('manifest', None)],
694 'ca': [('cmd', ['archive']), ('node', None)],
694 'ca': [('cmd', ['archive']), ('node', None)],
695 'tags': [('cmd', ['tags'])],
695 'tags': [('cmd', ['tags'])],
696 'tip': [('cmd', ['changeset']), ('node', ['tip'])],
696 'tip': [('cmd', ['changeset']), ('node', ['tip'])],
697 'static': [('cmd', ['static']), ('file', None)]
697 'static': [('cmd', ['static']), ('file', None)]
698 }
698 }
699
699
700 for k in shortcuts.iterkeys():
700 for k in shortcuts.iterkeys():
701 if form.has_key(k):
701 if form.has_key(k):
702 for name, value in shortcuts[k]:
702 for name, value in shortcuts[k]:
703 if value is None:
703 if value is None:
704 value = form[k]
704 value = form[k]
705 form[name] = value
705 form[name] = value
706 del form[k]
706 del form[k]
707
707
708 self.refresh()
708 self.refresh()
709
709
710 expand_form(req.form)
710 expand_form(req.form)
711
711
712 m = os.path.join(self.templatepath, "map")
712 m = os.path.join(self.templatepath, "map")
713 style = self.repo.ui.config("web", "style", "")
713 style = self.repo.ui.config("web", "style", "")
714 if req.form.has_key('style'):
714 if req.form.has_key('style'):
715 style = req.form['style'][0]
715 style = req.form['style'][0]
716 if style:
716 if style:
717 b = os.path.basename("map-" + style)
717 b = os.path.basename("map-" + style)
718 p = os.path.join(self.templatepath, b)
718 p = os.path.join(self.templatepath, b)
719 if os.path.isfile(p):
719 if os.path.isfile(p):
720 m = p
720 m = p
721
721
722 port = req.env["SERVER_PORT"]
722 port = req.env["SERVER_PORT"]
723 port = port != "80" and (":" + port) or ""
723 port = port != "80" and (":" + port) or ""
724 uri = req.env["REQUEST_URI"]
724 uri = req.env["REQUEST_URI"]
725 if "?" in uri:
725 if "?" in uri:
726 uri = uri.split("?")[0]
726 uri = uri.split("?")[0]
727 url = "http://%s%s%s" % (req.env["SERVER_NAME"], port, uri)
727 url = "http://%s%s%s" % (req.env["SERVER_NAME"], port, uri)
728 if not self.reponame:
728 if not self.reponame:
729 self.reponame = (self.repo.ui.config("web", "name")
729 self.reponame = (self.repo.ui.config("web", "name")
730 or uri.strip('/') or self.repo.root)
730 or uri.strip('/') or self.repo.root)
731
731
732 self.t = templater.templater(m, templater.common_filters,
732 self.t = templater.templater(m, templater.common_filters,
733 defaults={"url": url,
733 defaults={"url": url,
734 "repo": self.reponame,
734 "repo": self.reponame,
735 "header": header,
735 "header": header,
736 "footer": footer,
736 "footer": footer,
737 "rawfileheader": rawfileheader,
737 "rawfileheader": rawfileheader,
738 })
738 })
739
739
740 if not req.form.has_key('cmd'):
740 if not req.form.has_key('cmd'):
741 req.form['cmd'] = [self.t.cache['default'],]
741 req.form['cmd'] = [self.t.cache['default'],]
742
742
743 cmd = req.form['cmd'][0]
743 cmd = req.form['cmd'][0]
744
744
745 method = getattr(self, 'do_' + cmd, None)
745 method = getattr(self, 'do_' + cmd, None)
746 if method:
746 if method:
747 method(req)
747 method(req)
748 else:
748 else:
749 req.write(self.t("error"))
749 req.write(self.t("error"))
750
750
751 def stripes(self, parity):
751 def stripes(self, parity):
752 "make horizontal stripes for easier reading"
752 "make horizontal stripes for easier reading"
753 if self.stripecount:
753 if self.stripecount:
754 return (1 + parity / self.stripecount) & 1
754 return (1 + parity / self.stripecount) & 1
755 else:
755 else:
756 return 0
756 return 0
757
757
758 def do_changelog(self, req):
758 def do_changelog(self, req):
759 hi = self.repo.changelog.count() - 1
759 hi = self.repo.changelog.count() - 1
760 if req.form.has_key('rev'):
760 if req.form.has_key('rev'):
761 hi = req.form['rev'][0]
761 hi = req.form['rev'][0]
762 try:
762 try:
763 hi = self.repo.changelog.rev(self.repo.lookup(hi))
763 hi = self.repo.changelog.rev(self.repo.lookup(hi))
764 except hg.RepoError:
764 except hg.RepoError:
765 req.write(self.search(hi)) # XXX redirect to 404 page?
765 req.write(self.search(hi)) # XXX redirect to 404 page?
766 return
766 return
767
767
768 req.write(self.changelog(hi))
768 req.write(self.changelog(hi))
769
769
770 def do_shortlog(self, req):
770 def do_shortlog(self, req):
771 hi = self.repo.changelog.count() - 1
771 hi = self.repo.changelog.count() - 1
772 if req.form.has_key('rev'):
772 if req.form.has_key('rev'):
773 hi = req.form['rev'][0]
773 hi = req.form['rev'][0]
774 try:
774 try:
775 hi = self.repo.changelog.rev(self.repo.lookup(hi))
775 hi = self.repo.changelog.rev(self.repo.lookup(hi))
776 except hg.RepoError:
776 except hg.RepoError:
777 req.write(self.search(hi)) # XXX redirect to 404 page?
777 req.write(self.search(hi)) # XXX redirect to 404 page?
778 return
778 return
779
779
780 req.write(self.changelog(hi, shortlog = True))
780 req.write(self.changelog(hi, shortlog = True))
781
781
782 def do_changeset(self, req):
782 def do_changeset(self, req):
783 req.write(self.changeset(req.form['node'][0]))
783 req.write(self.changeset(req.form['node'][0]))
784
784
785 def do_manifest(self, req):
785 def do_manifest(self, req):
786 req.write(self.manifest(req.form['manifest'][0],
786 req.write(self.manifest(req.form['manifest'][0],
787 self.cleanpath(req.form['path'][0])))
787 self.cleanpath(req.form['path'][0])))
788
788
789 def do_tags(self, req):
789 def do_tags(self, req):
790 req.write(self.tags())
790 req.write(self.tags())
791
791
792 def do_summary(self, req):
792 def do_summary(self, req):
793 req.write(self.summary())
793 req.write(self.summary())
794
794
795 def do_filediff(self, req):
795 def do_filediff(self, req):
796 req.write(self.filediff(self.cleanpath(req.form['file'][0]),
796 req.write(self.filediff(self.cleanpath(req.form['file'][0]),
797 req.form['node'][0]))
797 req.form['node'][0]))
798
798
799 def do_file(self, req):
799 def do_file(self, req):
800 req.write(self.filerevision(self.cleanpath(req.form['file'][0]),
800 req.write(self.filerevision(self.cleanpath(req.form['file'][0]),
801 req.form['filenode'][0]))
801 req.form['filenode'][0]))
802
802
803 def do_annotate(self, req):
803 def do_annotate(self, req):
804 req.write(self.fileannotate(self.cleanpath(req.form['file'][0]),
804 req.write(self.fileannotate(self.cleanpath(req.form['file'][0]),
805 req.form['filenode'][0]))
805 req.form['filenode'][0]))
806
806
807 def do_filelog(self, req):
807 def do_filelog(self, req):
808 req.write(self.filelog(self.cleanpath(req.form['file'][0]),
808 req.write(self.filelog(self.cleanpath(req.form['file'][0]),
809 req.form['filenode'][0]))
809 req.form['filenode'][0]))
810
810
811 def do_heads(self, req):
811 def do_heads(self, req):
812 resp = " ".join(map(hex, self.repo.heads())) + "\n"
812 resp = " ".join(map(hex, self.repo.heads())) + "\n"
813 req.httphdr("application/mercurial-0.1", length=len(resp))
813 req.httphdr("application/mercurial-0.1", length=len(resp))
814 req.write(resp)
814 req.write(resp)
815
815
816 def do_branches(self, req):
816 def do_branches(self, req):
817 nodes = []
817 nodes = []
818 if req.form.has_key('nodes'):
818 if req.form.has_key('nodes'):
819 nodes = map(bin, req.form['nodes'][0].split(" "))
819 nodes = map(bin, req.form['nodes'][0].split(" "))
820 resp = cStringIO.StringIO()
820 resp = cStringIO.StringIO()
821 for b in self.repo.branches(nodes):
821 for b in self.repo.branches(nodes):
822 resp.write(" ".join(map(hex, b)) + "\n")
822 resp.write(" ".join(map(hex, b)) + "\n")
823 resp = resp.getvalue()
823 resp = resp.getvalue()
824 req.httphdr("application/mercurial-0.1", length=len(resp))
824 req.httphdr("application/mercurial-0.1", length=len(resp))
825 req.write(resp)
825 req.write(resp)
826
826
827 def do_between(self, req):
827 def do_between(self, req):
828 nodes = []
828 nodes = []
829 if req.form.has_key('pairs'):
829 if req.form.has_key('pairs'):
830 pairs = [map(bin, p.split("-"))
830 pairs = [map(bin, p.split("-"))
831 for p in req.form['pairs'][0].split(" ")]
831 for p in req.form['pairs'][0].split(" ")]
832 resp = cStringIO.StringIO()
832 resp = cStringIO.StringIO()
833 for b in self.repo.between(pairs):
833 for b in self.repo.between(pairs):
834 resp.write(" ".join(map(hex, b)) + "\n")
834 resp.write(" ".join(map(hex, b)) + "\n")
835 resp = resp.getvalue()
835 resp = resp.getvalue()
836 req.httphdr("application/mercurial-0.1", length=len(resp))
836 req.httphdr("application/mercurial-0.1", length=len(resp))
837 req.write(resp)
837 req.write(resp)
838
838
839 def do_changegroup(self, req):
839 def do_changegroup(self, req):
840 req.httphdr("application/mercurial-0.1")
840 req.httphdr("application/mercurial-0.1")
841 nodes = []
841 nodes = []
842 if not self.allowpull:
842 if not self.allowpull:
843 return
843 return
844
844
845 if req.form.has_key('roots'):
845 if req.form.has_key('roots'):
846 nodes = map(bin, req.form['roots'][0].split(" "))
846 nodes = map(bin, req.form['roots'][0].split(" "))
847
847
848 z = zlib.compressobj()
848 z = zlib.compressobj()
849 f = self.repo.changegroup(nodes, 'serve')
849 f = self.repo.changegroup(nodes, 'serve')
850 while 1:
850 while 1:
851 chunk = f.read(4096)
851 chunk = f.read(4096)
852 if not chunk:
852 if not chunk:
853 break
853 break
854 req.write(z.compress(chunk))
854 req.write(z.compress(chunk))
855
855
856 req.write(z.flush())
856 req.write(z.flush())
857
857
858 def do_archive(self, req):
858 def do_archive(self, req):
859 changeset = self.repo.lookup(req.form['node'][0])
859 changeset = self.repo.lookup(req.form['node'][0])
860 type_ = req.form['type'][0]
860 type_ = req.form['type'][0]
861 allowed = self.repo.ui.configlist("web", "allow_archive")
861 allowed = self.repo.ui.configlist("web", "allow_archive")
862 if (type_ in self.archives and (type_ in allowed or
862 if (type_ in self.archives and (type_ in allowed or
863 self.repo.ui.configbool("web", "allow" + type_, False))):
863 self.repo.ui.configbool("web", "allow" + type_, False))):
864 self.archive(req, changeset, type_)
864 self.archive(req, changeset, type_)
865 return
865 return
866
866
867 req.write(self.t("error"))
867 req.write(self.t("error"))
868
868
869 def do_static(self, req):
869 def do_static(self, req):
870 fname = req.form['file'][0]
870 fname = req.form['file'][0]
871 static = self.repo.ui.config("web", "static",
871 static = self.repo.ui.config("web", "static",
872 os.path.join(self.templatepath,
872 os.path.join(self.templatepath,
873 "static"))
873 "static"))
874 req.write(staticfile(static, fname, req)
874 req.write(staticfile(static, fname, req)
875 or self.t("error", error="%r not found" % fname))
875 or self.t("error", error="%r not found" % fname))
876
876
877 def do_capabilities(self, req):
877 def do_capabilities(self, req):
878 caps = ['unbundle']
878 caps = ['unbundle']
879 if self.repo.ui.configbool('server', 'uncompressed'):
879 if self.repo.ui.configbool('server', 'uncompressed'):
880 caps.append('stream=%d' % self.repo.revlogversion)
880 caps.append('stream=%d' % self.repo.revlogversion)
881 resp = ' '.join(caps)
881 resp = ' '.join(caps)
882 req.httphdr("application/mercurial-0.1", length=len(resp))
882 req.httphdr("application/mercurial-0.1", length=len(resp))
883 req.write(resp)
883 req.write(resp)
884
884
885 def check_perm(self, req, op, default):
885 def check_perm(self, req, op, default):
886 '''check permission for operation based on user auth.
886 '''check permission for operation based on user auth.
887 return true if op allowed, else false.
887 return true if op allowed, else false.
888 default is policy to use if no config given.'''
888 default is policy to use if no config given.'''
889
889
890 user = req.env.get('REMOTE_USER')
890 user = req.env.get('REMOTE_USER')
891
891
892 deny = self.repo.ui.configlist('web', 'deny_' + op)
892 deny = self.repo.ui.configlist('web', 'deny_' + op)
893 if deny and (not user or deny == ['*'] or user in deny):
893 if deny and (not user or deny == ['*'] or user in deny):
894 return False
894 return False
895
895
896 allow = self.repo.ui.configlist('web', 'allow_' + op)
896 allow = self.repo.ui.configlist('web', 'allow_' + op)
897 return (allow and (allow == ['*'] or user in allow)) or default
897 return (allow and (allow == ['*'] or user in allow)) or default
898
898
899 def do_unbundle(self, req):
899 def do_unbundle(self, req):
900 def bail(response, headers={}):
900 def bail(response, headers={}):
901 length = int(req.env['CONTENT_LENGTH'])
901 length = int(req.env['CONTENT_LENGTH'])
902 for s in util.filechunkiter(req, limit=length):
902 for s in util.filechunkiter(req, limit=length):
903 # drain incoming bundle, else client will not see
903 # drain incoming bundle, else client will not see
904 # response when run outside cgi script
904 # response when run outside cgi script
905 pass
905 pass
906 req.httphdr("application/mercurial-0.1", headers=headers)
906 req.httphdr("application/mercurial-0.1", headers=headers)
907 req.write('0\n')
907 req.write('0\n')
908 req.write(response)
908 req.write(response)
909
909
910 # require ssl by default, auth info cannot be sniffed and
910 # require ssl by default, auth info cannot be sniffed and
911 # replayed
911 # replayed
912 ssl_req = self.repo.ui.configbool('web', 'push_ssl', True)
912 ssl_req = self.repo.ui.configbool('web', 'push_ssl', True)
913 if ssl_req:
913 if ssl_req:
914 if not req.env.get('HTTPS'):
914 if not req.env.get('HTTPS'):
915 bail(_('ssl required\n'))
915 bail(_('ssl required\n'))
916 return
916 return
917 proto = 'https'
917 proto = 'https'
918 else:
918 else:
919 proto = 'http'
919 proto = 'http'
920
920
921 # do not allow push unless explicitly allowed
921 # do not allow push unless explicitly allowed
922 if not self.check_perm(req, 'push', False):
922 if not self.check_perm(req, 'push', False):
923 bail(_('push not authorized\n'),
923 bail(_('push not authorized\n'),
924 headers={'status': '401 Unauthorized'})
924 headers={'status': '401 Unauthorized'})
925 return
925 return
926
926
927 req.httphdr("application/mercurial-0.1")
927 req.httphdr("application/mercurial-0.1")
928
928
929 their_heads = req.form['heads'][0].split(' ')
929 their_heads = req.form['heads'][0].split(' ')
930
930
931 def check_heads():
931 def check_heads():
932 heads = map(hex, self.repo.heads())
932 heads = map(hex, self.repo.heads())
933 return their_heads == [hex('force')] or their_heads == heads
933 return their_heads == [hex('force')] or their_heads == heads
934
934
935 # fail early if possible
935 # fail early if possible
936 if not check_heads():
936 if not check_heads():
937 bail(_('unsynced changes\n'))
937 bail(_('unsynced changes\n'))
938 return
938 return
939
939
940 # do not lock repo until all changegroup data is
940 # do not lock repo until all changegroup data is
941 # streamed. save to temporary file.
941 # streamed. save to temporary file.
942
942
943 fd, tempname = tempfile.mkstemp(prefix='hg-unbundle-')
943 fd, tempname = tempfile.mkstemp(prefix='hg-unbundle-')
944 fp = os.fdopen(fd, 'wb+')
944 fp = os.fdopen(fd, 'wb+')
945 try:
945 try:
946 length = int(req.env['CONTENT_LENGTH'])
946 length = int(req.env['CONTENT_LENGTH'])
947 for s in util.filechunkiter(req, limit=length):
947 for s in util.filechunkiter(req, limit=length):
948 fp.write(s)
948 fp.write(s)
949
949
950 lock = self.repo.lock()
950 lock = self.repo.lock()
951 try:
951 try:
952 if not check_heads():
952 if not check_heads():
953 req.write('0\n')
953 req.write('0\n')
954 req.write(_('unsynced changes\n'))
954 req.write(_('unsynced changes\n'))
955 return
955 return
956
956
957 fp.seek(0)
957 fp.seek(0)
958
958
959 # send addchangegroup output to client
959 # send addchangegroup output to client
960
960
961 old_stdout = sys.stdout
961 old_stdout = sys.stdout
962 sys.stdout = cStringIO.StringIO()
962 sys.stdout = cStringIO.StringIO()
963
963
964 try:
964 try:
965 url = 'remote:%s:%s' % (proto,
965 url = 'remote:%s:%s' % (proto,
966 req.env.get('REMOTE_HOST', ''))
966 req.env.get('REMOTE_HOST', ''))
967 ret = self.repo.addchangegroup(fp, 'serve', url)
967 ret = self.repo.addchangegroup(fp, 'serve', url)
968 finally:
968 finally:
969 val = sys.stdout.getvalue()
969 val = sys.stdout.getvalue()
970 sys.stdout = old_stdout
970 sys.stdout = old_stdout
971 req.write('%d\n' % ret)
971 req.write('%d\n' % ret)
972 req.write(val)
972 req.write(val)
973 finally:
973 finally:
974 lock.release()
974 lock.release()
975 finally:
975 finally:
976 fp.close()
976 fp.close()
977 os.unlink(tempname)
977 os.unlink(tempname)
978
978
979 def do_stream_out(self, req):
979 def do_stream_out(self, req):
980 req.httphdr("application/mercurial-0.1")
980 req.httphdr("application/mercurial-0.1")
981 streamclone.stream_out(self.repo, req)
981 streamclone.stream_out(self.repo, req)
@@ -1,334 +1,339 b''
1 # merge.py - directory-level update/merge handling for Mercurial
1 # merge.py - directory-level update/merge handling for Mercurial
2 #
2 #
3 # Copyright 2006 Matt Mackall <mpm@selenic.com>
3 # Copyright 2006 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7
7
8 from node import *
8 from node import *
9 from i18n import gettext as _
9 from i18n import gettext as _
10 from demandload import *
10 from demandload import *
11 demandload(globals(), "util os tempfile")
11 demandload(globals(), "util os tempfile")
12
12
13 def fmerge(f, local, other, ancestor):
14 """merge executable flags"""
15 a, b, c = ancestor.execf(f), local.execf(f), other.execf(f)
16 return ((a^b) | (a^c)) ^ a
17
13 def merge3(repo, fn, my, other, p1, p2):
18 def merge3(repo, fn, my, other, p1, p2):
14 """perform a 3-way merge in the working directory"""
19 """perform a 3-way merge in the working directory"""
15
20
16 def temp(prefix, node):
21 def temp(prefix, node):
17 pre = "%s~%s." % (os.path.basename(fn), prefix)
22 pre = "%s~%s." % (os.path.basename(fn), prefix)
18 (fd, name) = tempfile.mkstemp(prefix=pre)
23 (fd, name) = tempfile.mkstemp(prefix=pre)
19 f = os.fdopen(fd, "wb")
24 f = os.fdopen(fd, "wb")
20 repo.wwrite(fn, fl.read(node), f)
25 repo.wwrite(fn, fl.read(node), f)
21 f.close()
26 f.close()
22 return name
27 return name
23
28
24 fl = repo.file(fn)
29 fl = repo.file(fn)
25 base = fl.ancestor(my, other)
30 base = fl.ancestor(my, other)
26 a = repo.wjoin(fn)
31 a = repo.wjoin(fn)
27 b = temp("base", base)
32 b = temp("base", base)
28 c = temp("other", other)
33 c = temp("other", other)
29
34
30 repo.ui.note(_("resolving %s\n") % fn)
35 repo.ui.note(_("resolving %s\n") % fn)
31 repo.ui.debug(_("file %s: my %s other %s ancestor %s\n") %
36 repo.ui.debug(_("file %s: my %s other %s ancestor %s\n") %
32 (fn, short(my), short(other), short(base)))
37 (fn, short(my), short(other), short(base)))
33
38
34 cmd = (os.environ.get("HGMERGE") or repo.ui.config("ui", "merge")
39 cmd = (os.environ.get("HGMERGE") or repo.ui.config("ui", "merge")
35 or "hgmerge")
40 or "hgmerge")
36 r = util.system('%s "%s" "%s" "%s"' % (cmd, a, b, c), cwd=repo.root,
41 r = util.system('%s "%s" "%s" "%s"' % (cmd, a, b, c), cwd=repo.root,
37 environ={'HG_FILE': fn,
42 environ={'HG_FILE': fn,
38 'HG_MY_NODE': p1,
43 'HG_MY_NODE': p1,
39 'HG_OTHER_NODE': p2,
44 'HG_OTHER_NODE': p2,
40 'HG_FILE_MY_NODE': hex(my),
45 'HG_FILE_MY_NODE': hex(my),
41 'HG_FILE_OTHER_NODE': hex(other),
46 'HG_FILE_OTHER_NODE': hex(other),
42 'HG_FILE_BASE_NODE': hex(base)})
47 'HG_FILE_BASE_NODE': hex(base)})
43 if r:
48 if r:
44 repo.ui.warn(_("merging %s failed!\n") % fn)
49 repo.ui.warn(_("merging %s failed!\n") % fn)
45
50
46 os.unlink(b)
51 os.unlink(b)
47 os.unlink(c)
52 os.unlink(c)
48 return r
53 return r
49
54
50 def update(repo, node, branchmerge=False, force=False, partial=None,
55 def update(repo, node, branchmerge=False, force=False, partial=None,
51 wlock=None, show_stats=True, remind=True):
56 wlock=None, show_stats=True, remind=True):
52
57
53 overwrite = force and not branchmerge
58 overwrite = force and not branchmerge
54 forcemerge = force and branchmerge
59 forcemerge = force and branchmerge
55
60
56 if not wlock:
61 if not wlock:
57 wlock = repo.wlock()
62 wlock = repo.wlock()
58
63
59 ### check phase
64 ### check phase
60
65
61 pl = repo.dirstate.parents()
66 pl = repo.dirstate.parents()
62 if not overwrite and pl[1] != nullid:
67 if not overwrite and pl[1] != nullid:
63 raise util.Abort(_("outstanding uncommitted merges"))
68 raise util.Abort(_("outstanding uncommitted merges"))
64
69
65 p1, p2 = pl[0], node
70 p1, p2 = pl[0], node
66 pa = repo.changelog.ancestor(p1, p2)
71 pa = repo.changelog.ancestor(p1, p2)
67
72
68 # is there a linear path from p1 to p2?
73 # is there a linear path from p1 to p2?
69 linear_path = (pa == p1 or pa == p2)
74 linear_path = (pa == p1 or pa == p2)
70 if branchmerge and linear_path:
75 if branchmerge and linear_path:
71 raise util.Abort(_("there is nothing to merge, just use "
76 raise util.Abort(_("there is nothing to merge, just use "
72 "'hg update' or look at 'hg heads'"))
77 "'hg update' or look at 'hg heads'"))
73
78
74 if not overwrite and not linear_path and not branchmerge:
79 if not overwrite and not linear_path and not branchmerge:
75 raise util.Abort(_("update spans branches, use 'hg merge' "
80 raise util.Abort(_("update spans branches, use 'hg merge' "
76 "or 'hg update -C' to lose changes"))
81 "or 'hg update -C' to lose changes"))
77
82
78 modified, added, removed, deleted, unknown = repo.status()[:5]
83 modified, added, removed, deleted, unknown = repo.status()[:5]
79 if branchmerge and not forcemerge:
84 if branchmerge and not forcemerge:
80 if modified or added or removed:
85 if modified or added or removed:
81 raise util.Abort(_("outstanding uncommitted changes"))
86 raise util.Abort(_("outstanding uncommitted changes"))
82
87
83 m1n = repo.changelog.read(p1)[0]
88 m1n = repo.changelog.read(p1)[0]
84 m2n = repo.changelog.read(p2)[0]
89 m2n = repo.changelog.read(p2)[0]
85 man = repo.manifest.ancestor(m1n, m2n)
90 man = repo.manifest.ancestor(m1n, m2n)
86 m1 = repo.manifest.read(m1n)
91 m1 = repo.manifest.read(m1n)
87 m2 = repo.manifest.read(m2n).copy()
92 m2 = repo.manifest.read(m2n).copy()
88 ma = repo.manifest.read(man)
93 ma = repo.manifest.read(man)
89
94
90 if not force:
95 if not force:
91 for f in unknown:
96 for f in unknown:
92 if f in m2:
97 if f in m2:
93 t1 = repo.wread(f)
98 if repo.file(f).cmp(m2[f], repo.wread(f)):
94 t2 = repo.file(f).read(m2[f])
95 if cmp(t1, t2) != 0:
96 raise util.Abort(_("'%s' already exists in the working"
99 raise util.Abort(_("'%s' already exists in the working"
97 " dir and differs from remote") % f)
100 " dir and differs from remote") % f)
98
101
99 # resolve the manifest to determine which files
102 # resolve the manifest to determine which files
100 # we care about merging
103 # we care about merging
101 repo.ui.note(_("resolving manifests\n"))
104 repo.ui.note(_("resolving manifests\n"))
102 repo.ui.debug(_(" overwrite %s branchmerge %s partial %s linear %s\n") %
105 repo.ui.debug(_(" overwrite %s branchmerge %s partial %s linear %s\n") %
103 (overwrite, branchmerge, partial and True or False, linear_path))
106 (overwrite, branchmerge, bool(partial), linear_path))
104 repo.ui.debug(_(" ancestor %s local %s remote %s\n") %
107 repo.ui.debug(_(" ancestor %s local %s remote %s\n") %
105 (short(man), short(m1n), short(m2n)))
108 (short(man), short(m1n), short(m2n)))
106
109
107 merge = {}
110 merge = {}
108 get = {}
111 get = {}
109 remove = []
112 remove = []
113 forget = []
110
114
111 # construct a working dir manifest
115 # construct a working dir manifest
112 mw = m1.copy()
116 mw = m1.copy()
113 umap = dict.fromkeys(unknown)
117 umap = dict.fromkeys(unknown)
114
118
115 for f in added + modified + unknown:
119 for f in added + modified + unknown:
116 mw[f] = ""
120 mw[f] = ""
121 # is the wfile new and matches m2?
122 if (f not in m1 and f in m2 and
123 not repo.file(f).cmp(m2[f], repo.wread(f))):
124 mw[f] = m2[f]
125
117 mw.set(f, util.is_exec(repo.wjoin(f), mw.execf(f)))
126 mw.set(f, util.is_exec(repo.wjoin(f), mw.execf(f)))
118
127
119 for f in deleted + removed:
128 for f in deleted + removed:
120 if f in mw:
129 if f in mw:
121 del mw[f]
130 del mw[f]
122
131
123 # If we're jumping between revisions (as opposed to merging),
132 # If we're jumping between revisions (as opposed to merging),
124 # and if neither the working directory nor the target rev has
133 # and if neither the working directory nor the target rev has
125 # the file, then we need to remove it from the dirstate, to
134 # the file, then we need to remove it from the dirstate, to
126 # prevent the dirstate from listing the file when it is no
135 # prevent the dirstate from listing the file when it is no
127 # longer in the manifest.
136 # longer in the manifest.
128 if not partial and linear_path and f not in m2:
137 if linear_path and f not in m2:
129 repo.dirstate.forget((f,))
138 forget.append(f)
130
139
131 # Compare manifests
140 # Compare manifests
132 for f, n in mw.iteritems():
141 for f, n in mw.iteritems():
133 if partial and not partial(f):
142 if partial and not partial(f):
134 continue
143 continue
135 if f in m2:
144 if f in m2:
136 s = 0
145 s = 0
137
146
138 # is the wfile new since m1, and match m2?
139 if f not in m1:
140 t1 = repo.wread(f)
141 t2 = repo.file(f).read(m2[f])
142 if cmp(t1, t2) == 0:
143 n = m2[f]
144 del t1, t2
145
146 # are files different?
147 # are files different?
147 if n != m2[f]:
148 if n != m2[f]:
148 a = ma.get(f, nullid)
149 a = ma.get(f, nullid)
149 # are both different from the ancestor?
150 # are both different from the ancestor?
150 if n != a and m2[f] != a:
151 if n != a and m2[f] != a:
151 repo.ui.debug(_(" %s versions differ, resolve\n") % f)
152 repo.ui.debug(_(" %s versions differ, resolve\n") % f)
152 # merge executable bits
153 merge[f] = (fmerge(f, mw, m2, ma), m1.get(f, nullid), m2[f])
153 # "if we changed or they changed, change in merge"
154 a, b, c = ma.execf(f), mw.execf(f), m2.execf(f)
155 mode = ((a^b) | (a^c)) ^ a
156 merge[f] = (mode, m1.get(f, nullid), m2[f])
157 s = 1
154 s = 1
158 # are we clobbering?
155 # are we clobbering?
159 # is remote's version newer?
156 # is remote's version newer?
160 # or are we going back in time?
157 # or are we going back in time?
161 elif overwrite or m2[f] != a or (p2 == pa and mw[f] == m1[f]):
158 elif overwrite or m2[f] != a or (p2 == pa and mw[f] == m1[f]):
162 repo.ui.debug(_(" remote %s is newer, get\n") % f)
159 repo.ui.debug(_(" remote %s is newer, get\n") % f)
163 get[f] = (m2.execf(f), m2[f])
160 get[f] = (m2.execf(f), m2[f])
164 s = 1
161 s = 1
165 elif f in umap or f in added:
162 elif f in umap or f in added:
166 # this unknown file is the same as the checkout
163 # this unknown file is the same as the checkout
167 # we need to reset the dirstate if the file was added
164 # we need to reset the dirstate if the file was added
168 get[f] = (m2.execf(f), m2[f])
165 get[f] = (m2.execf(f), m2[f])
169
166
170 if not s and mw.execf(f) != m2.execf(f):
167 if not s and mw.execf(f) != m2.execf(f):
171 if overwrite:
168 if overwrite:
172 repo.ui.debug(_(" updating permissions for %s\n") % f)
169 repo.ui.debug(_(" updating permissions for %s\n") % f)
173 util.set_exec(repo.wjoin(f), m2.execf(f))
170 util.set_exec(repo.wjoin(f), m2.execf(f))
174 else:
171 else:
175 a, b, c = ma.execf(f), mw.execf(f), m2.execf(f)
172 if fmerge(f, mw, m2, ma) != mw.execf(f):
176 mode = ((a^b) | (a^c)) ^ a
177 if mode != b:
178 repo.ui.debug(_(" updating permissions for %s\n")
173 repo.ui.debug(_(" updating permissions for %s\n")
179 % f)
174 % f)
180 util.set_exec(repo.wjoin(f), mode)
175 util.set_exec(repo.wjoin(f), mode)
181 del m2[f]
176 del m2[f]
182 elif f in ma:
177 elif f in ma:
183 if n != ma[f]:
178 if n != ma[f]:
184 r = _("d")
179 r = _("d")
185 if not overwrite and (linear_path or branchmerge):
180 if not overwrite and (linear_path or branchmerge):
186 r = repo.ui.prompt(
181 r = repo.ui.prompt(
187 (_(" local changed %s which remote deleted\n") % f) +
182 (_(" local changed %s which remote deleted\n") % f) +
188 _("(k)eep or (d)elete?"), _("[kd]"), _("k"))
183 _("(k)eep or (d)elete?"), _("[kd]"), _("k"))
189 if r == _("d"):
184 if r == _("d"):
190 remove.append(f)
185 remove.append(f)
191 else:
186 else:
192 repo.ui.debug(_("other deleted %s\n") % f)
187 repo.ui.debug(_("other deleted %s\n") % f)
193 remove.append(f) # other deleted it
188 remove.append(f) # other deleted it
194 else:
189 else:
195 # file is created on branch or in working directory
190 # file is created on branch or in working directory
196 if overwrite and f not in umap:
191 if overwrite and f not in umap:
197 repo.ui.debug(_("remote deleted %s, clobbering\n") % f)
192 repo.ui.debug(_("remote deleted %s, clobbering\n") % f)
198 remove.append(f)
193 remove.append(f)
199 elif n == m1.get(f, nullid): # same as parent
194 elif n == m1.get(f, nullid): # same as parent
200 if p2 == pa: # going backwards?
195 if p2 == pa: # going backwards?
201 repo.ui.debug(_("remote deleted %s\n") % f)
196 repo.ui.debug(_("remote deleted %s\n") % f)
202 remove.append(f)
197 remove.append(f)
203 else:
198 else:
204 repo.ui.debug(_("local modified %s, keeping\n") % f)
199 repo.ui.debug(_("local modified %s, keeping\n") % f)
205 else:
200 else:
206 repo.ui.debug(_("working dir created %s, keeping\n") % f)
201 repo.ui.debug(_("working dir created %s, keeping\n") % f)
207
202
208 for f, n in m2.iteritems():
203 for f, n in m2.iteritems():
209 if partial and not partial(f):
204 if partial and not partial(f):
210 continue
205 continue
211 if f[0] == "/":
206 if f[0] == "/":
212 continue
207 continue
213 if f in ma and n != ma[f]:
208 if f in ma and n != ma[f]:
214 r = _("k")
209 r = _("k")
215 if not overwrite and (linear_path or branchmerge):
210 if not overwrite and (linear_path or branchmerge):
216 r = repo.ui.prompt(
211 r = repo.ui.prompt(
217 (_("remote changed %s which local deleted\n") % f) +
212 (_("remote changed %s which local deleted\n") % f) +
218 _("(k)eep or (d)elete?"), _("[kd]"), _("k"))
213 _("(k)eep or (d)elete?"), _("[kd]"), _("k"))
219 if r == _("k"):
214 if r == _("k"):
220 get[f] = (m2.execf(f), n)
215 get[f] = (m2.execf(f), n)
221 elif f not in ma:
216 elif f not in ma:
222 repo.ui.debug(_("remote created %s\n") % f)
217 repo.ui.debug(_("remote created %s\n") % f)
223 get[f] = (m2.execf(f), n)
218 get[f] = (m2.execf(f), n)
224 else:
219 else:
225 if overwrite or p2 == pa: # going backwards?
220 if overwrite or p2 == pa: # going backwards?
226 repo.ui.debug(_("local deleted %s, recreating\n") % f)
221 repo.ui.debug(_("local deleted %s, recreating\n") % f)
227 get[f] = (m2.execf(f), n)
222 get[f] = (m2.execf(f), n)
228 else:
223 else:
229 repo.ui.debug(_("local deleted %s\n") % f)
224 repo.ui.debug(_("local deleted %s\n") % f)
230
225
231 del mw, m1, m2, ma
226 del mw, m1, m2, ma
232
227
228 ### apply phase
229
233 if overwrite:
230 if overwrite:
234 for f in merge:
231 for f in merge:
235 get[f] = merge[f][:2]
232 get[f] = merge[f][:2]
236 merge = {}
233 merge = {}
237
234
238 if linear_path or overwrite:
235 if linear_path or overwrite:
239 # we don't need to do any magic, just jump to the new rev
236 # we don't need to do any magic, just jump to the new rev
240 p1, p2 = p2, nullid
237 p1, p2 = p2, nullid
241
238
242 xp1 = hex(p1)
239 xp1 = hex(p1)
243 xp2 = hex(p2)
240 xp2 = hex(p2)
244 if p2 == nullid: xxp2 = ''
241 if p2 == nullid: xxp2 = ''
245 else: xxp2 = xp2
242 else: xxp2 = xp2
246
243
247 repo.hook('preupdate', throw=True, parent1=xp1, parent2=xxp2)
244 repo.hook('preupdate', throw=True, parent1=xp1, parent2=xxp2)
248
245
249 # get the files we don't need to change
246 # get the files we don't need to change
250 files = get.keys()
247 files = get.keys()
251 files.sort()
248 files.sort()
252 for f in files:
249 for f in files:
253 flag, node = get[f]
250 flag, node = get[f]
254 if f[0] == "/":
251 if f[0] == "/":
255 continue
252 continue
256 repo.ui.note(_("getting %s\n") % f)
253 repo.ui.note(_("getting %s\n") % f)
257 t = repo.file(f).read(node)
254 t = repo.file(f).read(node)
258 repo.wwrite(f, t)
255 repo.wwrite(f, t)
259 util.set_exec(repo.wjoin(f), flag)
256 util.set_exec(repo.wjoin(f), flag)
260 if not partial:
261 if branchmerge:
262 repo.dirstate.update([f], 'n', st_mtime=-1)
263 else:
264 repo.dirstate.update([f], 'n')
265
257
266 # merge the tricky bits
258 # merge the tricky bits
267 unresolved = []
259 unresolved = []
268 files = merge.keys()
260 files = merge.keys()
269 files.sort()
261 files.sort()
270 for f in files:
262 for f in files:
271 repo.ui.status(_("merging %s\n") % f)
263 repo.ui.status(_("merging %s\n") % f)
272 flag, my, other = merge[f]
264 flag, my, other = merge[f]
273 ret = merge3(repo, f, my, other, xp1, xp2)
265 ret = merge3(repo, f, my, other, xp1, xp2)
274 if ret:
266 if ret:
275 unresolved.append(f)
267 unresolved.append(f)
276 util.set_exec(repo.wjoin(f), flag)
268 util.set_exec(repo.wjoin(f), flag)
277 if not partial:
278 if branchmerge:
279 # We've done a branch merge, mark this file as merged
280 # so that we properly record the merger later
281 repo.dirstate.update([f], 'm')
282 else:
283 # We've update-merged a locally modified file, so
284 # we set the dirstate to emulate a normal checkout
285 # of that file some time in the past. Thus our
286 # merge will appear as a normal local file
287 # modification.
288 f_len = len(repo.file(f).read(other))
289 repo.dirstate.update([f], 'n', st_size=f_len, st_mtime=-1)
290
269
291 remove.sort()
270 remove.sort()
292 for f in remove:
271 for f in remove:
293 repo.ui.note(_("removing %s\n") % f)
272 repo.ui.note(_("removing %s\n") % f)
294 util.audit_path(f)
273 util.audit_path(f)
295 try:
274 try:
296 util.unlink(repo.wjoin(f))
275 util.unlink(repo.wjoin(f))
297 except OSError, inst:
276 except OSError, inst:
298 if inst.errno != errno.ENOENT:
277 if inst.errno != errno.ENOENT:
299 repo.ui.warn(_("update failed to remove %s: %s!\n") %
278 repo.ui.warn(_("update failed to remove %s: %s!\n") %
300 (f, inst.strerror))
279 (f, inst.strerror))
280
281 # update dirstate
301 if not partial:
282 if not partial:
283 repo.dirstate.setparents(p1, p2)
284 repo.dirstate.forget(forget)
302 if branchmerge:
285 if branchmerge:
303 repo.dirstate.update(remove, 'r')
286 repo.dirstate.update(remove, 'r')
304 else:
287 else:
305 repo.dirstate.forget(remove)
288 repo.dirstate.forget(remove)
306
289
307 if not partial:
290 files = get.keys()
308 repo.dirstate.setparents(p1, p2)
291 files.sort()
292 for f in files:
293 if branchmerge:
294 repo.dirstate.update([f], 'n', st_mtime=-1)
295 else:
296 repo.dirstate.update([f], 'n')
297
298 files = merge.keys()
299 files.sort()
300 for f in files:
301 if branchmerge:
302 # We've done a branch merge, mark this file as merged
303 # so that we properly record the merger later
304 repo.dirstate.update([f], 'm')
305 else:
306 # We've update-merged a locally modified file, so
307 # we set the dirstate to emulate a normal checkout
308 # of that file some time in the past. Thus our
309 # merge will appear as a normal local file
310 # modification.
311 fl = repo.file(f)
312 f_len = fl.size(fl.rev(other))
313 repo.dirstate.update([f], 'n', st_size=f_len, st_mtime=-1)
309
314
310 if show_stats:
315 if show_stats:
311 stats = ((len(get), _("updated")),
316 stats = ((len(get), _("updated")),
312 (len(merge) - len(unresolved), _("merged")),
317 (len(merge) - len(unresolved), _("merged")),
313 (len(remove), _("removed")),
318 (len(remove), _("removed")),
314 (len(unresolved), _("unresolved")))
319 (len(unresolved), _("unresolved")))
315 note = ", ".join([_("%d files %s") % s for s in stats])
320 note = ", ".join([_("%d files %s") % s for s in stats])
316 repo.ui.status("%s\n" % note)
321 repo.ui.status("%s\n" % note)
317 if not partial:
322 if not partial:
318 if branchmerge:
323 if branchmerge:
319 if unresolved:
324 if unresolved:
320 repo.ui.status(_("There are unresolved merges,"
325 repo.ui.status(_("There are unresolved merges,"
321 " you can redo the full merge using:\n"
326 " you can redo the full merge using:\n"
322 " hg update -C %s\n"
327 " hg update -C %s\n"
323 " hg merge %s\n"
328 " hg merge %s\n"
324 % (repo.changelog.rev(p1),
329 % (repo.changelog.rev(p1),
325 repo.changelog.rev(p2))))
330 repo.changelog.rev(p2))))
326 elif remind:
331 elif remind:
327 repo.ui.status(_("(branch merge, don't forget to commit)\n"))
332 repo.ui.status(_("(branch merge, don't forget to commit)\n"))
328 elif unresolved:
333 elif unresolved:
329 repo.ui.status(_("There are unresolved merges with"
334 repo.ui.status(_("There are unresolved merges with"
330 " locally modified files.\n"))
335 " locally modified files.\n"))
331
336
332 repo.hook('update', parent1=xp1, parent2=xxp2, error=len(unresolved))
337 repo.hook('update', parent1=xp1, parent2=xxp2, error=len(unresolved))
333 return len(unresolved)
338 return len(unresolved)
334
339
@@ -1,435 +1,377 b''
1 # patch.py - patch file parsing routines
1 # patch.py - patch file parsing routines
2 #
2 #
3 # Copyright 2006 Brendan Cully <brendan@kublai.com>
3 # Copyright 2006 Brendan Cully <brendan@kublai.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7
7
8 from demandload import demandload
8 from demandload import demandload
9 from i18n import gettext as _
9 from i18n import gettext as _
10 from node import *
10 from node import *
11 demandload(globals(), "cmdutil mdiff util")
11 demandload(globals(), "cmdutil mdiff util")
12 demandload(globals(), "cStringIO email.Parser os re shutil sys tempfile")
12 demandload(globals(), "cStringIO email.Parser os re shutil sys tempfile")
13
13
14 def extract(ui, fileobj):
14 def extract(ui, fileobj):
15 '''extract patch from data read from fileobj.
15 '''extract patch from data read from fileobj.
16
16
17 patch can be normal patch or contained in email message.
17 patch can be normal patch or contained in email message.
18
18
19 return tuple (filename, message, user, date). any item in returned
19 return tuple (filename, message, user, date). any item in returned
20 tuple can be None. if filename is None, fileobj did not contain
20 tuple can be None. if filename is None, fileobj did not contain
21 patch. caller must unlink filename when done.'''
21 patch. caller must unlink filename when done.'''
22
22
23 # attempt to detect the start of a patch
23 # attempt to detect the start of a patch
24 # (this heuristic is borrowed from quilt)
24 # (this heuristic is borrowed from quilt)
25 diffre = re.compile(r'^(?:Index:[ \t]|diff[ \t]|RCS file: |' +
25 diffre = re.compile(r'^(?:Index:[ \t]|diff[ \t]|RCS file: |' +
26 'retrieving revision [0-9]+(\.[0-9]+)*$|' +
26 'retrieving revision [0-9]+(\.[0-9]+)*$|' +
27 '(---|\*\*\*)[ \t])', re.MULTILINE)
27 '(---|\*\*\*)[ \t])', re.MULTILINE)
28
28
29 fd, tmpname = tempfile.mkstemp(prefix='hg-patch-')
29 fd, tmpname = tempfile.mkstemp(prefix='hg-patch-')
30 tmpfp = os.fdopen(fd, 'w')
30 tmpfp = os.fdopen(fd, 'w')
31 try:
31 try:
32 hgpatch = False
32 hgpatch = False
33
33
34 msg = email.Parser.Parser().parse(fileobj)
34 msg = email.Parser.Parser().parse(fileobj)
35
35
36 message = msg['Subject']
36 message = msg['Subject']
37 user = msg['From']
37 user = msg['From']
38 # should try to parse msg['Date']
38 # should try to parse msg['Date']
39 date = None
39 date = None
40
40
41 if message:
41 if message:
42 message = message.replace('\n\t', ' ')
42 message = message.replace('\n\t', ' ')
43 ui.debug('Subject: %s\n' % message)
43 ui.debug('Subject: %s\n' % message)
44 if user:
44 if user:
45 ui.debug('From: %s\n' % user)
45 ui.debug('From: %s\n' % user)
46 diffs_seen = 0
46 diffs_seen = 0
47 ok_types = ('text/plain', 'text/x-diff', 'text/x-patch')
47 ok_types = ('text/plain', 'text/x-diff', 'text/x-patch')
48
48
49 for part in msg.walk():
49 for part in msg.walk():
50 content_type = part.get_content_type()
50 content_type = part.get_content_type()
51 ui.debug('Content-Type: %s\n' % content_type)
51 ui.debug('Content-Type: %s\n' % content_type)
52 if content_type not in ok_types:
52 if content_type not in ok_types:
53 continue
53 continue
54 payload = part.get_payload(decode=True)
54 payload = part.get_payload(decode=True)
55 m = diffre.search(payload)
55 m = diffre.search(payload)
56 if m:
56 if m:
57 ui.debug(_('found patch at byte %d\n') % m.start(0))
57 ui.debug(_('found patch at byte %d\n') % m.start(0))
58 diffs_seen += 1
58 diffs_seen += 1
59 cfp = cStringIO.StringIO()
59 cfp = cStringIO.StringIO()
60 if message:
60 if message:
61 cfp.write(message)
61 cfp.write(message)
62 cfp.write('\n')
62 cfp.write('\n')
63 for line in payload[:m.start(0)].splitlines():
63 for line in payload[:m.start(0)].splitlines():
64 if line.startswith('# HG changeset patch'):
64 if line.startswith('# HG changeset patch'):
65 ui.debug(_('patch generated by hg export\n'))
65 ui.debug(_('patch generated by hg export\n'))
66 hgpatch = True
66 hgpatch = True
67 # drop earlier commit message content
67 # drop earlier commit message content
68 cfp.seek(0)
68 cfp.seek(0)
69 cfp.truncate()
69 cfp.truncate()
70 elif hgpatch:
70 elif hgpatch:
71 if line.startswith('# User '):
71 if line.startswith('# User '):
72 user = line[7:]
72 user = line[7:]
73 ui.debug('From: %s\n' % user)
73 ui.debug('From: %s\n' % user)
74 elif line.startswith("# Date "):
74 elif line.startswith("# Date "):
75 date = line[7:]
75 date = line[7:]
76 if not line.startswith('# '):
76 if not line.startswith('# '):
77 cfp.write(line)
77 cfp.write(line)
78 cfp.write('\n')
78 cfp.write('\n')
79 message = cfp.getvalue()
79 message = cfp.getvalue()
80 if tmpfp:
80 if tmpfp:
81 tmpfp.write(payload)
81 tmpfp.write(payload)
82 if not payload.endswith('\n'):
82 if not payload.endswith('\n'):
83 tmpfp.write('\n')
83 tmpfp.write('\n')
84 elif not diffs_seen and message and content_type == 'text/plain':
84 elif not diffs_seen and message and content_type == 'text/plain':
85 message += '\n' + payload
85 message += '\n' + payload
86 except:
86 except:
87 tmpfp.close()
87 tmpfp.close()
88 os.unlink(tmpname)
88 os.unlink(tmpname)
89 raise
89 raise
90
90
91 tmpfp.close()
91 tmpfp.close()
92 if not diffs_seen:
92 if not diffs_seen:
93 os.unlink(tmpname)
93 os.unlink(tmpname)
94 return None, message, user, date
94 return None, message, user, date
95 return tmpname, message, user, date
95 return tmpname, message, user, date
96
96
97 def readgitpatch(patchname):
97 def readgitpatch(patchname):
98 """extract git-style metadata about patches from <patchname>"""
98 """extract git-style metadata about patches from <patchname>"""
99 class gitpatch:
99 class gitpatch:
100 "op is one of ADD, DELETE, RENAME, MODIFY or COPY"
100 "op is one of ADD, DELETE, RENAME, MODIFY or COPY"
101 def __init__(self, path):
101 def __init__(self, path):
102 self.path = path
102 self.path = path
103 self.oldpath = None
103 self.oldpath = None
104 self.mode = None
104 self.mode = None
105 self.op = 'MODIFY'
105 self.op = 'MODIFY'
106 self.copymod = False
106 self.copymod = False
107 self.lineno = 0
107 self.lineno = 0
108
108
109 # Filter patch for git information
109 # Filter patch for git information
110 gitre = re.compile('diff --git a/(.*) b/(.*)')
110 gitre = re.compile('diff --git a/(.*) b/(.*)')
111 pf = file(patchname)
111 pf = file(patchname)
112 gp = None
112 gp = None
113 gitpatches = []
113 gitpatches = []
114 # Can have a git patch with only metadata, causing patch to complain
114 # Can have a git patch with only metadata, causing patch to complain
115 dopatch = False
115 dopatch = False
116
116
117 lineno = 0
117 lineno = 0
118 for line in pf:
118 for line in pf:
119 lineno += 1
119 lineno += 1
120 if line.startswith('diff --git'):
120 if line.startswith('diff --git'):
121 m = gitre.match(line)
121 m = gitre.match(line)
122 if m:
122 if m:
123 if gp:
123 if gp:
124 gitpatches.append(gp)
124 gitpatches.append(gp)
125 src, dst = m.group(1,2)
125 src, dst = m.group(1,2)
126 gp = gitpatch(dst)
126 gp = gitpatch(dst)
127 gp.lineno = lineno
127 gp.lineno = lineno
128 elif gp:
128 elif gp:
129 if line.startswith('--- '):
129 if line.startswith('--- '):
130 if gp.op in ('COPY', 'RENAME'):
130 if gp.op in ('COPY', 'RENAME'):
131 gp.copymod = True
131 gp.copymod = True
132 dopatch = 'filter'
132 dopatch = 'filter'
133 gitpatches.append(gp)
133 gitpatches.append(gp)
134 gp = None
134 gp = None
135 if not dopatch:
135 if not dopatch:
136 dopatch = True
136 dopatch = True
137 continue
137 continue
138 if line.startswith('rename from '):
138 if line.startswith('rename from '):
139 gp.op = 'RENAME'
139 gp.op = 'RENAME'
140 gp.oldpath = line[12:].rstrip()
140 gp.oldpath = line[12:].rstrip()
141 elif line.startswith('rename to '):
141 elif line.startswith('rename to '):
142 gp.path = line[10:].rstrip()
142 gp.path = line[10:].rstrip()
143 elif line.startswith('copy from '):
143 elif line.startswith('copy from '):
144 gp.op = 'COPY'
144 gp.op = 'COPY'
145 gp.oldpath = line[10:].rstrip()
145 gp.oldpath = line[10:].rstrip()
146 elif line.startswith('copy to '):
146 elif line.startswith('copy to '):
147 gp.path = line[8:].rstrip()
147 gp.path = line[8:].rstrip()
148 elif line.startswith('deleted file'):
148 elif line.startswith('deleted file'):
149 gp.op = 'DELETE'
149 gp.op = 'DELETE'
150 elif line.startswith('new file mode '):
150 elif line.startswith('new file mode '):
151 gp.op = 'ADD'
151 gp.op = 'ADD'
152 gp.mode = int(line.rstrip()[-3:], 8)
152 gp.mode = int(line.rstrip()[-3:], 8)
153 elif line.startswith('new mode '):
153 elif line.startswith('new mode '):
154 gp.mode = int(line.rstrip()[-3:], 8)
154 gp.mode = int(line.rstrip()[-3:], 8)
155 if gp:
155 if gp:
156 gitpatches.append(gp)
156 gitpatches.append(gp)
157
157
158 if not gitpatches:
158 if not gitpatches:
159 dopatch = True
159 dopatch = True
160
160
161 return (dopatch, gitpatches)
161 return (dopatch, gitpatches)
162
162
163 def dogitpatch(patchname, gitpatches):
163 def dogitpatch(patchname, gitpatches):
164 """Preprocess git patch so that vanilla patch can handle it"""
164 """Preprocess git patch so that vanilla patch can handle it"""
165 pf = file(patchname)
165 pf = file(patchname)
166 pfline = 1
166 pfline = 1
167
167
168 fd, patchname = tempfile.mkstemp(prefix='hg-patch-')
168 fd, patchname = tempfile.mkstemp(prefix='hg-patch-')
169 tmpfp = os.fdopen(fd, 'w')
169 tmpfp = os.fdopen(fd, 'w')
170
170
171 try:
171 try:
172 for i in range(len(gitpatches)):
172 for i in range(len(gitpatches)):
173 p = gitpatches[i]
173 p = gitpatches[i]
174 if not p.copymod:
174 if not p.copymod:
175 continue
175 continue
176
176
177 if os.path.exists(p.path):
177 if os.path.exists(p.path):
178 raise util.Abort(_("cannot create %s: destination already exists") %
178 raise util.Abort(_("cannot create %s: destination already exists") %
179 p.path)
179 p.path)
180
180
181 (src, dst) = [os.path.join(os.getcwd(), n)
181 (src, dst) = [os.path.join(os.getcwd(), n)
182 for n in (p.oldpath, p.path)]
182 for n in (p.oldpath, p.path)]
183
183
184 targetdir = os.path.dirname(dst)
184 targetdir = os.path.dirname(dst)
185 if not os.path.isdir(targetdir):
185 if not os.path.isdir(targetdir):
186 os.makedirs(targetdir)
186 os.makedirs(targetdir)
187 try:
187 try:
188 shutil.copyfile(src, dst)
188 shutil.copyfile(src, dst)
189 shutil.copymode(src, dst)
189 shutil.copymode(src, dst)
190 except shutil.Error, inst:
190 except shutil.Error, inst:
191 raise util.Abort(str(inst))
191 raise util.Abort(str(inst))
192
192
193 # rewrite patch hunk
193 # rewrite patch hunk
194 while pfline < p.lineno:
194 while pfline < p.lineno:
195 tmpfp.write(pf.readline())
195 tmpfp.write(pf.readline())
196 pfline += 1
196 pfline += 1
197 tmpfp.write('diff --git a/%s b/%s\n' % (p.path, p.path))
197 tmpfp.write('diff --git a/%s b/%s\n' % (p.path, p.path))
198 line = pf.readline()
198 line = pf.readline()
199 pfline += 1
199 pfline += 1
200 while not line.startswith('--- a/'):
200 while not line.startswith('--- a/'):
201 tmpfp.write(line)
201 tmpfp.write(line)
202 line = pf.readline()
202 line = pf.readline()
203 pfline += 1
203 pfline += 1
204 tmpfp.write('--- a/%s\n' % p.path)
204 tmpfp.write('--- a/%s\n' % p.path)
205
205
206 line = pf.readline()
206 line = pf.readline()
207 while line:
207 while line:
208 tmpfp.write(line)
208 tmpfp.write(line)
209 line = pf.readline()
209 line = pf.readline()
210 except:
210 except:
211 tmpfp.close()
211 tmpfp.close()
212 os.unlink(patchname)
212 os.unlink(patchname)
213 raise
213 raise
214
214
215 tmpfp.close()
215 tmpfp.close()
216 return patchname
216 return patchname
217
217
218 def patch(patchname, ui, strip=1, cwd=None):
218 def patch(strip, patchname, ui, cwd=None):
219 """apply the patch <patchname> to the working directory.
219 """apply the patch <patchname> to the working directory.
220 a list of patched files is returned"""
220 a list of patched files is returned"""
221
221
222 (dopatch, gitpatches) = readgitpatch(patchname)
222 (dopatch, gitpatches) = readgitpatch(patchname)
223
223
224 files = {}
224 files = {}
225 fuzz = False
226 if dopatch:
225 if dopatch:
227 if dopatch == 'filter':
226 if dopatch == 'filter':
228 patchname = dogitpatch(patchname, gitpatches)
227 patchname = dogitpatch(patchname, gitpatches)
229 patcher = util.find_in_path('gpatch', os.environ.get('PATH', ''), 'patch')
228 patcher = util.find_in_path('gpatch', os.environ.get('PATH', ''), 'patch')
230 args = []
229 args = []
231 if cwd:
230 if cwd:
232 args.append('-d %s' % util.shellquote(cwd))
231 args.append('-d %s' % util.shellquote(cwd))
233 fp = os.popen('%s %s -p%d < %s' % (patcher, ' '.join(args), strip,
232 fp = os.popen('%s %s -p%d < %s' % (patcher, ' '.join(args), strip,
234 util.shellquote(patchname)))
233 util.shellquote(patchname)))
235
234
236 if dopatch == 'filter':
235 if dopatch == 'filter':
237 False and os.unlink(patchname)
236 False and os.unlink(patchname)
238
237
239 for line in fp:
238 for line in fp:
240 line = line.rstrip()
239 line = line.rstrip()
241 ui.note(line + '\n')
240 ui.status("%s\n" % line)
242 if line.startswith('patching file '):
241 if line.startswith('patching file '):
243 pf = util.parse_patch_output(line)
242 pf = util.parse_patch_output(line)
244 printed_file = False
245 files.setdefault(pf, (None, None))
243 files.setdefault(pf, (None, None))
246 elif line.find('with fuzz') >= 0:
247 fuzz = True
248 if not printed_file:
249 ui.warn(pf + '\n')
250 printed_file = True
251 ui.warn(line + '\n')
252 elif line.find('saving rejects to file') >= 0:
253 ui.warn(line + '\n')
254 elif line.find('FAILED') >= 0:
255 if not printed_file:
256 ui.warn(pf + '\n')
257 printed_file = True
258 ui.warn(line + '\n')
259
260 code = fp.close()
244 code = fp.close()
261 if code:
245 if code:
262 raise util.Abort(_("patch command failed: %s") %
246 raise util.Abort(_("patch command failed: %s") %
263 util.explain_exit(code)[0])
247 util.explain_exit(code)[0])
264
248
265 for gp in gitpatches:
249 for gp in gitpatches:
266 files[gp.path] = (gp.op, gp)
250 files[gp.path] = (gp.op, gp)
267
251
268 return (files, fuzz)
252 return files
253
254 def diffopts(ui, opts={}):
255 return mdiff.diffopts(
256 text=opts.get('text'),
257 showfunc=(opts.get('show_function') or
258 ui.configbool('diff', 'showfunc', None)),
259 ignorews=(opts.get('ignore_all_space') or
260 ui.configbool('diff', 'ignorews', None)),
261 ignorewsamount=(opts.get('ignore_space_change') or
262 ui.configbool('diff', 'ignorewsamount', None)),
263 ignoreblanklines=(opts.get('ignore_blank_lines') or
264 ui.configbool('diff', 'ignoreblanklines', None)))
269
265
270 def diff(repo, node1=None, node2=None, files=None, match=util.always,
266 def diff(repo, node1=None, node2=None, files=None, match=util.always,
271 fp=None, changes=None, opts=None):
267 fp=None, changes=None, opts=None):
272 '''print diff of changes to files between two nodes, or node and
268 '''print diff of changes to files between two nodes, or node and
273 working directory.
269 working directory.
274
270
275 if node1 is None, use first dirstate parent instead.
271 if node1 is None, use first dirstate parent instead.
276 if node2 is None, compare node1 with working directory.'''
272 if node2 is None, compare node1 with working directory.'''
277
273
278 if opts is None:
274 if opts is None:
279 opts = mdiff.defaultopts
275 opts = mdiff.defaultopts
280 if fp is None:
276 if fp is None:
281 fp = repo.ui
277 fp = repo.ui
282
278
283 if not node1:
279 if not node1:
284 node1 = repo.dirstate.parents()[0]
280 node1 = repo.dirstate.parents()[0]
285 # reading the data for node1 early allows it to play nicely
281 # reading the data for node1 early allows it to play nicely
286 # with repo.status and the revlog cache.
282 # with repo.status and the revlog cache.
287 change = repo.changelog.read(node1)
283 change = repo.changelog.read(node1)
288 mmap = repo.manifest.read(change[0])
284 mmap = repo.manifest.read(change[0])
289 date1 = util.datestr(change[2])
285 date1 = util.datestr(change[2])
290
286
291 if not changes:
287 if not changes:
292 changes = repo.status(node1, node2, files, match=match)[:5]
288 changes = repo.status(node1, node2, files, match=match)[:5]
293 modified, added, removed, deleted, unknown = changes
289 modified, added, removed, deleted, unknown = changes
294 if files:
290 if files:
295 def filterfiles(filters):
291 def filterfiles(filters):
296 l = [x for x in filters if x in files]
292 l = [x for x in filters if x in files]
297
293
298 for t in files:
294 for t in files:
299 if not t.endswith("/"):
295 if not t.endswith("/"):
300 t += "/"
296 t += "/"
301 l += [x for x in filters if x.startswith(t)]
297 l += [x for x in filters if x.startswith(t)]
302 return l
298 return l
303
299
304 modified, added, removed = map(filterfiles, (modified, added, removed))
300 modified, added, removed = map(filterfiles, (modified, added, removed))
305
301
306 if not modified and not added and not removed:
302 if not modified and not added and not removed:
307 return
303 return
308
304
309 if node2:
305 if node2:
310 change = repo.changelog.read(node2)
306 change = repo.changelog.read(node2)
311 mmap2 = repo.manifest.read(change[0])
307 mmap2 = repo.manifest.read(change[0])
312 _date2 = util.datestr(change[2])
308 _date2 = util.datestr(change[2])
313 def date2(f):
309 def date2(f):
314 return _date2
310 return _date2
315 def read(f):
311 def read(f):
316 return repo.file(f).read(mmap2[f])
312 return repo.file(f).read(mmap2[f])
317 def renamed(f):
318 src = repo.file(f).renamed(mmap2[f])
319 return src and src[0] or None
320 else:
313 else:
321 tz = util.makedate()[1]
314 tz = util.makedate()[1]
322 _date2 = util.datestr()
315 _date2 = util.datestr()
323 def date2(f):
316 def date2(f):
324 try:
317 try:
325 return util.datestr((os.lstat(repo.wjoin(f)).st_mtime, tz))
318 return util.datestr((os.lstat(repo.wjoin(f)).st_mtime, tz))
326 except OSError, err:
319 except OSError, err:
327 if err.errno != errno.ENOENT: raise
320 if err.errno != errno.ENOENT: raise
328 return _date2
321 return _date2
329 def read(f):
322 def read(f):
330 return repo.wread(f)
323 return repo.wread(f)
331 def renamed(f):
332 return repo.dirstate.copies.get(f)
333
324
334 if repo.ui.quiet:
325 if repo.ui.quiet:
335 r = None
326 r = None
336 else:
327 else:
337 hexfunc = repo.ui.verbose and hex or short
328 hexfunc = repo.ui.verbose and hex or short
338 r = [hexfunc(node) for node in [node1, node2] if node]
329 r = [hexfunc(node) for node in [node1, node2] if node]
339
330
340 if opts.git:
341 copied = {}
342 for f in added:
343 src = renamed(f)
344 if src:
345 copied[f] = src
346 srcs = [x[1] for x in copied.items()]
347
348 all = modified + added + removed
331 all = modified + added + removed
349 all.sort()
332 all.sort()
350 for f in all:
333 for f in all:
351 to = None
334 to = None
352 tn = None
335 tn = None
353 dodiff = True
354 if f in mmap:
336 if f in mmap:
355 to = repo.file(f).read(mmap[f])
337 to = repo.file(f).read(mmap[f])
356 if f not in removed:
338 if f not in removed:
357 tn = read(f)
339 tn = read(f)
358 if opts.git:
340 fp.write(mdiff.unidiff(to, date1, tn, date2(f), f, r, opts=opts))
359 def gitmode(x):
360 return x and '100755' or '100644'
361 def addmodehdr(header, omode, nmode):
362 if omode != nmode:
363 header.append('old mode %s\n' % omode)
364 header.append('new mode %s\n' % nmode)
365
366 a, b = f, f
367 header = []
368 if f in added:
369 if node2:
370 mode = gitmode(mmap2.execf(f))
371 else:
372 mode = gitmode(util.is_exec(repo.wjoin(f), None))
373 if f in copied:
374 a = copied[f]
375 omode = gitmode(mmap.execf(a))
376 addmodehdr(header, omode, mode)
377 op = a in removed and 'rename' or 'copy'
378 header.append('%s from %s\n' % (op, a))
379 header.append('%s to %s\n' % (op, f))
380 to = repo.file(a).read(mmap[a])
381 else:
382 header.append('new file mode %s\n' % mode)
383 elif f in removed:
384 if f in srcs:
385 dodiff = False
386 else:
387 mode = gitmode(mmap.execf(f))
388 header.append('deleted file mode %s\n' % mode)
389 else:
390 omode = gitmode(mmap.execf(f))
391 nmode = gitmode(util.is_exec(repo.wjoin(f), mmap.execf(f)))
392 addmodehdr(header, omode, nmode)
393 r = None
394 if dodiff:
395 header.insert(0, 'diff --git a/%s b/%s\n' % (a, b))
396 fp.write(''.join(header))
397 if dodiff:
398 fp.write(mdiff.unidiff(to, date1, tn, date2(f), f, r, opts=opts))
399
341
400 def export(repo, revs, template='hg-%h.patch', fp=None, switch_parent=False,
342 def export(repo, revs, template='hg-%h.patch', fp=None, switch_parent=False,
401 opts=None):
343 opts=None):
402 '''export changesets as hg patches.'''
344 '''export changesets as hg patches.'''
403
345
404 total = len(revs)
346 total = len(revs)
405 revwidth = max(map(len, revs))
347 revwidth = max(map(len, revs))
406
348
407 def single(node, seqno, fp):
349 def single(node, seqno, fp):
408 parents = [p for p in repo.changelog.parents(node) if p != nullid]
350 parents = [p for p in repo.changelog.parents(node) if p != nullid]
409 if switch_parent:
351 if switch_parent:
410 parents.reverse()
352 parents.reverse()
411 prev = (parents and parents[0]) or nullid
353 prev = (parents and parents[0]) or nullid
412 change = repo.changelog.read(node)
354 change = repo.changelog.read(node)
413
355
414 if not fp:
356 if not fp:
415 fp = cmdutil.make_file(repo, template, node, total=total,
357 fp = cmdutil.make_file(repo, template, node, total=total,
416 seqno=seqno, revwidth=revwidth)
358 seqno=seqno, revwidth=revwidth)
417 if fp not in (sys.stdout, repo.ui):
359 if fp not in (sys.stdout, repo.ui):
418 repo.ui.note("%s\n" % fp.name)
360 repo.ui.note("%s\n" % fp.name)
419
361
420 fp.write("# HG changeset patch\n")
362 fp.write("# HG changeset patch\n")
421 fp.write("# User %s\n" % change[1])
363 fp.write("# User %s\n" % change[1])
422 fp.write("# Date %d %d\n" % change[2])
364 fp.write("# Date %d %d\n" % change[2])
423 fp.write("# Node ID %s\n" % hex(node))
365 fp.write("# Node ID %s\n" % hex(node))
424 fp.write("# Parent %s\n" % hex(prev))
366 fp.write("# Parent %s\n" % hex(prev))
425 if len(parents) > 1:
367 if len(parents) > 1:
426 fp.write("# Parent %s\n" % hex(parents[1]))
368 fp.write("# Parent %s\n" % hex(parents[1]))
427 fp.write(change[4].rstrip())
369 fp.write(change[4].rstrip())
428 fp.write("\n\n")
370 fp.write("\n\n")
429
371
430 diff(repo, prev, node, fp=fp, opts=opts)
372 diff(repo, prev, node, fp=fp, opts=opts)
431 if fp not in (sys.stdout, repo.ui):
373 if fp not in (sys.stdout, repo.ui):
432 fp.close()
374 fp.close()
433
375
434 for seqno, cset in enumerate(revs):
376 for seqno, cset in enumerate(revs):
435 single(cset, seqno, fp)
377 single(cset, seqno, fp)
@@ -1,1285 +1,1298 b''
1 """
1 """
2 revlog.py - storage back-end for mercurial
2 revlog.py - storage back-end for mercurial
3
3
4 This provides efficient delta storage with O(1) retrieve and append
4 This provides efficient delta storage with O(1) retrieve and append
5 and O(changes) merge between branches
5 and O(changes) merge between branches
6
6
7 Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
7 Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
8
8
9 This software may be used and distributed according to the terms
9 This software may be used and distributed according to the terms
10 of the GNU General Public License, incorporated herein by reference.
10 of the GNU General Public License, incorporated herein by reference.
11 """
11 """
12
12
13 from node import *
13 from node import *
14 from i18n import gettext as _
14 from i18n import gettext as _
15 from demandload import demandload
15 from demandload import demandload
16 demandload(globals(), "binascii changegroup errno heapq mdiff os")
16 demandload(globals(), "binascii changegroup errno heapq mdiff os")
17 demandload(globals(), "sha struct util zlib")
17 demandload(globals(), "sha struct util zlib")
18
18
19 # revlog version strings
19 # revlog version strings
20 REVLOGV0 = 0
20 REVLOGV0 = 0
21 REVLOGNG = 1
21 REVLOGNG = 1
22
22
23 # revlog flags
23 # revlog flags
24 REVLOGNGINLINEDATA = (1 << 16)
24 REVLOGNGINLINEDATA = (1 << 16)
25 REVLOG_DEFAULT_FLAGS = REVLOGNGINLINEDATA
25 REVLOG_DEFAULT_FLAGS = REVLOGNGINLINEDATA
26
26
27 REVLOG_DEFAULT_FORMAT = REVLOGNG
27 REVLOG_DEFAULT_FORMAT = REVLOGNG
28 REVLOG_DEFAULT_VERSION = REVLOG_DEFAULT_FORMAT | REVLOG_DEFAULT_FLAGS
28 REVLOG_DEFAULT_VERSION = REVLOG_DEFAULT_FORMAT | REVLOG_DEFAULT_FLAGS
29
29
30 def flagstr(flag):
30 def flagstr(flag):
31 if flag == "inline":
31 if flag == "inline":
32 return REVLOGNGINLINEDATA
32 return REVLOGNGINLINEDATA
33 raise RevlogError(_("unknown revlog flag %s" % flag))
33 raise RevlogError(_("unknown revlog flag %s" % flag))
34
34
35 def hash(text, p1, p2):
35 def hash(text, p1, p2):
36 """generate a hash from the given text and its parent hashes
36 """generate a hash from the given text and its parent hashes
37
37
38 This hash combines both the current file contents and its history
38 This hash combines both the current file contents and its history
39 in a manner that makes it easy to distinguish nodes with the same
39 in a manner that makes it easy to distinguish nodes with the same
40 content in the revision graph.
40 content in the revision graph.
41 """
41 """
42 l = [p1, p2]
42 l = [p1, p2]
43 l.sort()
43 l.sort()
44 s = sha.new(l[0])
44 s = sha.new(l[0])
45 s.update(l[1])
45 s.update(l[1])
46 s.update(text)
46 s.update(text)
47 return s.digest()
47 return s.digest()
48
48
49 def compress(text):
49 def compress(text):
50 """ generate a possibly-compressed representation of text """
50 """ generate a possibly-compressed representation of text """
51 if not text: return ("", text)
51 if not text: return ("", text)
52 if len(text) < 44:
52 if len(text) < 44:
53 if text[0] == '\0': return ("", text)
53 if text[0] == '\0': return ("", text)
54 return ('u', text)
54 return ('u', text)
55 bin = zlib.compress(text)
55 bin = zlib.compress(text)
56 if len(bin) > len(text):
56 if len(bin) > len(text):
57 if text[0] == '\0': return ("", text)
57 if text[0] == '\0': return ("", text)
58 return ('u', text)
58 return ('u', text)
59 return ("", bin)
59 return ("", bin)
60
60
61 def decompress(bin):
61 def decompress(bin):
62 """ decompress the given input """
62 """ decompress the given input """
63 if not bin: return bin
63 if not bin: return bin
64 t = bin[0]
64 t = bin[0]
65 if t == '\0': return bin
65 if t == '\0': return bin
66 if t == 'x': return zlib.decompress(bin)
66 if t == 'x': return zlib.decompress(bin)
67 if t == 'u': return bin[1:]
67 if t == 'u': return bin[1:]
68 raise RevlogError(_("unknown compression type %r") % t)
68 raise RevlogError(_("unknown compression type %r") % t)
69
69
70 indexformatv0 = ">4l20s20s20s"
70 indexformatv0 = ">4l20s20s20s"
71 v0shaoffset = 56
71 v0shaoffset = 56
72 # index ng:
72 # index ng:
73 # 6 bytes offset
73 # 6 bytes offset
74 # 2 bytes flags
74 # 2 bytes flags
75 # 4 bytes compressed length
75 # 4 bytes compressed length
76 # 4 bytes uncompressed length
76 # 4 bytes uncompressed length
77 # 4 bytes: base rev
77 # 4 bytes: base rev
78 # 4 bytes link rev
78 # 4 bytes link rev
79 # 4 bytes parent 1 rev
79 # 4 bytes parent 1 rev
80 # 4 bytes parent 2 rev
80 # 4 bytes parent 2 rev
81 # 32 bytes: nodeid
81 # 32 bytes: nodeid
82 indexformatng = ">Qiiiiii20s12x"
82 indexformatng = ">Qiiiiii20s12x"
83 ngshaoffset = 32
83 ngshaoffset = 32
84 versionformat = ">i"
84 versionformat = ">i"
85
85
86 class lazyparser(object):
86 class lazyparser(object):
87 """
87 """
88 this class avoids the need to parse the entirety of large indices
88 this class avoids the need to parse the entirety of large indices
89 """
89 """
90
90
91 # lazyparser is not safe to use on windows if win32 extensions not
91 # lazyparser is not safe to use on windows if win32 extensions not
92 # available. it keeps file handle open, which make it not possible
92 # available. it keeps file handle open, which make it not possible
93 # to break hardlinks on local cloned repos.
93 # to break hardlinks on local cloned repos.
94 safe_to_use = os.name != 'nt' or (not util.is_win_9x() and
94 safe_to_use = os.name != 'nt' or (not util.is_win_9x() and
95 hasattr(util, 'win32api'))
95 hasattr(util, 'win32api'))
96
96
97 def __init__(self, dataf, size, indexformat, shaoffset):
97 def __init__(self, dataf, size, indexformat, shaoffset):
98 self.dataf = dataf
98 self.dataf = dataf
99 self.format = indexformat
99 self.format = indexformat
100 self.s = struct.calcsize(indexformat)
100 self.s = struct.calcsize(indexformat)
101 self.indexformat = indexformat
101 self.indexformat = indexformat
102 self.datasize = size
102 self.datasize = size
103 self.l = size/self.s
103 self.l = size/self.s
104 self.index = [None] * self.l
104 self.index = [None] * self.l
105 self.map = {nullid: -1}
105 self.map = {nullid: -1}
106 self.allmap = 0
106 self.allmap = 0
107 self.all = 0
107 self.all = 0
108 self.mapfind_count = 0
108 self.mapfind_count = 0
109 self.shaoffset = shaoffset
109 self.shaoffset = shaoffset
110
110
111 def loadmap(self):
111 def loadmap(self):
112 """
112 """
113 during a commit, we need to make sure the rev being added is
113 during a commit, we need to make sure the rev being added is
114 not a duplicate. This requires loading the entire index,
114 not a duplicate. This requires loading the entire index,
115 which is fairly slow. loadmap can load up just the node map,
115 which is fairly slow. loadmap can load up just the node map,
116 which takes much less time.
116 which takes much less time.
117 """
117 """
118 if self.allmap: return
118 if self.allmap: return
119 start = 0
119 start = 0
120 end = self.datasize
120 end = self.datasize
121 self.allmap = 1
121 self.allmap = 1
122 cur = 0
122 cur = 0
123 count = 0
123 count = 0
124 blocksize = self.s * 256
124 blocksize = self.s * 256
125 self.dataf.seek(0)
125 self.dataf.seek(0)
126 while cur < end:
126 while cur < end:
127 data = self.dataf.read(blocksize)
127 data = self.dataf.read(blocksize)
128 off = 0
128 off = 0
129 for x in xrange(256):
129 for x in xrange(256):
130 n = data[off + self.shaoffset:off + self.shaoffset + 20]
130 n = data[off + self.shaoffset:off + self.shaoffset + 20]
131 self.map[n] = count
131 self.map[n] = count
132 count += 1
132 count += 1
133 if count >= self.l:
133 if count >= self.l:
134 break
134 break
135 off += self.s
135 off += self.s
136 cur += blocksize
136 cur += blocksize
137
137
138 def loadblock(self, blockstart, blocksize, data=None):
138 def loadblock(self, blockstart, blocksize, data=None):
139 if self.all: return
139 if self.all: return
140 if data is None:
140 if data is None:
141 self.dataf.seek(blockstart)
141 self.dataf.seek(blockstart)
142 data = self.dataf.read(blocksize)
142 data = self.dataf.read(blocksize)
143 lend = len(data) / self.s
143 lend = len(data) / self.s
144 i = blockstart / self.s
144 i = blockstart / self.s
145 off = 0
145 off = 0
146 for x in xrange(lend):
146 for x in xrange(lend):
147 if self.index[i + x] == None:
147 if self.index[i + x] == None:
148 b = data[off : off + self.s]
148 b = data[off : off + self.s]
149 self.index[i + x] = b
149 self.index[i + x] = b
150 n = b[self.shaoffset:self.shaoffset + 20]
150 n = b[self.shaoffset:self.shaoffset + 20]
151 self.map[n] = i + x
151 self.map[n] = i + x
152 off += self.s
152 off += self.s
153
153
154 def findnode(self, node):
154 def findnode(self, node):
155 """search backwards through the index file for a specific node"""
155 """search backwards through the index file for a specific node"""
156 if self.allmap: return None
156 if self.allmap: return None
157
157
158 # hg log will cause many many searches for the manifest
158 # hg log will cause many many searches for the manifest
159 # nodes. After we get called a few times, just load the whole
159 # nodes. After we get called a few times, just load the whole
160 # thing.
160 # thing.
161 if self.mapfind_count > 8:
161 if self.mapfind_count > 8:
162 self.loadmap()
162 self.loadmap()
163 if node in self.map:
163 if node in self.map:
164 return node
164 return node
165 return None
165 return None
166 self.mapfind_count += 1
166 self.mapfind_count += 1
167 last = self.l - 1
167 last = self.l - 1
168 while self.index[last] != None:
168 while self.index[last] != None:
169 if last == 0:
169 if last == 0:
170 self.all = 1
170 self.all = 1
171 self.allmap = 1
171 self.allmap = 1
172 return None
172 return None
173 last -= 1
173 last -= 1
174 end = (last + 1) * self.s
174 end = (last + 1) * self.s
175 blocksize = self.s * 256
175 blocksize = self.s * 256
176 while end >= 0:
176 while end >= 0:
177 start = max(end - blocksize, 0)
177 start = max(end - blocksize, 0)
178 self.dataf.seek(start)
178 self.dataf.seek(start)
179 data = self.dataf.read(end - start)
179 data = self.dataf.read(end - start)
180 findend = end - start
180 findend = end - start
181 while True:
181 while True:
182 # we're searching backwards, so weh have to make sure
182 # we're searching backwards, so weh have to make sure
183 # we don't find a changeset where this node is a parent
183 # we don't find a changeset where this node is a parent
184 off = data.rfind(node, 0, findend)
184 off = data.rfind(node, 0, findend)
185 findend = off
185 findend = off
186 if off >= 0:
186 if off >= 0:
187 i = off / self.s
187 i = off / self.s
188 off = i * self.s
188 off = i * self.s
189 n = data[off + self.shaoffset:off + self.shaoffset + 20]
189 n = data[off + self.shaoffset:off + self.shaoffset + 20]
190 if n == node:
190 if n == node:
191 self.map[n] = i + start / self.s
191 self.map[n] = i + start / self.s
192 return node
192 return node
193 else:
193 else:
194 break
194 break
195 end -= blocksize
195 end -= blocksize
196 return None
196 return None
197
197
198 def loadindex(self, i=None, end=None):
198 def loadindex(self, i=None, end=None):
199 if self.all: return
199 if self.all: return
200 all = False
200 all = False
201 if i == None:
201 if i == None:
202 blockstart = 0
202 blockstart = 0
203 blocksize = (512 / self.s) * self.s
203 blocksize = (512 / self.s) * self.s
204 end = self.datasize
204 end = self.datasize
205 all = True
205 all = True
206 else:
206 else:
207 if end:
207 if end:
208 blockstart = i * self.s
208 blockstart = i * self.s
209 end = end * self.s
209 end = end * self.s
210 blocksize = end - blockstart
210 blocksize = end - blockstart
211 else:
211 else:
212 blockstart = (i & ~(32)) * self.s
212 blockstart = (i & ~(32)) * self.s
213 blocksize = self.s * 64
213 blocksize = self.s * 64
214 end = blockstart + blocksize
214 end = blockstart + blocksize
215 while blockstart < end:
215 while blockstart < end:
216 self.loadblock(blockstart, blocksize)
216 self.loadblock(blockstart, blocksize)
217 blockstart += blocksize
217 blockstart += blocksize
218 if all: self.all = True
218 if all: self.all = True
219
219
220 class lazyindex(object):
220 class lazyindex(object):
221 """a lazy version of the index array"""
221 """a lazy version of the index array"""
222 def __init__(self, parser):
222 def __init__(self, parser):
223 self.p = parser
223 self.p = parser
224 def __len__(self):
224 def __len__(self):
225 return len(self.p.index)
225 return len(self.p.index)
226 def load(self, pos):
226 def load(self, pos):
227 if pos < 0:
227 if pos < 0:
228 pos += len(self.p.index)
228 pos += len(self.p.index)
229 self.p.loadindex(pos)
229 self.p.loadindex(pos)
230 return self.p.index[pos]
230 return self.p.index[pos]
231 def __getitem__(self, pos):
231 def __getitem__(self, pos):
232 ret = self.p.index[pos] or self.load(pos)
232 ret = self.p.index[pos] or self.load(pos)
233 if isinstance(ret, str):
233 if isinstance(ret, str):
234 ret = struct.unpack(self.p.indexformat, ret)
234 ret = struct.unpack(self.p.indexformat, ret)
235 return ret
235 return ret
236 def __setitem__(self, pos, item):
236 def __setitem__(self, pos, item):
237 self.p.index[pos] = item
237 self.p.index[pos] = item
238 def __delitem__(self, pos):
238 def __delitem__(self, pos):
239 del self.p.index[pos]
239 del self.p.index[pos]
240 def append(self, e):
240 def append(self, e):
241 self.p.index.append(e)
241 self.p.index.append(e)
242
242
243 class lazymap(object):
243 class lazymap(object):
244 """a lazy version of the node map"""
244 """a lazy version of the node map"""
245 def __init__(self, parser):
245 def __init__(self, parser):
246 self.p = parser
246 self.p = parser
247 def load(self, key):
247 def load(self, key):
248 n = self.p.findnode(key)
248 n = self.p.findnode(key)
249 if n == None:
249 if n == None:
250 raise KeyError(key)
250 raise KeyError(key)
251 def __contains__(self, key):
251 def __contains__(self, key):
252 if key in self.p.map:
252 if key in self.p.map:
253 return True
253 return True
254 self.p.loadmap()
254 self.p.loadmap()
255 return key in self.p.map
255 return key in self.p.map
256 def __iter__(self):
256 def __iter__(self):
257 yield nullid
257 yield nullid
258 for i in xrange(self.p.l):
258 for i in xrange(self.p.l):
259 ret = self.p.index[i]
259 ret = self.p.index[i]
260 if not ret:
260 if not ret:
261 self.p.loadindex(i)
261 self.p.loadindex(i)
262 ret = self.p.index[i]
262 ret = self.p.index[i]
263 if isinstance(ret, str):
263 if isinstance(ret, str):
264 ret = struct.unpack(self.p.indexformat, ret)
264 ret = struct.unpack(self.p.indexformat, ret)
265 yield ret[-1]
265 yield ret[-1]
266 def __getitem__(self, key):
266 def __getitem__(self, key):
267 try:
267 try:
268 return self.p.map[key]
268 return self.p.map[key]
269 except KeyError:
269 except KeyError:
270 try:
270 try:
271 self.load(key)
271 self.load(key)
272 return self.p.map[key]
272 return self.p.map[key]
273 except KeyError:
273 except KeyError:
274 raise KeyError("node " + hex(key))
274 raise KeyError("node " + hex(key))
275 def __setitem__(self, key, val):
275 def __setitem__(self, key, val):
276 self.p.map[key] = val
276 self.p.map[key] = val
277 def __delitem__(self, key):
277 def __delitem__(self, key):
278 del self.p.map[key]
278 del self.p.map[key]
279
279
280 class RevlogError(Exception): pass
280 class RevlogError(Exception): pass
281
281
282 class revlog(object):
282 class revlog(object):
283 """
283 """
284 the underlying revision storage object
284 the underlying revision storage object
285
285
286 A revlog consists of two parts, an index and the revision data.
286 A revlog consists of two parts, an index and the revision data.
287
287
288 The index is a file with a fixed record size containing
288 The index is a file with a fixed record size containing
289 information on each revision, includings its nodeid (hash), the
289 information on each revision, includings its nodeid (hash), the
290 nodeids of its parents, the position and offset of its data within
290 nodeids of its parents, the position and offset of its data within
291 the data file, and the revision it's based on. Finally, each entry
291 the data file, and the revision it's based on. Finally, each entry
292 contains a linkrev entry that can serve as a pointer to external
292 contains a linkrev entry that can serve as a pointer to external
293 data.
293 data.
294
294
295 The revision data itself is a linear collection of data chunks.
295 The revision data itself is a linear collection of data chunks.
296 Each chunk represents a revision and is usually represented as a
296 Each chunk represents a revision and is usually represented as a
297 delta against the previous chunk. To bound lookup time, runs of
297 delta against the previous chunk. To bound lookup time, runs of
298 deltas are limited to about 2 times the length of the original
298 deltas are limited to about 2 times the length of the original
299 version data. This makes retrieval of a version proportional to
299 version data. This makes retrieval of a version proportional to
300 its size, or O(1) relative to the number of revisions.
300 its size, or O(1) relative to the number of revisions.
301
301
302 Both pieces of the revlog are written to in an append-only
302 Both pieces of the revlog are written to in an append-only
303 fashion, which means we never need to rewrite a file to insert or
303 fashion, which means we never need to rewrite a file to insert or
304 remove data, and can use some simple techniques to avoid the need
304 remove data, and can use some simple techniques to avoid the need
305 for locking while reading.
305 for locking while reading.
306 """
306 """
307 def __init__(self, opener, indexfile, datafile,
307 def __init__(self, opener, indexfile, datafile,
308 defversion=REVLOG_DEFAULT_VERSION):
308 defversion=REVLOG_DEFAULT_VERSION):
309 """
309 """
310 create a revlog object
310 create a revlog object
311
311
312 opener is a function that abstracts the file opening operation
312 opener is a function that abstracts the file opening operation
313 and can be used to implement COW semantics or the like.
313 and can be used to implement COW semantics or the like.
314 """
314 """
315 self.indexfile = indexfile
315 self.indexfile = indexfile
316 self.datafile = datafile
316 self.datafile = datafile
317 self.opener = opener
317 self.opener = opener
318
318
319 self.indexstat = None
319 self.indexstat = None
320 self.cache = None
320 self.cache = None
321 self.chunkcache = None
321 self.chunkcache = None
322 self.defversion = defversion
322 self.defversion = defversion
323 self.load()
323 self.load()
324
324
325 def load(self):
325 def load(self):
326 v = self.defversion
326 v = self.defversion
327 try:
327 try:
328 f = self.opener(self.indexfile)
328 f = self.opener(self.indexfile)
329 i = f.read(4)
329 i = f.read(4)
330 f.seek(0)
330 f.seek(0)
331 except IOError, inst:
331 except IOError, inst:
332 if inst.errno != errno.ENOENT:
332 if inst.errno != errno.ENOENT:
333 raise
333 raise
334 i = ""
334 i = ""
335 else:
335 else:
336 try:
336 try:
337 st = util.fstat(f)
337 st = util.fstat(f)
338 except AttributeError, inst:
338 except AttributeError, inst:
339 st = None
339 st = None
340 else:
340 else:
341 oldst = self.indexstat
341 oldst = self.indexstat
342 if (oldst and st.st_dev == oldst.st_dev
342 if (oldst and st.st_dev == oldst.st_dev
343 and st.st_ino == oldst.st_ino
343 and st.st_ino == oldst.st_ino
344 and st.st_mtime == oldst.st_mtime
344 and st.st_mtime == oldst.st_mtime
345 and st.st_ctime == oldst.st_ctime):
345 and st.st_ctime == oldst.st_ctime):
346 return
346 return
347 self.indexstat = st
347 self.indexstat = st
348 if len(i) > 0:
348 if len(i) > 0:
349 v = struct.unpack(versionformat, i)[0]
349 v = struct.unpack(versionformat, i)[0]
350 flags = v & ~0xFFFF
350 flags = v & ~0xFFFF
351 fmt = v & 0xFFFF
351 fmt = v & 0xFFFF
352 if fmt == REVLOGV0:
352 if fmt == REVLOGV0:
353 if flags:
353 if flags:
354 raise RevlogError(_("index %s invalid flags %x for format v0" %
354 raise RevlogError(_("index %s invalid flags %x for format v0" %
355 (self.indexfile, flags)))
355 (self.indexfile, flags)))
356 elif fmt == REVLOGNG:
356 elif fmt == REVLOGNG:
357 if flags & ~REVLOGNGINLINEDATA:
357 if flags & ~REVLOGNGINLINEDATA:
358 raise RevlogError(_("index %s invalid flags %x for revlogng" %
358 raise RevlogError(_("index %s invalid flags %x for revlogng" %
359 (self.indexfile, flags)))
359 (self.indexfile, flags)))
360 else:
360 else:
361 raise RevlogError(_("index %s invalid format %d" %
361 raise RevlogError(_("index %s invalid format %d" %
362 (self.indexfile, fmt)))
362 (self.indexfile, fmt)))
363 self.version = v
363 self.version = v
364 if v == REVLOGV0:
364 if v == REVLOGV0:
365 self.indexformat = indexformatv0
365 self.indexformat = indexformatv0
366 shaoffset = v0shaoffset
366 shaoffset = v0shaoffset
367 else:
367 else:
368 self.indexformat = indexformatng
368 self.indexformat = indexformatng
369 shaoffset = ngshaoffset
369 shaoffset = ngshaoffset
370
370
371 if i:
371 if i:
372 if (lazyparser.safe_to_use and not self.inlinedata() and
372 if (lazyparser.safe_to_use and not self.inlinedata() and
373 st and st.st_size > 10000):
373 st and st.st_size > 10000):
374 # big index, let's parse it on demand
374 # big index, let's parse it on demand
375 parser = lazyparser(f, st.st_size, self.indexformat, shaoffset)
375 parser = lazyparser(f, st.st_size, self.indexformat, shaoffset)
376 self.index = lazyindex(parser)
376 self.index = lazyindex(parser)
377 self.nodemap = lazymap(parser)
377 self.nodemap = lazymap(parser)
378 else:
378 else:
379 self.parseindex(f, st)
379 self.parseindex(f, st)
380 if self.version != REVLOGV0:
380 if self.version != REVLOGV0:
381 e = list(self.index[0])
381 e = list(self.index[0])
382 type = self.ngtype(e[0])
382 type = self.ngtype(e[0])
383 e[0] = self.offset_type(0, type)
383 e[0] = self.offset_type(0, type)
384 self.index[0] = e
384 self.index[0] = e
385 else:
385 else:
386 self.nodemap = { nullid: -1}
386 self.nodemap = { nullid: -1}
387 self.index = []
387 self.index = []
388
388
389
389
390 def parseindex(self, fp, st):
390 def parseindex(self, fp, st):
391 s = struct.calcsize(self.indexformat)
391 s = struct.calcsize(self.indexformat)
392 self.index = []
392 self.index = []
393 self.nodemap = {nullid: -1}
393 self.nodemap = {nullid: -1}
394 inline = self.inlinedata()
394 inline = self.inlinedata()
395 n = 0
395 n = 0
396 leftover = None
396 leftover = None
397 while True:
397 while True:
398 if st:
398 if st:
399 data = fp.read(65536)
399 data = fp.read(65536)
400 else:
400 else:
401 # hack for httprangereader, it doesn't do partial reads well
401 # hack for httprangereader, it doesn't do partial reads well
402 data = fp.read()
402 data = fp.read()
403 if not data:
403 if not data:
404 break
404 break
405 if n == 0 and self.inlinedata():
405 if n == 0 and self.inlinedata():
406 # cache the first chunk
406 # cache the first chunk
407 self.chunkcache = (0, data)
407 self.chunkcache = (0, data)
408 if leftover:
408 if leftover:
409 data = leftover + data
409 data = leftover + data
410 leftover = None
410 leftover = None
411 off = 0
411 off = 0
412 l = len(data)
412 l = len(data)
413 while off < l:
413 while off < l:
414 if l - off < s:
414 if l - off < s:
415 leftover = data[off:]
415 leftover = data[off:]
416 break
416 break
417 cur = data[off:off + s]
417 cur = data[off:off + s]
418 off += s
418 off += s
419 e = struct.unpack(self.indexformat, cur)
419 e = struct.unpack(self.indexformat, cur)
420 self.index.append(e)
420 self.index.append(e)
421 self.nodemap[e[-1]] = n
421 self.nodemap[e[-1]] = n
422 n += 1
422 n += 1
423 if inline:
423 if inline:
424 off += e[1]
424 off += e[1]
425 if off > l:
425 if off > l:
426 # some things don't seek well, just read it
426 # some things don't seek well, just read it
427 fp.read(off - l)
427 fp.read(off - l)
428 if not st:
428 if not st:
429 break
429 break
430
430
431
431
432 def ngoffset(self, q):
432 def ngoffset(self, q):
433 if q & 0xFFFF:
433 if q & 0xFFFF:
434 raise RevlogError(_('%s: incompatible revision flag %x') %
434 raise RevlogError(_('%s: incompatible revision flag %x') %
435 (self.indexfile, q))
435 (self.indexfile, q))
436 return long(q >> 16)
436 return long(q >> 16)
437
437
438 def ngtype(self, q):
438 def ngtype(self, q):
439 return int(q & 0xFFFF)
439 return int(q & 0xFFFF)
440
440
441 def offset_type(self, offset, type):
441 def offset_type(self, offset, type):
442 return long(long(offset) << 16 | type)
442 return long(long(offset) << 16 | type)
443
443
444 def loadindex(self, start, end):
444 def loadindex(self, start, end):
445 """load a block of indexes all at once from the lazy parser"""
445 """load a block of indexes all at once from the lazy parser"""
446 if isinstance(self.index, lazyindex):
446 if isinstance(self.index, lazyindex):
447 self.index.p.loadindex(start, end)
447 self.index.p.loadindex(start, end)
448
448
449 def loadindexmap(self):
449 def loadindexmap(self):
450 """loads both the map and the index from the lazy parser"""
450 """loads both the map and the index from the lazy parser"""
451 if isinstance(self.index, lazyindex):
451 if isinstance(self.index, lazyindex):
452 p = self.index.p
452 p = self.index.p
453 p.loadindex()
453 p.loadindex()
454 self.nodemap = p.map
454 self.nodemap = p.map
455
455
456 def loadmap(self):
456 def loadmap(self):
457 """loads the map from the lazy parser"""
457 """loads the map from the lazy parser"""
458 if isinstance(self.nodemap, lazymap):
458 if isinstance(self.nodemap, lazymap):
459 self.nodemap.p.loadmap()
459 self.nodemap.p.loadmap()
460 self.nodemap = self.nodemap.p.map
460 self.nodemap = self.nodemap.p.map
461
461
462 def inlinedata(self): return self.version & REVLOGNGINLINEDATA
462 def inlinedata(self): return self.version & REVLOGNGINLINEDATA
463 def tip(self): return self.node(len(self.index) - 1)
463 def tip(self): return self.node(len(self.index) - 1)
464 def count(self): return len(self.index)
464 def count(self): return len(self.index)
465 def node(self, rev):
465 def node(self, rev):
466 return (rev < 0) and nullid or self.index[rev][-1]
466 return (rev < 0) and nullid or self.index[rev][-1]
467 def rev(self, node):
467 def rev(self, node):
468 try:
468 try:
469 return self.nodemap[node]
469 return self.nodemap[node]
470 except KeyError:
470 except KeyError:
471 raise RevlogError(_('%s: no node %s') % (self.indexfile, hex(node)))
471 raise RevlogError(_('%s: no node %s') % (self.indexfile, hex(node)))
472 def linkrev(self, node):
472 def linkrev(self, node):
473 return (node == nullid) and -1 or self.index[self.rev(node)][-4]
473 return (node == nullid) and -1 or self.index[self.rev(node)][-4]
474 def parents(self, node):
474 def parents(self, node):
475 if node == nullid: return (nullid, nullid)
475 if node == nullid: return (nullid, nullid)
476 r = self.rev(node)
476 r = self.rev(node)
477 d = self.index[r][-3:-1]
477 d = self.index[r][-3:-1]
478 if self.version == REVLOGV0:
478 if self.version == REVLOGV0:
479 return d
479 return d
480 return [ self.node(x) for x in d ]
480 return [ self.node(x) for x in d ]
481 def parentrevs(self, rev):
481 def parentrevs(self, rev):
482 if rev == -1:
482 if rev == -1:
483 return (-1, -1)
483 return (-1, -1)
484 d = self.index[rev][-3:-1]
484 d = self.index[rev][-3:-1]
485 if self.version == REVLOGV0:
485 if self.version == REVLOGV0:
486 return [ self.rev(x) for x in d ]
486 return [ self.rev(x) for x in d ]
487 return d
487 return d
488 def start(self, rev):
488 def start(self, rev):
489 if rev < 0:
489 if rev < 0:
490 return -1
490 return -1
491 if self.version != REVLOGV0:
491 if self.version != REVLOGV0:
492 return self.ngoffset(self.index[rev][0])
492 return self.ngoffset(self.index[rev][0])
493 return self.index[rev][0]
493 return self.index[rev][0]
494
494
495 def end(self, rev): return self.start(rev) + self.length(rev)
495 def end(self, rev): return self.start(rev) + self.length(rev)
496
496
497 def size(self, rev):
497 def size(self, rev):
498 """return the length of the uncompressed text for a given revision"""
498 """return the length of the uncompressed text for a given revision"""
499 l = -1
499 l = -1
500 if self.version != REVLOGV0:
500 if self.version != REVLOGV0:
501 l = self.index[rev][2]
501 l = self.index[rev][2]
502 if l >= 0:
502 if l >= 0:
503 return l
503 return l
504
504
505 t = self.revision(self.node(rev))
505 t = self.revision(self.node(rev))
506 return len(t)
506 return len(t)
507
507
508 # alternate implementation, The advantage to this code is it
508 # alternate implementation, The advantage to this code is it
509 # will be faster for a single revision. But, the results are not
509 # will be faster for a single revision. But, the results are not
510 # cached, so finding the size of every revision will be slower.
510 # cached, so finding the size of every revision will be slower.
511 """
511 """
512 if self.cache and self.cache[1] == rev:
512 if self.cache and self.cache[1] == rev:
513 return len(self.cache[2])
513 return len(self.cache[2])
514
514
515 base = self.base(rev)
515 base = self.base(rev)
516 if self.cache and self.cache[1] >= base and self.cache[1] < rev:
516 if self.cache and self.cache[1] >= base and self.cache[1] < rev:
517 base = self.cache[1]
517 base = self.cache[1]
518 text = self.cache[2]
518 text = self.cache[2]
519 else:
519 else:
520 text = self.revision(self.node(base))
520 text = self.revision(self.node(base))
521
521
522 l = len(text)
522 l = len(text)
523 for x in xrange(base + 1, rev + 1):
523 for x in xrange(base + 1, rev + 1):
524 l = mdiff.patchedsize(l, self.chunk(x))
524 l = mdiff.patchedsize(l, self.chunk(x))
525 return l
525 return l
526 """
526 """
527
527
528 def length(self, rev):
528 def length(self, rev):
529 if rev < 0:
529 if rev < 0:
530 return 0
530 return 0
531 else:
531 else:
532 return self.index[rev][1]
532 return self.index[rev][1]
533 def base(self, rev): return (rev < 0) and rev or self.index[rev][-5]
533 def base(self, rev): return (rev < 0) and rev or self.index[rev][-5]
534
534
535 def reachable(self, rev, stop=None):
535 def reachable(self, rev, stop=None):
536 reachable = {}
536 reachable = {}
537 visit = [rev]
537 visit = [rev]
538 reachable[rev] = 1
538 reachable[rev] = 1
539 if stop:
539 if stop:
540 stopn = self.rev(stop)
540 stopn = self.rev(stop)
541 else:
541 else:
542 stopn = 0
542 stopn = 0
543 while visit:
543 while visit:
544 n = visit.pop(0)
544 n = visit.pop(0)
545 if n == stop:
545 if n == stop:
546 continue
546 continue
547 if n == nullid:
547 if n == nullid:
548 continue
548 continue
549 for p in self.parents(n):
549 for p in self.parents(n):
550 if self.rev(p) < stopn:
550 if self.rev(p) < stopn:
551 continue
551 continue
552 if p not in reachable:
552 if p not in reachable:
553 reachable[p] = 1
553 reachable[p] = 1
554 visit.append(p)
554 visit.append(p)
555 return reachable
555 return reachable
556
556
557 def nodesbetween(self, roots=None, heads=None):
557 def nodesbetween(self, roots=None, heads=None):
558 """Return a tuple containing three elements. Elements 1 and 2 contain
558 """Return a tuple containing three elements. Elements 1 and 2 contain
559 a final list bases and heads after all the unreachable ones have been
559 a final list bases and heads after all the unreachable ones have been
560 pruned. Element 0 contains a topologically sorted list of all
560 pruned. Element 0 contains a topologically sorted list of all
561
561
562 nodes that satisfy these constraints:
562 nodes that satisfy these constraints:
563 1. All nodes must be descended from a node in roots (the nodes on
563 1. All nodes must be descended from a node in roots (the nodes on
564 roots are considered descended from themselves).
564 roots are considered descended from themselves).
565 2. All nodes must also be ancestors of a node in heads (the nodes in
565 2. All nodes must also be ancestors of a node in heads (the nodes in
566 heads are considered to be their own ancestors).
566 heads are considered to be their own ancestors).
567
567
568 If roots is unspecified, nullid is assumed as the only root.
568 If roots is unspecified, nullid is assumed as the only root.
569 If heads is unspecified, it is taken to be the output of the
569 If heads is unspecified, it is taken to be the output of the
570 heads method (i.e. a list of all nodes in the repository that
570 heads method (i.e. a list of all nodes in the repository that
571 have no children)."""
571 have no children)."""
572 nonodes = ([], [], [])
572 nonodes = ([], [], [])
573 if roots is not None:
573 if roots is not None:
574 roots = list(roots)
574 roots = list(roots)
575 if not roots:
575 if not roots:
576 return nonodes
576 return nonodes
577 lowestrev = min([self.rev(n) for n in roots])
577 lowestrev = min([self.rev(n) for n in roots])
578 else:
578 else:
579 roots = [nullid] # Everybody's a descendent of nullid
579 roots = [nullid] # Everybody's a descendent of nullid
580 lowestrev = -1
580 lowestrev = -1
581 if (lowestrev == -1) and (heads is None):
581 if (lowestrev == -1) and (heads is None):
582 # We want _all_ the nodes!
582 # We want _all_ the nodes!
583 return ([self.node(r) for r in xrange(0, self.count())],
583 return ([self.node(r) for r in xrange(0, self.count())],
584 [nullid], list(self.heads()))
584 [nullid], list(self.heads()))
585 if heads is None:
585 if heads is None:
586 # All nodes are ancestors, so the latest ancestor is the last
586 # All nodes are ancestors, so the latest ancestor is the last
587 # node.
587 # node.
588 highestrev = self.count() - 1
588 highestrev = self.count() - 1
589 # Set ancestors to None to signal that every node is an ancestor.
589 # Set ancestors to None to signal that every node is an ancestor.
590 ancestors = None
590 ancestors = None
591 # Set heads to an empty dictionary for later discovery of heads
591 # Set heads to an empty dictionary for later discovery of heads
592 heads = {}
592 heads = {}
593 else:
593 else:
594 heads = list(heads)
594 heads = list(heads)
595 if not heads:
595 if not heads:
596 return nonodes
596 return nonodes
597 ancestors = {}
597 ancestors = {}
598 # Start at the top and keep marking parents until we're done.
598 # Start at the top and keep marking parents until we're done.
599 nodestotag = heads[:]
599 nodestotag = heads[:]
600 # Turn heads into a dictionary so we can remove 'fake' heads.
600 # Turn heads into a dictionary so we can remove 'fake' heads.
601 # Also, later we will be using it to filter out the heads we can't
601 # Also, later we will be using it to filter out the heads we can't
602 # find from roots.
602 # find from roots.
603 heads = dict.fromkeys(heads, 0)
603 heads = dict.fromkeys(heads, 0)
604 # Remember where the top was so we can use it as a limit later.
604 # Remember where the top was so we can use it as a limit later.
605 highestrev = max([self.rev(n) for n in nodestotag])
605 highestrev = max([self.rev(n) for n in nodestotag])
606 while nodestotag:
606 while nodestotag:
607 # grab a node to tag
607 # grab a node to tag
608 n = nodestotag.pop()
608 n = nodestotag.pop()
609 # Never tag nullid
609 # Never tag nullid
610 if n == nullid:
610 if n == nullid:
611 continue
611 continue
612 # A node's revision number represents its place in a
612 # A node's revision number represents its place in a
613 # topologically sorted list of nodes.
613 # topologically sorted list of nodes.
614 r = self.rev(n)
614 r = self.rev(n)
615 if r >= lowestrev:
615 if r >= lowestrev:
616 if n not in ancestors:
616 if n not in ancestors:
617 # If we are possibly a descendent of one of the roots
617 # If we are possibly a descendent of one of the roots
618 # and we haven't already been marked as an ancestor
618 # and we haven't already been marked as an ancestor
619 ancestors[n] = 1 # Mark as ancestor
619 ancestors[n] = 1 # Mark as ancestor
620 # Add non-nullid parents to list of nodes to tag.
620 # Add non-nullid parents to list of nodes to tag.
621 nodestotag.extend([p for p in self.parents(n) if
621 nodestotag.extend([p for p in self.parents(n) if
622 p != nullid])
622 p != nullid])
623 elif n in heads: # We've seen it before, is it a fake head?
623 elif n in heads: # We've seen it before, is it a fake head?
624 # So it is, real heads should not be the ancestors of
624 # So it is, real heads should not be the ancestors of
625 # any other heads.
625 # any other heads.
626 heads.pop(n)
626 heads.pop(n)
627 if not ancestors:
627 if not ancestors:
628 return nonodes
628 return nonodes
629 # Now that we have our set of ancestors, we want to remove any
629 # Now that we have our set of ancestors, we want to remove any
630 # roots that are not ancestors.
630 # roots that are not ancestors.
631
631
632 # If one of the roots was nullid, everything is included anyway.
632 # If one of the roots was nullid, everything is included anyway.
633 if lowestrev > -1:
633 if lowestrev > -1:
634 # But, since we weren't, let's recompute the lowest rev to not
634 # But, since we weren't, let's recompute the lowest rev to not
635 # include roots that aren't ancestors.
635 # include roots that aren't ancestors.
636
636
637 # Filter out roots that aren't ancestors of heads
637 # Filter out roots that aren't ancestors of heads
638 roots = [n for n in roots if n in ancestors]
638 roots = [n for n in roots if n in ancestors]
639 # Recompute the lowest revision
639 # Recompute the lowest revision
640 if roots:
640 if roots:
641 lowestrev = min([self.rev(n) for n in roots])
641 lowestrev = min([self.rev(n) for n in roots])
642 else:
642 else:
643 # No more roots? Return empty list
643 # No more roots? Return empty list
644 return nonodes
644 return nonodes
645 else:
645 else:
646 # We are descending from nullid, and don't need to care about
646 # We are descending from nullid, and don't need to care about
647 # any other roots.
647 # any other roots.
648 lowestrev = -1
648 lowestrev = -1
649 roots = [nullid]
649 roots = [nullid]
650 # Transform our roots list into a 'set' (i.e. a dictionary where the
650 # Transform our roots list into a 'set' (i.e. a dictionary where the
651 # values don't matter.
651 # values don't matter.
652 descendents = dict.fromkeys(roots, 1)
652 descendents = dict.fromkeys(roots, 1)
653 # Also, keep the original roots so we can filter out roots that aren't
653 # Also, keep the original roots so we can filter out roots that aren't
654 # 'real' roots (i.e. are descended from other roots).
654 # 'real' roots (i.e. are descended from other roots).
655 roots = descendents.copy()
655 roots = descendents.copy()
656 # Our topologically sorted list of output nodes.
656 # Our topologically sorted list of output nodes.
657 orderedout = []
657 orderedout = []
658 # Don't start at nullid since we don't want nullid in our output list,
658 # Don't start at nullid since we don't want nullid in our output list,
659 # and if nullid shows up in descedents, empty parents will look like
659 # and if nullid shows up in descedents, empty parents will look like
660 # they're descendents.
660 # they're descendents.
661 for r in xrange(max(lowestrev, 0), highestrev + 1):
661 for r in xrange(max(lowestrev, 0), highestrev + 1):
662 n = self.node(r)
662 n = self.node(r)
663 isdescendent = False
663 isdescendent = False
664 if lowestrev == -1: # Everybody is a descendent of nullid
664 if lowestrev == -1: # Everybody is a descendent of nullid
665 isdescendent = True
665 isdescendent = True
666 elif n in descendents:
666 elif n in descendents:
667 # n is already a descendent
667 # n is already a descendent
668 isdescendent = True
668 isdescendent = True
669 # This check only needs to be done here because all the roots
669 # This check only needs to be done here because all the roots
670 # will start being marked is descendents before the loop.
670 # will start being marked is descendents before the loop.
671 if n in roots:
671 if n in roots:
672 # If n was a root, check if it's a 'real' root.
672 # If n was a root, check if it's a 'real' root.
673 p = tuple(self.parents(n))
673 p = tuple(self.parents(n))
674 # If any of its parents are descendents, it's not a root.
674 # If any of its parents are descendents, it's not a root.
675 if (p[0] in descendents) or (p[1] in descendents):
675 if (p[0] in descendents) or (p[1] in descendents):
676 roots.pop(n)
676 roots.pop(n)
677 else:
677 else:
678 p = tuple(self.parents(n))
678 p = tuple(self.parents(n))
679 # A node is a descendent if either of its parents are
679 # A node is a descendent if either of its parents are
680 # descendents. (We seeded the dependents list with the roots
680 # descendents. (We seeded the dependents list with the roots
681 # up there, remember?)
681 # up there, remember?)
682 if (p[0] in descendents) or (p[1] in descendents):
682 if (p[0] in descendents) or (p[1] in descendents):
683 descendents[n] = 1
683 descendents[n] = 1
684 isdescendent = True
684 isdescendent = True
685 if isdescendent and ((ancestors is None) or (n in ancestors)):
685 if isdescendent and ((ancestors is None) or (n in ancestors)):
686 # Only include nodes that are both descendents and ancestors.
686 # Only include nodes that are both descendents and ancestors.
687 orderedout.append(n)
687 orderedout.append(n)
688 if (ancestors is not None) and (n in heads):
688 if (ancestors is not None) and (n in heads):
689 # We're trying to figure out which heads are reachable
689 # We're trying to figure out which heads are reachable
690 # from roots.
690 # from roots.
691 # Mark this head as having been reached
691 # Mark this head as having been reached
692 heads[n] = 1
692 heads[n] = 1
693 elif ancestors is None:
693 elif ancestors is None:
694 # Otherwise, we're trying to discover the heads.
694 # Otherwise, we're trying to discover the heads.
695 # Assume this is a head because if it isn't, the next step
695 # Assume this is a head because if it isn't, the next step
696 # will eventually remove it.
696 # will eventually remove it.
697 heads[n] = 1
697 heads[n] = 1
698 # But, obviously its parents aren't.
698 # But, obviously its parents aren't.
699 for p in self.parents(n):
699 for p in self.parents(n):
700 heads.pop(p, None)
700 heads.pop(p, None)
701 heads = [n for n in heads.iterkeys() if heads[n] != 0]
701 heads = [n for n in heads.iterkeys() if heads[n] != 0]
702 roots = roots.keys()
702 roots = roots.keys()
703 assert orderedout
703 assert orderedout
704 assert roots
704 assert roots
705 assert heads
705 assert heads
706 return (orderedout, roots, heads)
706 return (orderedout, roots, heads)
707
707
708 def heads(self, start=None):
708 def heads(self, start=None):
709 """return the list of all nodes that have no children
709 """return the list of all nodes that have no children
710
710
711 if start is specified, only heads that are descendants of
711 if start is specified, only heads that are descendants of
712 start will be returned
712 start will be returned
713
713
714 """
714 """
715 if start is None:
715 if start is None:
716 start = nullid
716 start = nullid
717 startrev = self.rev(start)
717 startrev = self.rev(start)
718 reachable = {startrev: 1}
718 reachable = {startrev: 1}
719 heads = {startrev: 1}
719 heads = {startrev: 1}
720
720
721 parentrevs = self.parentrevs
721 parentrevs = self.parentrevs
722 for r in xrange(startrev + 1, self.count()):
722 for r in xrange(startrev + 1, self.count()):
723 for p in parentrevs(r):
723 for p in parentrevs(r):
724 if p in reachable:
724 if p in reachable:
725 reachable[r] = 1
725 reachable[r] = 1
726 heads[r] = 1
726 heads[r] = 1
727 if p in heads:
727 if p in heads:
728 del heads[p]
728 del heads[p]
729 return [self.node(r) for r in heads]
729 return [self.node(r) for r in heads]
730
730
731 def children(self, node):
731 def children(self, node):
732 """find the children of a given node"""
732 """find the children of a given node"""
733 c = []
733 c = []
734 p = self.rev(node)
734 p = self.rev(node)
735 for r in range(p + 1, self.count()):
735 for r in range(p + 1, self.count()):
736 n = self.node(r)
736 n = self.node(r)
737 for pn in self.parents(n):
737 for pn in self.parents(n):
738 if pn == node:
738 if pn == node:
739 c.append(n)
739 c.append(n)
740 continue
740 continue
741 elif pn == nullid:
741 elif pn == nullid:
742 continue
742 continue
743 return c
743 return c
744
744
745 def lookup(self, id):
745 def lookup(self, id):
746 """locate a node based on revision number or subset of hex nodeid"""
746 """locate a node based on revision number or subset of hex nodeid"""
747 if type(id) == type(0):
747 if type(id) == type(0):
748 return self.node(id)
748 return self.node(id)
749 try:
749 try:
750 rev = int(id)
750 rev = int(id)
751 if str(rev) != id: raise ValueError
751 if str(rev) != id: raise ValueError
752 if rev < 0: rev = self.count() + rev
752 if rev < 0: rev = self.count() + rev
753 if rev < 0 or rev >= self.count(): raise ValueError
753 if rev < 0 or rev >= self.count(): raise ValueError
754 return self.node(rev)
754 return self.node(rev)
755 except (ValueError, OverflowError):
755 except (ValueError, OverflowError):
756 c = []
756 c = []
757 for n in self.nodemap:
757 for n in self.nodemap:
758 if hex(n).startswith(id):
758 if hex(n).startswith(id):
759 c.append(n)
759 c.append(n)
760 if len(c) > 1: raise RevlogError(_("Ambiguous identifier"))
760 if len(c) > 1: raise RevlogError(_("Ambiguous identifier"))
761 if len(c) == 1: return c[0]
761 if len(c) == 1: return c[0]
762
762
763 # might need fixing if we change hash lengths
763 # might need fixing if we change hash lengths
764 if len(id) == 20 and id in self.nodemap:
764 if len(id) == 20 and id in self.nodemap:
765 return id
765 return id
766
766
767 raise RevlogError(_("No match found"))
767 raise RevlogError(_("No match found"))
768
768
769 def cmp(self, node, text):
770 """compare text with a given file revision"""
771 p1, p2 = self.parents(node)
772 return hash(text, p1, p2) != node
773
774 def makenode(self, node, text):
775 """calculate a file nodeid for text, descended or possibly
776 unchanged from node"""
777
778 if self.cmp(node, text):
779 return hash(text, node, nullid)
780 return node
781
769 def diff(self, a, b):
782 def diff(self, a, b):
770 """return a delta between two revisions"""
783 """return a delta between two revisions"""
771 return mdiff.textdiff(a, b)
784 return mdiff.textdiff(a, b)
772
785
773 def patches(self, t, pl):
786 def patches(self, t, pl):
774 """apply a list of patches to a string"""
787 """apply a list of patches to a string"""
775 return mdiff.patches(t, pl)
788 return mdiff.patches(t, pl)
776
789
777 def chunk(self, rev, df=None, cachelen=4096):
790 def chunk(self, rev, df=None, cachelen=4096):
778 start, length = self.start(rev), self.length(rev)
791 start, length = self.start(rev), self.length(rev)
779 inline = self.inlinedata()
792 inline = self.inlinedata()
780 if inline:
793 if inline:
781 start += (rev + 1) * struct.calcsize(self.indexformat)
794 start += (rev + 1) * struct.calcsize(self.indexformat)
782 end = start + length
795 end = start + length
783 def loadcache(df):
796 def loadcache(df):
784 cache_length = max(cachelen, length) # 4k
797 cache_length = max(cachelen, length) # 4k
785 if not df:
798 if not df:
786 if inline:
799 if inline:
787 df = self.opener(self.indexfile)
800 df = self.opener(self.indexfile)
788 else:
801 else:
789 df = self.opener(self.datafile)
802 df = self.opener(self.datafile)
790 df.seek(start)
803 df.seek(start)
791 self.chunkcache = (start, df.read(cache_length))
804 self.chunkcache = (start, df.read(cache_length))
792
805
793 if not self.chunkcache:
806 if not self.chunkcache:
794 loadcache(df)
807 loadcache(df)
795
808
796 cache_start = self.chunkcache[0]
809 cache_start = self.chunkcache[0]
797 cache_end = cache_start + len(self.chunkcache[1])
810 cache_end = cache_start + len(self.chunkcache[1])
798 if start >= cache_start and end <= cache_end:
811 if start >= cache_start and end <= cache_end:
799 # it is cached
812 # it is cached
800 offset = start - cache_start
813 offset = start - cache_start
801 else:
814 else:
802 loadcache(df)
815 loadcache(df)
803 offset = 0
816 offset = 0
804
817
805 #def checkchunk():
818 #def checkchunk():
806 # df = self.opener(self.datafile)
819 # df = self.opener(self.datafile)
807 # df.seek(start)
820 # df.seek(start)
808 # return df.read(length)
821 # return df.read(length)
809 #assert s == checkchunk()
822 #assert s == checkchunk()
810 return decompress(self.chunkcache[1][offset:offset + length])
823 return decompress(self.chunkcache[1][offset:offset + length])
811
824
812 def delta(self, node):
825 def delta(self, node):
813 """return or calculate a delta between a node and its predecessor"""
826 """return or calculate a delta between a node and its predecessor"""
814 r = self.rev(node)
827 r = self.rev(node)
815 return self.revdiff(r - 1, r)
828 return self.revdiff(r - 1, r)
816
829
817 def revdiff(self, rev1, rev2):
830 def revdiff(self, rev1, rev2):
818 """return or calculate a delta between two revisions"""
831 """return or calculate a delta between two revisions"""
819 b1 = self.base(rev1)
832 b1 = self.base(rev1)
820 b2 = self.base(rev2)
833 b2 = self.base(rev2)
821 if b1 == b2 and rev1 + 1 == rev2:
834 if b1 == b2 and rev1 + 1 == rev2:
822 return self.chunk(rev2)
835 return self.chunk(rev2)
823 else:
836 else:
824 return self.diff(self.revision(self.node(rev1)),
837 return self.diff(self.revision(self.node(rev1)),
825 self.revision(self.node(rev2)))
838 self.revision(self.node(rev2)))
826
839
827 def revision(self, node):
840 def revision(self, node):
828 """return an uncompressed revision of a given"""
841 """return an uncompressed revision of a given"""
829 if node == nullid: return ""
842 if node == nullid: return ""
830 if self.cache and self.cache[0] == node: return self.cache[2]
843 if self.cache and self.cache[0] == node: return self.cache[2]
831
844
832 # look up what we need to read
845 # look up what we need to read
833 text = None
846 text = None
834 rev = self.rev(node)
847 rev = self.rev(node)
835 base = self.base(rev)
848 base = self.base(rev)
836
849
837 if self.inlinedata():
850 if self.inlinedata():
838 # we probably have the whole chunk cached
851 # we probably have the whole chunk cached
839 df = None
852 df = None
840 else:
853 else:
841 df = self.opener(self.datafile)
854 df = self.opener(self.datafile)
842
855
843 # do we have useful data cached?
856 # do we have useful data cached?
844 if self.cache and self.cache[1] >= base and self.cache[1] < rev:
857 if self.cache and self.cache[1] >= base and self.cache[1] < rev:
845 base = self.cache[1]
858 base = self.cache[1]
846 text = self.cache[2]
859 text = self.cache[2]
847 self.loadindex(base, rev + 1)
860 self.loadindex(base, rev + 1)
848 else:
861 else:
849 self.loadindex(base, rev + 1)
862 self.loadindex(base, rev + 1)
850 text = self.chunk(base, df=df)
863 text = self.chunk(base, df=df)
851
864
852 bins = []
865 bins = []
853 for r in xrange(base + 1, rev + 1):
866 for r in xrange(base + 1, rev + 1):
854 bins.append(self.chunk(r, df=df))
867 bins.append(self.chunk(r, df=df))
855
868
856 text = self.patches(text, bins)
869 text = self.patches(text, bins)
857
870
858 p1, p2 = self.parents(node)
871 p1, p2 = self.parents(node)
859 if node != hash(text, p1, p2):
872 if node != hash(text, p1, p2):
860 raise RevlogError(_("integrity check failed on %s:%d")
873 raise RevlogError(_("integrity check failed on %s:%d")
861 % (self.datafile, rev))
874 % (self.datafile, rev))
862
875
863 self.cache = (node, rev, text)
876 self.cache = (node, rev, text)
864 return text
877 return text
865
878
866 def checkinlinesize(self, tr, fp=None):
879 def checkinlinesize(self, tr, fp=None):
867 if not self.inlinedata():
880 if not self.inlinedata():
868 return
881 return
869 if not fp:
882 if not fp:
870 fp = self.opener(self.indexfile, 'r')
883 fp = self.opener(self.indexfile, 'r')
871 fp.seek(0, 2)
884 fp.seek(0, 2)
872 size = fp.tell()
885 size = fp.tell()
873 if size < 131072:
886 if size < 131072:
874 return
887 return
875 trinfo = tr.find(self.indexfile)
888 trinfo = tr.find(self.indexfile)
876 if trinfo == None:
889 if trinfo == None:
877 raise RevlogError(_("%s not found in the transaction" %
890 raise RevlogError(_("%s not found in the transaction" %
878 self.indexfile))
891 self.indexfile))
879
892
880 trindex = trinfo[2]
893 trindex = trinfo[2]
881 dataoff = self.start(trindex)
894 dataoff = self.start(trindex)
882
895
883 tr.add(self.datafile, dataoff)
896 tr.add(self.datafile, dataoff)
884 df = self.opener(self.datafile, 'w')
897 df = self.opener(self.datafile, 'w')
885 calc = struct.calcsize(self.indexformat)
898 calc = struct.calcsize(self.indexformat)
886 for r in xrange(self.count()):
899 for r in xrange(self.count()):
887 start = self.start(r) + (r + 1) * calc
900 start = self.start(r) + (r + 1) * calc
888 length = self.length(r)
901 length = self.length(r)
889 fp.seek(start)
902 fp.seek(start)
890 d = fp.read(length)
903 d = fp.read(length)
891 df.write(d)
904 df.write(d)
892 fp.close()
905 fp.close()
893 df.close()
906 df.close()
894 fp = self.opener(self.indexfile, 'w', atomictemp=True)
907 fp = self.opener(self.indexfile, 'w', atomictemp=True)
895 self.version &= ~(REVLOGNGINLINEDATA)
908 self.version &= ~(REVLOGNGINLINEDATA)
896 if self.count():
909 if self.count():
897 x = self.index[0]
910 x = self.index[0]
898 e = struct.pack(self.indexformat, *x)[4:]
911 e = struct.pack(self.indexformat, *x)[4:]
899 l = struct.pack(versionformat, self.version)
912 l = struct.pack(versionformat, self.version)
900 fp.write(l)
913 fp.write(l)
901 fp.write(e)
914 fp.write(e)
902
915
903 for i in xrange(1, self.count()):
916 for i in xrange(1, self.count()):
904 x = self.index[i]
917 x = self.index[i]
905 e = struct.pack(self.indexformat, *x)
918 e = struct.pack(self.indexformat, *x)
906 fp.write(e)
919 fp.write(e)
907
920
908 # if we don't call rename, the temp file will never replace the
921 # if we don't call rename, the temp file will never replace the
909 # real index
922 # real index
910 fp.rename()
923 fp.rename()
911
924
912 tr.replace(self.indexfile, trindex * calc)
925 tr.replace(self.indexfile, trindex * calc)
913 self.chunkcache = None
926 self.chunkcache = None
914
927
915 def addrevision(self, text, transaction, link, p1=None, p2=None, d=None):
928 def addrevision(self, text, transaction, link, p1=None, p2=None, d=None):
916 """add a revision to the log
929 """add a revision to the log
917
930
918 text - the revision data to add
931 text - the revision data to add
919 transaction - the transaction object used for rollback
932 transaction - the transaction object used for rollback
920 link - the linkrev data to add
933 link - the linkrev data to add
921 p1, p2 - the parent nodeids of the revision
934 p1, p2 - the parent nodeids of the revision
922 d - an optional precomputed delta
935 d - an optional precomputed delta
923 """
936 """
924 if text is None: text = ""
937 if text is None: text = ""
925 if p1 is None: p1 = self.tip()
938 if p1 is None: p1 = self.tip()
926 if p2 is None: p2 = nullid
939 if p2 is None: p2 = nullid
927
940
928 node = hash(text, p1, p2)
941 node = hash(text, p1, p2)
929
942
930 if node in self.nodemap:
943 if node in self.nodemap:
931 return node
944 return node
932
945
933 n = self.count()
946 n = self.count()
934 t = n - 1
947 t = n - 1
935
948
936 if n:
949 if n:
937 base = self.base(t)
950 base = self.base(t)
938 start = self.start(base)
951 start = self.start(base)
939 end = self.end(t)
952 end = self.end(t)
940 if not d:
953 if not d:
941 prev = self.revision(self.tip())
954 prev = self.revision(self.tip())
942 d = self.diff(prev, str(text))
955 d = self.diff(prev, str(text))
943 data = compress(d)
956 data = compress(d)
944 l = len(data[1]) + len(data[0])
957 l = len(data[1]) + len(data[0])
945 dist = end - start + l
958 dist = end - start + l
946
959
947 # full versions are inserted when the needed deltas
960 # full versions are inserted when the needed deltas
948 # become comparable to the uncompressed text
961 # become comparable to the uncompressed text
949 if not n or dist > len(text) * 2:
962 if not n or dist > len(text) * 2:
950 data = compress(text)
963 data = compress(text)
951 l = len(data[1]) + len(data[0])
964 l = len(data[1]) + len(data[0])
952 base = n
965 base = n
953 else:
966 else:
954 base = self.base(t)
967 base = self.base(t)
955
968
956 offset = 0
969 offset = 0
957 if t >= 0:
970 if t >= 0:
958 offset = self.end(t)
971 offset = self.end(t)
959
972
960 if self.version == REVLOGV0:
973 if self.version == REVLOGV0:
961 e = (offset, l, base, link, p1, p2, node)
974 e = (offset, l, base, link, p1, p2, node)
962 else:
975 else:
963 e = (self.offset_type(offset, 0), l, len(text),
976 e = (self.offset_type(offset, 0), l, len(text),
964 base, link, self.rev(p1), self.rev(p2), node)
977 base, link, self.rev(p1), self.rev(p2), node)
965
978
966 self.index.append(e)
979 self.index.append(e)
967 self.nodemap[node] = n
980 self.nodemap[node] = n
968 entry = struct.pack(self.indexformat, *e)
981 entry = struct.pack(self.indexformat, *e)
969
982
970 if not self.inlinedata():
983 if not self.inlinedata():
971 transaction.add(self.datafile, offset)
984 transaction.add(self.datafile, offset)
972 transaction.add(self.indexfile, n * len(entry))
985 transaction.add(self.indexfile, n * len(entry))
973 f = self.opener(self.datafile, "a")
986 f = self.opener(self.datafile, "a")
974 if data[0]:
987 if data[0]:
975 f.write(data[0])
988 f.write(data[0])
976 f.write(data[1])
989 f.write(data[1])
977 f.close()
990 f.close()
978 f = self.opener(self.indexfile, "a")
991 f = self.opener(self.indexfile, "a")
979 else:
992 else:
980 f = self.opener(self.indexfile, "a+")
993 f = self.opener(self.indexfile, "a+")
981 f.seek(0, 2)
994 f.seek(0, 2)
982 transaction.add(self.indexfile, f.tell(), self.count() - 1)
995 transaction.add(self.indexfile, f.tell(), self.count() - 1)
983
996
984 if len(self.index) == 1 and self.version != REVLOGV0:
997 if len(self.index) == 1 and self.version != REVLOGV0:
985 l = struct.pack(versionformat, self.version)
998 l = struct.pack(versionformat, self.version)
986 f.write(l)
999 f.write(l)
987 entry = entry[4:]
1000 entry = entry[4:]
988
1001
989 f.write(entry)
1002 f.write(entry)
990
1003
991 if self.inlinedata():
1004 if self.inlinedata():
992 f.write(data[0])
1005 f.write(data[0])
993 f.write(data[1])
1006 f.write(data[1])
994 self.checkinlinesize(transaction, f)
1007 self.checkinlinesize(transaction, f)
995
1008
996 self.cache = (node, n, text)
1009 self.cache = (node, n, text)
997 return node
1010 return node
998
1011
999 def ancestor(self, a, b):
1012 def ancestor(self, a, b):
1000 """calculate the least common ancestor of nodes a and b"""
1013 """calculate the least common ancestor of nodes a and b"""
1001
1014
1002 # start with some short cuts for the linear cases
1015 # start with some short cuts for the linear cases
1003 if a == b:
1016 if a == b:
1004 return a
1017 return a
1005 ra = self.rev(a)
1018 ra = self.rev(a)
1006 rb = self.rev(b)
1019 rb = self.rev(b)
1007 if ra < rb:
1020 if ra < rb:
1008 last = b
1021 last = b
1009 first = a
1022 first = a
1010 else:
1023 else:
1011 last = a
1024 last = a
1012 first = b
1025 first = b
1013
1026
1014 # reachable won't include stop in the list, so we have to use a parent
1027 # reachable won't include stop in the list, so we have to use a parent
1015 reachable = self.reachable(last, stop=self.parents(first)[0])
1028 reachable = self.reachable(last, stop=self.parents(first)[0])
1016 if first in reachable:
1029 if first in reachable:
1017 return first
1030 return first
1018
1031
1019 # calculate the distance of every node from root
1032 # calculate the distance of every node from root
1020 dist = {nullid: 0}
1033 dist = {nullid: 0}
1021 for i in xrange(self.count()):
1034 for i in xrange(self.count()):
1022 n = self.node(i)
1035 n = self.node(i)
1023 p1, p2 = self.parents(n)
1036 p1, p2 = self.parents(n)
1024 dist[n] = max(dist[p1], dist[p2]) + 1
1037 dist[n] = max(dist[p1], dist[p2]) + 1
1025
1038
1026 # traverse ancestors in order of decreasing distance from root
1039 # traverse ancestors in order of decreasing distance from root
1027 def ancestors(node):
1040 def ancestors(node):
1028 # we store negative distances because heap returns smallest member
1041 # we store negative distances because heap returns smallest member
1029 h = [(-dist[node], node)]
1042 h = [(-dist[node], node)]
1030 seen = {}
1043 seen = {}
1031 while h:
1044 while h:
1032 d, n = heapq.heappop(h)
1045 d, n = heapq.heappop(h)
1033 if n not in seen:
1046 if n not in seen:
1034 seen[n] = 1
1047 seen[n] = 1
1035 yield (-d, n)
1048 yield (-d, n)
1036 for p in self.parents(n):
1049 for p in self.parents(n):
1037 heapq.heappush(h, (-dist[p], p))
1050 heapq.heappush(h, (-dist[p], p))
1038
1051
1039 def generations(node):
1052 def generations(node):
1040 sg, s = None, {}
1053 sg, s = None, {}
1041 for g,n in ancestors(node):
1054 for g,n in ancestors(node):
1042 if g != sg:
1055 if g != sg:
1043 if sg:
1056 if sg:
1044 yield sg, s
1057 yield sg, s
1045 sg, s = g, {n:1}
1058 sg, s = g, {n:1}
1046 else:
1059 else:
1047 s[n] = 1
1060 s[n] = 1
1048 yield sg, s
1061 yield sg, s
1049
1062
1050 x = generations(a)
1063 x = generations(a)
1051 y = generations(b)
1064 y = generations(b)
1052 gx = x.next()
1065 gx = x.next()
1053 gy = y.next()
1066 gy = y.next()
1054
1067
1055 # increment each ancestor list until it is closer to root than
1068 # increment each ancestor list until it is closer to root than
1056 # the other, or they match
1069 # the other, or they match
1057 while 1:
1070 while 1:
1058 #print "ancestor gen %s %s" % (gx[0], gy[0])
1071 #print "ancestor gen %s %s" % (gx[0], gy[0])
1059 if gx[0] == gy[0]:
1072 if gx[0] == gy[0]:
1060 # find the intersection
1073 # find the intersection
1061 i = [ n for n in gx[1] if n in gy[1] ]
1074 i = [ n for n in gx[1] if n in gy[1] ]
1062 if i:
1075 if i:
1063 return i[0]
1076 return i[0]
1064 else:
1077 else:
1065 #print "next"
1078 #print "next"
1066 gy = y.next()
1079 gy = y.next()
1067 gx = x.next()
1080 gx = x.next()
1068 elif gx[0] < gy[0]:
1081 elif gx[0] < gy[0]:
1069 #print "next y"
1082 #print "next y"
1070 gy = y.next()
1083 gy = y.next()
1071 else:
1084 else:
1072 #print "next x"
1085 #print "next x"
1073 gx = x.next()
1086 gx = x.next()
1074
1087
1075 def group(self, nodelist, lookup, infocollect=None):
1088 def group(self, nodelist, lookup, infocollect=None):
1076 """calculate a delta group
1089 """calculate a delta group
1077
1090
1078 Given a list of changeset revs, return a set of deltas and
1091 Given a list of changeset revs, return a set of deltas and
1079 metadata corresponding to nodes. the first delta is
1092 metadata corresponding to nodes. the first delta is
1080 parent(nodes[0]) -> nodes[0] the receiver is guaranteed to
1093 parent(nodes[0]) -> nodes[0] the receiver is guaranteed to
1081 have this parent as it has all history before these
1094 have this parent as it has all history before these
1082 changesets. parent is parent[0]
1095 changesets. parent is parent[0]
1083 """
1096 """
1084 revs = [self.rev(n) for n in nodelist]
1097 revs = [self.rev(n) for n in nodelist]
1085
1098
1086 # if we don't have any revisions touched by these changesets, bail
1099 # if we don't have any revisions touched by these changesets, bail
1087 if not revs:
1100 if not revs:
1088 yield changegroup.closechunk()
1101 yield changegroup.closechunk()
1089 return
1102 return
1090
1103
1091 # add the parent of the first rev
1104 # add the parent of the first rev
1092 p = self.parents(self.node(revs[0]))[0]
1105 p = self.parents(self.node(revs[0]))[0]
1093 revs.insert(0, self.rev(p))
1106 revs.insert(0, self.rev(p))
1094
1107
1095 # build deltas
1108 # build deltas
1096 for d in xrange(0, len(revs) - 1):
1109 for d in xrange(0, len(revs) - 1):
1097 a, b = revs[d], revs[d + 1]
1110 a, b = revs[d], revs[d + 1]
1098 nb = self.node(b)
1111 nb = self.node(b)
1099
1112
1100 if infocollect is not None:
1113 if infocollect is not None:
1101 infocollect(nb)
1114 infocollect(nb)
1102
1115
1103 d = self.revdiff(a, b)
1116 d = self.revdiff(a, b)
1104 p = self.parents(nb)
1117 p = self.parents(nb)
1105 meta = nb + p[0] + p[1] + lookup(nb)
1118 meta = nb + p[0] + p[1] + lookup(nb)
1106 yield changegroup.genchunk("%s%s" % (meta, d))
1119 yield changegroup.genchunk("%s%s" % (meta, d))
1107
1120
1108 yield changegroup.closechunk()
1121 yield changegroup.closechunk()
1109
1122
1110 def addgroup(self, revs, linkmapper, transaction, unique=0):
1123 def addgroup(self, revs, linkmapper, transaction, unique=0):
1111 """
1124 """
1112 add a delta group
1125 add a delta group
1113
1126
1114 given a set of deltas, add them to the revision log. the
1127 given a set of deltas, add them to the revision log. the
1115 first delta is against its parent, which should be in our
1128 first delta is against its parent, which should be in our
1116 log, the rest are against the previous delta.
1129 log, the rest are against the previous delta.
1117 """
1130 """
1118
1131
1119 #track the base of the current delta log
1132 #track the base of the current delta log
1120 r = self.count()
1133 r = self.count()
1121 t = r - 1
1134 t = r - 1
1122 node = None
1135 node = None
1123
1136
1124 base = prev = -1
1137 base = prev = -1
1125 start = end = textlen = 0
1138 start = end = textlen = 0
1126 if r:
1139 if r:
1127 end = self.end(t)
1140 end = self.end(t)
1128
1141
1129 ifh = self.opener(self.indexfile, "a+")
1142 ifh = self.opener(self.indexfile, "a+")
1130 ifh.seek(0, 2)
1143 ifh.seek(0, 2)
1131 transaction.add(self.indexfile, ifh.tell(), self.count())
1144 transaction.add(self.indexfile, ifh.tell(), self.count())
1132 if self.inlinedata():
1145 if self.inlinedata():
1133 dfh = None
1146 dfh = None
1134 else:
1147 else:
1135 transaction.add(self.datafile, end)
1148 transaction.add(self.datafile, end)
1136 dfh = self.opener(self.datafile, "a")
1149 dfh = self.opener(self.datafile, "a")
1137
1150
1138 # loop through our set of deltas
1151 # loop through our set of deltas
1139 chain = None
1152 chain = None
1140 for chunk in revs:
1153 for chunk in revs:
1141 node, p1, p2, cs = struct.unpack("20s20s20s20s", chunk[:80])
1154 node, p1, p2, cs = struct.unpack("20s20s20s20s", chunk[:80])
1142 link = linkmapper(cs)
1155 link = linkmapper(cs)
1143 if node in self.nodemap:
1156 if node in self.nodemap:
1144 # this can happen if two branches make the same change
1157 # this can happen if two branches make the same change
1145 # if unique:
1158 # if unique:
1146 # raise RevlogError(_("already have %s") % hex(node[:4]))
1159 # raise RevlogError(_("already have %s") % hex(node[:4]))
1147 chain = node
1160 chain = node
1148 continue
1161 continue
1149 delta = chunk[80:]
1162 delta = chunk[80:]
1150
1163
1151 for p in (p1, p2):
1164 for p in (p1, p2):
1152 if not p in self.nodemap:
1165 if not p in self.nodemap:
1153 raise RevlogError(_("unknown parent %s") % short(p))
1166 raise RevlogError(_("unknown parent %s") % short(p))
1154
1167
1155 if not chain:
1168 if not chain:
1156 # retrieve the parent revision of the delta chain
1169 # retrieve the parent revision of the delta chain
1157 chain = p1
1170 chain = p1
1158 if not chain in self.nodemap:
1171 if not chain in self.nodemap:
1159 raise RevlogError(_("unknown base %s") % short(chain[:4]))
1172 raise RevlogError(_("unknown base %s") % short(chain[:4]))
1160
1173
1161 # full versions are inserted when the needed deltas become
1174 # full versions are inserted when the needed deltas become
1162 # comparable to the uncompressed text or when the previous
1175 # comparable to the uncompressed text or when the previous
1163 # version is not the one we have a delta against. We use
1176 # version is not the one we have a delta against. We use
1164 # the size of the previous full rev as a proxy for the
1177 # the size of the previous full rev as a proxy for the
1165 # current size.
1178 # current size.
1166
1179
1167 if chain == prev:
1180 if chain == prev:
1168 tempd = compress(delta)
1181 tempd = compress(delta)
1169 cdelta = tempd[0] + tempd[1]
1182 cdelta = tempd[0] + tempd[1]
1170 textlen = mdiff.patchedsize(textlen, delta)
1183 textlen = mdiff.patchedsize(textlen, delta)
1171
1184
1172 if chain != prev or (end - start + len(cdelta)) > textlen * 2:
1185 if chain != prev or (end - start + len(cdelta)) > textlen * 2:
1173 # flush our writes here so we can read it in revision
1186 # flush our writes here so we can read it in revision
1174 if dfh:
1187 if dfh:
1175 dfh.flush()
1188 dfh.flush()
1176 ifh.flush()
1189 ifh.flush()
1177 text = self.revision(chain)
1190 text = self.revision(chain)
1178 text = self.patches(text, [delta])
1191 text = self.patches(text, [delta])
1179 chk = self.addrevision(text, transaction, link, p1, p2)
1192 chk = self.addrevision(text, transaction, link, p1, p2)
1180 if chk != node:
1193 if chk != node:
1181 raise RevlogError(_("consistency error adding group"))
1194 raise RevlogError(_("consistency error adding group"))
1182 textlen = len(text)
1195 textlen = len(text)
1183 else:
1196 else:
1184 if self.version == REVLOGV0:
1197 if self.version == REVLOGV0:
1185 e = (end, len(cdelta), base, link, p1, p2, node)
1198 e = (end, len(cdelta), base, link, p1, p2, node)
1186 else:
1199 else:
1187 e = (self.offset_type(end, 0), len(cdelta), textlen, base,
1200 e = (self.offset_type(end, 0), len(cdelta), textlen, base,
1188 link, self.rev(p1), self.rev(p2), node)
1201 link, self.rev(p1), self.rev(p2), node)
1189 self.index.append(e)
1202 self.index.append(e)
1190 self.nodemap[node] = r
1203 self.nodemap[node] = r
1191 if self.inlinedata():
1204 if self.inlinedata():
1192 ifh.write(struct.pack(self.indexformat, *e))
1205 ifh.write(struct.pack(self.indexformat, *e))
1193 ifh.write(cdelta)
1206 ifh.write(cdelta)
1194 self.checkinlinesize(transaction, ifh)
1207 self.checkinlinesize(transaction, ifh)
1195 if not self.inlinedata():
1208 if not self.inlinedata():
1196 dfh = self.opener(self.datafile, "a")
1209 dfh = self.opener(self.datafile, "a")
1197 ifh = self.opener(self.indexfile, "a")
1210 ifh = self.opener(self.indexfile, "a")
1198 else:
1211 else:
1199 if not dfh:
1212 if not dfh:
1200 # addrevision switched from inline to conventional
1213 # addrevision switched from inline to conventional
1201 # reopen the index
1214 # reopen the index
1202 dfh = self.opener(self.datafile, "a")
1215 dfh = self.opener(self.datafile, "a")
1203 ifh = self.opener(self.indexfile, "a")
1216 ifh = self.opener(self.indexfile, "a")
1204 dfh.write(cdelta)
1217 dfh.write(cdelta)
1205 ifh.write(struct.pack(self.indexformat, *e))
1218 ifh.write(struct.pack(self.indexformat, *e))
1206
1219
1207 t, r, chain, prev = r, r + 1, node, node
1220 t, r, chain, prev = r, r + 1, node, node
1208 base = self.base(t)
1221 base = self.base(t)
1209 start = self.start(base)
1222 start = self.start(base)
1210 end = self.end(t)
1223 end = self.end(t)
1211
1224
1212 return node
1225 return node
1213
1226
1214 def strip(self, rev, minlink):
1227 def strip(self, rev, minlink):
1215 if self.count() == 0 or rev >= self.count():
1228 if self.count() == 0 or rev >= self.count():
1216 return
1229 return
1217
1230
1218 if isinstance(self.index, lazyindex):
1231 if isinstance(self.index, lazyindex):
1219 self.loadindexmap()
1232 self.loadindexmap()
1220
1233
1221 # When stripping away a revision, we need to make sure it
1234 # When stripping away a revision, we need to make sure it
1222 # does not actually belong to an older changeset.
1235 # does not actually belong to an older changeset.
1223 # The minlink parameter defines the oldest revision
1236 # The minlink parameter defines the oldest revision
1224 # we're allowed to strip away.
1237 # we're allowed to strip away.
1225 while minlink > self.index[rev][-4]:
1238 while minlink > self.index[rev][-4]:
1226 rev += 1
1239 rev += 1
1227 if rev >= self.count():
1240 if rev >= self.count():
1228 return
1241 return
1229
1242
1230 # first truncate the files on disk
1243 # first truncate the files on disk
1231 end = self.start(rev)
1244 end = self.start(rev)
1232 if not self.inlinedata():
1245 if not self.inlinedata():
1233 df = self.opener(self.datafile, "a")
1246 df = self.opener(self.datafile, "a")
1234 df.truncate(end)
1247 df.truncate(end)
1235 end = rev * struct.calcsize(self.indexformat)
1248 end = rev * struct.calcsize(self.indexformat)
1236 else:
1249 else:
1237 end += rev * struct.calcsize(self.indexformat)
1250 end += rev * struct.calcsize(self.indexformat)
1238
1251
1239 indexf = self.opener(self.indexfile, "a")
1252 indexf = self.opener(self.indexfile, "a")
1240 indexf.truncate(end)
1253 indexf.truncate(end)
1241
1254
1242 # then reset internal state in memory to forget those revisions
1255 # then reset internal state in memory to forget those revisions
1243 self.cache = None
1256 self.cache = None
1244 self.chunkcache = None
1257 self.chunkcache = None
1245 for x in xrange(rev, self.count()):
1258 for x in xrange(rev, self.count()):
1246 del self.nodemap[self.node(x)]
1259 del self.nodemap[self.node(x)]
1247
1260
1248 del self.index[rev:]
1261 del self.index[rev:]
1249
1262
1250 def checksize(self):
1263 def checksize(self):
1251 expected = 0
1264 expected = 0
1252 if self.count():
1265 if self.count():
1253 expected = self.end(self.count() - 1)
1266 expected = self.end(self.count() - 1)
1254
1267
1255 try:
1268 try:
1256 f = self.opener(self.datafile)
1269 f = self.opener(self.datafile)
1257 f.seek(0, 2)
1270 f.seek(0, 2)
1258 actual = f.tell()
1271 actual = f.tell()
1259 dd = actual - expected
1272 dd = actual - expected
1260 except IOError, inst:
1273 except IOError, inst:
1261 if inst.errno != errno.ENOENT:
1274 if inst.errno != errno.ENOENT:
1262 raise
1275 raise
1263 dd = 0
1276 dd = 0
1264
1277
1265 try:
1278 try:
1266 f = self.opener(self.indexfile)
1279 f = self.opener(self.indexfile)
1267 f.seek(0, 2)
1280 f.seek(0, 2)
1268 actual = f.tell()
1281 actual = f.tell()
1269 s = struct.calcsize(self.indexformat)
1282 s = struct.calcsize(self.indexformat)
1270 i = actual / s
1283 i = actual / s
1271 di = actual - (i * s)
1284 di = actual - (i * s)
1272 if self.inlinedata():
1285 if self.inlinedata():
1273 databytes = 0
1286 databytes = 0
1274 for r in xrange(self.count()):
1287 for r in xrange(self.count()):
1275 databytes += self.length(r)
1288 databytes += self.length(r)
1276 dd = 0
1289 dd = 0
1277 di = actual - self.count() * s - databytes
1290 di = actual - self.count() * s - databytes
1278 except IOError, inst:
1291 except IOError, inst:
1279 if inst.errno != errno.ENOENT:
1292 if inst.errno != errno.ENOENT:
1280 raise
1293 raise
1281 di = 0
1294 di = 0
1282
1295
1283 return (dd, di)
1296 return (dd, di)
1284
1297
1285
1298
@@ -1,360 +1,290 b''
1 # ui.py - user interface bits for mercurial
1 # ui.py - user interface bits for mercurial
2 #
2 #
3 # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms
5 # This software may be used and distributed according to the terms
6 # of the GNU General Public License, incorporated herein by reference.
6 # of the GNU General Public License, incorporated herein by reference.
7
7
8 from i18n import gettext as _
8 from i18n import gettext as _
9 from demandload import *
9 from demandload import *
10 demandload(globals(), "errno getpass os re smtplib socket sys tempfile")
10 demandload(globals(), "errno getpass os re socket sys tempfile")
11 demandload(globals(), "ConfigParser mdiff templater traceback util")
11 demandload(globals(), "ConfigParser mdiff templater traceback util")
12
12
13 class ui(object):
13 class ui(object):
14 def __init__(self, verbose=False, debug=False, quiet=False,
14 def __init__(self, verbose=False, debug=False, quiet=False,
15 interactive=True, traceback=False, parentui=None):
15 interactive=True, traceback=False, parentui=None):
16 self.overlay = {}
16 self.overlay = {}
17 if parentui is None:
17 if parentui is None:
18 # this is the parent of all ui children
18 # this is the parent of all ui children
19 self.parentui = None
19 self.parentui = None
20 self.cdata = ConfigParser.SafeConfigParser()
20 self.cdata = ConfigParser.SafeConfigParser()
21 self.readconfig(util.rcpath())
21 self.readconfig(util.rcpath())
22
22
23 self.quiet = self.configbool("ui", "quiet")
23 self.quiet = self.configbool("ui", "quiet")
24 self.verbose = self.configbool("ui", "verbose")
24 self.verbose = self.configbool("ui", "verbose")
25 self.debugflag = self.configbool("ui", "debug")
25 self.debugflag = self.configbool("ui", "debug")
26 self.interactive = self.configbool("ui", "interactive", True)
26 self.interactive = self.configbool("ui", "interactive", True)
27 self.traceback = traceback
27 self.traceback = traceback
28
28
29 self.updateopts(verbose, debug, quiet, interactive)
29 self.updateopts(verbose, debug, quiet, interactive)
30 self.diffcache = None
30 self.diffcache = None
31 self.header = []
31 self.header = []
32 self.prev_header = []
32 self.prev_header = []
33 self.revlogopts = self.configrevlog()
33 self.revlogopts = self.configrevlog()
34 else:
34 else:
35 # parentui may point to an ui object which is already a child
35 # parentui may point to an ui object which is already a child
36 self.parentui = parentui.parentui or parentui
36 self.parentui = parentui.parentui or parentui
37 parent_cdata = self.parentui.cdata
37 parent_cdata = self.parentui.cdata
38 self.cdata = ConfigParser.SafeConfigParser(parent_cdata.defaults())
38 self.cdata = ConfigParser.SafeConfigParser(parent_cdata.defaults())
39 # make interpolation work
39 # make interpolation work
40 for section in parent_cdata.sections():
40 for section in parent_cdata.sections():
41 self.cdata.add_section(section)
41 self.cdata.add_section(section)
42 for name, value in parent_cdata.items(section, raw=True):
42 for name, value in parent_cdata.items(section, raw=True):
43 self.cdata.set(section, name, value)
43 self.cdata.set(section, name, value)
44
44
45 def __getattr__(self, key):
45 def __getattr__(self, key):
46 return getattr(self.parentui, key)
46 return getattr(self.parentui, key)
47
47
48 def updateopts(self, verbose=False, debug=False, quiet=False,
48 def updateopts(self, verbose=False, debug=False, quiet=False,
49 interactive=True, traceback=False, config=[]):
49 interactive=True, traceback=False, config=[]):
50 self.quiet = (self.quiet or quiet) and not verbose and not debug
50 self.quiet = (self.quiet or quiet) and not verbose and not debug
51 self.verbose = (self.verbose or verbose) or debug
51 self.verbose = (self.verbose or verbose) or debug
52 self.debugflag = (self.debugflag or debug)
52 self.debugflag = (self.debugflag or debug)
53 self.interactive = (self.interactive and interactive)
53 self.interactive = (self.interactive and interactive)
54 self.traceback = self.traceback or traceback
54 self.traceback = self.traceback or traceback
55 for cfg in config:
55 for cfg in config:
56 try:
56 try:
57 name, value = cfg.split('=', 1)
57 name, value = cfg.split('=', 1)
58 section, name = name.split('.', 1)
58 section, name = name.split('.', 1)
59 if not self.cdata.has_section(section):
59 if not self.cdata.has_section(section):
60 self.cdata.add_section(section)
60 self.cdata.add_section(section)
61 if not section or not name:
61 if not section or not name:
62 raise IndexError
62 raise IndexError
63 self.cdata.set(section, name, value)
63 self.cdata.set(section, name, value)
64 except (IndexError, ValueError):
64 except (IndexError, ValueError):
65 raise util.Abort(_('malformed --config option: %s') % cfg)
65 raise util.Abort(_('malformed --config option: %s') % cfg)
66
66
67 def readconfig(self, fn, root=None):
67 def readconfig(self, fn, root=None):
68 if isinstance(fn, basestring):
68 if isinstance(fn, basestring):
69 fn = [fn]
69 fn = [fn]
70 for f in fn:
70 for f in fn:
71 try:
71 try:
72 self.cdata.read(f)
72 self.cdata.read(f)
73 except ConfigParser.ParsingError, inst:
73 except ConfigParser.ParsingError, inst:
74 raise util.Abort(_("Failed to parse %s\n%s") % (f, inst))
74 raise util.Abort(_("Failed to parse %s\n%s") % (f, inst))
75 # translate paths relative to root (or home) into absolute paths
75 # translate paths relative to root (or home) into absolute paths
76 if root is None:
76 if root is None:
77 root = os.path.expanduser('~')
77 root = os.path.expanduser('~')
78 for name, path in self.configitems("paths"):
78 for name, path in self.configitems("paths"):
79 if path and "://" not in path and not os.path.isabs(path):
79 if path and "://" not in path and not os.path.isabs(path):
80 self.cdata.set("paths", name, os.path.join(root, path))
80 self.cdata.set("paths", name, os.path.join(root, path))
81
81
82 def setconfig(self, section, name, val):
82 def setconfig(self, section, name, val):
83 self.overlay[(section, name)] = val
83 self.overlay[(section, name)] = val
84
84
85 def config(self, section, name, default=None):
85 def config(self, section, name, default=None):
86 if self.overlay.has_key((section, name)):
86 if self.overlay.has_key((section, name)):
87 return self.overlay[(section, name)]
87 return self.overlay[(section, name)]
88 if self.cdata.has_option(section, name):
88 if self.cdata.has_option(section, name):
89 try:
89 try:
90 return self.cdata.get(section, name)
90 return self.cdata.get(section, name)
91 except ConfigParser.InterpolationError, inst:
91 except ConfigParser.InterpolationError, inst:
92 raise util.Abort(_("Error in configuration:\n%s") % inst)
92 raise util.Abort(_("Error in configuration:\n%s") % inst)
93 if self.parentui is None:
93 if self.parentui is None:
94 return default
94 return default
95 else:
95 else:
96 return self.parentui.config(section, name, default)
96 return self.parentui.config(section, name, default)
97
97
98 def configlist(self, section, name, default=None):
98 def configlist(self, section, name, default=None):
99 """Return a list of comma/space separated strings"""
99 """Return a list of comma/space separated strings"""
100 result = self.config(section, name)
100 result = self.config(section, name)
101 if result is None:
101 if result is None:
102 result = default or []
102 result = default or []
103 if isinstance(result, basestring):
103 if isinstance(result, basestring):
104 result = result.replace(",", " ").split()
104 result = result.replace(",", " ").split()
105 return result
105 return result
106
106
107 def configbool(self, section, name, default=False):
107 def configbool(self, section, name, default=False):
108 if self.overlay.has_key((section, name)):
108 if self.overlay.has_key((section, name)):
109 return self.overlay[(section, name)]
109 return self.overlay[(section, name)]
110 if self.cdata.has_option(section, name):
110 if self.cdata.has_option(section, name):
111 try:
111 try:
112 return self.cdata.getboolean(section, name)
112 return self.cdata.getboolean(section, name)
113 except ConfigParser.InterpolationError, inst:
113 except ConfigParser.InterpolationError, inst:
114 raise util.Abort(_("Error in configuration:\n%s") % inst)
114 raise util.Abort(_("Error in configuration:\n%s") % inst)
115 if self.parentui is None:
115 if self.parentui is None:
116 return default
116 return default
117 else:
117 else:
118 return self.parentui.configbool(section, name, default)
118 return self.parentui.configbool(section, name, default)
119
119
120 def has_config(self, section):
120 def has_config(self, section):
121 '''tell whether section exists in config.'''
121 '''tell whether section exists in config.'''
122 return self.cdata.has_section(section)
122 return self.cdata.has_section(section)
123
123
124 def configitems(self, section):
124 def configitems(self, section):
125 items = {}
125 items = {}
126 if self.parentui is not None:
126 if self.parentui is not None:
127 items = dict(self.parentui.configitems(section))
127 items = dict(self.parentui.configitems(section))
128 if self.cdata.has_section(section):
128 if self.cdata.has_section(section):
129 try:
129 try:
130 items.update(dict(self.cdata.items(section)))
130 items.update(dict(self.cdata.items(section)))
131 except ConfigParser.InterpolationError, inst:
131 except ConfigParser.InterpolationError, inst:
132 raise util.Abort(_("Error in configuration:\n%s") % inst)
132 raise util.Abort(_("Error in configuration:\n%s") % inst)
133 x = items.items()
133 x = items.items()
134 x.sort()
134 x.sort()
135 return x
135 return x
136
136
137 def walkconfig(self, seen=None):
137 def walkconfig(self, seen=None):
138 if seen is None:
138 if seen is None:
139 seen = {}
139 seen = {}
140 for (section, name), value in self.overlay.iteritems():
140 for (section, name), value in self.overlay.iteritems():
141 yield section, name, value
141 yield section, name, value
142 seen[section, name] = 1
142 seen[section, name] = 1
143 for section in self.cdata.sections():
143 for section in self.cdata.sections():
144 for name, value in self.cdata.items(section):
144 for name, value in self.cdata.items(section):
145 if (section, name) in seen: continue
145 if (section, name) in seen: continue
146 yield section, name, value.replace('\n', '\\n')
146 yield section, name, value.replace('\n', '\\n')
147 seen[section, name] = 1
147 seen[section, name] = 1
148 if self.parentui is not None:
148 if self.parentui is not None:
149 for parent in self.parentui.walkconfig(seen):
149 for parent in self.parentui.walkconfig(seen):
150 yield parent
150 yield parent
151
151
152 def extensions(self):
152 def extensions(self):
153 result = self.configitems("extensions")
153 result = self.configitems("extensions")
154 for i, (key, value) in enumerate(result):
154 for i, (key, value) in enumerate(result):
155 if value:
155 if value:
156 result[i] = (key, os.path.expanduser(value))
156 result[i] = (key, os.path.expanduser(value))
157 return result
157 return result
158
158
159 def hgignorefiles(self):
159 def hgignorefiles(self):
160 result = []
160 result = []
161 for key, value in self.configitems("ui"):
161 for key, value in self.configitems("ui"):
162 if key == 'ignore' or key.startswith('ignore.'):
162 if key == 'ignore' or key.startswith('ignore.'):
163 result.append(os.path.expanduser(value))
163 result.append(os.path.expanduser(value))
164 return result
164 return result
165
165
166 def configrevlog(self):
166 def configrevlog(self):
167 result = {}
167 result = {}
168 for key, value in self.configitems("revlog"):
168 for key, value in self.configitems("revlog"):
169 result[key.lower()] = value
169 result[key.lower()] = value
170 return result
170 return result
171
171
172 def diffopts(self, opts={}):
173 return mdiff.diffopts(
174 text=opts.get('text'),
175 showfunc=(opts.get('show_function') or
176 self.configbool('diff', 'showfunc', None)),
177 git=(opts.get('git') or
178 self.configbool('diff', 'git', None)),
179 ignorews=(opts.get('ignore_all_space') or
180 self.configbool('diff', 'ignorews', None)),
181 ignorewsamount=(opts.get('ignore_space_change') or
182 self.configbool('diff', 'ignorewsamount', None)),
183 ignoreblanklines=(opts.get('ignore_blank_lines') or
184 self.configbool('diff', 'ignoreblanklines', None)))
185
186 def username(self):
172 def username(self):
187 """Return default username to be used in commits.
173 """Return default username to be used in commits.
188
174
189 Searched in this order: $HGUSER, [ui] section of hgrcs, $EMAIL
175 Searched in this order: $HGUSER, [ui] section of hgrcs, $EMAIL
190 and stop searching if one of these is set.
176 and stop searching if one of these is set.
191 Abort if found username is an empty string to force specifying
177 Abort if found username is an empty string to force specifying
192 the commit user elsewhere, e.g. with line option or repo hgrc.
178 the commit user elsewhere, e.g. with line option or repo hgrc.
193 If not found, use ($LOGNAME or $USER or $LNAME or
179 If not found, use ($LOGNAME or $USER or $LNAME or
194 $USERNAME) +"@full.hostname".
180 $USERNAME) +"@full.hostname".
195 """
181 """
196 user = os.environ.get("HGUSER")
182 user = os.environ.get("HGUSER")
197 if user is None:
183 if user is None:
198 user = self.config("ui", "username")
184 user = self.config("ui", "username")
199 if user is None:
185 if user is None:
200 user = os.environ.get("EMAIL")
186 user = os.environ.get("EMAIL")
201 if user is None:
187 if user is None:
202 try:
188 try:
203 user = '%s@%s' % (util.getuser(), socket.getfqdn())
189 user = '%s@%s' % (util.getuser(), socket.getfqdn())
204 except KeyError:
190 except KeyError:
205 raise util.Abort(_("Please specify a username."))
191 raise util.Abort(_("Please specify a username."))
206 return user
192 return user
207
193
208 def shortuser(self, user):
194 def shortuser(self, user):
209 """Return a short representation of a user name or email address."""
195 """Return a short representation of a user name or email address."""
210 if not self.verbose: user = util.shortuser(user)
196 if not self.verbose: user = util.shortuser(user)
211 return user
197 return user
212
198
213 def expandpath(self, loc, default=None):
199 def expandpath(self, loc, default=None):
214 """Return repository location relative to cwd or from [paths]"""
200 """Return repository location relative to cwd or from [paths]"""
215 if "://" in loc or os.path.isdir(loc):
201 if "://" in loc or os.path.isdir(loc):
216 return loc
202 return loc
217
203
218 path = self.config("paths", loc)
204 path = self.config("paths", loc)
219 if not path and default is not None:
205 if not path and default is not None:
220 path = self.config("paths", default)
206 path = self.config("paths", default)
221 return path or loc
207 return path or loc
222
208
223 def write(self, *args):
209 def write(self, *args):
224 if self.header:
210 if self.header:
225 if self.header != self.prev_header:
211 if self.header != self.prev_header:
226 self.prev_header = self.header
212 self.prev_header = self.header
227 self.write(*self.header)
213 self.write(*self.header)
228 self.header = []
214 self.header = []
229 for a in args:
215 for a in args:
230 sys.stdout.write(str(a))
216 sys.stdout.write(str(a))
231
217
232 def write_header(self, *args):
218 def write_header(self, *args):
233 for a in args:
219 for a in args:
234 self.header.append(str(a))
220 self.header.append(str(a))
235
221
236 def write_err(self, *args):
222 def write_err(self, *args):
237 try:
223 try:
238 if not sys.stdout.closed: sys.stdout.flush()
224 if not sys.stdout.closed: sys.stdout.flush()
239 for a in args:
225 for a in args:
240 sys.stderr.write(str(a))
226 sys.stderr.write(str(a))
241 except IOError, inst:
227 except IOError, inst:
242 if inst.errno != errno.EPIPE:
228 if inst.errno != errno.EPIPE:
243 raise
229 raise
244
230
245 def flush(self):
231 def flush(self):
246 try: sys.stdout.flush()
232 try: sys.stdout.flush()
247 except: pass
233 except: pass
248 try: sys.stderr.flush()
234 try: sys.stderr.flush()
249 except: pass
235 except: pass
250
236
251 def readline(self):
237 def readline(self):
252 return sys.stdin.readline()[:-1]
238 return sys.stdin.readline()[:-1]
253 def prompt(self, msg, pat=None, default="y"):
239 def prompt(self, msg, pat=None, default="y"):
254 if not self.interactive: return default
240 if not self.interactive: return default
255 while 1:
241 while 1:
256 self.write(msg, " ")
242 self.write(msg, " ")
257 r = self.readline()
243 r = self.readline()
258 if not pat or re.match(pat, r):
244 if not pat or re.match(pat, r):
259 return r
245 return r
260 else:
246 else:
261 self.write(_("unrecognized response\n"))
247 self.write(_("unrecognized response\n"))
262 def getpass(self, prompt=None, default=None):
248 def getpass(self, prompt=None, default=None):
263 if not self.interactive: return default
249 if not self.interactive: return default
264 return getpass.getpass(prompt or _('password: '))
250 return getpass.getpass(prompt or _('password: '))
265 def status(self, *msg):
251 def status(self, *msg):
266 if not self.quiet: self.write(*msg)
252 if not self.quiet: self.write(*msg)
267 def warn(self, *msg):
253 def warn(self, *msg):
268 self.write_err(*msg)
254 self.write_err(*msg)
269 def note(self, *msg):
255 def note(self, *msg):
270 if self.verbose: self.write(*msg)
256 if self.verbose: self.write(*msg)
271 def debug(self, *msg):
257 def debug(self, *msg):
272 if self.debugflag: self.write(*msg)
258 if self.debugflag: self.write(*msg)
273 def edit(self, text, user):
259 def edit(self, text, user):
274 (fd, name) = tempfile.mkstemp(prefix="hg-editor-", suffix=".txt",
260 (fd, name) = tempfile.mkstemp(prefix="hg-editor-", suffix=".txt",
275 text=True)
261 text=True)
276 try:
262 try:
277 f = os.fdopen(fd, "w")
263 f = os.fdopen(fd, "w")
278 f.write(text)
264 f.write(text)
279 f.close()
265 f.close()
280
266
281 editor = (os.environ.get("HGEDITOR") or
267 editor = (os.environ.get("HGEDITOR") or
282 self.config("ui", "editor") or
268 self.config("ui", "editor") or
283 os.environ.get("EDITOR", "vi"))
269 os.environ.get("EDITOR", "vi"))
284
270
285 util.system("%s \"%s\"" % (editor, name),
271 util.system("%s \"%s\"" % (editor, name),
286 environ={'HGUSER': user},
272 environ={'HGUSER': user},
287 onerr=util.Abort, errprefix=_("edit failed"))
273 onerr=util.Abort, errprefix=_("edit failed"))
288
274
289 f = open(name)
275 f = open(name)
290 t = f.read()
276 t = f.read()
291 f.close()
277 f.close()
292 t = re.sub("(?m)^HG:.*\n", "", t)
278 t = re.sub("(?m)^HG:.*\n", "", t)
293 finally:
279 finally:
294 os.unlink(name)
280 os.unlink(name)
295
281
296 return t
282 return t
297
283
298 def sendmail(self):
299 '''send mail message. object returned has one method, sendmail.
300 call as sendmail(sender, list-of-recipients, msg).'''
301
302 def smtp():
303 '''send mail using smtp.'''
304
305 local_hostname = self.config('smtp', 'local_hostname')
306 s = smtplib.SMTP(local_hostname=local_hostname)
307 mailhost = self.config('smtp', 'host')
308 if not mailhost:
309 raise util.Abort(_('no [smtp]host in hgrc - cannot send mail'))
310 mailport = int(self.config('smtp', 'port', 25))
311 self.note(_('sending mail: smtp host %s, port %s\n') %
312 (mailhost, mailport))
313 s.connect(host=mailhost, port=mailport)
314 if self.configbool('smtp', 'tls'):
315 self.note(_('(using tls)\n'))
316 s.ehlo()
317 s.starttls()
318 s.ehlo()
319 username = self.config('smtp', 'username')
320 password = self.config('smtp', 'password')
321 if username and password:
322 self.note(_('(authenticating to mail server as %s)\n') %
323 (username))
324 s.login(username, password)
325 return s
326
327 class sendmail(object):
328 '''send mail using sendmail.'''
329
330 def __init__(self, ui, program):
331 self.ui = ui
332 self.program = program
333
334 def sendmail(self, sender, recipients, msg):
335 cmdline = '%s -f %s %s' % (
336 self.program, templater.email(sender),
337 ' '.join(map(templater.email, recipients)))
338 self.ui.note(_('sending mail: %s\n') % cmdline)
339 fp = os.popen(cmdline, 'w')
340 fp.write(msg)
341 ret = fp.close()
342 if ret:
343 raise util.Abort('%s %s' % (
344 os.path.basename(self.program.split(None, 1)[0]),
345 util.explain_exit(ret)[0]))
346
347 method = self.config('email', 'method', 'smtp')
348 if method == 'smtp':
349 mail = smtp()
350 else:
351 mail = sendmail(self, method)
352 return mail
353
354 def print_exc(self):
284 def print_exc(self):
355 '''print exception traceback if traceback printing enabled.
285 '''print exception traceback if traceback printing enabled.
356 only to call in exception handler. returns true if traceback
286 only to call in exception handler. returns true if traceback
357 printed.'''
287 printed.'''
358 if self.traceback:
288 if self.traceback:
359 traceback.print_exc()
289 traceback.print_exc()
360 return self.traceback
290 return self.traceback
General Comments 0
You need to be logged in to leave comments. Login now