##// END OF EJS Templates
use new style classes
Benoit Boissinot -
r8778:c5f36402 default
parent child Browse files
Show More

The requested changes are too big and content was truncated. Show full diff

@@ -1,342 +1,342
1 # gnuarch.py - GNU Arch support for the convert extension
1 # gnuarch.py - GNU Arch support for the convert extension
2 #
2 #
3 # Copyright 2008, 2009 Aleix Conchillo Flaque <aleix@member.fsf.org>
3 # Copyright 2008, 2009 Aleix Conchillo Flaque <aleix@member.fsf.org>
4 # and others
4 # and others
5 #
5 #
6 # This software may be used and distributed according to the terms of the
6 # This software may be used and distributed according to the terms of the
7 # GNU General Public License version 2, incorporated herein by reference.
7 # GNU General Public License version 2, incorporated herein by reference.
8
8
9 from common import NoRepo, commandline, commit, converter_source
9 from common import NoRepo, commandline, commit, converter_source
10 from mercurial.i18n import _
10 from mercurial.i18n import _
11 from mercurial import util
11 from mercurial import util
12 import os, shutil, tempfile, stat, locale
12 import os, shutil, tempfile, stat, locale
13 from email.Parser import Parser
13 from email.Parser import Parser
14
14
15 class gnuarch_source(converter_source, commandline):
15 class gnuarch_source(converter_source, commandline):
16
16
17 class gnuarch_rev:
17 class gnuarch_rev(object):
18 def __init__(self, rev):
18 def __init__(self, rev):
19 self.rev = rev
19 self.rev = rev
20 self.summary = ''
20 self.summary = ''
21 self.date = None
21 self.date = None
22 self.author = ''
22 self.author = ''
23 self.continuationof = None
23 self.continuationof = None
24 self.add_files = []
24 self.add_files = []
25 self.mod_files = []
25 self.mod_files = []
26 self.del_files = []
26 self.del_files = []
27 self.ren_files = {}
27 self.ren_files = {}
28 self.ren_dirs = {}
28 self.ren_dirs = {}
29
29
30 def __init__(self, ui, path, rev=None):
30 def __init__(self, ui, path, rev=None):
31 super(gnuarch_source, self).__init__(ui, path, rev=rev)
31 super(gnuarch_source, self).__init__(ui, path, rev=rev)
32
32
33 if not os.path.exists(os.path.join(path, '{arch}')):
33 if not os.path.exists(os.path.join(path, '{arch}')):
34 raise NoRepo(_("%s does not look like a GNU Arch repo") % path)
34 raise NoRepo(_("%s does not look like a GNU Arch repo") % path)
35
35
36 # Could use checktool, but we want to check for baz or tla.
36 # Could use checktool, but we want to check for baz or tla.
37 self.execmd = None
37 self.execmd = None
38 if util.find_exe('baz'):
38 if util.find_exe('baz'):
39 self.execmd = 'baz'
39 self.execmd = 'baz'
40 else:
40 else:
41 if util.find_exe('tla'):
41 if util.find_exe('tla'):
42 self.execmd = 'tla'
42 self.execmd = 'tla'
43 else:
43 else:
44 raise util.Abort(_('cannot find a GNU Arch tool'))
44 raise util.Abort(_('cannot find a GNU Arch tool'))
45
45
46 commandline.__init__(self, ui, self.execmd)
46 commandline.__init__(self, ui, self.execmd)
47
47
48 self.path = os.path.realpath(path)
48 self.path = os.path.realpath(path)
49 self.tmppath = None
49 self.tmppath = None
50
50
51 self.treeversion = None
51 self.treeversion = None
52 self.lastrev = None
52 self.lastrev = None
53 self.changes = {}
53 self.changes = {}
54 self.parents = {}
54 self.parents = {}
55 self.tags = {}
55 self.tags = {}
56 self.modecache = {}
56 self.modecache = {}
57 self.catlogparser = Parser()
57 self.catlogparser = Parser()
58 self.locale = locale.getpreferredencoding()
58 self.locale = locale.getpreferredencoding()
59 self.archives = []
59 self.archives = []
60
60
61 def before(self):
61 def before(self):
62 # Get registered archives
62 # Get registered archives
63 self.archives = [i.rstrip('\n')
63 self.archives = [i.rstrip('\n')
64 for i in self.runlines0('archives', '-n')]
64 for i in self.runlines0('archives', '-n')]
65
65
66 if self.execmd == 'tla':
66 if self.execmd == 'tla':
67 output = self.run0('tree-version', self.path)
67 output = self.run0('tree-version', self.path)
68 else:
68 else:
69 output = self.run0('tree-version', '-d', self.path)
69 output = self.run0('tree-version', '-d', self.path)
70 self.treeversion = output.strip()
70 self.treeversion = output.strip()
71
71
72 # Get name of temporary directory
72 # Get name of temporary directory
73 version = self.treeversion.split('/')
73 version = self.treeversion.split('/')
74 self.tmppath = os.path.join(tempfile.gettempdir(),
74 self.tmppath = os.path.join(tempfile.gettempdir(),
75 'hg-%s' % version[1])
75 'hg-%s' % version[1])
76
76
77 # Generate parents dictionary
77 # Generate parents dictionary
78 self.parents[None] = []
78 self.parents[None] = []
79 treeversion = self.treeversion
79 treeversion = self.treeversion
80 child = None
80 child = None
81 while treeversion:
81 while treeversion:
82 self.ui.status(_('analyzing tree version %s...\n') % treeversion)
82 self.ui.status(_('analyzing tree version %s...\n') % treeversion)
83
83
84 archive = treeversion.split('/')[0]
84 archive = treeversion.split('/')[0]
85 if archive not in self.archives:
85 if archive not in self.archives:
86 self.ui.status(_('tree analysis stopped because it points to '
86 self.ui.status(_('tree analysis stopped because it points to '
87 'an unregistered archive %s...\n') % archive)
87 'an unregistered archive %s...\n') % archive)
88 break
88 break
89
89
90 # Get the complete list of revisions for that tree version
90 # Get the complete list of revisions for that tree version
91 output, status = self.runlines('revisions', '-r', '-f', treeversion)
91 output, status = self.runlines('revisions', '-r', '-f', treeversion)
92 self.checkexit(status, 'failed retrieveing revisions for %s' % treeversion)
92 self.checkexit(status, 'failed retrieveing revisions for %s' % treeversion)
93
93
94 # No new iteration unless a revision has a continuation-of header
94 # No new iteration unless a revision has a continuation-of header
95 treeversion = None
95 treeversion = None
96
96
97 for l in output:
97 for l in output:
98 rev = l.strip()
98 rev = l.strip()
99 self.changes[rev] = self.gnuarch_rev(rev)
99 self.changes[rev] = self.gnuarch_rev(rev)
100 self.parents[rev] = []
100 self.parents[rev] = []
101
101
102 # Read author, date and summary
102 # Read author, date and summary
103 catlog, status = self.run('cat-log', '-d', self.path, rev)
103 catlog, status = self.run('cat-log', '-d', self.path, rev)
104 if status:
104 if status:
105 catlog = self.run0('cat-archive-log', rev)
105 catlog = self.run0('cat-archive-log', rev)
106 self._parsecatlog(catlog, rev)
106 self._parsecatlog(catlog, rev)
107
107
108 # Populate the parents map
108 # Populate the parents map
109 self.parents[child].append(rev)
109 self.parents[child].append(rev)
110
110
111 # Keep track of the current revision as the child of the next
111 # Keep track of the current revision as the child of the next
112 # revision scanned
112 # revision scanned
113 child = rev
113 child = rev
114
114
115 # Check if we have to follow the usual incremental history
115 # Check if we have to follow the usual incremental history
116 # or if we have to 'jump' to a different treeversion given
116 # or if we have to 'jump' to a different treeversion given
117 # by the continuation-of header.
117 # by the continuation-of header.
118 if self.changes[rev].continuationof:
118 if self.changes[rev].continuationof:
119 treeversion = '--'.join(self.changes[rev].continuationof.split('--')[:-1])
119 treeversion = '--'.join(self.changes[rev].continuationof.split('--')[:-1])
120 break
120 break
121
121
122 # If we reached a base-0 revision w/o any continuation-of
122 # If we reached a base-0 revision w/o any continuation-of
123 # header, it means the tree history ends here.
123 # header, it means the tree history ends here.
124 if rev[-6:] == 'base-0':
124 if rev[-6:] == 'base-0':
125 break
125 break
126
126
127 def after(self):
127 def after(self):
128 self.ui.debug(_('cleaning up %s\n') % self.tmppath)
128 self.ui.debug(_('cleaning up %s\n') % self.tmppath)
129 shutil.rmtree(self.tmppath, ignore_errors=True)
129 shutil.rmtree(self.tmppath, ignore_errors=True)
130
130
131 def getheads(self):
131 def getheads(self):
132 return self.parents[None]
132 return self.parents[None]
133
133
134 def getfile(self, name, rev):
134 def getfile(self, name, rev):
135 if rev != self.lastrev:
135 if rev != self.lastrev:
136 raise util.Abort(_('internal calling inconsistency'))
136 raise util.Abort(_('internal calling inconsistency'))
137
137
138 # Raise IOError if necessary (i.e. deleted files).
138 # Raise IOError if necessary (i.e. deleted files).
139 if not os.path.exists(os.path.join(self.tmppath, name)):
139 if not os.path.exists(os.path.join(self.tmppath, name)):
140 raise IOError
140 raise IOError
141
141
142 data, mode = self._getfile(name, rev)
142 data, mode = self._getfile(name, rev)
143 self.modecache[(name, rev)] = mode
143 self.modecache[(name, rev)] = mode
144
144
145 return data
145 return data
146
146
147 def getmode(self, name, rev):
147 def getmode(self, name, rev):
148 return self.modecache[(name, rev)]
148 return self.modecache[(name, rev)]
149
149
150 def getchanges(self, rev):
150 def getchanges(self, rev):
151 self.modecache = {}
151 self.modecache = {}
152 self._update(rev)
152 self._update(rev)
153 changes = []
153 changes = []
154 copies = {}
154 copies = {}
155
155
156 for f in self.changes[rev].add_files:
156 for f in self.changes[rev].add_files:
157 changes.append((f, rev))
157 changes.append((f, rev))
158
158
159 for f in self.changes[rev].mod_files:
159 for f in self.changes[rev].mod_files:
160 changes.append((f, rev))
160 changes.append((f, rev))
161
161
162 for f in self.changes[rev].del_files:
162 for f in self.changes[rev].del_files:
163 changes.append((f, rev))
163 changes.append((f, rev))
164
164
165 for src in self.changes[rev].ren_files:
165 for src in self.changes[rev].ren_files:
166 to = self.changes[rev].ren_files[src]
166 to = self.changes[rev].ren_files[src]
167 changes.append((src, rev))
167 changes.append((src, rev))
168 changes.append((to, rev))
168 changes.append((to, rev))
169 copies[to] = src
169 copies[to] = src
170
170
171 for src in self.changes[rev].ren_dirs:
171 for src in self.changes[rev].ren_dirs:
172 to = self.changes[rev].ren_dirs[src]
172 to = self.changes[rev].ren_dirs[src]
173 chgs, cps = self._rendirchanges(src, to);
173 chgs, cps = self._rendirchanges(src, to);
174 changes += [(f, rev) for f in chgs]
174 changes += [(f, rev) for f in chgs]
175 copies.update(cps)
175 copies.update(cps)
176
176
177 self.lastrev = rev
177 self.lastrev = rev
178 return sorted(set(changes)), copies
178 return sorted(set(changes)), copies
179
179
180 def getcommit(self, rev):
180 def getcommit(self, rev):
181 changes = self.changes[rev]
181 changes = self.changes[rev]
182 return commit(author=changes.author, date=changes.date,
182 return commit(author=changes.author, date=changes.date,
183 desc=changes.summary, parents=self.parents[rev], rev=rev)
183 desc=changes.summary, parents=self.parents[rev], rev=rev)
184
184
185 def gettags(self):
185 def gettags(self):
186 return self.tags
186 return self.tags
187
187
188 def _execute(self, cmd, *args, **kwargs):
188 def _execute(self, cmd, *args, **kwargs):
189 cmdline = [self.execmd, cmd]
189 cmdline = [self.execmd, cmd]
190 cmdline += args
190 cmdline += args
191 cmdline = [util.shellquote(arg) for arg in cmdline]
191 cmdline = [util.shellquote(arg) for arg in cmdline]
192 cmdline += ['>', util.nulldev, '2>', util.nulldev]
192 cmdline += ['>', util.nulldev, '2>', util.nulldev]
193 cmdline = util.quotecommand(' '.join(cmdline))
193 cmdline = util.quotecommand(' '.join(cmdline))
194 self.ui.debug(cmdline, '\n')
194 self.ui.debug(cmdline, '\n')
195 return os.system(cmdline)
195 return os.system(cmdline)
196
196
197 def _update(self, rev):
197 def _update(self, rev):
198 self.ui.debug(_('applying revision %s...\n') % rev)
198 self.ui.debug(_('applying revision %s...\n') % rev)
199 changeset, status = self.runlines('replay', '-d', self.tmppath,
199 changeset, status = self.runlines('replay', '-d', self.tmppath,
200 rev)
200 rev)
201 if status:
201 if status:
202 # Something went wrong while merging (baz or tla
202 # Something went wrong while merging (baz or tla
203 # issue?), get latest revision and try from there
203 # issue?), get latest revision and try from there
204 shutil.rmtree(self.tmppath, ignore_errors=True)
204 shutil.rmtree(self.tmppath, ignore_errors=True)
205 self._obtainrevision(rev)
205 self._obtainrevision(rev)
206 else:
206 else:
207 old_rev = self.parents[rev][0]
207 old_rev = self.parents[rev][0]
208 self.ui.debug(_('computing changeset between %s and %s...\n')
208 self.ui.debug(_('computing changeset between %s and %s...\n')
209 % (old_rev, rev))
209 % (old_rev, rev))
210 self._parsechangeset(changeset, rev)
210 self._parsechangeset(changeset, rev)
211
211
212 def _getfile(self, name, rev):
212 def _getfile(self, name, rev):
213 mode = os.lstat(os.path.join(self.tmppath, name)).st_mode
213 mode = os.lstat(os.path.join(self.tmppath, name)).st_mode
214 if stat.S_ISLNK(mode):
214 if stat.S_ISLNK(mode):
215 data = os.readlink(os.path.join(self.tmppath, name))
215 data = os.readlink(os.path.join(self.tmppath, name))
216 mode = mode and 'l' or ''
216 mode = mode and 'l' or ''
217 else:
217 else:
218 data = open(os.path.join(self.tmppath, name), 'rb').read()
218 data = open(os.path.join(self.tmppath, name), 'rb').read()
219 mode = (mode & 0111) and 'x' or ''
219 mode = (mode & 0111) and 'x' or ''
220 return data, mode
220 return data, mode
221
221
222 def _exclude(self, name):
222 def _exclude(self, name):
223 exclude = [ '{arch}', '.arch-ids', '.arch-inventory' ]
223 exclude = [ '{arch}', '.arch-ids', '.arch-inventory' ]
224 for exc in exclude:
224 for exc in exclude:
225 if name.find(exc) != -1:
225 if name.find(exc) != -1:
226 return True
226 return True
227 return False
227 return False
228
228
229 def _readcontents(self, path):
229 def _readcontents(self, path):
230 files = []
230 files = []
231 contents = os.listdir(path)
231 contents = os.listdir(path)
232 while len(contents) > 0:
232 while len(contents) > 0:
233 c = contents.pop()
233 c = contents.pop()
234 p = os.path.join(path, c)
234 p = os.path.join(path, c)
235 # os.walk could be used, but here we avoid internal GNU
235 # os.walk could be used, but here we avoid internal GNU
236 # Arch files and directories, thus saving a lot time.
236 # Arch files and directories, thus saving a lot time.
237 if not self._exclude(p):
237 if not self._exclude(p):
238 if os.path.isdir(p):
238 if os.path.isdir(p):
239 contents += [os.path.join(c, f) for f in os.listdir(p)]
239 contents += [os.path.join(c, f) for f in os.listdir(p)]
240 else:
240 else:
241 files.append(c)
241 files.append(c)
242 return files
242 return files
243
243
244 def _rendirchanges(self, src, dest):
244 def _rendirchanges(self, src, dest):
245 changes = []
245 changes = []
246 copies = {}
246 copies = {}
247 files = self._readcontents(os.path.join(self.tmppath, dest))
247 files = self._readcontents(os.path.join(self.tmppath, dest))
248 for f in files:
248 for f in files:
249 s = os.path.join(src, f)
249 s = os.path.join(src, f)
250 d = os.path.join(dest, f)
250 d = os.path.join(dest, f)
251 changes.append(s)
251 changes.append(s)
252 changes.append(d)
252 changes.append(d)
253 copies[d] = s
253 copies[d] = s
254 return changes, copies
254 return changes, copies
255
255
256 def _obtainrevision(self, rev):
256 def _obtainrevision(self, rev):
257 self.ui.debug(_('obtaining revision %s...\n') % rev)
257 self.ui.debug(_('obtaining revision %s...\n') % rev)
258 output = self._execute('get', rev, self.tmppath)
258 output = self._execute('get', rev, self.tmppath)
259 self.checkexit(output)
259 self.checkexit(output)
260 self.ui.debug(_('analyzing revision %s...\n') % rev)
260 self.ui.debug(_('analyzing revision %s...\n') % rev)
261 files = self._readcontents(self.tmppath)
261 files = self._readcontents(self.tmppath)
262 self.changes[rev].add_files += files
262 self.changes[rev].add_files += files
263
263
264 def _stripbasepath(self, path):
264 def _stripbasepath(self, path):
265 if path.startswith('./'):
265 if path.startswith('./'):
266 return path[2:]
266 return path[2:]
267 return path
267 return path
268
268
269 def _parsecatlog(self, data, rev):
269 def _parsecatlog(self, data, rev):
270 try:
270 try:
271 catlog = self.catlogparser.parsestr(data)
271 catlog = self.catlogparser.parsestr(data)
272
272
273 # Commit date
273 # Commit date
274 self.changes[rev].date = util.datestr(
274 self.changes[rev].date = util.datestr(
275 util.strdate(catlog['Standard-date'],
275 util.strdate(catlog['Standard-date'],
276 '%Y-%m-%d %H:%M:%S'))
276 '%Y-%m-%d %H:%M:%S'))
277
277
278 # Commit author
278 # Commit author
279 self.changes[rev].author = self.recode(catlog['Creator'])
279 self.changes[rev].author = self.recode(catlog['Creator'])
280
280
281 # Commit description
281 # Commit description
282 self.changes[rev].summary = '\n\n'.join((catlog['Summary'],
282 self.changes[rev].summary = '\n\n'.join((catlog['Summary'],
283 catlog.get_payload()))
283 catlog.get_payload()))
284 self.changes[rev].summary = self.recode(self.changes[rev].summary)
284 self.changes[rev].summary = self.recode(self.changes[rev].summary)
285
285
286 # Commit revision origin when dealing with a branch or tag
286 # Commit revision origin when dealing with a branch or tag
287 if catlog.has_key('Continuation-of'):
287 if catlog.has_key('Continuation-of'):
288 self.changes[rev].continuationof = self.recode(catlog['Continuation-of'])
288 self.changes[rev].continuationof = self.recode(catlog['Continuation-of'])
289 except Exception:
289 except Exception:
290 raise util.Abort(_('could not parse cat-log of %s') % rev)
290 raise util.Abort(_('could not parse cat-log of %s') % rev)
291
291
292 def _parsechangeset(self, data, rev):
292 def _parsechangeset(self, data, rev):
293 for l in data:
293 for l in data:
294 l = l.strip()
294 l = l.strip()
295 # Added file (ignore added directory)
295 # Added file (ignore added directory)
296 if l.startswith('A') and not l.startswith('A/'):
296 if l.startswith('A') and not l.startswith('A/'):
297 file = self._stripbasepath(l[1:].strip())
297 file = self._stripbasepath(l[1:].strip())
298 if not self._exclude(file):
298 if not self._exclude(file):
299 self.changes[rev].add_files.append(file)
299 self.changes[rev].add_files.append(file)
300 # Deleted file (ignore deleted directory)
300 # Deleted file (ignore deleted directory)
301 elif l.startswith('D') and not l.startswith('D/'):
301 elif l.startswith('D') and not l.startswith('D/'):
302 file = self._stripbasepath(l[1:].strip())
302 file = self._stripbasepath(l[1:].strip())
303 if not self._exclude(file):
303 if not self._exclude(file):
304 self.changes[rev].del_files.append(file)
304 self.changes[rev].del_files.append(file)
305 # Modified binary file
305 # Modified binary file
306 elif l.startswith('Mb'):
306 elif l.startswith('Mb'):
307 file = self._stripbasepath(l[2:].strip())
307 file = self._stripbasepath(l[2:].strip())
308 if not self._exclude(file):
308 if not self._exclude(file):
309 self.changes[rev].mod_files.append(file)
309 self.changes[rev].mod_files.append(file)
310 # Modified link
310 # Modified link
311 elif l.startswith('M->'):
311 elif l.startswith('M->'):
312 file = self._stripbasepath(l[3:].strip())
312 file = self._stripbasepath(l[3:].strip())
313 if not self._exclude(file):
313 if not self._exclude(file):
314 self.changes[rev].mod_files.append(file)
314 self.changes[rev].mod_files.append(file)
315 # Modified file
315 # Modified file
316 elif l.startswith('M'):
316 elif l.startswith('M'):
317 file = self._stripbasepath(l[1:].strip())
317 file = self._stripbasepath(l[1:].strip())
318 if not self._exclude(file):
318 if not self._exclude(file):
319 self.changes[rev].mod_files.append(file)
319 self.changes[rev].mod_files.append(file)
320 # Renamed file (or link)
320 # Renamed file (or link)
321 elif l.startswith('=>'):
321 elif l.startswith('=>'):
322 files = l[2:].strip().split(' ')
322 files = l[2:].strip().split(' ')
323 if len(files) == 1:
323 if len(files) == 1:
324 files = l[2:].strip().split('\t')
324 files = l[2:].strip().split('\t')
325 src = self._stripbasepath(files[0])
325 src = self._stripbasepath(files[0])
326 dst = self._stripbasepath(files[1])
326 dst = self._stripbasepath(files[1])
327 if not self._exclude(src) and not self._exclude(dst):
327 if not self._exclude(src) and not self._exclude(dst):
328 self.changes[rev].ren_files[src] = dst
328 self.changes[rev].ren_files[src] = dst
329 # Conversion from file to link or from link to file (modified)
329 # Conversion from file to link or from link to file (modified)
330 elif l.startswith('ch'):
330 elif l.startswith('ch'):
331 file = self._stripbasepath(l[2:].strip())
331 file = self._stripbasepath(l[2:].strip())
332 if not self._exclude(file):
332 if not self._exclude(file):
333 self.changes[rev].mod_files.append(file)
333 self.changes[rev].mod_files.append(file)
334 # Renamed directory
334 # Renamed directory
335 elif l.startswith('/>'):
335 elif l.startswith('/>'):
336 dirs = l[2:].strip().split(' ')
336 dirs = l[2:].strip().split(' ')
337 if len(dirs) == 1:
337 if len(dirs) == 1:
338 dirs = l[2:].strip().split('\t')
338 dirs = l[2:].strip().split('\t')
339 src = self._stripbasepath(dirs[0])
339 src = self._stripbasepath(dirs[0])
340 dst = self._stripbasepath(dirs[1])
340 dst = self._stripbasepath(dirs[1])
341 if not self._exclude(src) and not self._exclude(dst):
341 if not self._exclude(src) and not self._exclude(dst):
342 self.changes[rev].ren_dirs[src] = dst
342 self.changes[rev].ren_dirs[src] = dst
@@ -1,1242 +1,1242
1 # Subversion 1.4/1.5 Python API backend
1 # Subversion 1.4/1.5 Python API backend
2 #
2 #
3 # Copyright(C) 2007 Daniel Holth et al
3 # Copyright(C) 2007 Daniel Holth et al
4 #
4 #
5 # Configuration options:
5 # Configuration options:
6 #
6 #
7 # convert.svn.trunk
7 # convert.svn.trunk
8 # Relative path to the trunk (default: "trunk")
8 # Relative path to the trunk (default: "trunk")
9 # convert.svn.branches
9 # convert.svn.branches
10 # Relative path to tree of branches (default: "branches")
10 # Relative path to tree of branches (default: "branches")
11 # convert.svn.tags
11 # convert.svn.tags
12 # Relative path to tree of tags (default: "tags")
12 # Relative path to tree of tags (default: "tags")
13 #
13 #
14 # Set these in a hgrc, or on the command line as follows:
14 # Set these in a hgrc, or on the command line as follows:
15 #
15 #
16 # hg convert --config convert.svn.trunk=wackoname [...]
16 # hg convert --config convert.svn.trunk=wackoname [...]
17
17
18 import locale
18 import locale
19 import os
19 import os
20 import re
20 import re
21 import sys
21 import sys
22 import cPickle as pickle
22 import cPickle as pickle
23 import tempfile
23 import tempfile
24 import urllib
24 import urllib
25
25
26 from mercurial import strutil, util
26 from mercurial import strutil, util
27 from mercurial.i18n import _
27 from mercurial.i18n import _
28
28
29 # Subversion stuff. Works best with very recent Python SVN bindings
29 # Subversion stuff. Works best with very recent Python SVN bindings
30 # e.g. SVN 1.5 or backports. Thanks to the bzr folks for enhancing
30 # e.g. SVN 1.5 or backports. Thanks to the bzr folks for enhancing
31 # these bindings.
31 # these bindings.
32
32
33 from cStringIO import StringIO
33 from cStringIO import StringIO
34
34
35 from common import NoRepo, MissingTool, commit, encodeargs, decodeargs
35 from common import NoRepo, MissingTool, commit, encodeargs, decodeargs
36 from common import commandline, converter_source, converter_sink, mapfile
36 from common import commandline, converter_source, converter_sink, mapfile
37
37
38 try:
38 try:
39 from svn.core import SubversionException, Pool
39 from svn.core import SubversionException, Pool
40 import svn
40 import svn
41 import svn.client
41 import svn.client
42 import svn.core
42 import svn.core
43 import svn.ra
43 import svn.ra
44 import svn.delta
44 import svn.delta
45 import transport
45 import transport
46 import warnings
46 import warnings
47 warnings.filterwarnings('ignore',
47 warnings.filterwarnings('ignore',
48 module='svn.core',
48 module='svn.core',
49 category=DeprecationWarning)
49 category=DeprecationWarning)
50
50
51 except ImportError:
51 except ImportError:
52 pass
52 pass
53
53
54 class SvnPathNotFound(Exception):
54 class SvnPathNotFound(Exception):
55 pass
55 pass
56
56
57 def geturl(path):
57 def geturl(path):
58 try:
58 try:
59 return svn.client.url_from_path(svn.core.svn_path_canonicalize(path))
59 return svn.client.url_from_path(svn.core.svn_path_canonicalize(path))
60 except SubversionException:
60 except SubversionException:
61 pass
61 pass
62 if os.path.isdir(path):
62 if os.path.isdir(path):
63 path = os.path.normpath(os.path.abspath(path))
63 path = os.path.normpath(os.path.abspath(path))
64 if os.name == 'nt':
64 if os.name == 'nt':
65 path = '/' + util.normpath(path)
65 path = '/' + util.normpath(path)
66 return 'file://%s' % urllib.quote(path)
66 return 'file://%s' % urllib.quote(path)
67 return path
67 return path
68
68
69 def optrev(number):
69 def optrev(number):
70 optrev = svn.core.svn_opt_revision_t()
70 optrev = svn.core.svn_opt_revision_t()
71 optrev.kind = svn.core.svn_opt_revision_number
71 optrev.kind = svn.core.svn_opt_revision_number
72 optrev.value.number = number
72 optrev.value.number = number
73 return optrev
73 return optrev
74
74
75 class changedpath(object):
75 class changedpath(object):
76 def __init__(self, p):
76 def __init__(self, p):
77 self.copyfrom_path = p.copyfrom_path
77 self.copyfrom_path = p.copyfrom_path
78 self.copyfrom_rev = p.copyfrom_rev
78 self.copyfrom_rev = p.copyfrom_rev
79 self.action = p.action
79 self.action = p.action
80
80
81 def get_log_child(fp, url, paths, start, end, limit=0, discover_changed_paths=True,
81 def get_log_child(fp, url, paths, start, end, limit=0, discover_changed_paths=True,
82 strict_node_history=False):
82 strict_node_history=False):
83 protocol = -1
83 protocol = -1
84 def receiver(orig_paths, revnum, author, date, message, pool):
84 def receiver(orig_paths, revnum, author, date, message, pool):
85 if orig_paths is not None:
85 if orig_paths is not None:
86 for k, v in orig_paths.iteritems():
86 for k, v in orig_paths.iteritems():
87 orig_paths[k] = changedpath(v)
87 orig_paths[k] = changedpath(v)
88 pickle.dump((orig_paths, revnum, author, date, message),
88 pickle.dump((orig_paths, revnum, author, date, message),
89 fp, protocol)
89 fp, protocol)
90
90
91 try:
91 try:
92 # Use an ra of our own so that our parent can consume
92 # Use an ra of our own so that our parent can consume
93 # our results without confusing the server.
93 # our results without confusing the server.
94 t = transport.SvnRaTransport(url=url)
94 t = transport.SvnRaTransport(url=url)
95 svn.ra.get_log(t.ra, paths, start, end, limit,
95 svn.ra.get_log(t.ra, paths, start, end, limit,
96 discover_changed_paths,
96 discover_changed_paths,
97 strict_node_history,
97 strict_node_history,
98 receiver)
98 receiver)
99 except SubversionException, (inst, num):
99 except SubversionException, (inst, num):
100 pickle.dump(num, fp, protocol)
100 pickle.dump(num, fp, protocol)
101 except IOError:
101 except IOError:
102 # Caller may interrupt the iteration
102 # Caller may interrupt the iteration
103 pickle.dump(None, fp, protocol)
103 pickle.dump(None, fp, protocol)
104 else:
104 else:
105 pickle.dump(None, fp, protocol)
105 pickle.dump(None, fp, protocol)
106 fp.close()
106 fp.close()
107 # With large history, cleanup process goes crazy and suddenly
107 # With large history, cleanup process goes crazy and suddenly
108 # consumes *huge* amount of memory. The output file being closed,
108 # consumes *huge* amount of memory. The output file being closed,
109 # there is no need for clean termination.
109 # there is no need for clean termination.
110 os._exit(0)
110 os._exit(0)
111
111
112 def debugsvnlog(ui, **opts):
112 def debugsvnlog(ui, **opts):
113 """Fetch SVN log in a subprocess and channel them back to parent to
113 """Fetch SVN log in a subprocess and channel them back to parent to
114 avoid memory collection issues.
114 avoid memory collection issues.
115 """
115 """
116 util.set_binary(sys.stdin)
116 util.set_binary(sys.stdin)
117 util.set_binary(sys.stdout)
117 util.set_binary(sys.stdout)
118 args = decodeargs(sys.stdin.read())
118 args = decodeargs(sys.stdin.read())
119 get_log_child(sys.stdout, *args)
119 get_log_child(sys.stdout, *args)
120
120
121 class logstream:
121 class logstream(object):
122 """Interruptible revision log iterator."""
122 """Interruptible revision log iterator."""
123 def __init__(self, stdout):
123 def __init__(self, stdout):
124 self._stdout = stdout
124 self._stdout = stdout
125
125
126 def __iter__(self):
126 def __iter__(self):
127 while True:
127 while True:
128 entry = pickle.load(self._stdout)
128 entry = pickle.load(self._stdout)
129 try:
129 try:
130 orig_paths, revnum, author, date, message = entry
130 orig_paths, revnum, author, date, message = entry
131 except:
131 except:
132 if entry is None:
132 if entry is None:
133 break
133 break
134 raise SubversionException("child raised exception", entry)
134 raise SubversionException("child raised exception", entry)
135 yield entry
135 yield entry
136
136
137 def close(self):
137 def close(self):
138 if self._stdout:
138 if self._stdout:
139 self._stdout.close()
139 self._stdout.close()
140 self._stdout = None
140 self._stdout = None
141
141
142
142
143 # Check to see if the given path is a local Subversion repo. Verify this by
143 # Check to see if the given path is a local Subversion repo. Verify this by
144 # looking for several svn-specific files and directories in the given
144 # looking for several svn-specific files and directories in the given
145 # directory.
145 # directory.
146 def filecheck(path, proto):
146 def filecheck(path, proto):
147 for x in ('locks', 'hooks', 'format', 'db', ):
147 for x in ('locks', 'hooks', 'format', 'db', ):
148 if not os.path.exists(os.path.join(path, x)):
148 if not os.path.exists(os.path.join(path, x)):
149 return False
149 return False
150 return True
150 return True
151
151
152 # Check to see if a given path is the root of an svn repo over http. We verify
152 # Check to see if a given path is the root of an svn repo over http. We verify
153 # this by requesting a version-controlled URL we know can't exist and looking
153 # this by requesting a version-controlled URL we know can't exist and looking
154 # for the svn-specific "not found" XML.
154 # for the svn-specific "not found" XML.
155 def httpcheck(path, proto):
155 def httpcheck(path, proto):
156 return ('<m:human-readable errcode="160013">' in
156 return ('<m:human-readable errcode="160013">' in
157 urllib.urlopen('%s://%s/!svn/ver/0/.svn' % (proto, path)).read())
157 urllib.urlopen('%s://%s/!svn/ver/0/.svn' % (proto, path)).read())
158
158
159 protomap = {'http': httpcheck,
159 protomap = {'http': httpcheck,
160 'https': httpcheck,
160 'https': httpcheck,
161 'file': filecheck,
161 'file': filecheck,
162 }
162 }
163 def issvnurl(url):
163 def issvnurl(url):
164 try:
164 try:
165 proto, path = url.split('://', 1)
165 proto, path = url.split('://', 1)
166 path = urllib.url2pathname(path)
166 path = urllib.url2pathname(path)
167 except ValueError:
167 except ValueError:
168 proto = 'file'
168 proto = 'file'
169 path = os.path.abspath(url)
169 path = os.path.abspath(url)
170 path = path.replace(os.sep, '/')
170 path = path.replace(os.sep, '/')
171 check = protomap.get(proto, lambda p, p2: False)
171 check = protomap.get(proto, lambda p, p2: False)
172 while '/' in path:
172 while '/' in path:
173 if check(path, proto):
173 if check(path, proto):
174 return True
174 return True
175 path = path.rsplit('/', 1)[0]
175 path = path.rsplit('/', 1)[0]
176 return False
176 return False
177
177
178 # SVN conversion code stolen from bzr-svn and tailor
178 # SVN conversion code stolen from bzr-svn and tailor
179 #
179 #
180 # Subversion looks like a versioned filesystem, branches structures
180 # Subversion looks like a versioned filesystem, branches structures
181 # are defined by conventions and not enforced by the tool. First,
181 # are defined by conventions and not enforced by the tool. First,
182 # we define the potential branches (modules) as "trunk" and "branches"
182 # we define the potential branches (modules) as "trunk" and "branches"
183 # children directories. Revisions are then identified by their
183 # children directories. Revisions are then identified by their
184 # module and revision number (and a repository identifier).
184 # module and revision number (and a repository identifier).
185 #
185 #
186 # The revision graph is really a tree (or a forest). By default, a
186 # The revision graph is really a tree (or a forest). By default, a
187 # revision parent is the previous revision in the same module. If the
187 # revision parent is the previous revision in the same module. If the
188 # module directory is copied/moved from another module then the
188 # module directory is copied/moved from another module then the
189 # revision is the module root and its parent the source revision in
189 # revision is the module root and its parent the source revision in
190 # the parent module. A revision has at most one parent.
190 # the parent module. A revision has at most one parent.
191 #
191 #
192 class svn_source(converter_source):
192 class svn_source(converter_source):
193 def __init__(self, ui, url, rev=None):
193 def __init__(self, ui, url, rev=None):
194 super(svn_source, self).__init__(ui, url, rev=rev)
194 super(svn_source, self).__init__(ui, url, rev=rev)
195
195
196 if not (url.startswith('svn://') or url.startswith('svn+ssh://') or
196 if not (url.startswith('svn://') or url.startswith('svn+ssh://') or
197 (os.path.exists(url) and
197 (os.path.exists(url) and
198 os.path.exists(os.path.join(url, '.svn'))) or
198 os.path.exists(os.path.join(url, '.svn'))) or
199 issvnurl(url)):
199 issvnurl(url)):
200 raise NoRepo("%s does not look like a Subversion repo" % url)
200 raise NoRepo("%s does not look like a Subversion repo" % url)
201
201
202 try:
202 try:
203 SubversionException
203 SubversionException
204 except NameError:
204 except NameError:
205 raise MissingTool(_('Subversion python bindings could not be loaded'))
205 raise MissingTool(_('Subversion python bindings could not be loaded'))
206
206
207 try:
207 try:
208 version = svn.core.SVN_VER_MAJOR, svn.core.SVN_VER_MINOR
208 version = svn.core.SVN_VER_MAJOR, svn.core.SVN_VER_MINOR
209 if version < (1, 4):
209 if version < (1, 4):
210 raise MissingTool(_('Subversion python bindings %d.%d found, '
210 raise MissingTool(_('Subversion python bindings %d.%d found, '
211 '1.4 or later required') % version)
211 '1.4 or later required') % version)
212 except AttributeError:
212 except AttributeError:
213 raise MissingTool(_('Subversion python bindings are too old, 1.4 '
213 raise MissingTool(_('Subversion python bindings are too old, 1.4 '
214 'or later required'))
214 'or later required'))
215
215
216 self.encoding = locale.getpreferredencoding()
216 self.encoding = locale.getpreferredencoding()
217 self.lastrevs = {}
217 self.lastrevs = {}
218
218
219 latest = None
219 latest = None
220 try:
220 try:
221 # Support file://path@rev syntax. Useful e.g. to convert
221 # Support file://path@rev syntax. Useful e.g. to convert
222 # deleted branches.
222 # deleted branches.
223 at = url.rfind('@')
223 at = url.rfind('@')
224 if at >= 0:
224 if at >= 0:
225 latest = int(url[at+1:])
225 latest = int(url[at+1:])
226 url = url[:at]
226 url = url[:at]
227 except ValueError:
227 except ValueError:
228 pass
228 pass
229 self.url = geturl(url)
229 self.url = geturl(url)
230 self.encoding = 'UTF-8' # Subversion is always nominal UTF-8
230 self.encoding = 'UTF-8' # Subversion is always nominal UTF-8
231 try:
231 try:
232 self.transport = transport.SvnRaTransport(url=self.url)
232 self.transport = transport.SvnRaTransport(url=self.url)
233 self.ra = self.transport.ra
233 self.ra = self.transport.ra
234 self.ctx = self.transport.client
234 self.ctx = self.transport.client
235 self.baseurl = svn.ra.get_repos_root(self.ra)
235 self.baseurl = svn.ra.get_repos_root(self.ra)
236 # Module is either empty or a repository path starting with
236 # Module is either empty or a repository path starting with
237 # a slash and not ending with a slash.
237 # a slash and not ending with a slash.
238 self.module = urllib.unquote(self.url[len(self.baseurl):])
238 self.module = urllib.unquote(self.url[len(self.baseurl):])
239 self.prevmodule = None
239 self.prevmodule = None
240 self.rootmodule = self.module
240 self.rootmodule = self.module
241 self.commits = {}
241 self.commits = {}
242 self.paths = {}
242 self.paths = {}
243 self.uuid = svn.ra.get_uuid(self.ra).decode(self.encoding)
243 self.uuid = svn.ra.get_uuid(self.ra).decode(self.encoding)
244 except SubversionException:
244 except SubversionException:
245 ui.traceback()
245 ui.traceback()
246 raise NoRepo("%s does not look like a Subversion repo" % self.url)
246 raise NoRepo("%s does not look like a Subversion repo" % self.url)
247
247
248 if rev:
248 if rev:
249 try:
249 try:
250 latest = int(rev)
250 latest = int(rev)
251 except ValueError:
251 except ValueError:
252 raise util.Abort(_('svn: revision %s is not an integer') % rev)
252 raise util.Abort(_('svn: revision %s is not an integer') % rev)
253
253
254 self.startrev = self.ui.config('convert', 'svn.startrev', default=0)
254 self.startrev = self.ui.config('convert', 'svn.startrev', default=0)
255 try:
255 try:
256 self.startrev = int(self.startrev)
256 self.startrev = int(self.startrev)
257 if self.startrev < 0:
257 if self.startrev < 0:
258 self.startrev = 0
258 self.startrev = 0
259 except ValueError:
259 except ValueError:
260 raise util.Abort(_('svn: start revision %s is not an integer')
260 raise util.Abort(_('svn: start revision %s is not an integer')
261 % self.startrev)
261 % self.startrev)
262
262
263 try:
263 try:
264 self.get_blacklist()
264 self.get_blacklist()
265 except IOError:
265 except IOError:
266 pass
266 pass
267
267
268 self.head = self.latest(self.module, latest)
268 self.head = self.latest(self.module, latest)
269 if not self.head:
269 if not self.head:
270 raise util.Abort(_('no revision found in module %s') %
270 raise util.Abort(_('no revision found in module %s') %
271 self.module.encode(self.encoding))
271 self.module.encode(self.encoding))
272 self.last_changed = self.revnum(self.head)
272 self.last_changed = self.revnum(self.head)
273
273
274 self._changescache = None
274 self._changescache = None
275
275
276 if os.path.exists(os.path.join(url, '.svn/entries')):
276 if os.path.exists(os.path.join(url, '.svn/entries')):
277 self.wc = url
277 self.wc = url
278 else:
278 else:
279 self.wc = None
279 self.wc = None
280 self.convertfp = None
280 self.convertfp = None
281
281
282 def setrevmap(self, revmap):
282 def setrevmap(self, revmap):
283 lastrevs = {}
283 lastrevs = {}
284 for revid in revmap.iterkeys():
284 for revid in revmap.iterkeys():
285 uuid, module, revnum = self.revsplit(revid)
285 uuid, module, revnum = self.revsplit(revid)
286 lastrevnum = lastrevs.setdefault(module, revnum)
286 lastrevnum = lastrevs.setdefault(module, revnum)
287 if revnum > lastrevnum:
287 if revnum > lastrevnum:
288 lastrevs[module] = revnum
288 lastrevs[module] = revnum
289 self.lastrevs = lastrevs
289 self.lastrevs = lastrevs
290
290
291 def exists(self, path, optrev):
291 def exists(self, path, optrev):
292 try:
292 try:
293 svn.client.ls(self.url.rstrip('/') + '/' + urllib.quote(path),
293 svn.client.ls(self.url.rstrip('/') + '/' + urllib.quote(path),
294 optrev, False, self.ctx)
294 optrev, False, self.ctx)
295 return True
295 return True
296 except SubversionException:
296 except SubversionException:
297 return False
297 return False
298
298
299 def getheads(self):
299 def getheads(self):
300
300
301 def isdir(path, revnum):
301 def isdir(path, revnum):
302 kind = self._checkpath(path, revnum)
302 kind = self._checkpath(path, revnum)
303 return kind == svn.core.svn_node_dir
303 return kind == svn.core.svn_node_dir
304
304
305 def getcfgpath(name, rev):
305 def getcfgpath(name, rev):
306 cfgpath = self.ui.config('convert', 'svn.' + name)
306 cfgpath = self.ui.config('convert', 'svn.' + name)
307 if cfgpath is not None and cfgpath.strip() == '':
307 if cfgpath is not None and cfgpath.strip() == '':
308 return None
308 return None
309 path = (cfgpath or name).strip('/')
309 path = (cfgpath or name).strip('/')
310 if not self.exists(path, rev):
310 if not self.exists(path, rev):
311 if cfgpath:
311 if cfgpath:
312 raise util.Abort(_('expected %s to be at %r, but not found')
312 raise util.Abort(_('expected %s to be at %r, but not found')
313 % (name, path))
313 % (name, path))
314 return None
314 return None
315 self.ui.note(_('found %s at %r\n') % (name, path))
315 self.ui.note(_('found %s at %r\n') % (name, path))
316 return path
316 return path
317
317
318 rev = optrev(self.last_changed)
318 rev = optrev(self.last_changed)
319 oldmodule = ''
319 oldmodule = ''
320 trunk = getcfgpath('trunk', rev)
320 trunk = getcfgpath('trunk', rev)
321 self.tags = getcfgpath('tags', rev)
321 self.tags = getcfgpath('tags', rev)
322 branches = getcfgpath('branches', rev)
322 branches = getcfgpath('branches', rev)
323
323
324 # If the project has a trunk or branches, we will extract heads
324 # If the project has a trunk or branches, we will extract heads
325 # from them. We keep the project root otherwise.
325 # from them. We keep the project root otherwise.
326 if trunk:
326 if trunk:
327 oldmodule = self.module or ''
327 oldmodule = self.module or ''
328 self.module += '/' + trunk
328 self.module += '/' + trunk
329 self.head = self.latest(self.module, self.last_changed)
329 self.head = self.latest(self.module, self.last_changed)
330 if not self.head:
330 if not self.head:
331 raise util.Abort(_('no revision found in module %s') %
331 raise util.Abort(_('no revision found in module %s') %
332 self.module.encode(self.encoding))
332 self.module.encode(self.encoding))
333
333
334 # First head in the list is the module's head
334 # First head in the list is the module's head
335 self.heads = [self.head]
335 self.heads = [self.head]
336 if self.tags is not None:
336 if self.tags is not None:
337 self.tags = '%s/%s' % (oldmodule , (self.tags or 'tags'))
337 self.tags = '%s/%s' % (oldmodule , (self.tags or 'tags'))
338
338
339 # Check if branches bring a few more heads to the list
339 # Check if branches bring a few more heads to the list
340 if branches:
340 if branches:
341 rpath = self.url.strip('/')
341 rpath = self.url.strip('/')
342 branchnames = svn.client.ls(rpath + '/' + urllib.quote(branches),
342 branchnames = svn.client.ls(rpath + '/' + urllib.quote(branches),
343 rev, False, self.ctx)
343 rev, False, self.ctx)
344 for branch in branchnames.keys():
344 for branch in branchnames.keys():
345 module = '%s/%s/%s' % (oldmodule, branches, branch)
345 module = '%s/%s/%s' % (oldmodule, branches, branch)
346 if not isdir(module, self.last_changed):
346 if not isdir(module, self.last_changed):
347 continue
347 continue
348 brevid = self.latest(module, self.last_changed)
348 brevid = self.latest(module, self.last_changed)
349 if not brevid:
349 if not brevid:
350 self.ui.note(_('ignoring empty branch %s\n') %
350 self.ui.note(_('ignoring empty branch %s\n') %
351 branch.encode(self.encoding))
351 branch.encode(self.encoding))
352 continue
352 continue
353 self.ui.note(_('found branch %s at %d\n') %
353 self.ui.note(_('found branch %s at %d\n') %
354 (branch, self.revnum(brevid)))
354 (branch, self.revnum(brevid)))
355 self.heads.append(brevid)
355 self.heads.append(brevid)
356
356
357 if self.startrev and self.heads:
357 if self.startrev and self.heads:
358 if len(self.heads) > 1:
358 if len(self.heads) > 1:
359 raise util.Abort(_('svn: start revision is not supported '
359 raise util.Abort(_('svn: start revision is not supported '
360 'with more than one branch'))
360 'with more than one branch'))
361 revnum = self.revnum(self.heads[0])
361 revnum = self.revnum(self.heads[0])
362 if revnum < self.startrev:
362 if revnum < self.startrev:
363 raise util.Abort(_('svn: no revision found after start revision %d')
363 raise util.Abort(_('svn: no revision found after start revision %d')
364 % self.startrev)
364 % self.startrev)
365
365
366 return self.heads
366 return self.heads
367
367
368 def getfile(self, file, rev):
368 def getfile(self, file, rev):
369 data, mode = self._getfile(file, rev)
369 data, mode = self._getfile(file, rev)
370 self.modecache[(file, rev)] = mode
370 self.modecache[(file, rev)] = mode
371 return data
371 return data
372
372
373 def getmode(self, file, rev):
373 def getmode(self, file, rev):
374 return self.modecache[(file, rev)]
374 return self.modecache[(file, rev)]
375
375
376 def getchanges(self, rev):
376 def getchanges(self, rev):
377 if self._changescache and self._changescache[0] == rev:
377 if self._changescache and self._changescache[0] == rev:
378 return self._changescache[1]
378 return self._changescache[1]
379 self._changescache = None
379 self._changescache = None
380 self.modecache = {}
380 self.modecache = {}
381 (paths, parents) = self.paths[rev]
381 (paths, parents) = self.paths[rev]
382 if parents:
382 if parents:
383 files, copies = self.expandpaths(rev, paths, parents)
383 files, copies = self.expandpaths(rev, paths, parents)
384 else:
384 else:
385 # Perform a full checkout on roots
385 # Perform a full checkout on roots
386 uuid, module, revnum = self.revsplit(rev)
386 uuid, module, revnum = self.revsplit(rev)
387 entries = svn.client.ls(self.baseurl + urllib.quote(module),
387 entries = svn.client.ls(self.baseurl + urllib.quote(module),
388 optrev(revnum), True, self.ctx)
388 optrev(revnum), True, self.ctx)
389 files = [n for n,e in entries.iteritems()
389 files = [n for n,e in entries.iteritems()
390 if e.kind == svn.core.svn_node_file]
390 if e.kind == svn.core.svn_node_file]
391 copies = {}
391 copies = {}
392
392
393 files.sort()
393 files.sort()
394 files = zip(files, [rev] * len(files))
394 files = zip(files, [rev] * len(files))
395
395
396 # caller caches the result, so free it here to release memory
396 # caller caches the result, so free it here to release memory
397 del self.paths[rev]
397 del self.paths[rev]
398 return (files, copies)
398 return (files, copies)
399
399
400 def getchangedfiles(self, rev, i):
400 def getchangedfiles(self, rev, i):
401 changes = self.getchanges(rev)
401 changes = self.getchanges(rev)
402 self._changescache = (rev, changes)
402 self._changescache = (rev, changes)
403 return [f[0] for f in changes[0]]
403 return [f[0] for f in changes[0]]
404
404
405 def getcommit(self, rev):
405 def getcommit(self, rev):
406 if rev not in self.commits:
406 if rev not in self.commits:
407 uuid, module, revnum = self.revsplit(rev)
407 uuid, module, revnum = self.revsplit(rev)
408 self.module = module
408 self.module = module
409 self.reparent(module)
409 self.reparent(module)
410 # We assume that:
410 # We assume that:
411 # - requests for revisions after "stop" come from the
411 # - requests for revisions after "stop" come from the
412 # revision graph backward traversal. Cache all of them
412 # revision graph backward traversal. Cache all of them
413 # down to stop, they will be used eventually.
413 # down to stop, they will be used eventually.
414 # - requests for revisions before "stop" come to get
414 # - requests for revisions before "stop" come to get
415 # isolated branches parents. Just fetch what is needed.
415 # isolated branches parents. Just fetch what is needed.
416 stop = self.lastrevs.get(module, 0)
416 stop = self.lastrevs.get(module, 0)
417 if revnum < stop:
417 if revnum < stop:
418 stop = revnum + 1
418 stop = revnum + 1
419 self._fetch_revisions(revnum, stop)
419 self._fetch_revisions(revnum, stop)
420 commit = self.commits[rev]
420 commit = self.commits[rev]
421 # caller caches the result, so free it here to release memory
421 # caller caches the result, so free it here to release memory
422 del self.commits[rev]
422 del self.commits[rev]
423 return commit
423 return commit
424
424
425 def gettags(self):
425 def gettags(self):
426 tags = {}
426 tags = {}
427 if self.tags is None:
427 if self.tags is None:
428 return tags
428 return tags
429
429
430 # svn tags are just a convention, project branches left in a
430 # svn tags are just a convention, project branches left in a
431 # 'tags' directory. There is no other relationship than
431 # 'tags' directory. There is no other relationship than
432 # ancestry, which is expensive to discover and makes them hard
432 # ancestry, which is expensive to discover and makes them hard
433 # to update incrementally. Worse, past revisions may be
433 # to update incrementally. Worse, past revisions may be
434 # referenced by tags far away in the future, requiring a deep
434 # referenced by tags far away in the future, requiring a deep
435 # history traversal on every calculation. Current code
435 # history traversal on every calculation. Current code
436 # performs a single backward traversal, tracking moves within
436 # performs a single backward traversal, tracking moves within
437 # the tags directory (tag renaming) and recording a new tag
437 # the tags directory (tag renaming) and recording a new tag
438 # everytime a project is copied from outside the tags
438 # everytime a project is copied from outside the tags
439 # directory. It also lists deleted tags, this behaviour may
439 # directory. It also lists deleted tags, this behaviour may
440 # change in the future.
440 # change in the future.
441 pendings = []
441 pendings = []
442 tagspath = self.tags
442 tagspath = self.tags
443 start = svn.ra.get_latest_revnum(self.ra)
443 start = svn.ra.get_latest_revnum(self.ra)
444 try:
444 try:
445 for entry in self._getlog([self.tags], start, self.startrev):
445 for entry in self._getlog([self.tags], start, self.startrev):
446 origpaths, revnum, author, date, message = entry
446 origpaths, revnum, author, date, message = entry
447 copies = [(e.copyfrom_path, e.copyfrom_rev, p) for p, e
447 copies = [(e.copyfrom_path, e.copyfrom_rev, p) for p, e
448 in origpaths.iteritems() if e.copyfrom_path]
448 in origpaths.iteritems() if e.copyfrom_path]
449 # Apply moves/copies from more specific to general
449 # Apply moves/copies from more specific to general
450 copies.sort(reverse=True)
450 copies.sort(reverse=True)
451
451
452 srctagspath = tagspath
452 srctagspath = tagspath
453 if copies and copies[-1][2] == tagspath:
453 if copies and copies[-1][2] == tagspath:
454 # Track tags directory moves
454 # Track tags directory moves
455 srctagspath = copies.pop()[0]
455 srctagspath = copies.pop()[0]
456
456
457 for source, sourcerev, dest in copies:
457 for source, sourcerev, dest in copies:
458 if not dest.startswith(tagspath + '/'):
458 if not dest.startswith(tagspath + '/'):
459 continue
459 continue
460 for tag in pendings:
460 for tag in pendings:
461 if tag[0].startswith(dest):
461 if tag[0].startswith(dest):
462 tagpath = source + tag[0][len(dest):]
462 tagpath = source + tag[0][len(dest):]
463 tag[:2] = [tagpath, sourcerev]
463 tag[:2] = [tagpath, sourcerev]
464 break
464 break
465 else:
465 else:
466 pendings.append([source, sourcerev, dest])
466 pendings.append([source, sourcerev, dest])
467
467
468 # Filter out tags with children coming from different
468 # Filter out tags with children coming from different
469 # parts of the repository like:
469 # parts of the repository like:
470 # /tags/tag.1 (from /trunk:10)
470 # /tags/tag.1 (from /trunk:10)
471 # /tags/tag.1/foo (from /branches/foo:12)
471 # /tags/tag.1/foo (from /branches/foo:12)
472 # Here/tags/tag.1 discarded as well as its children.
472 # Here/tags/tag.1 discarded as well as its children.
473 # It happens with tools like cvs2svn. Such tags cannot
473 # It happens with tools like cvs2svn. Such tags cannot
474 # be represented in mercurial.
474 # be represented in mercurial.
475 addeds = dict((p, e.copyfrom_path) for p,e
475 addeds = dict((p, e.copyfrom_path) for p,e
476 in origpaths.iteritems() if e.action == 'A')
476 in origpaths.iteritems() if e.action == 'A')
477 badroots = set()
477 badroots = set()
478 for destroot in addeds:
478 for destroot in addeds:
479 for source, sourcerev, dest in pendings:
479 for source, sourcerev, dest in pendings:
480 if (not dest.startswith(destroot + '/')
480 if (not dest.startswith(destroot + '/')
481 or source.startswith(addeds[destroot] + '/')):
481 or source.startswith(addeds[destroot] + '/')):
482 continue
482 continue
483 badroots.add(destroot)
483 badroots.add(destroot)
484 break
484 break
485
485
486 for badroot in badroots:
486 for badroot in badroots:
487 pendings = [p for p in pendings if p[2] != badroot
487 pendings = [p for p in pendings if p[2] != badroot
488 and not p[2].startswith(badroot + '/')]
488 and not p[2].startswith(badroot + '/')]
489
489
490 # Tell tag renamings from tag creations
490 # Tell tag renamings from tag creations
491 remainings = []
491 remainings = []
492 for source, sourcerev, dest in pendings:
492 for source, sourcerev, dest in pendings:
493 tagname = dest.split('/')[-1]
493 tagname = dest.split('/')[-1]
494 if source.startswith(srctagspath):
494 if source.startswith(srctagspath):
495 remainings.append([source, sourcerev, tagname])
495 remainings.append([source, sourcerev, tagname])
496 continue
496 continue
497 if tagname in tags:
497 if tagname in tags:
498 # Keep the latest tag value
498 # Keep the latest tag value
499 continue
499 continue
500 # From revision may be fake, get one with changes
500 # From revision may be fake, get one with changes
501 try:
501 try:
502 tagid = self.latest(source, sourcerev)
502 tagid = self.latest(source, sourcerev)
503 if tagid and tagname not in tags:
503 if tagid and tagname not in tags:
504 tags[tagname] = tagid
504 tags[tagname] = tagid
505 except SvnPathNotFound:
505 except SvnPathNotFound:
506 # It happens when we are following directories
506 # It happens when we are following directories
507 # we assumed were copied with their parents
507 # we assumed were copied with their parents
508 # but were really created in the tag
508 # but were really created in the tag
509 # directory.
509 # directory.
510 pass
510 pass
511 pendings = remainings
511 pendings = remainings
512 tagspath = srctagspath
512 tagspath = srctagspath
513
513
514 except SubversionException:
514 except SubversionException:
515 self.ui.note(_('no tags found at revision %d\n') % start)
515 self.ui.note(_('no tags found at revision %d\n') % start)
516 return tags
516 return tags
517
517
518 def converted(self, rev, destrev):
518 def converted(self, rev, destrev):
519 if not self.wc:
519 if not self.wc:
520 return
520 return
521 if self.convertfp is None:
521 if self.convertfp is None:
522 self.convertfp = open(os.path.join(self.wc, '.svn', 'hg-shamap'),
522 self.convertfp = open(os.path.join(self.wc, '.svn', 'hg-shamap'),
523 'a')
523 'a')
524 self.convertfp.write('%s %d\n' % (destrev, self.revnum(rev)))
524 self.convertfp.write('%s %d\n' % (destrev, self.revnum(rev)))
525 self.convertfp.flush()
525 self.convertfp.flush()
526
526
527 # -- helper functions --
527 # -- helper functions --
528
528
529 def revid(self, revnum, module=None):
529 def revid(self, revnum, module=None):
530 if not module:
530 if not module:
531 module = self.module
531 module = self.module
532 return u"svn:%s%s@%s" % (self.uuid, module.decode(self.encoding),
532 return u"svn:%s%s@%s" % (self.uuid, module.decode(self.encoding),
533 revnum)
533 revnum)
534
534
535 def revnum(self, rev):
535 def revnum(self, rev):
536 return int(rev.split('@')[-1])
536 return int(rev.split('@')[-1])
537
537
538 def revsplit(self, rev):
538 def revsplit(self, rev):
539 url, revnum = rev.encode(self.encoding).rsplit('@', 1)
539 url, revnum = rev.encode(self.encoding).rsplit('@', 1)
540 revnum = int(revnum)
540 revnum = int(revnum)
541 parts = url.split('/', 1)
541 parts = url.split('/', 1)
542 uuid = parts.pop(0)[4:]
542 uuid = parts.pop(0)[4:]
543 mod = ''
543 mod = ''
544 if parts:
544 if parts:
545 mod = '/' + parts[0]
545 mod = '/' + parts[0]
546 return uuid, mod, revnum
546 return uuid, mod, revnum
547
547
548 def latest(self, path, stop=0):
548 def latest(self, path, stop=0):
549 """Find the latest revid affecting path, up to stop. It may return
549 """Find the latest revid affecting path, up to stop. It may return
550 a revision in a different module, since a branch may be moved without
550 a revision in a different module, since a branch may be moved without
551 a change being reported. Return None if computed module does not
551 a change being reported. Return None if computed module does not
552 belong to rootmodule subtree.
552 belong to rootmodule subtree.
553 """
553 """
554 if not path.startswith(self.rootmodule):
554 if not path.startswith(self.rootmodule):
555 # Requests on foreign branches may be forbidden at server level
555 # Requests on foreign branches may be forbidden at server level
556 self.ui.debug(_('ignoring foreign branch %r\n') % path)
556 self.ui.debug(_('ignoring foreign branch %r\n') % path)
557 return None
557 return None
558
558
559 if not stop:
559 if not stop:
560 stop = svn.ra.get_latest_revnum(self.ra)
560 stop = svn.ra.get_latest_revnum(self.ra)
561 try:
561 try:
562 prevmodule = self.reparent('')
562 prevmodule = self.reparent('')
563 dirent = svn.ra.stat(self.ra, path.strip('/'), stop)
563 dirent = svn.ra.stat(self.ra, path.strip('/'), stop)
564 self.reparent(prevmodule)
564 self.reparent(prevmodule)
565 except SubversionException:
565 except SubversionException:
566 dirent = None
566 dirent = None
567 if not dirent:
567 if not dirent:
568 raise SvnPathNotFound(_('%s not found up to revision %d') % (path, stop))
568 raise SvnPathNotFound(_('%s not found up to revision %d') % (path, stop))
569
569
570 # stat() gives us the previous revision on this line of
570 # stat() gives us the previous revision on this line of
571 # development, but it might be in *another module*. Fetch the
571 # development, but it might be in *another module*. Fetch the
572 # log and detect renames down to the latest revision.
572 # log and detect renames down to the latest revision.
573 stream = self._getlog([path], stop, dirent.created_rev)
573 stream = self._getlog([path], stop, dirent.created_rev)
574 try:
574 try:
575 for entry in stream:
575 for entry in stream:
576 paths, revnum, author, date, message = entry
576 paths, revnum, author, date, message = entry
577 if revnum <= dirent.created_rev:
577 if revnum <= dirent.created_rev:
578 break
578 break
579
579
580 for p in paths:
580 for p in paths:
581 if not path.startswith(p) or not paths[p].copyfrom_path:
581 if not path.startswith(p) or not paths[p].copyfrom_path:
582 continue
582 continue
583 newpath = paths[p].copyfrom_path + path[len(p):]
583 newpath = paths[p].copyfrom_path + path[len(p):]
584 self.ui.debug(_("branch renamed from %s to %s at %d\n") %
584 self.ui.debug(_("branch renamed from %s to %s at %d\n") %
585 (path, newpath, revnum))
585 (path, newpath, revnum))
586 path = newpath
586 path = newpath
587 break
587 break
588 finally:
588 finally:
589 stream.close()
589 stream.close()
590
590
591 if not path.startswith(self.rootmodule):
591 if not path.startswith(self.rootmodule):
592 self.ui.debug(_('ignoring foreign branch %r\n') % path)
592 self.ui.debug(_('ignoring foreign branch %r\n') % path)
593 return None
593 return None
594 return self.revid(dirent.created_rev, path)
594 return self.revid(dirent.created_rev, path)
595
595
596 def get_blacklist(self):
596 def get_blacklist(self):
597 """Avoid certain revision numbers.
597 """Avoid certain revision numbers.
598 It is not uncommon for two nearby revisions to cancel each other
598 It is not uncommon for two nearby revisions to cancel each other
599 out, e.g. 'I copied trunk into a subdirectory of itself instead
599 out, e.g. 'I copied trunk into a subdirectory of itself instead
600 of making a branch'. The converted repository is significantly
600 of making a branch'. The converted repository is significantly
601 smaller if we ignore such revisions."""
601 smaller if we ignore such revisions."""
602 self.blacklist = set()
602 self.blacklist = set()
603 blacklist = self.blacklist
603 blacklist = self.blacklist
604 for line in file("blacklist.txt", "r"):
604 for line in file("blacklist.txt", "r"):
605 if not line.startswith("#"):
605 if not line.startswith("#"):
606 try:
606 try:
607 svn_rev = int(line.strip())
607 svn_rev = int(line.strip())
608 blacklist.add(svn_rev)
608 blacklist.add(svn_rev)
609 except ValueError:
609 except ValueError:
610 pass # not an integer or a comment
610 pass # not an integer or a comment
611
611
612 def is_blacklisted(self, svn_rev):
612 def is_blacklisted(self, svn_rev):
613 return svn_rev in self.blacklist
613 return svn_rev in self.blacklist
614
614
615 def reparent(self, module):
615 def reparent(self, module):
616 """Reparent the svn transport and return the previous parent."""
616 """Reparent the svn transport and return the previous parent."""
617 if self.prevmodule == module:
617 if self.prevmodule == module:
618 return module
618 return module
619 svnurl = self.baseurl + urllib.quote(module)
619 svnurl = self.baseurl + urllib.quote(module)
620 prevmodule = self.prevmodule
620 prevmodule = self.prevmodule
621 if prevmodule is None:
621 if prevmodule is None:
622 prevmodule = ''
622 prevmodule = ''
623 self.ui.debug(_("reparent to %s\n") % svnurl)
623 self.ui.debug(_("reparent to %s\n") % svnurl)
624 svn.ra.reparent(self.ra, svnurl)
624 svn.ra.reparent(self.ra, svnurl)
625 self.prevmodule = module
625 self.prevmodule = module
626 return prevmodule
626 return prevmodule
627
627
628 def expandpaths(self, rev, paths, parents):
628 def expandpaths(self, rev, paths, parents):
629 entries = []
629 entries = []
630 # Map of entrypath, revision for finding source of deleted
630 # Map of entrypath, revision for finding source of deleted
631 # revisions.
631 # revisions.
632 copyfrom = {}
632 copyfrom = {}
633 copies = {}
633 copies = {}
634
634
635 new_module, revnum = self.revsplit(rev)[1:]
635 new_module, revnum = self.revsplit(rev)[1:]
636 if new_module != self.module:
636 if new_module != self.module:
637 self.module = new_module
637 self.module = new_module
638 self.reparent(self.module)
638 self.reparent(self.module)
639
639
640 for path, ent in paths:
640 for path, ent in paths:
641 entrypath = self.getrelpath(path)
641 entrypath = self.getrelpath(path)
642 entry = entrypath.decode(self.encoding)
642 entry = entrypath.decode(self.encoding)
643
643
644 kind = self._checkpath(entrypath, revnum)
644 kind = self._checkpath(entrypath, revnum)
645 if kind == svn.core.svn_node_file:
645 if kind == svn.core.svn_node_file:
646 entries.append(self.recode(entry))
646 entries.append(self.recode(entry))
647 if not ent.copyfrom_path or not parents:
647 if not ent.copyfrom_path or not parents:
648 continue
648 continue
649 # Copy sources not in parent revisions cannot be
649 # Copy sources not in parent revisions cannot be
650 # represented, ignore their origin for now
650 # represented, ignore their origin for now
651 pmodule, prevnum = self.revsplit(parents[0])[1:]
651 pmodule, prevnum = self.revsplit(parents[0])[1:]
652 if ent.copyfrom_rev < prevnum:
652 if ent.copyfrom_rev < prevnum:
653 continue
653 continue
654 copyfrom_path = self.getrelpath(ent.copyfrom_path, pmodule)
654 copyfrom_path = self.getrelpath(ent.copyfrom_path, pmodule)
655 if not copyfrom_path:
655 if not copyfrom_path:
656 continue
656 continue
657 self.ui.debug(_("copied to %s from %s@%s\n") %
657 self.ui.debug(_("copied to %s from %s@%s\n") %
658 (entrypath, copyfrom_path, ent.copyfrom_rev))
658 (entrypath, copyfrom_path, ent.copyfrom_rev))
659 copies[self.recode(entry)] = self.recode(copyfrom_path)
659 copies[self.recode(entry)] = self.recode(copyfrom_path)
660 elif kind == 0: # gone, but had better be a deleted *file*
660 elif kind == 0: # gone, but had better be a deleted *file*
661 self.ui.debug(_("gone from %s\n") % ent.copyfrom_rev)
661 self.ui.debug(_("gone from %s\n") % ent.copyfrom_rev)
662
662
663 # if a branch is created but entries are removed in
663 # if a branch is created but entries are removed in
664 # the same changeset, get the right fromrev
664 # the same changeset, get the right fromrev
665 # parents cannot be empty here, you cannot remove
665 # parents cannot be empty here, you cannot remove
666 # things from a root revision.
666 # things from a root revision.
667 uuid, old_module, fromrev = self.revsplit(parents[0])
667 uuid, old_module, fromrev = self.revsplit(parents[0])
668
668
669 basepath = old_module + "/" + self.getrelpath(path)
669 basepath = old_module + "/" + self.getrelpath(path)
670 entrypath = basepath
670 entrypath = basepath
671
671
672 def lookup_parts(p):
672 def lookup_parts(p):
673 rc = None
673 rc = None
674 parts = p.split("/")
674 parts = p.split("/")
675 for i in range(len(parts)):
675 for i in range(len(parts)):
676 part = "/".join(parts[:i])
676 part = "/".join(parts[:i])
677 info = part, copyfrom.get(part, None)
677 info = part, copyfrom.get(part, None)
678 if info[1] is not None:
678 if info[1] is not None:
679 self.ui.debug(_("found parent directory %s\n") % info[1])
679 self.ui.debug(_("found parent directory %s\n") % info[1])
680 rc = info
680 rc = info
681 return rc
681 return rc
682
682
683 self.ui.debug(_("base, entry %s %s\n") % (basepath, entrypath))
683 self.ui.debug(_("base, entry %s %s\n") % (basepath, entrypath))
684
684
685 frompath, froment = lookup_parts(entrypath) or (None, revnum - 1)
685 frompath, froment = lookup_parts(entrypath) or (None, revnum - 1)
686
686
687 # need to remove fragment from lookup_parts and
687 # need to remove fragment from lookup_parts and
688 # replace with copyfrom_path
688 # replace with copyfrom_path
689 if frompath is not None:
689 if frompath is not None:
690 self.ui.debug(_("munge-o-matic\n"))
690 self.ui.debug(_("munge-o-matic\n"))
691 self.ui.debug(entrypath + '\n')
691 self.ui.debug(entrypath + '\n')
692 self.ui.debug(entrypath[len(frompath):] + '\n')
692 self.ui.debug(entrypath[len(frompath):] + '\n')
693 entrypath = froment.copyfrom_path + entrypath[len(frompath):]
693 entrypath = froment.copyfrom_path + entrypath[len(frompath):]
694 fromrev = froment.copyfrom_rev
694 fromrev = froment.copyfrom_rev
695 self.ui.debug(_("info: %s %s %s %s\n") % (frompath, froment, ent, entrypath))
695 self.ui.debug(_("info: %s %s %s %s\n") % (frompath, froment, ent, entrypath))
696
696
697 # We can avoid the reparent calls if the module has
697 # We can avoid the reparent calls if the module has
698 # not changed but it probably does not worth the pain.
698 # not changed but it probably does not worth the pain.
699 prevmodule = self.reparent('')
699 prevmodule = self.reparent('')
700 fromkind = svn.ra.check_path(self.ra, entrypath.strip('/'), fromrev)
700 fromkind = svn.ra.check_path(self.ra, entrypath.strip('/'), fromrev)
701 self.reparent(prevmodule)
701 self.reparent(prevmodule)
702
702
703 if fromkind == svn.core.svn_node_file: # a deleted file
703 if fromkind == svn.core.svn_node_file: # a deleted file
704 entries.append(self.recode(entry))
704 entries.append(self.recode(entry))
705 elif fromkind == svn.core.svn_node_dir:
705 elif fromkind == svn.core.svn_node_dir:
706 # print "Deleted/moved non-file:", revnum, path, ent
706 # print "Deleted/moved non-file:", revnum, path, ent
707 # children = self._find_children(path, revnum - 1)
707 # children = self._find_children(path, revnum - 1)
708 # print ("find children %s@%d from %d action %s" %
708 # print ("find children %s@%d from %d action %s" %
709 # (path, revnum, ent.copyfrom_rev, ent.action))
709 # (path, revnum, ent.copyfrom_rev, ent.action))
710 # Sometimes this is tricky. For example: in
710 # Sometimes this is tricky. For example: in
711 # The Subversion Repository revision 6940 a dir
711 # The Subversion Repository revision 6940 a dir
712 # was copied and one of its files was deleted
712 # was copied and one of its files was deleted
713 # from the new location in the same commit. This
713 # from the new location in the same commit. This
714 # code can't deal with that yet.
714 # code can't deal with that yet.
715 if ent.action == 'C':
715 if ent.action == 'C':
716 children = self._find_children(path, fromrev)
716 children = self._find_children(path, fromrev)
717 else:
717 else:
718 oroot = entrypath.strip('/')
718 oroot = entrypath.strip('/')
719 nroot = path.strip('/')
719 nroot = path.strip('/')
720 children = self._find_children(oroot, fromrev)
720 children = self._find_children(oroot, fromrev)
721 children = [s.replace(oroot,nroot) for s in children]
721 children = [s.replace(oroot,nroot) for s in children]
722 # Mark all [files, not directories] as deleted.
722 # Mark all [files, not directories] as deleted.
723 for child in children:
723 for child in children:
724 # Can we move a child directory and its
724 # Can we move a child directory and its
725 # parent in the same commit? (probably can). Could
725 # parent in the same commit? (probably can). Could
726 # cause problems if instead of revnum -1,
726 # cause problems if instead of revnum -1,
727 # we have to look in (copyfrom_path, revnum - 1)
727 # we have to look in (copyfrom_path, revnum - 1)
728 entrypath = self.getrelpath("/" + child, module=old_module)
728 entrypath = self.getrelpath("/" + child, module=old_module)
729 if entrypath:
729 if entrypath:
730 entry = self.recode(entrypath.decode(self.encoding))
730 entry = self.recode(entrypath.decode(self.encoding))
731 if entry in copies:
731 if entry in copies:
732 # deleted file within a copy
732 # deleted file within a copy
733 del copies[entry]
733 del copies[entry]
734 else:
734 else:
735 entries.append(entry)
735 entries.append(entry)
736 else:
736 else:
737 self.ui.debug(_('unknown path in revision %d: %s\n') % \
737 self.ui.debug(_('unknown path in revision %d: %s\n') % \
738 (revnum, path))
738 (revnum, path))
739 elif kind == svn.core.svn_node_dir:
739 elif kind == svn.core.svn_node_dir:
740 # Should probably synthesize normal file entries
740 # Should probably synthesize normal file entries
741 # and handle as above to clean up copy/rename handling.
741 # and handle as above to clean up copy/rename handling.
742
742
743 # If the directory just had a prop change,
743 # If the directory just had a prop change,
744 # then we shouldn't need to look for its children.
744 # then we shouldn't need to look for its children.
745 if ent.action == 'M':
745 if ent.action == 'M':
746 continue
746 continue
747
747
748 # Also this could create duplicate entries. Not sure
748 # Also this could create duplicate entries. Not sure
749 # whether this will matter. Maybe should make entries a set.
749 # whether this will matter. Maybe should make entries a set.
750 # print "Changed directory", revnum, path, ent.action, \
750 # print "Changed directory", revnum, path, ent.action, \
751 # ent.copyfrom_path, ent.copyfrom_rev
751 # ent.copyfrom_path, ent.copyfrom_rev
752 # This will fail if a directory was copied
752 # This will fail if a directory was copied
753 # from another branch and then some of its files
753 # from another branch and then some of its files
754 # were deleted in the same transaction.
754 # were deleted in the same transaction.
755 children = sorted(self._find_children(path, revnum))
755 children = sorted(self._find_children(path, revnum))
756 for child in children:
756 for child in children:
757 # Can we move a child directory and its
757 # Can we move a child directory and its
758 # parent in the same commit? (probably can). Could
758 # parent in the same commit? (probably can). Could
759 # cause problems if instead of revnum -1,
759 # cause problems if instead of revnum -1,
760 # we have to look in (copyfrom_path, revnum - 1)
760 # we have to look in (copyfrom_path, revnum - 1)
761 entrypath = self.getrelpath("/" + child)
761 entrypath = self.getrelpath("/" + child)
762 # print child, self.module, entrypath
762 # print child, self.module, entrypath
763 if entrypath:
763 if entrypath:
764 # Need to filter out directories here...
764 # Need to filter out directories here...
765 kind = self._checkpath(entrypath, revnum)
765 kind = self._checkpath(entrypath, revnum)
766 if kind != svn.core.svn_node_dir:
766 if kind != svn.core.svn_node_dir:
767 entries.append(self.recode(entrypath))
767 entries.append(self.recode(entrypath))
768
768
769 # Copies here (must copy all from source) Probably not
769 # Copies here (must copy all from source) Probably not
770 # a real problem for us if source does not exist
770 # a real problem for us if source does not exist
771 if not ent.copyfrom_path or not parents:
771 if not ent.copyfrom_path or not parents:
772 continue
772 continue
773 # Copy sources not in parent revisions cannot be
773 # Copy sources not in parent revisions cannot be
774 # represented, ignore their origin for now
774 # represented, ignore their origin for now
775 pmodule, prevnum = self.revsplit(parents[0])[1:]
775 pmodule, prevnum = self.revsplit(parents[0])[1:]
776 if ent.copyfrom_rev < prevnum:
776 if ent.copyfrom_rev < prevnum:
777 continue
777 continue
778 copyfrompath = ent.copyfrom_path.decode(self.encoding)
778 copyfrompath = ent.copyfrom_path.decode(self.encoding)
779 copyfrompath = self.getrelpath(copyfrompath, pmodule)
779 copyfrompath = self.getrelpath(copyfrompath, pmodule)
780 if not copyfrompath:
780 if not copyfrompath:
781 continue
781 continue
782 copyfrom[path] = ent
782 copyfrom[path] = ent
783 self.ui.debug(_("mark %s came from %s:%d\n")
783 self.ui.debug(_("mark %s came from %s:%d\n")
784 % (path, copyfrompath, ent.copyfrom_rev))
784 % (path, copyfrompath, ent.copyfrom_rev))
785 children = self._find_children(ent.copyfrom_path, ent.copyfrom_rev)
785 children = self._find_children(ent.copyfrom_path, ent.copyfrom_rev)
786 children.sort()
786 children.sort()
787 for child in children:
787 for child in children:
788 entrypath = self.getrelpath("/" + child, pmodule)
788 entrypath = self.getrelpath("/" + child, pmodule)
789 if not entrypath:
789 if not entrypath:
790 continue
790 continue
791 entry = entrypath.decode(self.encoding)
791 entry = entrypath.decode(self.encoding)
792 copytopath = path + entry[len(copyfrompath):]
792 copytopath = path + entry[len(copyfrompath):]
793 copytopath = self.getrelpath(copytopath)
793 copytopath = self.getrelpath(copytopath)
794 copies[self.recode(copytopath)] = self.recode(entry, pmodule)
794 copies[self.recode(copytopath)] = self.recode(entry, pmodule)
795
795
796 return (list(set(entries)), copies)
796 return (list(set(entries)), copies)
797
797
798 def _fetch_revisions(self, from_revnum, to_revnum):
798 def _fetch_revisions(self, from_revnum, to_revnum):
799 if from_revnum < to_revnum:
799 if from_revnum < to_revnum:
800 from_revnum, to_revnum = to_revnum, from_revnum
800 from_revnum, to_revnum = to_revnum, from_revnum
801
801
802 self.child_cset = None
802 self.child_cset = None
803
803
804 def parselogentry(orig_paths, revnum, author, date, message):
804 def parselogentry(orig_paths, revnum, author, date, message):
805 """Return the parsed commit object or None, and True if
805 """Return the parsed commit object or None, and True if
806 the revision is a branch root.
806 the revision is a branch root.
807 """
807 """
808 self.ui.debug(_("parsing revision %d (%d changes)\n") %
808 self.ui.debug(_("parsing revision %d (%d changes)\n") %
809 (revnum, len(orig_paths)))
809 (revnum, len(orig_paths)))
810
810
811 branched = False
811 branched = False
812 rev = self.revid(revnum)
812 rev = self.revid(revnum)
813 # branch log might return entries for a parent we already have
813 # branch log might return entries for a parent we already have
814
814
815 if rev in self.commits or revnum < to_revnum:
815 if rev in self.commits or revnum < to_revnum:
816 return None, branched
816 return None, branched
817
817
818 parents = []
818 parents = []
819 # check whether this revision is the start of a branch or part
819 # check whether this revision is the start of a branch or part
820 # of a branch renaming
820 # of a branch renaming
821 orig_paths = sorted(orig_paths.iteritems())
821 orig_paths = sorted(orig_paths.iteritems())
822 root_paths = [(p,e) for p,e in orig_paths if self.module.startswith(p)]
822 root_paths = [(p,e) for p,e in orig_paths if self.module.startswith(p)]
823 if root_paths:
823 if root_paths:
824 path, ent = root_paths[-1]
824 path, ent = root_paths[-1]
825 if ent.copyfrom_path:
825 if ent.copyfrom_path:
826 branched = True
826 branched = True
827 newpath = ent.copyfrom_path + self.module[len(path):]
827 newpath = ent.copyfrom_path + self.module[len(path):]
828 # ent.copyfrom_rev may not be the actual last revision
828 # ent.copyfrom_rev may not be the actual last revision
829 previd = self.latest(newpath, ent.copyfrom_rev)
829 previd = self.latest(newpath, ent.copyfrom_rev)
830 if previd is not None:
830 if previd is not None:
831 prevmodule, prevnum = self.revsplit(previd)[1:]
831 prevmodule, prevnum = self.revsplit(previd)[1:]
832 if prevnum >= self.startrev:
832 if prevnum >= self.startrev:
833 parents = [previd]
833 parents = [previd]
834 self.ui.note(_('found parent of branch %s at %d: %s\n') %
834 self.ui.note(_('found parent of branch %s at %d: %s\n') %
835 (self.module, prevnum, prevmodule))
835 (self.module, prevnum, prevmodule))
836 else:
836 else:
837 self.ui.debug(_("no copyfrom path, don't know what to do.\n"))
837 self.ui.debug(_("no copyfrom path, don't know what to do.\n"))
838
838
839 paths = []
839 paths = []
840 # filter out unrelated paths
840 # filter out unrelated paths
841 for path, ent in orig_paths:
841 for path, ent in orig_paths:
842 if self.getrelpath(path) is None:
842 if self.getrelpath(path) is None:
843 continue
843 continue
844 paths.append((path, ent))
844 paths.append((path, ent))
845
845
846 # Example SVN datetime. Includes microseconds.
846 # Example SVN datetime. Includes microseconds.
847 # ISO-8601 conformant
847 # ISO-8601 conformant
848 # '2007-01-04T17:35:00.902377Z'
848 # '2007-01-04T17:35:00.902377Z'
849 date = util.parsedate(date[:19] + " UTC", ["%Y-%m-%dT%H:%M:%S"])
849 date = util.parsedate(date[:19] + " UTC", ["%Y-%m-%dT%H:%M:%S"])
850
850
851 log = message and self.recode(message) or ''
851 log = message and self.recode(message) or ''
852 author = author and self.recode(author) or ''
852 author = author and self.recode(author) or ''
853 try:
853 try:
854 branch = self.module.split("/")[-1]
854 branch = self.module.split("/")[-1]
855 if branch == 'trunk':
855 if branch == 'trunk':
856 branch = ''
856 branch = ''
857 except IndexError:
857 except IndexError:
858 branch = None
858 branch = None
859
859
860 cset = commit(author=author,
860 cset = commit(author=author,
861 date=util.datestr(date),
861 date=util.datestr(date),
862 desc=log,
862 desc=log,
863 parents=parents,
863 parents=parents,
864 branch=branch,
864 branch=branch,
865 rev=rev.encode('utf-8'))
865 rev=rev.encode('utf-8'))
866
866
867 self.commits[rev] = cset
867 self.commits[rev] = cset
868 # The parents list is *shared* among self.paths and the
868 # The parents list is *shared* among self.paths and the
869 # commit object. Both will be updated below.
869 # commit object. Both will be updated below.
870 self.paths[rev] = (paths, cset.parents)
870 self.paths[rev] = (paths, cset.parents)
871 if self.child_cset and not self.child_cset.parents:
871 if self.child_cset and not self.child_cset.parents:
872 self.child_cset.parents[:] = [rev]
872 self.child_cset.parents[:] = [rev]
873 self.child_cset = cset
873 self.child_cset = cset
874 return cset, branched
874 return cset, branched
875
875
876 self.ui.note(_('fetching revision log for "%s" from %d to %d\n') %
876 self.ui.note(_('fetching revision log for "%s" from %d to %d\n') %
877 (self.module, from_revnum, to_revnum))
877 (self.module, from_revnum, to_revnum))
878
878
879 try:
879 try:
880 firstcset = None
880 firstcset = None
881 lastonbranch = False
881 lastonbranch = False
882 stream = self._getlog([self.module], from_revnum, to_revnum)
882 stream = self._getlog([self.module], from_revnum, to_revnum)
883 try:
883 try:
884 for entry in stream:
884 for entry in stream:
885 paths, revnum, author, date, message = entry
885 paths, revnum, author, date, message = entry
886 if revnum < self.startrev:
886 if revnum < self.startrev:
887 lastonbranch = True
887 lastonbranch = True
888 break
888 break
889 if self.is_blacklisted(revnum):
889 if self.is_blacklisted(revnum):
890 self.ui.note(_('skipping blacklisted revision %d\n')
890 self.ui.note(_('skipping blacklisted revision %d\n')
891 % revnum)
891 % revnum)
892 continue
892 continue
893 if not paths:
893 if not paths:
894 self.ui.debug(_('revision %d has no entries\n') % revnum)
894 self.ui.debug(_('revision %d has no entries\n') % revnum)
895 continue
895 continue
896 cset, lastonbranch = parselogentry(paths, revnum, author,
896 cset, lastonbranch = parselogentry(paths, revnum, author,
897 date, message)
897 date, message)
898 if cset:
898 if cset:
899 firstcset = cset
899 firstcset = cset
900 if lastonbranch:
900 if lastonbranch:
901 break
901 break
902 finally:
902 finally:
903 stream.close()
903 stream.close()
904
904
905 if not lastonbranch and firstcset and not firstcset.parents:
905 if not lastonbranch and firstcset and not firstcset.parents:
906 # The first revision of the sequence (the last fetched one)
906 # The first revision of the sequence (the last fetched one)
907 # has invalid parents if not a branch root. Find the parent
907 # has invalid parents if not a branch root. Find the parent
908 # revision now, if any.
908 # revision now, if any.
909 try:
909 try:
910 firstrevnum = self.revnum(firstcset.rev)
910 firstrevnum = self.revnum(firstcset.rev)
911 if firstrevnum > 1:
911 if firstrevnum > 1:
912 latest = self.latest(self.module, firstrevnum - 1)
912 latest = self.latest(self.module, firstrevnum - 1)
913 if latest:
913 if latest:
914 firstcset.parents.append(latest)
914 firstcset.parents.append(latest)
915 except SvnPathNotFound:
915 except SvnPathNotFound:
916 pass
916 pass
917 except SubversionException, (inst, num):
917 except SubversionException, (inst, num):
918 if num == svn.core.SVN_ERR_FS_NO_SUCH_REVISION:
918 if num == svn.core.SVN_ERR_FS_NO_SUCH_REVISION:
919 raise util.Abort(_('svn: branch has no revision %s') % to_revnum)
919 raise util.Abort(_('svn: branch has no revision %s') % to_revnum)
920 raise
920 raise
921
921
922 def _getfile(self, file, rev):
922 def _getfile(self, file, rev):
923 # TODO: ra.get_file transmits the whole file instead of diffs.
923 # TODO: ra.get_file transmits the whole file instead of diffs.
924 mode = ''
924 mode = ''
925 try:
925 try:
926 new_module, revnum = self.revsplit(rev)[1:]
926 new_module, revnum = self.revsplit(rev)[1:]
927 if self.module != new_module:
927 if self.module != new_module:
928 self.module = new_module
928 self.module = new_module
929 self.reparent(self.module)
929 self.reparent(self.module)
930 io = StringIO()
930 io = StringIO()
931 info = svn.ra.get_file(self.ra, file, revnum, io)
931 info = svn.ra.get_file(self.ra, file, revnum, io)
932 data = io.getvalue()
932 data = io.getvalue()
933 # ra.get_files() seems to keep a reference on the input buffer
933 # ra.get_files() seems to keep a reference on the input buffer
934 # preventing collection. Release it explicitely.
934 # preventing collection. Release it explicitely.
935 io.close()
935 io.close()
936 if isinstance(info, list):
936 if isinstance(info, list):
937 info = info[-1]
937 info = info[-1]
938 mode = ("svn:executable" in info) and 'x' or ''
938 mode = ("svn:executable" in info) and 'x' or ''
939 mode = ("svn:special" in info) and 'l' or mode
939 mode = ("svn:special" in info) and 'l' or mode
940 except SubversionException, e:
940 except SubversionException, e:
941 notfound = (svn.core.SVN_ERR_FS_NOT_FOUND,
941 notfound = (svn.core.SVN_ERR_FS_NOT_FOUND,
942 svn.core.SVN_ERR_RA_DAV_PATH_NOT_FOUND)
942 svn.core.SVN_ERR_RA_DAV_PATH_NOT_FOUND)
943 if e.apr_err in notfound: # File not found
943 if e.apr_err in notfound: # File not found
944 raise IOError()
944 raise IOError()
945 raise
945 raise
946 if mode == 'l':
946 if mode == 'l':
947 link_prefix = "link "
947 link_prefix = "link "
948 if data.startswith(link_prefix):
948 if data.startswith(link_prefix):
949 data = data[len(link_prefix):]
949 data = data[len(link_prefix):]
950 return data, mode
950 return data, mode
951
951
952 def _find_children(self, path, revnum):
952 def _find_children(self, path, revnum):
953 path = path.strip('/')
953 path = path.strip('/')
954 pool = Pool()
954 pool = Pool()
955 rpath = '/'.join([self.baseurl, urllib.quote(path)]).strip('/')
955 rpath = '/'.join([self.baseurl, urllib.quote(path)]).strip('/')
956 return ['%s/%s' % (path, x) for x in
956 return ['%s/%s' % (path, x) for x in
957 svn.client.ls(rpath, optrev(revnum), True, self.ctx, pool).keys()]
957 svn.client.ls(rpath, optrev(revnum), True, self.ctx, pool).keys()]
958
958
959 def getrelpath(self, path, module=None):
959 def getrelpath(self, path, module=None):
960 if module is None:
960 if module is None:
961 module = self.module
961 module = self.module
962 # Given the repository url of this wc, say
962 # Given the repository url of this wc, say
963 # "http://server/plone/CMFPlone/branches/Plone-2_0-branch"
963 # "http://server/plone/CMFPlone/branches/Plone-2_0-branch"
964 # extract the "entry" portion (a relative path) from what
964 # extract the "entry" portion (a relative path) from what
965 # svn log --xml says, ie
965 # svn log --xml says, ie
966 # "/CMFPlone/branches/Plone-2_0-branch/tests/PloneTestCase.py"
966 # "/CMFPlone/branches/Plone-2_0-branch/tests/PloneTestCase.py"
967 # that is to say "tests/PloneTestCase.py"
967 # that is to say "tests/PloneTestCase.py"
968 if path.startswith(module):
968 if path.startswith(module):
969 relative = path.rstrip('/')[len(module):]
969 relative = path.rstrip('/')[len(module):]
970 if relative.startswith('/'):
970 if relative.startswith('/'):
971 return relative[1:]
971 return relative[1:]
972 elif relative == '':
972 elif relative == '':
973 return relative
973 return relative
974
974
975 # The path is outside our tracked tree...
975 # The path is outside our tracked tree...
976 self.ui.debug(_('%r is not under %r, ignoring\n') % (path, module))
976 self.ui.debug(_('%r is not under %r, ignoring\n') % (path, module))
977 return None
977 return None
978
978
979 def _checkpath(self, path, revnum):
979 def _checkpath(self, path, revnum):
980 # ra.check_path does not like leading slashes very much, it leads
980 # ra.check_path does not like leading slashes very much, it leads
981 # to PROPFIND subversion errors
981 # to PROPFIND subversion errors
982 return svn.ra.check_path(self.ra, path.strip('/'), revnum)
982 return svn.ra.check_path(self.ra, path.strip('/'), revnum)
983
983
984 def _getlog(self, paths, start, end, limit=0, discover_changed_paths=True,
984 def _getlog(self, paths, start, end, limit=0, discover_changed_paths=True,
985 strict_node_history=False):
985 strict_node_history=False):
986 # Normalize path names, svn >= 1.5 only wants paths relative to
986 # Normalize path names, svn >= 1.5 only wants paths relative to
987 # supplied URL
987 # supplied URL
988 relpaths = []
988 relpaths = []
989 for p in paths:
989 for p in paths:
990 if not p.startswith('/'):
990 if not p.startswith('/'):
991 p = self.module + '/' + p
991 p = self.module + '/' + p
992 relpaths.append(p.strip('/'))
992 relpaths.append(p.strip('/'))
993 args = [self.baseurl, relpaths, start, end, limit, discover_changed_paths,
993 args = [self.baseurl, relpaths, start, end, limit, discover_changed_paths,
994 strict_node_history]
994 strict_node_history]
995 arg = encodeargs(args)
995 arg = encodeargs(args)
996 hgexe = util.hgexecutable()
996 hgexe = util.hgexecutable()
997 cmd = '%s debugsvnlog' % util.shellquote(hgexe)
997 cmd = '%s debugsvnlog' % util.shellquote(hgexe)
998 stdin, stdout = util.popen2(cmd)
998 stdin, stdout = util.popen2(cmd)
999 stdin.write(arg)
999 stdin.write(arg)
1000 stdin.close()
1000 stdin.close()
1001 return logstream(stdout)
1001 return logstream(stdout)
1002
1002
1003 pre_revprop_change = '''#!/bin/sh
1003 pre_revprop_change = '''#!/bin/sh
1004
1004
1005 REPOS="$1"
1005 REPOS="$1"
1006 REV="$2"
1006 REV="$2"
1007 USER="$3"
1007 USER="$3"
1008 PROPNAME="$4"
1008 PROPNAME="$4"
1009 ACTION="$5"
1009 ACTION="$5"
1010
1010
1011 if [ "$ACTION" = "M" -a "$PROPNAME" = "svn:log" ]; then exit 0; fi
1011 if [ "$ACTION" = "M" -a "$PROPNAME" = "svn:log" ]; then exit 0; fi
1012 if [ "$ACTION" = "A" -a "$PROPNAME" = "hg:convert-branch" ]; then exit 0; fi
1012 if [ "$ACTION" = "A" -a "$PROPNAME" = "hg:convert-branch" ]; then exit 0; fi
1013 if [ "$ACTION" = "A" -a "$PROPNAME" = "hg:convert-rev" ]; then exit 0; fi
1013 if [ "$ACTION" = "A" -a "$PROPNAME" = "hg:convert-rev" ]; then exit 0; fi
1014
1014
1015 echo "Changing prohibited revision property" >&2
1015 echo "Changing prohibited revision property" >&2
1016 exit 1
1016 exit 1
1017 '''
1017 '''
1018
1018
1019 class svn_sink(converter_sink, commandline):
1019 class svn_sink(converter_sink, commandline):
1020 commit_re = re.compile(r'Committed revision (\d+).', re.M)
1020 commit_re = re.compile(r'Committed revision (\d+).', re.M)
1021
1021
1022 def prerun(self):
1022 def prerun(self):
1023 if self.wc:
1023 if self.wc:
1024 os.chdir(self.wc)
1024 os.chdir(self.wc)
1025
1025
1026 def postrun(self):
1026 def postrun(self):
1027 if self.wc:
1027 if self.wc:
1028 os.chdir(self.cwd)
1028 os.chdir(self.cwd)
1029
1029
1030 def join(self, name):
1030 def join(self, name):
1031 return os.path.join(self.wc, '.svn', name)
1031 return os.path.join(self.wc, '.svn', name)
1032
1032
1033 def revmapfile(self):
1033 def revmapfile(self):
1034 return self.join('hg-shamap')
1034 return self.join('hg-shamap')
1035
1035
1036 def authorfile(self):
1036 def authorfile(self):
1037 return self.join('hg-authormap')
1037 return self.join('hg-authormap')
1038
1038
1039 def __init__(self, ui, path):
1039 def __init__(self, ui, path):
1040 converter_sink.__init__(self, ui, path)
1040 converter_sink.__init__(self, ui, path)
1041 commandline.__init__(self, ui, 'svn')
1041 commandline.__init__(self, ui, 'svn')
1042 self.delete = []
1042 self.delete = []
1043 self.setexec = []
1043 self.setexec = []
1044 self.delexec = []
1044 self.delexec = []
1045 self.copies = []
1045 self.copies = []
1046 self.wc = None
1046 self.wc = None
1047 self.cwd = os.getcwd()
1047 self.cwd = os.getcwd()
1048
1048
1049 path = os.path.realpath(path)
1049 path = os.path.realpath(path)
1050
1050
1051 created = False
1051 created = False
1052 if os.path.isfile(os.path.join(path, '.svn', 'entries')):
1052 if os.path.isfile(os.path.join(path, '.svn', 'entries')):
1053 self.wc = path
1053 self.wc = path
1054 self.run0('update')
1054 self.run0('update')
1055 else:
1055 else:
1056 wcpath = os.path.join(os.getcwd(), os.path.basename(path) + '-wc')
1056 wcpath = os.path.join(os.getcwd(), os.path.basename(path) + '-wc')
1057
1057
1058 if os.path.isdir(os.path.dirname(path)):
1058 if os.path.isdir(os.path.dirname(path)):
1059 if not os.path.exists(os.path.join(path, 'db', 'fs-type')):
1059 if not os.path.exists(os.path.join(path, 'db', 'fs-type')):
1060 ui.status(_('initializing svn repo %r\n') %
1060 ui.status(_('initializing svn repo %r\n') %
1061 os.path.basename(path))
1061 os.path.basename(path))
1062 commandline(ui, 'svnadmin').run0('create', path)
1062 commandline(ui, 'svnadmin').run0('create', path)
1063 created = path
1063 created = path
1064 path = util.normpath(path)
1064 path = util.normpath(path)
1065 if not path.startswith('/'):
1065 if not path.startswith('/'):
1066 path = '/' + path
1066 path = '/' + path
1067 path = 'file://' + path
1067 path = 'file://' + path
1068
1068
1069 ui.status(_('initializing svn wc %r\n') % os.path.basename(wcpath))
1069 ui.status(_('initializing svn wc %r\n') % os.path.basename(wcpath))
1070 self.run0('checkout', path, wcpath)
1070 self.run0('checkout', path, wcpath)
1071
1071
1072 self.wc = wcpath
1072 self.wc = wcpath
1073 self.opener = util.opener(self.wc)
1073 self.opener = util.opener(self.wc)
1074 self.wopener = util.opener(self.wc)
1074 self.wopener = util.opener(self.wc)
1075 self.childmap = mapfile(ui, self.join('hg-childmap'))
1075 self.childmap = mapfile(ui, self.join('hg-childmap'))
1076 self.is_exec = util.checkexec(self.wc) and util.is_exec or None
1076 self.is_exec = util.checkexec(self.wc) and util.is_exec or None
1077
1077
1078 if created:
1078 if created:
1079 hook = os.path.join(created, 'hooks', 'pre-revprop-change')
1079 hook = os.path.join(created, 'hooks', 'pre-revprop-change')
1080 fp = open(hook, 'w')
1080 fp = open(hook, 'w')
1081 fp.write(pre_revprop_change)
1081 fp.write(pre_revprop_change)
1082 fp.close()
1082 fp.close()
1083 util.set_flags(hook, False, True)
1083 util.set_flags(hook, False, True)
1084
1084
1085 xport = transport.SvnRaTransport(url=geturl(path))
1085 xport = transport.SvnRaTransport(url=geturl(path))
1086 self.uuid = svn.ra.get_uuid(xport.ra)
1086 self.uuid = svn.ra.get_uuid(xport.ra)
1087
1087
1088 def wjoin(self, *names):
1088 def wjoin(self, *names):
1089 return os.path.join(self.wc, *names)
1089 return os.path.join(self.wc, *names)
1090
1090
1091 def putfile(self, filename, flags, data):
1091 def putfile(self, filename, flags, data):
1092 if 'l' in flags:
1092 if 'l' in flags:
1093 self.wopener.symlink(data, filename)
1093 self.wopener.symlink(data, filename)
1094 else:
1094 else:
1095 try:
1095 try:
1096 if os.path.islink(self.wjoin(filename)):
1096 if os.path.islink(self.wjoin(filename)):
1097 os.unlink(filename)
1097 os.unlink(filename)
1098 except OSError:
1098 except OSError:
1099 pass
1099 pass
1100 self.wopener(filename, 'w').write(data)
1100 self.wopener(filename, 'w').write(data)
1101
1101
1102 if self.is_exec:
1102 if self.is_exec:
1103 was_exec = self.is_exec(self.wjoin(filename))
1103 was_exec = self.is_exec(self.wjoin(filename))
1104 else:
1104 else:
1105 # On filesystems not supporting execute-bit, there is no way
1105 # On filesystems not supporting execute-bit, there is no way
1106 # to know if it is set but asking subversion. Setting it
1106 # to know if it is set but asking subversion. Setting it
1107 # systematically is just as expensive and much simpler.
1107 # systematically is just as expensive and much simpler.
1108 was_exec = 'x' not in flags
1108 was_exec = 'x' not in flags
1109
1109
1110 util.set_flags(self.wjoin(filename), False, 'x' in flags)
1110 util.set_flags(self.wjoin(filename), False, 'x' in flags)
1111 if was_exec:
1111 if was_exec:
1112 if 'x' not in flags:
1112 if 'x' not in flags:
1113 self.delexec.append(filename)
1113 self.delexec.append(filename)
1114 else:
1114 else:
1115 if 'x' in flags:
1115 if 'x' in flags:
1116 self.setexec.append(filename)
1116 self.setexec.append(filename)
1117
1117
1118 def _copyfile(self, source, dest):
1118 def _copyfile(self, source, dest):
1119 # SVN's copy command pukes if the destination file exists, but
1119 # SVN's copy command pukes if the destination file exists, but
1120 # our copyfile method expects to record a copy that has
1120 # our copyfile method expects to record a copy that has
1121 # already occurred. Cross the semantic gap.
1121 # already occurred. Cross the semantic gap.
1122 wdest = self.wjoin(dest)
1122 wdest = self.wjoin(dest)
1123 exists = os.path.exists(wdest)
1123 exists = os.path.exists(wdest)
1124 if exists:
1124 if exists:
1125 fd, tempname = tempfile.mkstemp(
1125 fd, tempname = tempfile.mkstemp(
1126 prefix='hg-copy-', dir=os.path.dirname(wdest))
1126 prefix='hg-copy-', dir=os.path.dirname(wdest))
1127 os.close(fd)
1127 os.close(fd)
1128 os.unlink(tempname)
1128 os.unlink(tempname)
1129 os.rename(wdest, tempname)
1129 os.rename(wdest, tempname)
1130 try:
1130 try:
1131 self.run0('copy', source, dest)
1131 self.run0('copy', source, dest)
1132 finally:
1132 finally:
1133 if exists:
1133 if exists:
1134 try:
1134 try:
1135 os.unlink(wdest)
1135 os.unlink(wdest)
1136 except OSError:
1136 except OSError:
1137 pass
1137 pass
1138 os.rename(tempname, wdest)
1138 os.rename(tempname, wdest)
1139
1139
1140 def dirs_of(self, files):
1140 def dirs_of(self, files):
1141 dirs = set()
1141 dirs = set()
1142 for f in files:
1142 for f in files:
1143 if os.path.isdir(self.wjoin(f)):
1143 if os.path.isdir(self.wjoin(f)):
1144 dirs.add(f)
1144 dirs.add(f)
1145 for i in strutil.rfindall(f, '/'):
1145 for i in strutil.rfindall(f, '/'):
1146 dirs.add(f[:i])
1146 dirs.add(f[:i])
1147 return dirs
1147 return dirs
1148
1148
1149 def add_dirs(self, files):
1149 def add_dirs(self, files):
1150 add_dirs = [d for d in sorted(self.dirs_of(files))
1150 add_dirs = [d for d in sorted(self.dirs_of(files))
1151 if not os.path.exists(self.wjoin(d, '.svn', 'entries'))]
1151 if not os.path.exists(self.wjoin(d, '.svn', 'entries'))]
1152 if add_dirs:
1152 if add_dirs:
1153 self.xargs(add_dirs, 'add', non_recursive=True, quiet=True)
1153 self.xargs(add_dirs, 'add', non_recursive=True, quiet=True)
1154 return add_dirs
1154 return add_dirs
1155
1155
1156 def add_files(self, files):
1156 def add_files(self, files):
1157 if files:
1157 if files:
1158 self.xargs(files, 'add', quiet=True)
1158 self.xargs(files, 'add', quiet=True)
1159 return files
1159 return files
1160
1160
1161 def tidy_dirs(self, names):
1161 def tidy_dirs(self, names):
1162 deleted = []
1162 deleted = []
1163 for d in sorted(self.dirs_of(names), reverse=True):
1163 for d in sorted(self.dirs_of(names), reverse=True):
1164 wd = self.wjoin(d)
1164 wd = self.wjoin(d)
1165 if os.listdir(wd) == '.svn':
1165 if os.listdir(wd) == '.svn':
1166 self.run0('delete', d)
1166 self.run0('delete', d)
1167 deleted.append(d)
1167 deleted.append(d)
1168 return deleted
1168 return deleted
1169
1169
1170 def addchild(self, parent, child):
1170 def addchild(self, parent, child):
1171 self.childmap[parent] = child
1171 self.childmap[parent] = child
1172
1172
1173 def revid(self, rev):
1173 def revid(self, rev):
1174 return u"svn:%s@%s" % (self.uuid, rev)
1174 return u"svn:%s@%s" % (self.uuid, rev)
1175
1175
1176 def putcommit(self, files, copies, parents, commit, source, revmap):
1176 def putcommit(self, files, copies, parents, commit, source, revmap):
1177 # Apply changes to working copy
1177 # Apply changes to working copy
1178 for f, v in files:
1178 for f, v in files:
1179 try:
1179 try:
1180 data = source.getfile(f, v)
1180 data = source.getfile(f, v)
1181 except IOError:
1181 except IOError:
1182 self.delete.append(f)
1182 self.delete.append(f)
1183 else:
1183 else:
1184 e = source.getmode(f, v)
1184 e = source.getmode(f, v)
1185 self.putfile(f, e, data)
1185 self.putfile(f, e, data)
1186 if f in copies:
1186 if f in copies:
1187 self.copies.append([copies[f], f])
1187 self.copies.append([copies[f], f])
1188 files = [f[0] for f in files]
1188 files = [f[0] for f in files]
1189
1189
1190 for parent in parents:
1190 for parent in parents:
1191 try:
1191 try:
1192 return self.revid(self.childmap[parent])
1192 return self.revid(self.childmap[parent])
1193 except KeyError:
1193 except KeyError:
1194 pass
1194 pass
1195 entries = set(self.delete)
1195 entries = set(self.delete)
1196 files = frozenset(files)
1196 files = frozenset(files)
1197 entries.update(self.add_dirs(files.difference(entries)))
1197 entries.update(self.add_dirs(files.difference(entries)))
1198 if self.copies:
1198 if self.copies:
1199 for s, d in self.copies:
1199 for s, d in self.copies:
1200 self._copyfile(s, d)
1200 self._copyfile(s, d)
1201 self.copies = []
1201 self.copies = []
1202 if self.delete:
1202 if self.delete:
1203 self.xargs(self.delete, 'delete')
1203 self.xargs(self.delete, 'delete')
1204 self.delete = []
1204 self.delete = []
1205 entries.update(self.add_files(files.difference(entries)))
1205 entries.update(self.add_files(files.difference(entries)))
1206 entries.update(self.tidy_dirs(entries))
1206 entries.update(self.tidy_dirs(entries))
1207 if self.delexec:
1207 if self.delexec:
1208 self.xargs(self.delexec, 'propdel', 'svn:executable')
1208 self.xargs(self.delexec, 'propdel', 'svn:executable')
1209 self.delexec = []
1209 self.delexec = []
1210 if self.setexec:
1210 if self.setexec:
1211 self.xargs(self.setexec, 'propset', 'svn:executable', '*')
1211 self.xargs(self.setexec, 'propset', 'svn:executable', '*')
1212 self.setexec = []
1212 self.setexec = []
1213
1213
1214 fd, messagefile = tempfile.mkstemp(prefix='hg-convert-')
1214 fd, messagefile = tempfile.mkstemp(prefix='hg-convert-')
1215 fp = os.fdopen(fd, 'w')
1215 fp = os.fdopen(fd, 'w')
1216 fp.write(commit.desc)
1216 fp.write(commit.desc)
1217 fp.close()
1217 fp.close()
1218 try:
1218 try:
1219 output = self.run0('commit',
1219 output = self.run0('commit',
1220 username=util.shortuser(commit.author),
1220 username=util.shortuser(commit.author),
1221 file=messagefile,
1221 file=messagefile,
1222 encoding='utf-8')
1222 encoding='utf-8')
1223 try:
1223 try:
1224 rev = self.commit_re.search(output).group(1)
1224 rev = self.commit_re.search(output).group(1)
1225 except AttributeError:
1225 except AttributeError:
1226 self.ui.warn(_('unexpected svn output:\n'))
1226 self.ui.warn(_('unexpected svn output:\n'))
1227 self.ui.warn(output)
1227 self.ui.warn(output)
1228 raise util.Abort(_('unable to cope with svn output'))
1228 raise util.Abort(_('unable to cope with svn output'))
1229 if commit.rev:
1229 if commit.rev:
1230 self.run('propset', 'hg:convert-rev', commit.rev,
1230 self.run('propset', 'hg:convert-rev', commit.rev,
1231 revprop=True, revision=rev)
1231 revprop=True, revision=rev)
1232 if commit.branch and commit.branch != 'default':
1232 if commit.branch and commit.branch != 'default':
1233 self.run('propset', 'hg:convert-branch', commit.branch,
1233 self.run('propset', 'hg:convert-branch', commit.branch,
1234 revprop=True, revision=rev)
1234 revprop=True, revision=rev)
1235 for parent in parents:
1235 for parent in parents:
1236 self.addchild(parent, rev)
1236 self.addchild(parent, rev)
1237 return self.revid(rev)
1237 return self.revid(rev)
1238 finally:
1238 finally:
1239 os.unlink(messagefile)
1239 os.unlink(messagefile)
1240
1240
1241 def puttags(self, tags):
1241 def puttags(self, tags):
1242 self.ui.warn(_('XXX TAGS NOT IMPLEMENTED YET\n'))
1242 self.ui.warn(_('XXX TAGS NOT IMPLEMENTED YET\n'))
@@ -1,128 +1,128
1 # -*- coding: utf-8 -*-
1 # -*- coding: utf-8 -*-
2
2
3 # Copyright (C) 2007 Daniel Holth <dholth@fastmail.fm>
3 # Copyright (C) 2007 Daniel Holth <dholth@fastmail.fm>
4 # This is a stripped-down version of the original bzr-svn transport.py,
4 # This is a stripped-down version of the original bzr-svn transport.py,
5 # Copyright (C) 2006 Jelmer Vernooij <jelmer@samba.org>
5 # Copyright (C) 2006 Jelmer Vernooij <jelmer@samba.org>
6
6
7 # This program is free software; you can redistribute it and/or modify
7 # This program is free software; you can redistribute it and/or modify
8 # it under the terms of the GNU General Public License as published by
8 # it under the terms of the GNU General Public License as published by
9 # the Free Software Foundation; either version 2 of the License, or
9 # the Free Software Foundation; either version 2 of the License, or
10 # (at your option) any later version.
10 # (at your option) any later version.
11
11
12 # This program is distributed in the hope that it will be useful,
12 # This program is distributed in the hope that it will be useful,
13 # but WITHOUT ANY WARRANTY; without even the implied warranty of
13 # but WITHOUT ANY WARRANTY; without even the implied warranty of
14 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 # GNU General Public License for more details.
15 # GNU General Public License for more details.
16
16
17 # You should have received a copy of the GNU General Public License
17 # You should have received a copy of the GNU General Public License
18 # along with this program; if not, write to the Free Software
18 # along with this program; if not, write to the Free Software
19 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
19 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
20
20
21 from svn.core import SubversionException, Pool
21 from svn.core import SubversionException, Pool
22 import svn.ra
22 import svn.ra
23 import svn.client
23 import svn.client
24 import svn.core
24 import svn.core
25
25
26 # Some older versions of the Python bindings need to be
26 # Some older versions of the Python bindings need to be
27 # explicitly initialized. But what we want to do probably
27 # explicitly initialized. But what we want to do probably
28 # won't work worth a darn against those libraries anyway!
28 # won't work worth a darn against those libraries anyway!
29 svn.ra.initialize()
29 svn.ra.initialize()
30
30
31 svn_config = svn.core.svn_config_get_config(None)
31 svn_config = svn.core.svn_config_get_config(None)
32
32
33
33
34 def _create_auth_baton(pool):
34 def _create_auth_baton(pool):
35 """Create a Subversion authentication baton. """
35 """Create a Subversion authentication baton. """
36 import svn.client
36 import svn.client
37 # Give the client context baton a suite of authentication
37 # Give the client context baton a suite of authentication
38 # providers.h
38 # providers.h
39 providers = [
39 providers = [
40 svn.client.get_simple_provider(pool),
40 svn.client.get_simple_provider(pool),
41 svn.client.get_username_provider(pool),
41 svn.client.get_username_provider(pool),
42 svn.client.get_ssl_client_cert_file_provider(pool),
42 svn.client.get_ssl_client_cert_file_provider(pool),
43 svn.client.get_ssl_client_cert_pw_file_provider(pool),
43 svn.client.get_ssl_client_cert_pw_file_provider(pool),
44 svn.client.get_ssl_server_trust_file_provider(pool),
44 svn.client.get_ssl_server_trust_file_provider(pool),
45 ]
45 ]
46 # Platform-dependant authentication methods
46 # Platform-dependant authentication methods
47 getprovider = getattr(svn.core, 'svn_auth_get_platform_specific_provider',
47 getprovider = getattr(svn.core, 'svn_auth_get_platform_specific_provider',
48 None)
48 None)
49 if getprovider:
49 if getprovider:
50 # Available in svn >= 1.6
50 # Available in svn >= 1.6
51 for name in ('gnome_keyring', 'keychain', 'kwallet', 'windows'):
51 for name in ('gnome_keyring', 'keychain', 'kwallet', 'windows'):
52 for type in ('simple', 'ssl_client_cert_pw', 'ssl_server_trust'):
52 for type in ('simple', 'ssl_client_cert_pw', 'ssl_server_trust'):
53 p = getprovider(name, type, pool)
53 p = getprovider(name, type, pool)
54 if p:
54 if p:
55 providers.append(p)
55 providers.append(p)
56 else:
56 else:
57 if hasattr(svn.client, 'get_windows_simple_provider'):
57 if hasattr(svn.client, 'get_windows_simple_provider'):
58 providers.append(svn.client.get_windows_simple_provider(pool))
58 providers.append(svn.client.get_windows_simple_provider(pool))
59
59
60 return svn.core.svn_auth_open(providers, pool)
60 return svn.core.svn_auth_open(providers, pool)
61
61
62 class NotBranchError(SubversionException):
62 class NotBranchError(SubversionException):
63 pass
63 pass
64
64
65 class SvnRaTransport(object):
65 class SvnRaTransport(object):
66 """
66 """
67 Open an ra connection to a Subversion repository.
67 Open an ra connection to a Subversion repository.
68 """
68 """
69 def __init__(self, url="", ra=None):
69 def __init__(self, url="", ra=None):
70 self.pool = Pool()
70 self.pool = Pool()
71 self.svn_url = url
71 self.svn_url = url
72 self.username = ''
72 self.username = ''
73 self.password = ''
73 self.password = ''
74
74
75 # Only Subversion 1.4 has reparent()
75 # Only Subversion 1.4 has reparent()
76 if ra is None or not hasattr(svn.ra, 'reparent'):
76 if ra is None or not hasattr(svn.ra, 'reparent'):
77 self.client = svn.client.create_context(self.pool)
77 self.client = svn.client.create_context(self.pool)
78 ab = _create_auth_baton(self.pool)
78 ab = _create_auth_baton(self.pool)
79 if False:
79 if False:
80 svn.core.svn_auth_set_parameter(
80 svn.core.svn_auth_set_parameter(
81 ab, svn.core.SVN_AUTH_PARAM_DEFAULT_USERNAME, self.username)
81 ab, svn.core.SVN_AUTH_PARAM_DEFAULT_USERNAME, self.username)
82 svn.core.svn_auth_set_parameter(
82 svn.core.svn_auth_set_parameter(
83 ab, svn.core.SVN_AUTH_PARAM_DEFAULT_PASSWORD, self.password)
83 ab, svn.core.SVN_AUTH_PARAM_DEFAULT_PASSWORD, self.password)
84 self.client.auth_baton = ab
84 self.client.auth_baton = ab
85 self.client.config = svn_config
85 self.client.config = svn_config
86 try:
86 try:
87 self.ra = svn.client.open_ra_session(
87 self.ra = svn.client.open_ra_session(
88 self.svn_url.encode('utf8'),
88 self.svn_url.encode('utf8'),
89 self.client, self.pool)
89 self.client, self.pool)
90 except SubversionException, (inst, num):
90 except SubversionException, (inst, num):
91 if num in (svn.core.SVN_ERR_RA_ILLEGAL_URL,
91 if num in (svn.core.SVN_ERR_RA_ILLEGAL_URL,
92 svn.core.SVN_ERR_RA_LOCAL_REPOS_OPEN_FAILED,
92 svn.core.SVN_ERR_RA_LOCAL_REPOS_OPEN_FAILED,
93 svn.core.SVN_ERR_BAD_URL):
93 svn.core.SVN_ERR_BAD_URL):
94 raise NotBranchError(url)
94 raise NotBranchError(url)
95 raise
95 raise
96 else:
96 else:
97 self.ra = ra
97 self.ra = ra
98 svn.ra.reparent(self.ra, self.svn_url.encode('utf8'))
98 svn.ra.reparent(self.ra, self.svn_url.encode('utf8'))
99
99
100 class Reporter:
100 class Reporter(object):
101 def __init__(self, (reporter, report_baton)):
101 def __init__(self, (reporter, report_baton)):
102 self._reporter = reporter
102 self._reporter = reporter
103 self._baton = report_baton
103 self._baton = report_baton
104
104
105 def set_path(self, path, revnum, start_empty, lock_token, pool=None):
105 def set_path(self, path, revnum, start_empty, lock_token, pool=None):
106 svn.ra.reporter2_invoke_set_path(self._reporter, self._baton,
106 svn.ra.reporter2_invoke_set_path(self._reporter, self._baton,
107 path, revnum, start_empty, lock_token, pool)
107 path, revnum, start_empty, lock_token, pool)
108
108
109 def delete_path(self, path, pool=None):
109 def delete_path(self, path, pool=None):
110 svn.ra.reporter2_invoke_delete_path(self._reporter, self._baton,
110 svn.ra.reporter2_invoke_delete_path(self._reporter, self._baton,
111 path, pool)
111 path, pool)
112
112
113 def link_path(self, path, url, revision, start_empty, lock_token,
113 def link_path(self, path, url, revision, start_empty, lock_token,
114 pool=None):
114 pool=None):
115 svn.ra.reporter2_invoke_link_path(self._reporter, self._baton,
115 svn.ra.reporter2_invoke_link_path(self._reporter, self._baton,
116 path, url, revision, start_empty, lock_token,
116 path, url, revision, start_empty, lock_token,
117 pool)
117 pool)
118
118
119 def finish_report(self, pool=None):
119 def finish_report(self, pool=None):
120 svn.ra.reporter2_invoke_finish_report(self._reporter,
120 svn.ra.reporter2_invoke_finish_report(self._reporter,
121 self._baton, pool)
121 self._baton, pool)
122
122
123 def abort_report(self, pool=None):
123 def abort_report(self, pool=None):
124 svn.ra.reporter2_invoke_abort_report(self._reporter,
124 svn.ra.reporter2_invoke_abort_report(self._reporter,
125 self._baton, pool)
125 self._baton, pool)
126
126
127 def do_update(self, revnum, path, *args, **kwargs):
127 def do_update(self, revnum, path, *args, **kwargs):
128 return self.Reporter(svn.ra.do_update(self.ra, revnum, path, *args, **kwargs))
128 return self.Reporter(svn.ra.do_update(self.ra, revnum, path, *args, **kwargs))
@@ -1,283 +1,283
1 # GnuPG signing extension for Mercurial
1 # GnuPG signing extension for Mercurial
2 #
2 #
3 # Copyright 2005, 2006 Benoit Boissinot <benoit.boissinot@ens-lyon.org>
3 # Copyright 2005, 2006 Benoit Boissinot <benoit.boissinot@ens-lyon.org>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2, incorporated herein by reference.
6 # GNU General Public License version 2, incorporated herein by reference.
7
7
8 import os, tempfile, binascii
8 import os, tempfile, binascii
9 from mercurial import util, commands, match
9 from mercurial import util, commands, match
10 from mercurial import node as hgnode
10 from mercurial import node as hgnode
11 from mercurial.i18n import _
11 from mercurial.i18n import _
12
12
13 class gpg:
13 class gpg(object):
14 def __init__(self, path, key=None):
14 def __init__(self, path, key=None):
15 self.path = path
15 self.path = path
16 self.key = (key and " --local-user \"%s\"" % key) or ""
16 self.key = (key and " --local-user \"%s\"" % key) or ""
17
17
18 def sign(self, data):
18 def sign(self, data):
19 gpgcmd = "%s --sign --detach-sign%s" % (self.path, self.key)
19 gpgcmd = "%s --sign --detach-sign%s" % (self.path, self.key)
20 return util.filter(data, gpgcmd)
20 return util.filter(data, gpgcmd)
21
21
22 def verify(self, data, sig):
22 def verify(self, data, sig):
23 """ returns of the good and bad signatures"""
23 """ returns of the good and bad signatures"""
24 sigfile = datafile = None
24 sigfile = datafile = None
25 try:
25 try:
26 # create temporary files
26 # create temporary files
27 fd, sigfile = tempfile.mkstemp(prefix="hg-gpg-", suffix=".sig")
27 fd, sigfile = tempfile.mkstemp(prefix="hg-gpg-", suffix=".sig")
28 fp = os.fdopen(fd, 'wb')
28 fp = os.fdopen(fd, 'wb')
29 fp.write(sig)
29 fp.write(sig)
30 fp.close()
30 fp.close()
31 fd, datafile = tempfile.mkstemp(prefix="hg-gpg-", suffix=".txt")
31 fd, datafile = tempfile.mkstemp(prefix="hg-gpg-", suffix=".txt")
32 fp = os.fdopen(fd, 'wb')
32 fp = os.fdopen(fd, 'wb')
33 fp.write(data)
33 fp.write(data)
34 fp.close()
34 fp.close()
35 gpgcmd = ("%s --logger-fd 1 --status-fd 1 --verify "
35 gpgcmd = ("%s --logger-fd 1 --status-fd 1 --verify "
36 "\"%s\" \"%s\"" % (self.path, sigfile, datafile))
36 "\"%s\" \"%s\"" % (self.path, sigfile, datafile))
37 ret = util.filter("", gpgcmd)
37 ret = util.filter("", gpgcmd)
38 finally:
38 finally:
39 for f in (sigfile, datafile):
39 for f in (sigfile, datafile):
40 try:
40 try:
41 if f: os.unlink(f)
41 if f: os.unlink(f)
42 except: pass
42 except: pass
43 keys = []
43 keys = []
44 key, fingerprint = None, None
44 key, fingerprint = None, None
45 err = ""
45 err = ""
46 for l in ret.splitlines():
46 for l in ret.splitlines():
47 # see DETAILS in the gnupg documentation
47 # see DETAILS in the gnupg documentation
48 # filter the logger output
48 # filter the logger output
49 if not l.startswith("[GNUPG:]"):
49 if not l.startswith("[GNUPG:]"):
50 continue
50 continue
51 l = l[9:]
51 l = l[9:]
52 if l.startswith("ERRSIG"):
52 if l.startswith("ERRSIG"):
53 err = _("error while verifying signature")
53 err = _("error while verifying signature")
54 break
54 break
55 elif l.startswith("VALIDSIG"):
55 elif l.startswith("VALIDSIG"):
56 # fingerprint of the primary key
56 # fingerprint of the primary key
57 fingerprint = l.split()[10]
57 fingerprint = l.split()[10]
58 elif (l.startswith("GOODSIG") or
58 elif (l.startswith("GOODSIG") or
59 l.startswith("EXPSIG") or
59 l.startswith("EXPSIG") or
60 l.startswith("EXPKEYSIG") or
60 l.startswith("EXPKEYSIG") or
61 l.startswith("BADSIG")):
61 l.startswith("BADSIG")):
62 if key is not None:
62 if key is not None:
63 keys.append(key + [fingerprint])
63 keys.append(key + [fingerprint])
64 key = l.split(" ", 2)
64 key = l.split(" ", 2)
65 fingerprint = None
65 fingerprint = None
66 if err:
66 if err:
67 return err, []
67 return err, []
68 if key is not None:
68 if key is not None:
69 keys.append(key + [fingerprint])
69 keys.append(key + [fingerprint])
70 return err, keys
70 return err, keys
71
71
72 def newgpg(ui, **opts):
72 def newgpg(ui, **opts):
73 """create a new gpg instance"""
73 """create a new gpg instance"""
74 gpgpath = ui.config("gpg", "cmd", "gpg")
74 gpgpath = ui.config("gpg", "cmd", "gpg")
75 gpgkey = opts.get('key')
75 gpgkey = opts.get('key')
76 if not gpgkey:
76 if not gpgkey:
77 gpgkey = ui.config("gpg", "key", None)
77 gpgkey = ui.config("gpg", "key", None)
78 return gpg(gpgpath, gpgkey)
78 return gpg(gpgpath, gpgkey)
79
79
80 def sigwalk(repo):
80 def sigwalk(repo):
81 """
81 """
82 walk over every sigs, yields a couple
82 walk over every sigs, yields a couple
83 ((node, version, sig), (filename, linenumber))
83 ((node, version, sig), (filename, linenumber))
84 """
84 """
85 def parsefile(fileiter, context):
85 def parsefile(fileiter, context):
86 ln = 1
86 ln = 1
87 for l in fileiter:
87 for l in fileiter:
88 if not l:
88 if not l:
89 continue
89 continue
90 yield (l.split(" ", 2), (context, ln))
90 yield (l.split(" ", 2), (context, ln))
91 ln +=1
91 ln +=1
92
92
93 # read the heads
93 # read the heads
94 fl = repo.file(".hgsigs")
94 fl = repo.file(".hgsigs")
95 for r in reversed(fl.heads()):
95 for r in reversed(fl.heads()):
96 fn = ".hgsigs|%s" % hgnode.short(r)
96 fn = ".hgsigs|%s" % hgnode.short(r)
97 for item in parsefile(fl.read(r).splitlines(), fn):
97 for item in parsefile(fl.read(r).splitlines(), fn):
98 yield item
98 yield item
99 try:
99 try:
100 # read local signatures
100 # read local signatures
101 fn = "localsigs"
101 fn = "localsigs"
102 for item in parsefile(repo.opener(fn), fn):
102 for item in parsefile(repo.opener(fn), fn):
103 yield item
103 yield item
104 except IOError:
104 except IOError:
105 pass
105 pass
106
106
107 def getkeys(ui, repo, mygpg, sigdata, context):
107 def getkeys(ui, repo, mygpg, sigdata, context):
108 """get the keys who signed a data"""
108 """get the keys who signed a data"""
109 fn, ln = context
109 fn, ln = context
110 node, version, sig = sigdata
110 node, version, sig = sigdata
111 prefix = "%s:%d" % (fn, ln)
111 prefix = "%s:%d" % (fn, ln)
112 node = hgnode.bin(node)
112 node = hgnode.bin(node)
113
113
114 data = node2txt(repo, node, version)
114 data = node2txt(repo, node, version)
115 sig = binascii.a2b_base64(sig)
115 sig = binascii.a2b_base64(sig)
116 err, keys = mygpg.verify(data, sig)
116 err, keys = mygpg.verify(data, sig)
117 if err:
117 if err:
118 ui.warn("%s:%d %s\n" % (fn, ln , err))
118 ui.warn("%s:%d %s\n" % (fn, ln , err))
119 return None
119 return None
120
120
121 validkeys = []
121 validkeys = []
122 # warn for expired key and/or sigs
122 # warn for expired key and/or sigs
123 for key in keys:
123 for key in keys:
124 if key[0] == "BADSIG":
124 if key[0] == "BADSIG":
125 ui.write(_("%s Bad signature from \"%s\"\n") % (prefix, key[2]))
125 ui.write(_("%s Bad signature from \"%s\"\n") % (prefix, key[2]))
126 continue
126 continue
127 if key[0] == "EXPSIG":
127 if key[0] == "EXPSIG":
128 ui.write(_("%s Note: Signature has expired"
128 ui.write(_("%s Note: Signature has expired"
129 " (signed by: \"%s\")\n") % (prefix, key[2]))
129 " (signed by: \"%s\")\n") % (prefix, key[2]))
130 elif key[0] == "EXPKEYSIG":
130 elif key[0] == "EXPKEYSIG":
131 ui.write(_("%s Note: This key has expired"
131 ui.write(_("%s Note: This key has expired"
132 " (signed by: \"%s\")\n") % (prefix, key[2]))
132 " (signed by: \"%s\")\n") % (prefix, key[2]))
133 validkeys.append((key[1], key[2], key[3]))
133 validkeys.append((key[1], key[2], key[3]))
134 return validkeys
134 return validkeys
135
135
136 def sigs(ui, repo):
136 def sigs(ui, repo):
137 """list signed changesets"""
137 """list signed changesets"""
138 mygpg = newgpg(ui)
138 mygpg = newgpg(ui)
139 revs = {}
139 revs = {}
140
140
141 for data, context in sigwalk(repo):
141 for data, context in sigwalk(repo):
142 node, version, sig = data
142 node, version, sig = data
143 fn, ln = context
143 fn, ln = context
144 try:
144 try:
145 n = repo.lookup(node)
145 n = repo.lookup(node)
146 except KeyError:
146 except KeyError:
147 ui.warn(_("%s:%d node does not exist\n") % (fn, ln))
147 ui.warn(_("%s:%d node does not exist\n") % (fn, ln))
148 continue
148 continue
149 r = repo.changelog.rev(n)
149 r = repo.changelog.rev(n)
150 keys = getkeys(ui, repo, mygpg, data, context)
150 keys = getkeys(ui, repo, mygpg, data, context)
151 if not keys:
151 if not keys:
152 continue
152 continue
153 revs.setdefault(r, [])
153 revs.setdefault(r, [])
154 revs[r].extend(keys)
154 revs[r].extend(keys)
155 for rev in sorted(revs, reverse=True):
155 for rev in sorted(revs, reverse=True):
156 for k in revs[rev]:
156 for k in revs[rev]:
157 r = "%5d:%s" % (rev, hgnode.hex(repo.changelog.node(rev)))
157 r = "%5d:%s" % (rev, hgnode.hex(repo.changelog.node(rev)))
158 ui.write("%-30s %s\n" % (keystr(ui, k), r))
158 ui.write("%-30s %s\n" % (keystr(ui, k), r))
159
159
160 def check(ui, repo, rev):
160 def check(ui, repo, rev):
161 """verify all the signatures there may be for a particular revision"""
161 """verify all the signatures there may be for a particular revision"""
162 mygpg = newgpg(ui)
162 mygpg = newgpg(ui)
163 rev = repo.lookup(rev)
163 rev = repo.lookup(rev)
164 hexrev = hgnode.hex(rev)
164 hexrev = hgnode.hex(rev)
165 keys = []
165 keys = []
166
166
167 for data, context in sigwalk(repo):
167 for data, context in sigwalk(repo):
168 node, version, sig = data
168 node, version, sig = data
169 if node == hexrev:
169 if node == hexrev:
170 k = getkeys(ui, repo, mygpg, data, context)
170 k = getkeys(ui, repo, mygpg, data, context)
171 if k:
171 if k:
172 keys.extend(k)
172 keys.extend(k)
173
173
174 if not keys:
174 if not keys:
175 ui.write(_("No valid signature for %s\n") % hgnode.short(rev))
175 ui.write(_("No valid signature for %s\n") % hgnode.short(rev))
176 return
176 return
177
177
178 # print summary
178 # print summary
179 ui.write("%s is signed by:\n" % hgnode.short(rev))
179 ui.write("%s is signed by:\n" % hgnode.short(rev))
180 for key in keys:
180 for key in keys:
181 ui.write(" %s\n" % keystr(ui, key))
181 ui.write(" %s\n" % keystr(ui, key))
182
182
183 def keystr(ui, key):
183 def keystr(ui, key):
184 """associate a string to a key (username, comment)"""
184 """associate a string to a key (username, comment)"""
185 keyid, user, fingerprint = key
185 keyid, user, fingerprint = key
186 comment = ui.config("gpg", fingerprint, None)
186 comment = ui.config("gpg", fingerprint, None)
187 if comment:
187 if comment:
188 return "%s (%s)" % (user, comment)
188 return "%s (%s)" % (user, comment)
189 else:
189 else:
190 return user
190 return user
191
191
192 def sign(ui, repo, *revs, **opts):
192 def sign(ui, repo, *revs, **opts):
193 """add a signature for the current or given revision
193 """add a signature for the current or given revision
194
194
195 If no revision is given, the parent of the working directory is used,
195 If no revision is given, the parent of the working directory is used,
196 or tip if no revision is checked out.
196 or tip if no revision is checked out.
197
197
198 See 'hg help dates' for a list of formats valid for -d/--date.
198 See 'hg help dates' for a list of formats valid for -d/--date.
199 """
199 """
200
200
201 mygpg = newgpg(ui, **opts)
201 mygpg = newgpg(ui, **opts)
202 sigver = "0"
202 sigver = "0"
203 sigmessage = ""
203 sigmessage = ""
204
204
205 date = opts.get('date')
205 date = opts.get('date')
206 if date:
206 if date:
207 opts['date'] = util.parsedate(date)
207 opts['date'] = util.parsedate(date)
208
208
209 if revs:
209 if revs:
210 nodes = [repo.lookup(n) for n in revs]
210 nodes = [repo.lookup(n) for n in revs]
211 else:
211 else:
212 nodes = [node for node in repo.dirstate.parents()
212 nodes = [node for node in repo.dirstate.parents()
213 if node != hgnode.nullid]
213 if node != hgnode.nullid]
214 if len(nodes) > 1:
214 if len(nodes) > 1:
215 raise util.Abort(_('uncommitted merge - please provide a '
215 raise util.Abort(_('uncommitted merge - please provide a '
216 'specific revision'))
216 'specific revision'))
217 if not nodes:
217 if not nodes:
218 nodes = [repo.changelog.tip()]
218 nodes = [repo.changelog.tip()]
219
219
220 for n in nodes:
220 for n in nodes:
221 hexnode = hgnode.hex(n)
221 hexnode = hgnode.hex(n)
222 ui.write("Signing %d:%s\n" % (repo.changelog.rev(n),
222 ui.write("Signing %d:%s\n" % (repo.changelog.rev(n),
223 hgnode.short(n)))
223 hgnode.short(n)))
224 # build data
224 # build data
225 data = node2txt(repo, n, sigver)
225 data = node2txt(repo, n, sigver)
226 sig = mygpg.sign(data)
226 sig = mygpg.sign(data)
227 if not sig:
227 if not sig:
228 raise util.Abort(_("Error while signing"))
228 raise util.Abort(_("Error while signing"))
229 sig = binascii.b2a_base64(sig)
229 sig = binascii.b2a_base64(sig)
230 sig = sig.replace("\n", "")
230 sig = sig.replace("\n", "")
231 sigmessage += "%s %s %s\n" % (hexnode, sigver, sig)
231 sigmessage += "%s %s %s\n" % (hexnode, sigver, sig)
232
232
233 # write it
233 # write it
234 if opts['local']:
234 if opts['local']:
235 repo.opener("localsigs", "ab").write(sigmessage)
235 repo.opener("localsigs", "ab").write(sigmessage)
236 return
236 return
237
237
238 for x in repo.status(unknown=True)[:5]:
238 for x in repo.status(unknown=True)[:5]:
239 if ".hgsigs" in x and not opts["force"]:
239 if ".hgsigs" in x and not opts["force"]:
240 raise util.Abort(_("working copy of .hgsigs is changed "
240 raise util.Abort(_("working copy of .hgsigs is changed "
241 "(please commit .hgsigs manually "
241 "(please commit .hgsigs manually "
242 "or use --force)"))
242 "or use --force)"))
243
243
244 repo.wfile(".hgsigs", "ab").write(sigmessage)
244 repo.wfile(".hgsigs", "ab").write(sigmessage)
245
245
246 if '.hgsigs' not in repo.dirstate:
246 if '.hgsigs' not in repo.dirstate:
247 repo.add([".hgsigs"])
247 repo.add([".hgsigs"])
248
248
249 if opts["no_commit"]:
249 if opts["no_commit"]:
250 return
250 return
251
251
252 message = opts['message']
252 message = opts['message']
253 if not message:
253 if not message:
254 message = "\n".join([_("Added signature for changeset %s")
254 message = "\n".join([_("Added signature for changeset %s")
255 % hgnode.short(n)
255 % hgnode.short(n)
256 for n in nodes])
256 for n in nodes])
257 try:
257 try:
258 m = match.exact(['.hgsigs'])
258 m = match.exact(['.hgsigs'])
259 repo.commit(message, opts['user'], opts['date'], match=m)
259 repo.commit(message, opts['user'], opts['date'], match=m)
260 except ValueError, inst:
260 except ValueError, inst:
261 raise util.Abort(str(inst))
261 raise util.Abort(str(inst))
262
262
263 def node2txt(repo, node, ver):
263 def node2txt(repo, node, ver):
264 """map a manifest into some text"""
264 """map a manifest into some text"""
265 if ver == "0":
265 if ver == "0":
266 return "%s\n" % hgnode.hex(node)
266 return "%s\n" % hgnode.hex(node)
267 else:
267 else:
268 raise util.Abort(_("unknown signature version"))
268 raise util.Abort(_("unknown signature version"))
269
269
270 cmdtable = {
270 cmdtable = {
271 "sign":
271 "sign":
272 (sign,
272 (sign,
273 [('l', 'local', None, _('make the signature local')),
273 [('l', 'local', None, _('make the signature local')),
274 ('f', 'force', None, _('sign even if the sigfile is modified')),
274 ('f', 'force', None, _('sign even if the sigfile is modified')),
275 ('', 'no-commit', None, _('do not commit the sigfile after signing')),
275 ('', 'no-commit', None, _('do not commit the sigfile after signing')),
276 ('k', 'key', '', _('the key id to sign with')),
276 ('k', 'key', '', _('the key id to sign with')),
277 ('m', 'message', '', _('commit message')),
277 ('m', 'message', '', _('commit message')),
278 ] + commands.commitopts2,
278 ] + commands.commitopts2,
279 _('hg sign [OPTION]... [REVISION]...')),
279 _('hg sign [OPTION]... [REVISION]...')),
280 "sigcheck": (check, [], _('hg sigcheck REVISION')),
280 "sigcheck": (check, [], _('hg sigcheck REVISION')),
281 "sigs": (sigs, [], _('hg sigs')),
281 "sigs": (sigs, [], _('hg sigs')),
282 }
282 }
283
283
@@ -1,246 +1,246
1 # Copyright (C) 2007-8 Brendan Cully <brendan@kublai.com>
1 # Copyright (C) 2007-8 Brendan Cully <brendan@kublai.com>
2 # Published under the GNU GPL
2 # Published under the GNU GPL
3
3
4 """CIA notification
4 """CIA notification
5
5
6 This is meant to be run as a changegroup or incoming hook.
6 This is meant to be run as a changegroup or incoming hook.
7 To configure it, set the following options in your hgrc:
7 To configure it, set the following options in your hgrc:
8
8
9 [cia]
9 [cia]
10 # your registered CIA user name
10 # your registered CIA user name
11 user = foo
11 user = foo
12 # the name of the project in CIA
12 # the name of the project in CIA
13 project = foo
13 project = foo
14 # the module (subproject) (optional)
14 # the module (subproject) (optional)
15 #module = foo
15 #module = foo
16 # Append a diffstat to the log message (optional)
16 # Append a diffstat to the log message (optional)
17 #diffstat = False
17 #diffstat = False
18 # Template to use for log messages (optional)
18 # Template to use for log messages (optional)
19 #template = {desc}\\n{baseurl}/rev/{node}-- {diffstat}
19 #template = {desc}\\n{baseurl}/rev/{node}-- {diffstat}
20 # Style to use (optional)
20 # Style to use (optional)
21 #style = foo
21 #style = foo
22 # The URL of the CIA notification service (optional)
22 # The URL of the CIA notification service (optional)
23 # You can use mailto: URLs to send by email, eg
23 # You can use mailto: URLs to send by email, eg
24 # mailto:cia@cia.vc
24 # mailto:cia@cia.vc
25 # Make sure to set email.from if you do this.
25 # Make sure to set email.from if you do this.
26 #url = http://cia.vc/
26 #url = http://cia.vc/
27 # print message instead of sending it (optional)
27 # print message instead of sending it (optional)
28 #test = False
28 #test = False
29
29
30 [hooks]
30 [hooks]
31 # one of these:
31 # one of these:
32 changegroup.cia = python:hgcia.hook
32 changegroup.cia = python:hgcia.hook
33 #incoming.cia = python:hgcia.hook
33 #incoming.cia = python:hgcia.hook
34
34
35 [web]
35 [web]
36 # If you want hyperlinks (optional)
36 # If you want hyperlinks (optional)
37 baseurl = http://server/path/to/repo
37 baseurl = http://server/path/to/repo
38 """
38 """
39
39
40 from mercurial.i18n import _
40 from mercurial.i18n import _
41 from mercurial.node import *
41 from mercurial.node import *
42 from mercurial import cmdutil, patch, templater, util, mail
42 from mercurial import cmdutil, patch, templater, util, mail
43 import email.Parser
43 import email.Parser
44
44
45 import xmlrpclib
45 import xmlrpclib
46 from xml.sax import saxutils
46 from xml.sax import saxutils
47
47
48 socket_timeout = 30 # seconds
48 socket_timeout = 30 # seconds
49 try:
49 try:
50 # set a timeout for the socket so you don't have to wait so looooong
50 # set a timeout for the socket so you don't have to wait so looooong
51 # when cia.vc is having problems. requires python >= 2.3:
51 # when cia.vc is having problems. requires python >= 2.3:
52 import socket
52 import socket
53 socket.setdefaulttimeout(socket_timeout)
53 socket.setdefaulttimeout(socket_timeout)
54 except:
54 except:
55 pass
55 pass
56
56
57 HGCIA_VERSION = '0.1'
57 HGCIA_VERSION = '0.1'
58 HGCIA_URL = 'http://hg.kublai.com/mercurial/hgcia'
58 HGCIA_URL = 'http://hg.kublai.com/mercurial/hgcia'
59
59
60
60
61 class ciamsg(object):
61 class ciamsg(object):
62 """ A CIA message """
62 """ A CIA message """
63 def __init__(self, cia, ctx):
63 def __init__(self, cia, ctx):
64 self.cia = cia
64 self.cia = cia
65 self.ctx = ctx
65 self.ctx = ctx
66 self.url = self.cia.url
66 self.url = self.cia.url
67
67
68 def fileelem(self, path, uri, action):
68 def fileelem(self, path, uri, action):
69 if uri:
69 if uri:
70 uri = ' uri=%s' % saxutils.quoteattr(uri)
70 uri = ' uri=%s' % saxutils.quoteattr(uri)
71 return '<file%s action=%s>%s</file>' % (
71 return '<file%s action=%s>%s</file>' % (
72 uri, saxutils.quoteattr(action), saxutils.escape(path))
72 uri, saxutils.quoteattr(action), saxutils.escape(path))
73
73
74 def fileelems(self):
74 def fileelems(self):
75 n = self.ctx.node()
75 n = self.ctx.node()
76 f = self.cia.repo.status(self.ctx.parents()[0].node(), n)
76 f = self.cia.repo.status(self.ctx.parents()[0].node(), n)
77 url = self.url or ''
77 url = self.url or ''
78 elems = []
78 elems = []
79 for path in f[0]:
79 for path in f[0]:
80 uri = '%s/diff/%s/%s' % (url, short(n), path)
80 uri = '%s/diff/%s/%s' % (url, short(n), path)
81 elems.append(self.fileelem(path, url and uri, 'modify'))
81 elems.append(self.fileelem(path, url and uri, 'modify'))
82 for path in f[1]:
82 for path in f[1]:
83 # TODO: copy/rename ?
83 # TODO: copy/rename ?
84 uri = '%s/file/%s/%s' % (url, short(n), path)
84 uri = '%s/file/%s/%s' % (url, short(n), path)
85 elems.append(self.fileelem(path, url and uri, 'add'))
85 elems.append(self.fileelem(path, url and uri, 'add'))
86 for path in f[2]:
86 for path in f[2]:
87 elems.append(self.fileelem(path, '', 'remove'))
87 elems.append(self.fileelem(path, '', 'remove'))
88
88
89 return '\n'.join(elems)
89 return '\n'.join(elems)
90
90
91 def sourceelem(self, project, module=None, branch=None):
91 def sourceelem(self, project, module=None, branch=None):
92 msg = ['<source>', '<project>%s</project>' % saxutils.escape(project)]
92 msg = ['<source>', '<project>%s</project>' % saxutils.escape(project)]
93 if module:
93 if module:
94 msg.append('<module>%s</module>' % saxutils.escape(module))
94 msg.append('<module>%s</module>' % saxutils.escape(module))
95 if branch:
95 if branch:
96 msg.append('<branch>%s</branch>' % saxutils.escape(branch))
96 msg.append('<branch>%s</branch>' % saxutils.escape(branch))
97 msg.append('</source>')
97 msg.append('</source>')
98
98
99 return '\n'.join(msg)
99 return '\n'.join(msg)
100
100
101 def diffstat(self):
101 def diffstat(self):
102 class patchbuf:
102 class patchbuf(object):
103 def __init__(self):
103 def __init__(self):
104 self.lines = []
104 self.lines = []
105 # diffstat is stupid
105 # diffstat is stupid
106 self.name = 'cia'
106 self.name = 'cia'
107 def write(self, data):
107 def write(self, data):
108 self.lines.append(data)
108 self.lines.append(data)
109 def close(self):
109 def close(self):
110 pass
110 pass
111
111
112 n = self.ctx.node()
112 n = self.ctx.node()
113 pbuf = patchbuf()
113 pbuf = patchbuf()
114 patch.export(self.cia.repo, [n], fp=pbuf)
114 patch.export(self.cia.repo, [n], fp=pbuf)
115 return patch.diffstat(pbuf.lines) or ''
115 return patch.diffstat(pbuf.lines) or ''
116
116
117 def logmsg(self):
117 def logmsg(self):
118 diffstat = self.cia.diffstat and self.diffstat() or ''
118 diffstat = self.cia.diffstat and self.diffstat() or ''
119 self.cia.ui.pushbuffer()
119 self.cia.ui.pushbuffer()
120 self.cia.templater.show(self.ctx, changes=self.ctx.changeset(),
120 self.cia.templater.show(self.ctx, changes=self.ctx.changeset(),
121 url=self.cia.url, diffstat=diffstat)
121 url=self.cia.url, diffstat=diffstat)
122 return self.cia.ui.popbuffer()
122 return self.cia.ui.popbuffer()
123
123
124 def xml(self):
124 def xml(self):
125 n = short(self.ctx.node())
125 n = short(self.ctx.node())
126 src = self.sourceelem(self.cia.project, module=self.cia.module,
126 src = self.sourceelem(self.cia.project, module=self.cia.module,
127 branch=self.ctx.branch())
127 branch=self.ctx.branch())
128 # unix timestamp
128 # unix timestamp
129 dt = self.ctx.date()
129 dt = self.ctx.date()
130 timestamp = dt[0]
130 timestamp = dt[0]
131
131
132 author = saxutils.escape(self.ctx.user())
132 author = saxutils.escape(self.ctx.user())
133 rev = '%d:%s' % (self.ctx.rev(), n)
133 rev = '%d:%s' % (self.ctx.rev(), n)
134 log = saxutils.escape(self.logmsg())
134 log = saxutils.escape(self.logmsg())
135
135
136 url = self.url and '<url>%s/rev/%s</url>' % (saxutils.escape(self.url),
136 url = self.url and '<url>%s/rev/%s</url>' % (saxutils.escape(self.url),
137 n) or ''
137 n) or ''
138
138
139 msg = """
139 msg = """
140 <message>
140 <message>
141 <generator>
141 <generator>
142 <name>Mercurial (hgcia)</name>
142 <name>Mercurial (hgcia)</name>
143 <version>%s</version>
143 <version>%s</version>
144 <url>%s</url>
144 <url>%s</url>
145 <user>%s</user>
145 <user>%s</user>
146 </generator>
146 </generator>
147 %s
147 %s
148 <body>
148 <body>
149 <commit>
149 <commit>
150 <author>%s</author>
150 <author>%s</author>
151 <version>%s</version>
151 <version>%s</version>
152 <log>%s</log>
152 <log>%s</log>
153 %s
153 %s
154 <files>%s</files>
154 <files>%s</files>
155 </commit>
155 </commit>
156 </body>
156 </body>
157 <timestamp>%d</timestamp>
157 <timestamp>%d</timestamp>
158 </message>
158 </message>
159 """ % \
159 """ % \
160 (HGCIA_VERSION, saxutils.escape(HGCIA_URL),
160 (HGCIA_VERSION, saxutils.escape(HGCIA_URL),
161 saxutils.escape(self.cia.user), src, author, rev, log, url,
161 saxutils.escape(self.cia.user), src, author, rev, log, url,
162 self.fileelems(), timestamp)
162 self.fileelems(), timestamp)
163
163
164 return msg
164 return msg
165
165
166
166
167 class hgcia(object):
167 class hgcia(object):
168 """ CIA notification class """
168 """ CIA notification class """
169
169
170 deftemplate = '{desc}'
170 deftemplate = '{desc}'
171 dstemplate = '{desc}\n-- \n{diffstat}'
171 dstemplate = '{desc}\n-- \n{diffstat}'
172
172
173 def __init__(self, ui, repo):
173 def __init__(self, ui, repo):
174 self.ui = ui
174 self.ui = ui
175 self.repo = repo
175 self.repo = repo
176
176
177 self.ciaurl = self.ui.config('cia', 'url', 'http://cia.vc')
177 self.ciaurl = self.ui.config('cia', 'url', 'http://cia.vc')
178 self.user = self.ui.config('cia', 'user')
178 self.user = self.ui.config('cia', 'user')
179 self.project = self.ui.config('cia', 'project')
179 self.project = self.ui.config('cia', 'project')
180 self.module = self.ui.config('cia', 'module')
180 self.module = self.ui.config('cia', 'module')
181 self.diffstat = self.ui.configbool('cia', 'diffstat')
181 self.diffstat = self.ui.configbool('cia', 'diffstat')
182 self.emailfrom = self.ui.config('email', 'from')
182 self.emailfrom = self.ui.config('email', 'from')
183 self.dryrun = self.ui.configbool('cia', 'test')
183 self.dryrun = self.ui.configbool('cia', 'test')
184 self.url = self.ui.config('web', 'baseurl')
184 self.url = self.ui.config('web', 'baseurl')
185
185
186 style = self.ui.config('cia', 'style')
186 style = self.ui.config('cia', 'style')
187 template = self.ui.config('cia', 'template')
187 template = self.ui.config('cia', 'template')
188 if not template:
188 if not template:
189 template = self.diffstat and self.dstemplate or self.deftemplate
189 template = self.diffstat and self.dstemplate or self.deftemplate
190 template = templater.parsestring(template, quoted=False)
190 template = templater.parsestring(template, quoted=False)
191 t = cmdutil.changeset_templater(self.ui, self.repo, False, None,
191 t = cmdutil.changeset_templater(self.ui, self.repo, False, None,
192 style, False)
192 style, False)
193 t.use_template(template)
193 t.use_template(template)
194 self.templater = t
194 self.templater = t
195
195
196 def sendrpc(self, msg):
196 def sendrpc(self, msg):
197 srv = xmlrpclib.Server(self.ciaurl)
197 srv = xmlrpclib.Server(self.ciaurl)
198 srv.hub.deliver(msg)
198 srv.hub.deliver(msg)
199
199
200 def sendemail(self, address, data):
200 def sendemail(self, address, data):
201 p = email.Parser.Parser()
201 p = email.Parser.Parser()
202 msg = p.parsestr(data)
202 msg = p.parsestr(data)
203 msg['Date'] = util.datestr(format="%a, %d %b %Y %H:%M:%S %1%2")
203 msg['Date'] = util.datestr(format="%a, %d %b %Y %H:%M:%S %1%2")
204 msg['To'] = address
204 msg['To'] = address
205 msg['From'] = self.emailfrom
205 msg['From'] = self.emailfrom
206 msg['Subject'] = 'DeliverXML'
206 msg['Subject'] = 'DeliverXML'
207 msg['Content-type'] = 'text/xml'
207 msg['Content-type'] = 'text/xml'
208 msgtext = msg.as_string(0)
208 msgtext = msg.as_string(0)
209
209
210 self.ui.status(_('hgcia: sending update to %s\n') % address)
210 self.ui.status(_('hgcia: sending update to %s\n') % address)
211 mail.sendmail(self.ui, util.email(self.emailfrom),
211 mail.sendmail(self.ui, util.email(self.emailfrom),
212 [address], msgtext)
212 [address], msgtext)
213
213
214
214
215 def hook(ui, repo, hooktype, node=None, url=None, **kwargs):
215 def hook(ui, repo, hooktype, node=None, url=None, **kwargs):
216 """ send CIA notification """
216 """ send CIA notification """
217 def sendmsg(cia, ctx):
217 def sendmsg(cia, ctx):
218 msg = ciamsg(cia, ctx).xml()
218 msg = ciamsg(cia, ctx).xml()
219 if cia.dryrun:
219 if cia.dryrun:
220 ui.write(msg)
220 ui.write(msg)
221 elif cia.ciaurl.startswith('mailto:'):
221 elif cia.ciaurl.startswith('mailto:'):
222 if not cia.emailfrom:
222 if not cia.emailfrom:
223 raise util.Abort(_('email.from must be defined when '
223 raise util.Abort(_('email.from must be defined when '
224 'sending by email'))
224 'sending by email'))
225 cia.sendemail(cia.ciaurl[7:], msg)
225 cia.sendemail(cia.ciaurl[7:], msg)
226 else:
226 else:
227 cia.sendrpc(msg)
227 cia.sendrpc(msg)
228
228
229 n = bin(node)
229 n = bin(node)
230 cia = hgcia(ui, repo)
230 cia = hgcia(ui, repo)
231 if not cia.user:
231 if not cia.user:
232 ui.debug(_('cia: no user specified'))
232 ui.debug(_('cia: no user specified'))
233 return
233 return
234 if not cia.project:
234 if not cia.project:
235 ui.debug(_('cia: no project specified'))
235 ui.debug(_('cia: no project specified'))
236 return
236 return
237 if hooktype == 'changegroup':
237 if hooktype == 'changegroup':
238 start = repo.changelog.rev(n)
238 start = repo.changelog.rev(n)
239 end = len(repo.changelog)
239 end = len(repo.changelog)
240 for rev in xrange(start, end):
240 for rev in xrange(start, end):
241 n = repo.changelog.node(rev)
241 n = repo.changelog.node(rev)
242 ctx = repo.changectx(n)
242 ctx = repo.changectx(n)
243 sendmsg(cia, ctx)
243 sendmsg(cia, ctx)
244 else:
244 else:
245 ctx = repo.changectx(n)
245 ctx = repo.changectx(n)
246 sendmsg(cia, ctx)
246 sendmsg(cia, ctx)
@@ -1,110 +1,110
1 # __init__.py - inotify-based status acceleration for Linux
1 # __init__.py - inotify-based status acceleration for Linux
2 #
2 #
3 # Copyright 2006, 2007, 2008 Bryan O'Sullivan <bos@serpentine.com>
3 # Copyright 2006, 2007, 2008 Bryan O'Sullivan <bos@serpentine.com>
4 # Copyright 2007, 2008 Brendan Cully <brendan@kublai.com>
4 # Copyright 2007, 2008 Brendan Cully <brendan@kublai.com>
5 #
5 #
6 # This software may be used and distributed according to the terms of the
6 # This software may be used and distributed according to the terms of the
7 # GNU General Public License version 2, incorporated herein by reference.
7 # GNU General Public License version 2, incorporated herein by reference.
8
8
9 '''inotify-based status acceleration for Linux systems
9 '''inotify-based status acceleration for Linux systems
10 '''
10 '''
11
11
12 # todo: socket permissions
12 # todo: socket permissions
13
13
14 from mercurial.i18n import _
14 from mercurial.i18n import _
15 from mercurial import cmdutil, util
15 from mercurial import cmdutil, util
16 import server
16 import server
17 from weakref import proxy
17 from weakref import proxy
18 from client import client, QueryFailed
18 from client import client, QueryFailed
19
19
20 def serve(ui, repo, **opts):
20 def serve(ui, repo, **opts):
21 '''start an inotify server for this repository'''
21 '''start an inotify server for this repository'''
22 timeout = opts.get('timeout')
22 timeout = opts.get('timeout')
23 if timeout:
23 if timeout:
24 timeout = float(timeout) * 1e3
24 timeout = float(timeout) * 1e3
25
25
26 class service:
26 class service(object):
27 def init(self):
27 def init(self):
28 try:
28 try:
29 self.master = server.master(ui, repo, timeout)
29 self.master = server.master(ui, repo, timeout)
30 except server.AlreadyStartedException, inst:
30 except server.AlreadyStartedException, inst:
31 raise util.Abort(str(inst))
31 raise util.Abort(str(inst))
32
32
33 def run(self):
33 def run(self):
34 try:
34 try:
35 self.master.run()
35 self.master.run()
36 finally:
36 finally:
37 self.master.shutdown()
37 self.master.shutdown()
38
38
39 service = service()
39 service = service()
40 cmdutil.service(opts, initfn=service.init, runfn=service.run)
40 cmdutil.service(opts, initfn=service.init, runfn=service.run)
41
41
42 def debuginotify(ui, repo, **opts):
42 def debuginotify(ui, repo, **opts):
43 '''debugging information for inotify extension
43 '''debugging information for inotify extension
44
44
45 Prints the list of directories being watched by the inotify server.
45 Prints the list of directories being watched by the inotify server.
46 '''
46 '''
47 cli = client(ui, repo)
47 cli = client(ui, repo)
48 response = cli.debugquery()
48 response = cli.debugquery()
49
49
50 ui.write(_('directories being watched:\n'))
50 ui.write(_('directories being watched:\n'))
51 for path in response:
51 for path in response:
52 ui.write((' %s/\n') % path)
52 ui.write((' %s/\n') % path)
53
53
54 def reposetup(ui, repo):
54 def reposetup(ui, repo):
55 if not hasattr(repo, 'dirstate'):
55 if not hasattr(repo, 'dirstate'):
56 return
56 return
57
57
58 # XXX: weakref until hg stops relying on __del__
58 # XXX: weakref until hg stops relying on __del__
59 repo = proxy(repo)
59 repo = proxy(repo)
60
60
61 class inotifydirstate(repo.dirstate.__class__):
61 class inotifydirstate(repo.dirstate.__class__):
62
62
63 # We'll set this to false after an unsuccessful attempt so that
63 # We'll set this to false after an unsuccessful attempt so that
64 # next calls of status() within the same instance don't try again
64 # next calls of status() within the same instance don't try again
65 # to start an inotify server if it won't start.
65 # to start an inotify server if it won't start.
66 _inotifyon = True
66 _inotifyon = True
67
67
68 def status(self, match, ignored, clean, unknown=True):
68 def status(self, match, ignored, clean, unknown=True):
69 files = match.files()
69 files = match.files()
70 if '.' in files:
70 if '.' in files:
71 files = []
71 files = []
72 if self._inotifyon and not ignored:
72 if self._inotifyon and not ignored:
73 cli = client(ui, repo)
73 cli = client(ui, repo)
74 try:
74 try:
75 result = cli.statusquery(files, match, False,
75 result = cli.statusquery(files, match, False,
76 clean, unknown)
76 clean, unknown)
77 except QueryFailed, instr:
77 except QueryFailed, instr:
78 ui.debug(str(instr))
78 ui.debug(str(instr))
79 # don't retry within the same hg instance
79 # don't retry within the same hg instance
80 inotifydirstate._inotifyon = False
80 inotifydirstate._inotifyon = False
81 pass
81 pass
82 else:
82 else:
83 if ui.config('inotify', 'debug'):
83 if ui.config('inotify', 'debug'):
84 r2 = super(inotifydirstate, self).status(
84 r2 = super(inotifydirstate, self).status(
85 match, False, clean, unknown)
85 match, False, clean, unknown)
86 for c,a,b in zip('LMARDUIC', result, r2):
86 for c,a,b in zip('LMARDUIC', result, r2):
87 for f in a:
87 for f in a:
88 if f not in b:
88 if f not in b:
89 ui.warn('*** inotify: %s +%s\n' % (c, f))
89 ui.warn('*** inotify: %s +%s\n' % (c, f))
90 for f in b:
90 for f in b:
91 if f not in a:
91 if f not in a:
92 ui.warn('*** inotify: %s -%s\n' % (c, f))
92 ui.warn('*** inotify: %s -%s\n' % (c, f))
93 result = r2
93 result = r2
94 return result
94 return result
95 return super(inotifydirstate, self).status(
95 return super(inotifydirstate, self).status(
96 match, ignored, clean, unknown)
96 match, ignored, clean, unknown)
97
97
98 repo.dirstate.__class__ = inotifydirstate
98 repo.dirstate.__class__ = inotifydirstate
99
99
100 cmdtable = {
100 cmdtable = {
101 'debuginotify':
101 'debuginotify':
102 (debuginotify, [], ('hg debuginotify')),
102 (debuginotify, [], ('hg debuginotify')),
103 '^inserve':
103 '^inserve':
104 (serve,
104 (serve,
105 [('d', 'daemon', None, _('run server in background')),
105 [('d', 'daemon', None, _('run server in background')),
106 ('', 'daemon-pipefds', '', _('used internally by daemon mode')),
106 ('', 'daemon-pipefds', '', _('used internally by daemon mode')),
107 ('t', 'idle-timeout', '', _('minutes to sit idle before exiting')),
107 ('t', 'idle-timeout', '', _('minutes to sit idle before exiting')),
108 ('', 'pid-file', '', _('name of file to write process ID to'))],
108 ('', 'pid-file', '', _('name of file to write process ID to'))],
109 _('hg inserve [OPT]...')),
109 _('hg inserve [OPT]...')),
110 }
110 }
@@ -1,2630 +1,2630
1 # mq.py - patch queues for mercurial
1 # mq.py - patch queues for mercurial
2 #
2 #
3 # Copyright 2005, 2006 Chris Mason <mason@suse.com>
3 # Copyright 2005, 2006 Chris Mason <mason@suse.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2, incorporated herein by reference.
6 # GNU General Public License version 2, incorporated herein by reference.
7
7
8 '''patch management and development
8 '''patch management and development
9
9
10 This extension lets you work with a stack of patches in a Mercurial
10 This extension lets you work with a stack of patches in a Mercurial
11 repository. It manages two stacks of patches - all known patches, and
11 repository. It manages two stacks of patches - all known patches, and
12 applied patches (subset of known patches).
12 applied patches (subset of known patches).
13
13
14 Known patches are represented as patch files in the .hg/patches
14 Known patches are represented as patch files in the .hg/patches
15 directory. Applied patches are both patch files and changesets.
15 directory. Applied patches are both patch files and changesets.
16
16
17 Common tasks (use "hg help command" for more details):
17 Common tasks (use "hg help command" for more details):
18
18
19 prepare repository to work with patches qinit
19 prepare repository to work with patches qinit
20 create new patch qnew
20 create new patch qnew
21 import existing patch qimport
21 import existing patch qimport
22
22
23 print patch series qseries
23 print patch series qseries
24 print applied patches qapplied
24 print applied patches qapplied
25 print name of top applied patch qtop
25 print name of top applied patch qtop
26
26
27 add known patch to applied stack qpush
27 add known patch to applied stack qpush
28 remove patch from applied stack qpop
28 remove patch from applied stack qpop
29 refresh contents of top applied patch qrefresh
29 refresh contents of top applied patch qrefresh
30 '''
30 '''
31
31
32 from mercurial.i18n import _
32 from mercurial.i18n import _
33 from mercurial.node import bin, hex, short, nullid, nullrev
33 from mercurial.node import bin, hex, short, nullid, nullrev
34 from mercurial.lock import release
34 from mercurial.lock import release
35 from mercurial import commands, cmdutil, hg, patch, util
35 from mercurial import commands, cmdutil, hg, patch, util
36 from mercurial import repair, extensions, url, error
36 from mercurial import repair, extensions, url, error
37 import os, sys, re, errno
37 import os, sys, re, errno
38
38
39 commands.norepo += " qclone"
39 commands.norepo += " qclone"
40
40
41 # Patch names looks like unix-file names.
41 # Patch names looks like unix-file names.
42 # They must be joinable with queue directory and result in the patch path.
42 # They must be joinable with queue directory and result in the patch path.
43 normname = util.normpath
43 normname = util.normpath
44
44
45 class statusentry:
45 class statusentry(object):
46 def __init__(self, rev, name=None):
46 def __init__(self, rev, name=None):
47 if not name:
47 if not name:
48 fields = rev.split(':', 1)
48 fields = rev.split(':', 1)
49 if len(fields) == 2:
49 if len(fields) == 2:
50 self.rev, self.name = fields
50 self.rev, self.name = fields
51 else:
51 else:
52 self.rev, self.name = None, None
52 self.rev, self.name = None, None
53 else:
53 else:
54 self.rev, self.name = rev, name
54 self.rev, self.name = rev, name
55
55
56 def __str__(self):
56 def __str__(self):
57 return self.rev + ':' + self.name
57 return self.rev + ':' + self.name
58
58
59 class patchheader(object):
59 class patchheader(object):
60 def __init__(self, pf):
60 def __init__(self, pf):
61 def eatdiff(lines):
61 def eatdiff(lines):
62 while lines:
62 while lines:
63 l = lines[-1]
63 l = lines[-1]
64 if (l.startswith("diff -") or
64 if (l.startswith("diff -") or
65 l.startswith("Index:") or
65 l.startswith("Index:") or
66 l.startswith("===========")):
66 l.startswith("===========")):
67 del lines[-1]
67 del lines[-1]
68 else:
68 else:
69 break
69 break
70 def eatempty(lines):
70 def eatempty(lines):
71 while lines:
71 while lines:
72 l = lines[-1]
72 l = lines[-1]
73 if re.match('\s*$', l):
73 if re.match('\s*$', l):
74 del lines[-1]
74 del lines[-1]
75 else:
75 else:
76 break
76 break
77
77
78 message = []
78 message = []
79 comments = []
79 comments = []
80 user = None
80 user = None
81 date = None
81 date = None
82 format = None
82 format = None
83 subject = None
83 subject = None
84 diffstart = 0
84 diffstart = 0
85
85
86 for line in file(pf):
86 for line in file(pf):
87 line = line.rstrip()
87 line = line.rstrip()
88 if line.startswith('diff --git'):
88 if line.startswith('diff --git'):
89 diffstart = 2
89 diffstart = 2
90 break
90 break
91 if diffstart:
91 if diffstart:
92 if line.startswith('+++ '):
92 if line.startswith('+++ '):
93 diffstart = 2
93 diffstart = 2
94 break
94 break
95 if line.startswith("--- "):
95 if line.startswith("--- "):
96 diffstart = 1
96 diffstart = 1
97 continue
97 continue
98 elif format == "hgpatch":
98 elif format == "hgpatch":
99 # parse values when importing the result of an hg export
99 # parse values when importing the result of an hg export
100 if line.startswith("# User "):
100 if line.startswith("# User "):
101 user = line[7:]
101 user = line[7:]
102 elif line.startswith("# Date "):
102 elif line.startswith("# Date "):
103 date = line[7:]
103 date = line[7:]
104 elif not line.startswith("# ") and line:
104 elif not line.startswith("# ") and line:
105 message.append(line)
105 message.append(line)
106 format = None
106 format = None
107 elif line == '# HG changeset patch':
107 elif line == '# HG changeset patch':
108 format = "hgpatch"
108 format = "hgpatch"
109 elif (format != "tagdone" and (line.startswith("Subject: ") or
109 elif (format != "tagdone" and (line.startswith("Subject: ") or
110 line.startswith("subject: "))):
110 line.startswith("subject: "))):
111 subject = line[9:]
111 subject = line[9:]
112 format = "tag"
112 format = "tag"
113 elif (format != "tagdone" and (line.startswith("From: ") or
113 elif (format != "tagdone" and (line.startswith("From: ") or
114 line.startswith("from: "))):
114 line.startswith("from: "))):
115 user = line[6:]
115 user = line[6:]
116 format = "tag"
116 format = "tag"
117 elif format == "tag" and line == "":
117 elif format == "tag" and line == "":
118 # when looking for tags (subject: from: etc) they
118 # when looking for tags (subject: from: etc) they
119 # end once you find a blank line in the source
119 # end once you find a blank line in the source
120 format = "tagdone"
120 format = "tagdone"
121 elif message or line:
121 elif message or line:
122 message.append(line)
122 message.append(line)
123 comments.append(line)
123 comments.append(line)
124
124
125 eatdiff(message)
125 eatdiff(message)
126 eatdiff(comments)
126 eatdiff(comments)
127 eatempty(message)
127 eatempty(message)
128 eatempty(comments)
128 eatempty(comments)
129
129
130 # make sure message isn't empty
130 # make sure message isn't empty
131 if format and format.startswith("tag") and subject:
131 if format and format.startswith("tag") and subject:
132 message.insert(0, "")
132 message.insert(0, "")
133 message.insert(0, subject)
133 message.insert(0, subject)
134
134
135 self.message = message
135 self.message = message
136 self.comments = comments
136 self.comments = comments
137 self.user = user
137 self.user = user
138 self.date = date
138 self.date = date
139 self.haspatch = diffstart > 1
139 self.haspatch = diffstart > 1
140
140
141 def setuser(self, user):
141 def setuser(self, user):
142 if not self.updateheader(['From: ', '# User '], user):
142 if not self.updateheader(['From: ', '# User '], user):
143 try:
143 try:
144 patchheaderat = self.comments.index('# HG changeset patch')
144 patchheaderat = self.comments.index('# HG changeset patch')
145 self.comments.insert(patchheaderat + 1,'# User ' + user)
145 self.comments.insert(patchheaderat + 1,'# User ' + user)
146 except ValueError:
146 except ValueError:
147 self.comments = ['From: ' + user, ''] + self.comments
147 self.comments = ['From: ' + user, ''] + self.comments
148 self.user = user
148 self.user = user
149
149
150 def setdate(self, date):
150 def setdate(self, date):
151 if self.updateheader(['# Date '], date):
151 if self.updateheader(['# Date '], date):
152 self.date = date
152 self.date = date
153
153
154 def setmessage(self, message):
154 def setmessage(self, message):
155 if self.comments:
155 if self.comments:
156 self._delmsg()
156 self._delmsg()
157 self.message = [message]
157 self.message = [message]
158 self.comments += self.message
158 self.comments += self.message
159
159
160 def updateheader(self, prefixes, new):
160 def updateheader(self, prefixes, new):
161 '''Update all references to a field in the patch header.
161 '''Update all references to a field in the patch header.
162 Return whether the field is present.'''
162 Return whether the field is present.'''
163 res = False
163 res = False
164 for prefix in prefixes:
164 for prefix in prefixes:
165 for i in xrange(len(self.comments)):
165 for i in xrange(len(self.comments)):
166 if self.comments[i].startswith(prefix):
166 if self.comments[i].startswith(prefix):
167 self.comments[i] = prefix + new
167 self.comments[i] = prefix + new
168 res = True
168 res = True
169 break
169 break
170 return res
170 return res
171
171
172 def __str__(self):
172 def __str__(self):
173 if not self.comments:
173 if not self.comments:
174 return ''
174 return ''
175 return '\n'.join(self.comments) + '\n\n'
175 return '\n'.join(self.comments) + '\n\n'
176
176
177 def _delmsg(self):
177 def _delmsg(self):
178 '''Remove existing message, keeping the rest of the comments fields.
178 '''Remove existing message, keeping the rest of the comments fields.
179 If comments contains 'subject: ', message will prepend
179 If comments contains 'subject: ', message will prepend
180 the field and a blank line.'''
180 the field and a blank line.'''
181 if self.message:
181 if self.message:
182 subj = 'subject: ' + self.message[0].lower()
182 subj = 'subject: ' + self.message[0].lower()
183 for i in xrange(len(self.comments)):
183 for i in xrange(len(self.comments)):
184 if subj == self.comments[i].lower():
184 if subj == self.comments[i].lower():
185 del self.comments[i]
185 del self.comments[i]
186 self.message = self.message[2:]
186 self.message = self.message[2:]
187 break
187 break
188 ci = 0
188 ci = 0
189 for mi in self.message:
189 for mi in self.message:
190 while mi != self.comments[ci]:
190 while mi != self.comments[ci]:
191 ci += 1
191 ci += 1
192 del self.comments[ci]
192 del self.comments[ci]
193
193
194 class queue:
194 class queue(object):
195 def __init__(self, ui, path, patchdir=None):
195 def __init__(self, ui, path, patchdir=None):
196 self.basepath = path
196 self.basepath = path
197 self.path = patchdir or os.path.join(path, "patches")
197 self.path = patchdir or os.path.join(path, "patches")
198 self.opener = util.opener(self.path)
198 self.opener = util.opener(self.path)
199 self.ui = ui
199 self.ui = ui
200 self.applied_dirty = 0
200 self.applied_dirty = 0
201 self.series_dirty = 0
201 self.series_dirty = 0
202 self.series_path = "series"
202 self.series_path = "series"
203 self.status_path = "status"
203 self.status_path = "status"
204 self.guards_path = "guards"
204 self.guards_path = "guards"
205 self.active_guards = None
205 self.active_guards = None
206 self.guards_dirty = False
206 self.guards_dirty = False
207 self._diffopts = None
207 self._diffopts = None
208
208
209 @util.propertycache
209 @util.propertycache
210 def applied(self):
210 def applied(self):
211 if os.path.exists(self.join(self.status_path)):
211 if os.path.exists(self.join(self.status_path)):
212 lines = self.opener(self.status_path).read().splitlines()
212 lines = self.opener(self.status_path).read().splitlines()
213 return [statusentry(l) for l in lines]
213 return [statusentry(l) for l in lines]
214 return []
214 return []
215
215
216 @util.propertycache
216 @util.propertycache
217 def full_series(self):
217 def full_series(self):
218 if os.path.exists(self.join(self.series_path)):
218 if os.path.exists(self.join(self.series_path)):
219 return self.opener(self.series_path).read().splitlines()
219 return self.opener(self.series_path).read().splitlines()
220 return []
220 return []
221
221
222 @util.propertycache
222 @util.propertycache
223 def series(self):
223 def series(self):
224 self.parse_series()
224 self.parse_series()
225 return self.series
225 return self.series
226
226
227 @util.propertycache
227 @util.propertycache
228 def series_guards(self):
228 def series_guards(self):
229 self.parse_series()
229 self.parse_series()
230 return self.series_guards
230 return self.series_guards
231
231
232 def invalidate(self):
232 def invalidate(self):
233 for a in 'applied full_series series series_guards'.split():
233 for a in 'applied full_series series series_guards'.split():
234 if a in self.__dict__:
234 if a in self.__dict__:
235 delattr(self, a)
235 delattr(self, a)
236 self.applied_dirty = 0
236 self.applied_dirty = 0
237 self.series_dirty = 0
237 self.series_dirty = 0
238 self.guards_dirty = False
238 self.guards_dirty = False
239 self.active_guards = None
239 self.active_guards = None
240
240
241 def diffopts(self):
241 def diffopts(self):
242 if self._diffopts is None:
242 if self._diffopts is None:
243 self._diffopts = patch.diffopts(self.ui)
243 self._diffopts = patch.diffopts(self.ui)
244 return self._diffopts
244 return self._diffopts
245
245
246 def join(self, *p):
246 def join(self, *p):
247 return os.path.join(self.path, *p)
247 return os.path.join(self.path, *p)
248
248
249 def find_series(self, patch):
249 def find_series(self, patch):
250 pre = re.compile("(\s*)([^#]+)")
250 pre = re.compile("(\s*)([^#]+)")
251 index = 0
251 index = 0
252 for l in self.full_series:
252 for l in self.full_series:
253 m = pre.match(l)
253 m = pre.match(l)
254 if m:
254 if m:
255 s = m.group(2)
255 s = m.group(2)
256 s = s.rstrip()
256 s = s.rstrip()
257 if s == patch:
257 if s == patch:
258 return index
258 return index
259 index += 1
259 index += 1
260 return None
260 return None
261
261
262 guard_re = re.compile(r'\s?#([-+][^-+# \t\r\n\f][^# \t\r\n\f]*)')
262 guard_re = re.compile(r'\s?#([-+][^-+# \t\r\n\f][^# \t\r\n\f]*)')
263
263
264 def parse_series(self):
264 def parse_series(self):
265 self.series = []
265 self.series = []
266 self.series_guards = []
266 self.series_guards = []
267 for l in self.full_series:
267 for l in self.full_series:
268 h = l.find('#')
268 h = l.find('#')
269 if h == -1:
269 if h == -1:
270 patch = l
270 patch = l
271 comment = ''
271 comment = ''
272 elif h == 0:
272 elif h == 0:
273 continue
273 continue
274 else:
274 else:
275 patch = l[:h]
275 patch = l[:h]
276 comment = l[h:]
276 comment = l[h:]
277 patch = patch.strip()
277 patch = patch.strip()
278 if patch:
278 if patch:
279 if patch in self.series:
279 if patch in self.series:
280 raise util.Abort(_('%s appears more than once in %s') %
280 raise util.Abort(_('%s appears more than once in %s') %
281 (patch, self.join(self.series_path)))
281 (patch, self.join(self.series_path)))
282 self.series.append(patch)
282 self.series.append(patch)
283 self.series_guards.append(self.guard_re.findall(comment))
283 self.series_guards.append(self.guard_re.findall(comment))
284
284
285 def check_guard(self, guard):
285 def check_guard(self, guard):
286 if not guard:
286 if not guard:
287 return _('guard cannot be an empty string')
287 return _('guard cannot be an empty string')
288 bad_chars = '# \t\r\n\f'
288 bad_chars = '# \t\r\n\f'
289 first = guard[0]
289 first = guard[0]
290 if first in '-+':
290 if first in '-+':
291 return (_('guard %r starts with invalid character: %r') %
291 return (_('guard %r starts with invalid character: %r') %
292 (guard, first))
292 (guard, first))
293 for c in bad_chars:
293 for c in bad_chars:
294 if c in guard:
294 if c in guard:
295 return _('invalid character in guard %r: %r') % (guard, c)
295 return _('invalid character in guard %r: %r') % (guard, c)
296
296
297 def set_active(self, guards):
297 def set_active(self, guards):
298 for guard in guards:
298 for guard in guards:
299 bad = self.check_guard(guard)
299 bad = self.check_guard(guard)
300 if bad:
300 if bad:
301 raise util.Abort(bad)
301 raise util.Abort(bad)
302 guards = sorted(set(guards))
302 guards = sorted(set(guards))
303 self.ui.debug(_('active guards: %s\n') % ' '.join(guards))
303 self.ui.debug(_('active guards: %s\n') % ' '.join(guards))
304 self.active_guards = guards
304 self.active_guards = guards
305 self.guards_dirty = True
305 self.guards_dirty = True
306
306
307 def active(self):
307 def active(self):
308 if self.active_guards is None:
308 if self.active_guards is None:
309 self.active_guards = []
309 self.active_guards = []
310 try:
310 try:
311 guards = self.opener(self.guards_path).read().split()
311 guards = self.opener(self.guards_path).read().split()
312 except IOError, err:
312 except IOError, err:
313 if err.errno != errno.ENOENT: raise
313 if err.errno != errno.ENOENT: raise
314 guards = []
314 guards = []
315 for i, guard in enumerate(guards):
315 for i, guard in enumerate(guards):
316 bad = self.check_guard(guard)
316 bad = self.check_guard(guard)
317 if bad:
317 if bad:
318 self.ui.warn('%s:%d: %s\n' %
318 self.ui.warn('%s:%d: %s\n' %
319 (self.join(self.guards_path), i + 1, bad))
319 (self.join(self.guards_path), i + 1, bad))
320 else:
320 else:
321 self.active_guards.append(guard)
321 self.active_guards.append(guard)
322 return self.active_guards
322 return self.active_guards
323
323
324 def set_guards(self, idx, guards):
324 def set_guards(self, idx, guards):
325 for g in guards:
325 for g in guards:
326 if len(g) < 2:
326 if len(g) < 2:
327 raise util.Abort(_('guard %r too short') % g)
327 raise util.Abort(_('guard %r too short') % g)
328 if g[0] not in '-+':
328 if g[0] not in '-+':
329 raise util.Abort(_('guard %r starts with invalid char') % g)
329 raise util.Abort(_('guard %r starts with invalid char') % g)
330 bad = self.check_guard(g[1:])
330 bad = self.check_guard(g[1:])
331 if bad:
331 if bad:
332 raise util.Abort(bad)
332 raise util.Abort(bad)
333 drop = self.guard_re.sub('', self.full_series[idx])
333 drop = self.guard_re.sub('', self.full_series[idx])
334 self.full_series[idx] = drop + ''.join([' #' + g for g in guards])
334 self.full_series[idx] = drop + ''.join([' #' + g for g in guards])
335 self.parse_series()
335 self.parse_series()
336 self.series_dirty = True
336 self.series_dirty = True
337
337
338 def pushable(self, idx):
338 def pushable(self, idx):
339 if isinstance(idx, str):
339 if isinstance(idx, str):
340 idx = self.series.index(idx)
340 idx = self.series.index(idx)
341 patchguards = self.series_guards[idx]
341 patchguards = self.series_guards[idx]
342 if not patchguards:
342 if not patchguards:
343 return True, None
343 return True, None
344 guards = self.active()
344 guards = self.active()
345 exactneg = [g for g in patchguards if g[0] == '-' and g[1:] in guards]
345 exactneg = [g for g in patchguards if g[0] == '-' and g[1:] in guards]
346 if exactneg:
346 if exactneg:
347 return False, exactneg[0]
347 return False, exactneg[0]
348 pos = [g for g in patchguards if g[0] == '+']
348 pos = [g for g in patchguards if g[0] == '+']
349 exactpos = [g for g in pos if g[1:] in guards]
349 exactpos = [g for g in pos if g[1:] in guards]
350 if pos:
350 if pos:
351 if exactpos:
351 if exactpos:
352 return True, exactpos[0]
352 return True, exactpos[0]
353 return False, pos
353 return False, pos
354 return True, ''
354 return True, ''
355
355
356 def explain_pushable(self, idx, all_patches=False):
356 def explain_pushable(self, idx, all_patches=False):
357 write = all_patches and self.ui.write or self.ui.warn
357 write = all_patches and self.ui.write or self.ui.warn
358 if all_patches or self.ui.verbose:
358 if all_patches or self.ui.verbose:
359 if isinstance(idx, str):
359 if isinstance(idx, str):
360 idx = self.series.index(idx)
360 idx = self.series.index(idx)
361 pushable, why = self.pushable(idx)
361 pushable, why = self.pushable(idx)
362 if all_patches and pushable:
362 if all_patches and pushable:
363 if why is None:
363 if why is None:
364 write(_('allowing %s - no guards in effect\n') %
364 write(_('allowing %s - no guards in effect\n') %
365 self.series[idx])
365 self.series[idx])
366 else:
366 else:
367 if not why:
367 if not why:
368 write(_('allowing %s - no matching negative guards\n') %
368 write(_('allowing %s - no matching negative guards\n') %
369 self.series[idx])
369 self.series[idx])
370 else:
370 else:
371 write(_('allowing %s - guarded by %r\n') %
371 write(_('allowing %s - guarded by %r\n') %
372 (self.series[idx], why))
372 (self.series[idx], why))
373 if not pushable:
373 if not pushable:
374 if why:
374 if why:
375 write(_('skipping %s - guarded by %r\n') %
375 write(_('skipping %s - guarded by %r\n') %
376 (self.series[idx], why))
376 (self.series[idx], why))
377 else:
377 else:
378 write(_('skipping %s - no matching guards\n') %
378 write(_('skipping %s - no matching guards\n') %
379 self.series[idx])
379 self.series[idx])
380
380
381 def save_dirty(self):
381 def save_dirty(self):
382 def write_list(items, path):
382 def write_list(items, path):
383 fp = self.opener(path, 'w')
383 fp = self.opener(path, 'w')
384 for i in items:
384 for i in items:
385 fp.write("%s\n" % i)
385 fp.write("%s\n" % i)
386 fp.close()
386 fp.close()
387 if self.applied_dirty: write_list(map(str, self.applied), self.status_path)
387 if self.applied_dirty: write_list(map(str, self.applied), self.status_path)
388 if self.series_dirty: write_list(self.full_series, self.series_path)
388 if self.series_dirty: write_list(self.full_series, self.series_path)
389 if self.guards_dirty: write_list(self.active_guards, self.guards_path)
389 if self.guards_dirty: write_list(self.active_guards, self.guards_path)
390
390
391 def removeundo(self, repo):
391 def removeundo(self, repo):
392 undo = repo.sjoin('undo')
392 undo = repo.sjoin('undo')
393 if not os.path.exists(undo):
393 if not os.path.exists(undo):
394 return
394 return
395 try:
395 try:
396 os.unlink(undo)
396 os.unlink(undo)
397 except OSError, inst:
397 except OSError, inst:
398 self.ui.warn(_('error removing undo: %s\n') % str(inst))
398 self.ui.warn(_('error removing undo: %s\n') % str(inst))
399
399
400 def printdiff(self, repo, node1, node2=None, files=None,
400 def printdiff(self, repo, node1, node2=None, files=None,
401 fp=None, changes=None, opts={}):
401 fp=None, changes=None, opts={}):
402 m = cmdutil.match(repo, files, opts)
402 m = cmdutil.match(repo, files, opts)
403 chunks = patch.diff(repo, node1, node2, m, changes, self.diffopts())
403 chunks = patch.diff(repo, node1, node2, m, changes, self.diffopts())
404 write = fp is None and repo.ui.write or fp.write
404 write = fp is None and repo.ui.write or fp.write
405 for chunk in chunks:
405 for chunk in chunks:
406 write(chunk)
406 write(chunk)
407
407
408 def mergeone(self, repo, mergeq, head, patch, rev):
408 def mergeone(self, repo, mergeq, head, patch, rev):
409 # first try just applying the patch
409 # first try just applying the patch
410 (err, n) = self.apply(repo, [ patch ], update_status=False,
410 (err, n) = self.apply(repo, [ patch ], update_status=False,
411 strict=True, merge=rev)
411 strict=True, merge=rev)
412
412
413 if err == 0:
413 if err == 0:
414 return (err, n)
414 return (err, n)
415
415
416 if n is None:
416 if n is None:
417 raise util.Abort(_("apply failed for patch %s") % patch)
417 raise util.Abort(_("apply failed for patch %s") % patch)
418
418
419 self.ui.warn(_("patch didn't work out, merging %s\n") % patch)
419 self.ui.warn(_("patch didn't work out, merging %s\n") % patch)
420
420
421 # apply failed, strip away that rev and merge.
421 # apply failed, strip away that rev and merge.
422 hg.clean(repo, head)
422 hg.clean(repo, head)
423 self.strip(repo, n, update=False, backup='strip')
423 self.strip(repo, n, update=False, backup='strip')
424
424
425 ctx = repo[rev]
425 ctx = repo[rev]
426 ret = hg.merge(repo, rev)
426 ret = hg.merge(repo, rev)
427 if ret:
427 if ret:
428 raise util.Abort(_("update returned %d") % ret)
428 raise util.Abort(_("update returned %d") % ret)
429 n = repo.commit(ctx.description(), ctx.user(), force=True)
429 n = repo.commit(ctx.description(), ctx.user(), force=True)
430 if n is None:
430 if n is None:
431 raise util.Abort(_("repo commit failed"))
431 raise util.Abort(_("repo commit failed"))
432 try:
432 try:
433 ph = patchheader(mergeq.join(patch))
433 ph = patchheader(mergeq.join(patch))
434 except:
434 except:
435 raise util.Abort(_("unable to read %s") % patch)
435 raise util.Abort(_("unable to read %s") % patch)
436
436
437 patchf = self.opener(patch, "w")
437 patchf = self.opener(patch, "w")
438 comments = str(ph)
438 comments = str(ph)
439 if comments:
439 if comments:
440 patchf.write(comments)
440 patchf.write(comments)
441 self.printdiff(repo, head, n, fp=patchf)
441 self.printdiff(repo, head, n, fp=patchf)
442 patchf.close()
442 patchf.close()
443 self.removeundo(repo)
443 self.removeundo(repo)
444 return (0, n)
444 return (0, n)
445
445
446 def qparents(self, repo, rev=None):
446 def qparents(self, repo, rev=None):
447 if rev is None:
447 if rev is None:
448 (p1, p2) = repo.dirstate.parents()
448 (p1, p2) = repo.dirstate.parents()
449 if p2 == nullid:
449 if p2 == nullid:
450 return p1
450 return p1
451 if len(self.applied) == 0:
451 if len(self.applied) == 0:
452 return None
452 return None
453 return bin(self.applied[-1].rev)
453 return bin(self.applied[-1].rev)
454 pp = repo.changelog.parents(rev)
454 pp = repo.changelog.parents(rev)
455 if pp[1] != nullid:
455 if pp[1] != nullid:
456 arevs = [ x.rev for x in self.applied ]
456 arevs = [ x.rev for x in self.applied ]
457 p0 = hex(pp[0])
457 p0 = hex(pp[0])
458 p1 = hex(pp[1])
458 p1 = hex(pp[1])
459 if p0 in arevs:
459 if p0 in arevs:
460 return pp[0]
460 return pp[0]
461 if p1 in arevs:
461 if p1 in arevs:
462 return pp[1]
462 return pp[1]
463 return pp[0]
463 return pp[0]
464
464
465 def mergepatch(self, repo, mergeq, series):
465 def mergepatch(self, repo, mergeq, series):
466 if len(self.applied) == 0:
466 if len(self.applied) == 0:
467 # each of the patches merged in will have two parents. This
467 # each of the patches merged in will have two parents. This
468 # can confuse the qrefresh, qdiff, and strip code because it
468 # can confuse the qrefresh, qdiff, and strip code because it
469 # needs to know which parent is actually in the patch queue.
469 # needs to know which parent is actually in the patch queue.
470 # so, we insert a merge marker with only one parent. This way
470 # so, we insert a merge marker with only one parent. This way
471 # the first patch in the queue is never a merge patch
471 # the first patch in the queue is never a merge patch
472 #
472 #
473 pname = ".hg.patches.merge.marker"
473 pname = ".hg.patches.merge.marker"
474 n = repo.commit('[mq]: merge marker', force=True)
474 n = repo.commit('[mq]: merge marker', force=True)
475 self.removeundo(repo)
475 self.removeundo(repo)
476 self.applied.append(statusentry(hex(n), pname))
476 self.applied.append(statusentry(hex(n), pname))
477 self.applied_dirty = 1
477 self.applied_dirty = 1
478
478
479 head = self.qparents(repo)
479 head = self.qparents(repo)
480
480
481 for patch in series:
481 for patch in series:
482 patch = mergeq.lookup(patch, strict=True)
482 patch = mergeq.lookup(patch, strict=True)
483 if not patch:
483 if not patch:
484 self.ui.warn(_("patch %s does not exist\n") % patch)
484 self.ui.warn(_("patch %s does not exist\n") % patch)
485 return (1, None)
485 return (1, None)
486 pushable, reason = self.pushable(patch)
486 pushable, reason = self.pushable(patch)
487 if not pushable:
487 if not pushable:
488 self.explain_pushable(patch, all_patches=True)
488 self.explain_pushable(patch, all_patches=True)
489 continue
489 continue
490 info = mergeq.isapplied(patch)
490 info = mergeq.isapplied(patch)
491 if not info:
491 if not info:
492 self.ui.warn(_("patch %s is not applied\n") % patch)
492 self.ui.warn(_("patch %s is not applied\n") % patch)
493 return (1, None)
493 return (1, None)
494 rev = bin(info[1])
494 rev = bin(info[1])
495 (err, head) = self.mergeone(repo, mergeq, head, patch, rev)
495 (err, head) = self.mergeone(repo, mergeq, head, patch, rev)
496 if head:
496 if head:
497 self.applied.append(statusentry(hex(head), patch))
497 self.applied.append(statusentry(hex(head), patch))
498 self.applied_dirty = 1
498 self.applied_dirty = 1
499 if err:
499 if err:
500 return (err, head)
500 return (err, head)
501 self.save_dirty()
501 self.save_dirty()
502 return (0, head)
502 return (0, head)
503
503
504 def patch(self, repo, patchfile):
504 def patch(self, repo, patchfile):
505 '''Apply patchfile to the working directory.
505 '''Apply patchfile to the working directory.
506 patchfile: name of patch file'''
506 patchfile: name of patch file'''
507 files = {}
507 files = {}
508 try:
508 try:
509 fuzz = patch.patch(patchfile, self.ui, strip=1, cwd=repo.root,
509 fuzz = patch.patch(patchfile, self.ui, strip=1, cwd=repo.root,
510 files=files)
510 files=files)
511 except Exception, inst:
511 except Exception, inst:
512 self.ui.note(str(inst) + '\n')
512 self.ui.note(str(inst) + '\n')
513 if not self.ui.verbose:
513 if not self.ui.verbose:
514 self.ui.warn(_("patch failed, unable to continue (try -v)\n"))
514 self.ui.warn(_("patch failed, unable to continue (try -v)\n"))
515 return (False, files, False)
515 return (False, files, False)
516
516
517 return (True, files, fuzz)
517 return (True, files, fuzz)
518
518
519 def apply(self, repo, series, list=False, update_status=True,
519 def apply(self, repo, series, list=False, update_status=True,
520 strict=False, patchdir=None, merge=None, all_files={}):
520 strict=False, patchdir=None, merge=None, all_files={}):
521 wlock = lock = tr = None
521 wlock = lock = tr = None
522 try:
522 try:
523 wlock = repo.wlock()
523 wlock = repo.wlock()
524 lock = repo.lock()
524 lock = repo.lock()
525 tr = repo.transaction()
525 tr = repo.transaction()
526 try:
526 try:
527 ret = self._apply(repo, series, list, update_status,
527 ret = self._apply(repo, series, list, update_status,
528 strict, patchdir, merge, all_files=all_files)
528 strict, patchdir, merge, all_files=all_files)
529 tr.close()
529 tr.close()
530 self.save_dirty()
530 self.save_dirty()
531 return ret
531 return ret
532 except:
532 except:
533 try:
533 try:
534 tr.abort()
534 tr.abort()
535 finally:
535 finally:
536 repo.invalidate()
536 repo.invalidate()
537 repo.dirstate.invalidate()
537 repo.dirstate.invalidate()
538 raise
538 raise
539 finally:
539 finally:
540 del tr
540 del tr
541 release(lock, wlock)
541 release(lock, wlock)
542 self.removeundo(repo)
542 self.removeundo(repo)
543
543
544 def _apply(self, repo, series, list=False, update_status=True,
544 def _apply(self, repo, series, list=False, update_status=True,
545 strict=False, patchdir=None, merge=None, all_files={}):
545 strict=False, patchdir=None, merge=None, all_files={}):
546 # TODO unify with commands.py
546 # TODO unify with commands.py
547 if not patchdir:
547 if not patchdir:
548 patchdir = self.path
548 patchdir = self.path
549 err = 0
549 err = 0
550 n = None
550 n = None
551 for patchname in series:
551 for patchname in series:
552 pushable, reason = self.pushable(patchname)
552 pushable, reason = self.pushable(patchname)
553 if not pushable:
553 if not pushable:
554 self.explain_pushable(patchname, all_patches=True)
554 self.explain_pushable(patchname, all_patches=True)
555 continue
555 continue
556 self.ui.warn(_("applying %s\n") % patchname)
556 self.ui.warn(_("applying %s\n") % patchname)
557 pf = os.path.join(patchdir, patchname)
557 pf = os.path.join(patchdir, patchname)
558
558
559 try:
559 try:
560 ph = patchheader(self.join(patchname))
560 ph = patchheader(self.join(patchname))
561 except:
561 except:
562 self.ui.warn(_("Unable to read %s\n") % patchname)
562 self.ui.warn(_("Unable to read %s\n") % patchname)
563 err = 1
563 err = 1
564 break
564 break
565
565
566 message = ph.message
566 message = ph.message
567 if not message:
567 if not message:
568 message = _("imported patch %s\n") % patchname
568 message = _("imported patch %s\n") % patchname
569 else:
569 else:
570 if list:
570 if list:
571 message.append(_("\nimported patch %s") % patchname)
571 message.append(_("\nimported patch %s") % patchname)
572 message = '\n'.join(message)
572 message = '\n'.join(message)
573
573
574 if ph.haspatch:
574 if ph.haspatch:
575 (patcherr, files, fuzz) = self.patch(repo, pf)
575 (patcherr, files, fuzz) = self.patch(repo, pf)
576 all_files.update(files)
576 all_files.update(files)
577 patcherr = not patcherr
577 patcherr = not patcherr
578 else:
578 else:
579 self.ui.warn(_("patch %s is empty\n") % patchname)
579 self.ui.warn(_("patch %s is empty\n") % patchname)
580 patcherr, files, fuzz = 0, [], 0
580 patcherr, files, fuzz = 0, [], 0
581
581
582 if merge and files:
582 if merge and files:
583 # Mark as removed/merged and update dirstate parent info
583 # Mark as removed/merged and update dirstate parent info
584 removed = []
584 removed = []
585 merged = []
585 merged = []
586 for f in files:
586 for f in files:
587 if os.path.exists(repo.wjoin(f)):
587 if os.path.exists(repo.wjoin(f)):
588 merged.append(f)
588 merged.append(f)
589 else:
589 else:
590 removed.append(f)
590 removed.append(f)
591 for f in removed:
591 for f in removed:
592 repo.dirstate.remove(f)
592 repo.dirstate.remove(f)
593 for f in merged:
593 for f in merged:
594 repo.dirstate.merge(f)
594 repo.dirstate.merge(f)
595 p1, p2 = repo.dirstate.parents()
595 p1, p2 = repo.dirstate.parents()
596 repo.dirstate.setparents(p1, merge)
596 repo.dirstate.setparents(p1, merge)
597
597
598 files = patch.updatedir(self.ui, repo, files)
598 files = patch.updatedir(self.ui, repo, files)
599 match = cmdutil.matchfiles(repo, files or [])
599 match = cmdutil.matchfiles(repo, files or [])
600 n = repo.commit(message, ph.user, ph.date, match=match, force=True)
600 n = repo.commit(message, ph.user, ph.date, match=match, force=True)
601
601
602 if n is None:
602 if n is None:
603 raise util.Abort(_("repo commit failed"))
603 raise util.Abort(_("repo commit failed"))
604
604
605 if update_status:
605 if update_status:
606 self.applied.append(statusentry(hex(n), patchname))
606 self.applied.append(statusentry(hex(n), patchname))
607
607
608 if patcherr:
608 if patcherr:
609 self.ui.warn(_("patch failed, rejects left in working dir\n"))
609 self.ui.warn(_("patch failed, rejects left in working dir\n"))
610 err = 1
610 err = 1
611 break
611 break
612
612
613 if fuzz and strict:
613 if fuzz and strict:
614 self.ui.warn(_("fuzz found when applying patch, stopping\n"))
614 self.ui.warn(_("fuzz found when applying patch, stopping\n"))
615 err = 1
615 err = 1
616 break
616 break
617 return (err, n)
617 return (err, n)
618
618
619 def _clean_series(self, patches):
619 def _clean_series(self, patches):
620 for i in sorted([self.find_series(p) for p in patches], reverse=True):
620 for i in sorted([self.find_series(p) for p in patches], reverse=True):
621 del self.full_series[i]
621 del self.full_series[i]
622 self.parse_series()
622 self.parse_series()
623 self.series_dirty = 1
623 self.series_dirty = 1
624
624
625 def finish(self, repo, revs):
625 def finish(self, repo, revs):
626 firstrev = repo[self.applied[0].rev].rev()
626 firstrev = repo[self.applied[0].rev].rev()
627 appliedbase = 0
627 appliedbase = 0
628 patches = []
628 patches = []
629 for rev in sorted(revs):
629 for rev in sorted(revs):
630 if rev < firstrev:
630 if rev < firstrev:
631 raise util.Abort(_('revision %d is not managed') % rev)
631 raise util.Abort(_('revision %d is not managed') % rev)
632 base = bin(self.applied[appliedbase].rev)
632 base = bin(self.applied[appliedbase].rev)
633 node = repo.changelog.node(rev)
633 node = repo.changelog.node(rev)
634 if node != base:
634 if node != base:
635 raise util.Abort(_('cannot delete revision %d above '
635 raise util.Abort(_('cannot delete revision %d above '
636 'applied patches') % rev)
636 'applied patches') % rev)
637 patches.append(self.applied[appliedbase].name)
637 patches.append(self.applied[appliedbase].name)
638 appliedbase += 1
638 appliedbase += 1
639
639
640 r = self.qrepo()
640 r = self.qrepo()
641 if r:
641 if r:
642 r.remove(patches, True)
642 r.remove(patches, True)
643 else:
643 else:
644 for p in patches:
644 for p in patches:
645 os.unlink(self.join(p))
645 os.unlink(self.join(p))
646
646
647 del self.applied[:appliedbase]
647 del self.applied[:appliedbase]
648 self.applied_dirty = 1
648 self.applied_dirty = 1
649 self._clean_series(patches)
649 self._clean_series(patches)
650
650
651 def delete(self, repo, patches, opts):
651 def delete(self, repo, patches, opts):
652 if not patches and not opts.get('rev'):
652 if not patches and not opts.get('rev'):
653 raise util.Abort(_('qdelete requires at least one revision or '
653 raise util.Abort(_('qdelete requires at least one revision or '
654 'patch name'))
654 'patch name'))
655
655
656 realpatches = []
656 realpatches = []
657 for patch in patches:
657 for patch in patches:
658 patch = self.lookup(patch, strict=True)
658 patch = self.lookup(patch, strict=True)
659 info = self.isapplied(patch)
659 info = self.isapplied(patch)
660 if info:
660 if info:
661 raise util.Abort(_("cannot delete applied patch %s") % patch)
661 raise util.Abort(_("cannot delete applied patch %s") % patch)
662 if patch not in self.series:
662 if patch not in self.series:
663 raise util.Abort(_("patch %s not in series file") % patch)
663 raise util.Abort(_("patch %s not in series file") % patch)
664 realpatches.append(patch)
664 realpatches.append(patch)
665
665
666 appliedbase = 0
666 appliedbase = 0
667 if opts.get('rev'):
667 if opts.get('rev'):
668 if not self.applied:
668 if not self.applied:
669 raise util.Abort(_('no patches applied'))
669 raise util.Abort(_('no patches applied'))
670 revs = cmdutil.revrange(repo, opts['rev'])
670 revs = cmdutil.revrange(repo, opts['rev'])
671 if len(revs) > 1 and revs[0] > revs[1]:
671 if len(revs) > 1 and revs[0] > revs[1]:
672 revs.reverse()
672 revs.reverse()
673 for rev in revs:
673 for rev in revs:
674 if appliedbase >= len(self.applied):
674 if appliedbase >= len(self.applied):
675 raise util.Abort(_("revision %d is not managed") % rev)
675 raise util.Abort(_("revision %d is not managed") % rev)
676
676
677 base = bin(self.applied[appliedbase].rev)
677 base = bin(self.applied[appliedbase].rev)
678 node = repo.changelog.node(rev)
678 node = repo.changelog.node(rev)
679 if node != base:
679 if node != base:
680 raise util.Abort(_("cannot delete revision %d above "
680 raise util.Abort(_("cannot delete revision %d above "
681 "applied patches") % rev)
681 "applied patches") % rev)
682 realpatches.append(self.applied[appliedbase].name)
682 realpatches.append(self.applied[appliedbase].name)
683 appliedbase += 1
683 appliedbase += 1
684
684
685 if not opts.get('keep'):
685 if not opts.get('keep'):
686 r = self.qrepo()
686 r = self.qrepo()
687 if r:
687 if r:
688 r.remove(realpatches, True)
688 r.remove(realpatches, True)
689 else:
689 else:
690 for p in realpatches:
690 for p in realpatches:
691 os.unlink(self.join(p))
691 os.unlink(self.join(p))
692
692
693 if appliedbase:
693 if appliedbase:
694 del self.applied[:appliedbase]
694 del self.applied[:appliedbase]
695 self.applied_dirty = 1
695 self.applied_dirty = 1
696 self._clean_series(realpatches)
696 self._clean_series(realpatches)
697
697
698 def check_toppatch(self, repo):
698 def check_toppatch(self, repo):
699 if len(self.applied) > 0:
699 if len(self.applied) > 0:
700 top = bin(self.applied[-1].rev)
700 top = bin(self.applied[-1].rev)
701 pp = repo.dirstate.parents()
701 pp = repo.dirstate.parents()
702 if top not in pp:
702 if top not in pp:
703 raise util.Abort(_("working directory revision is not qtip"))
703 raise util.Abort(_("working directory revision is not qtip"))
704 return top
704 return top
705 return None
705 return None
706 def check_localchanges(self, repo, force=False, refresh=True):
706 def check_localchanges(self, repo, force=False, refresh=True):
707 m, a, r, d = repo.status()[:4]
707 m, a, r, d = repo.status()[:4]
708 if m or a or r or d:
708 if m or a or r or d:
709 if not force:
709 if not force:
710 if refresh:
710 if refresh:
711 raise util.Abort(_("local changes found, refresh first"))
711 raise util.Abort(_("local changes found, refresh first"))
712 else:
712 else:
713 raise util.Abort(_("local changes found"))
713 raise util.Abort(_("local changes found"))
714 return m, a, r, d
714 return m, a, r, d
715
715
716 _reserved = ('series', 'status', 'guards')
716 _reserved = ('series', 'status', 'guards')
717 def check_reserved_name(self, name):
717 def check_reserved_name(self, name):
718 if (name in self._reserved or name.startswith('.hg')
718 if (name in self._reserved or name.startswith('.hg')
719 or name.startswith('.mq')):
719 or name.startswith('.mq')):
720 raise util.Abort(_('"%s" cannot be used as the name of a patch')
720 raise util.Abort(_('"%s" cannot be used as the name of a patch')
721 % name)
721 % name)
722
722
723 def new(self, repo, patchfn, *pats, **opts):
723 def new(self, repo, patchfn, *pats, **opts):
724 """options:
724 """options:
725 msg: a string or a no-argument function returning a string
725 msg: a string or a no-argument function returning a string
726 """
726 """
727 msg = opts.get('msg')
727 msg = opts.get('msg')
728 force = opts.get('force')
728 force = opts.get('force')
729 user = opts.get('user')
729 user = opts.get('user')
730 date = opts.get('date')
730 date = opts.get('date')
731 if date:
731 if date:
732 date = util.parsedate(date)
732 date = util.parsedate(date)
733 self.check_reserved_name(patchfn)
733 self.check_reserved_name(patchfn)
734 if os.path.exists(self.join(patchfn)):
734 if os.path.exists(self.join(patchfn)):
735 raise util.Abort(_('patch "%s" already exists') % patchfn)
735 raise util.Abort(_('patch "%s" already exists') % patchfn)
736 if opts.get('include') or opts.get('exclude') or pats:
736 if opts.get('include') or opts.get('exclude') or pats:
737 match = cmdutil.match(repo, pats, opts)
737 match = cmdutil.match(repo, pats, opts)
738 # detect missing files in pats
738 # detect missing files in pats
739 def badfn(f, msg):
739 def badfn(f, msg):
740 raise util.Abort('%s: %s' % (f, msg))
740 raise util.Abort('%s: %s' % (f, msg))
741 match.bad = badfn
741 match.bad = badfn
742 m, a, r, d = repo.status(match=match)[:4]
742 m, a, r, d = repo.status(match=match)[:4]
743 else:
743 else:
744 m, a, r, d = self.check_localchanges(repo, force)
744 m, a, r, d = self.check_localchanges(repo, force)
745 match = cmdutil.matchfiles(repo, m + a + r)
745 match = cmdutil.matchfiles(repo, m + a + r)
746 commitfiles = m + a + r
746 commitfiles = m + a + r
747 self.check_toppatch(repo)
747 self.check_toppatch(repo)
748 insert = self.full_series_end()
748 insert = self.full_series_end()
749 wlock = repo.wlock()
749 wlock = repo.wlock()
750 try:
750 try:
751 # if patch file write fails, abort early
751 # if patch file write fails, abort early
752 p = self.opener(patchfn, "w")
752 p = self.opener(patchfn, "w")
753 try:
753 try:
754 if date:
754 if date:
755 p.write("# HG changeset patch\n")
755 p.write("# HG changeset patch\n")
756 if user:
756 if user:
757 p.write("# User " + user + "\n")
757 p.write("# User " + user + "\n")
758 p.write("# Date %d %d\n\n" % date)
758 p.write("# Date %d %d\n\n" % date)
759 elif user:
759 elif user:
760 p.write("From: " + user + "\n\n")
760 p.write("From: " + user + "\n\n")
761
761
762 if hasattr(msg, '__call__'):
762 if hasattr(msg, '__call__'):
763 msg = msg()
763 msg = msg()
764 commitmsg = msg and msg or ("[mq]: %s" % patchfn)
764 commitmsg = msg and msg or ("[mq]: %s" % patchfn)
765 n = repo.commit(commitmsg, user, date, match=match, force=True)
765 n = repo.commit(commitmsg, user, date, match=match, force=True)
766 if n is None:
766 if n is None:
767 raise util.Abort(_("repo commit failed"))
767 raise util.Abort(_("repo commit failed"))
768 try:
768 try:
769 self.full_series[insert:insert] = [patchfn]
769 self.full_series[insert:insert] = [patchfn]
770 self.applied.append(statusentry(hex(n), patchfn))
770 self.applied.append(statusentry(hex(n), patchfn))
771 self.parse_series()
771 self.parse_series()
772 self.series_dirty = 1
772 self.series_dirty = 1
773 self.applied_dirty = 1
773 self.applied_dirty = 1
774 if msg:
774 if msg:
775 msg = msg + "\n\n"
775 msg = msg + "\n\n"
776 p.write(msg)
776 p.write(msg)
777 if commitfiles:
777 if commitfiles:
778 diffopts = self.diffopts()
778 diffopts = self.diffopts()
779 if opts.get('git'): diffopts.git = True
779 if opts.get('git'): diffopts.git = True
780 parent = self.qparents(repo, n)
780 parent = self.qparents(repo, n)
781 chunks = patch.diff(repo, node1=parent, node2=n,
781 chunks = patch.diff(repo, node1=parent, node2=n,
782 match=match, opts=diffopts)
782 match=match, opts=diffopts)
783 for chunk in chunks:
783 for chunk in chunks:
784 p.write(chunk)
784 p.write(chunk)
785 p.close()
785 p.close()
786 wlock.release()
786 wlock.release()
787 wlock = None
787 wlock = None
788 r = self.qrepo()
788 r = self.qrepo()
789 if r: r.add([patchfn])
789 if r: r.add([patchfn])
790 except:
790 except:
791 repo.rollback()
791 repo.rollback()
792 raise
792 raise
793 except Exception:
793 except Exception:
794 patchpath = self.join(patchfn)
794 patchpath = self.join(patchfn)
795 try:
795 try:
796 os.unlink(patchpath)
796 os.unlink(patchpath)
797 except:
797 except:
798 self.ui.warn(_('error unlinking %s\n') % patchpath)
798 self.ui.warn(_('error unlinking %s\n') % patchpath)
799 raise
799 raise
800 self.removeundo(repo)
800 self.removeundo(repo)
801 finally:
801 finally:
802 release(wlock)
802 release(wlock)
803
803
804 def strip(self, repo, rev, update=True, backup="all", force=None):
804 def strip(self, repo, rev, update=True, backup="all", force=None):
805 wlock = lock = None
805 wlock = lock = None
806 try:
806 try:
807 wlock = repo.wlock()
807 wlock = repo.wlock()
808 lock = repo.lock()
808 lock = repo.lock()
809
809
810 if update:
810 if update:
811 self.check_localchanges(repo, force=force, refresh=False)
811 self.check_localchanges(repo, force=force, refresh=False)
812 urev = self.qparents(repo, rev)
812 urev = self.qparents(repo, rev)
813 hg.clean(repo, urev)
813 hg.clean(repo, urev)
814 repo.dirstate.write()
814 repo.dirstate.write()
815
815
816 self.removeundo(repo)
816 self.removeundo(repo)
817 repair.strip(self.ui, repo, rev, backup)
817 repair.strip(self.ui, repo, rev, backup)
818 # strip may have unbundled a set of backed up revisions after
818 # strip may have unbundled a set of backed up revisions after
819 # the actual strip
819 # the actual strip
820 self.removeundo(repo)
820 self.removeundo(repo)
821 finally:
821 finally:
822 release(lock, wlock)
822 release(lock, wlock)
823
823
824 def isapplied(self, patch):
824 def isapplied(self, patch):
825 """returns (index, rev, patch)"""
825 """returns (index, rev, patch)"""
826 for i, a in enumerate(self.applied):
826 for i, a in enumerate(self.applied):
827 if a.name == patch:
827 if a.name == patch:
828 return (i, a.rev, a.name)
828 return (i, a.rev, a.name)
829 return None
829 return None
830
830
831 # if the exact patch name does not exist, we try a few
831 # if the exact patch name does not exist, we try a few
832 # variations. If strict is passed, we try only #1
832 # variations. If strict is passed, we try only #1
833 #
833 #
834 # 1) a number to indicate an offset in the series file
834 # 1) a number to indicate an offset in the series file
835 # 2) a unique substring of the patch name was given
835 # 2) a unique substring of the patch name was given
836 # 3) patchname[-+]num to indicate an offset in the series file
836 # 3) patchname[-+]num to indicate an offset in the series file
837 def lookup(self, patch, strict=False):
837 def lookup(self, patch, strict=False):
838 patch = patch and str(patch)
838 patch = patch and str(patch)
839
839
840 def partial_name(s):
840 def partial_name(s):
841 if s in self.series:
841 if s in self.series:
842 return s
842 return s
843 matches = [x for x in self.series if s in x]
843 matches = [x for x in self.series if s in x]
844 if len(matches) > 1:
844 if len(matches) > 1:
845 self.ui.warn(_('patch name "%s" is ambiguous:\n') % s)
845 self.ui.warn(_('patch name "%s" is ambiguous:\n') % s)
846 for m in matches:
846 for m in matches:
847 self.ui.warn(' %s\n' % m)
847 self.ui.warn(' %s\n' % m)
848 return None
848 return None
849 if matches:
849 if matches:
850 return matches[0]
850 return matches[0]
851 if len(self.series) > 0 and len(self.applied) > 0:
851 if len(self.series) > 0 and len(self.applied) > 0:
852 if s == 'qtip':
852 if s == 'qtip':
853 return self.series[self.series_end(True)-1]
853 return self.series[self.series_end(True)-1]
854 if s == 'qbase':
854 if s == 'qbase':
855 return self.series[0]
855 return self.series[0]
856 return None
856 return None
857
857
858 if patch is None:
858 if patch is None:
859 return None
859 return None
860 if patch in self.series:
860 if patch in self.series:
861 return patch
861 return patch
862
862
863 if not os.path.isfile(self.join(patch)):
863 if not os.path.isfile(self.join(patch)):
864 try:
864 try:
865 sno = int(patch)
865 sno = int(patch)
866 except(ValueError, OverflowError):
866 except(ValueError, OverflowError):
867 pass
867 pass
868 else:
868 else:
869 if -len(self.series) <= sno < len(self.series):
869 if -len(self.series) <= sno < len(self.series):
870 return self.series[sno]
870 return self.series[sno]
871
871
872 if not strict:
872 if not strict:
873 res = partial_name(patch)
873 res = partial_name(patch)
874 if res:
874 if res:
875 return res
875 return res
876 minus = patch.rfind('-')
876 minus = patch.rfind('-')
877 if minus >= 0:
877 if minus >= 0:
878 res = partial_name(patch[:minus])
878 res = partial_name(patch[:minus])
879 if res:
879 if res:
880 i = self.series.index(res)
880 i = self.series.index(res)
881 try:
881 try:
882 off = int(patch[minus+1:] or 1)
882 off = int(patch[minus+1:] or 1)
883 except(ValueError, OverflowError):
883 except(ValueError, OverflowError):
884 pass
884 pass
885 else:
885 else:
886 if i - off >= 0:
886 if i - off >= 0:
887 return self.series[i - off]
887 return self.series[i - off]
888 plus = patch.rfind('+')
888 plus = patch.rfind('+')
889 if plus >= 0:
889 if plus >= 0:
890 res = partial_name(patch[:plus])
890 res = partial_name(patch[:plus])
891 if res:
891 if res:
892 i = self.series.index(res)
892 i = self.series.index(res)
893 try:
893 try:
894 off = int(patch[plus+1:] or 1)
894 off = int(patch[plus+1:] or 1)
895 except(ValueError, OverflowError):
895 except(ValueError, OverflowError):
896 pass
896 pass
897 else:
897 else:
898 if i + off < len(self.series):
898 if i + off < len(self.series):
899 return self.series[i + off]
899 return self.series[i + off]
900 raise util.Abort(_("patch %s not in series") % patch)
900 raise util.Abort(_("patch %s not in series") % patch)
901
901
902 def push(self, repo, patch=None, force=False, list=False,
902 def push(self, repo, patch=None, force=False, list=False,
903 mergeq=None, all=False):
903 mergeq=None, all=False):
904 wlock = repo.wlock()
904 wlock = repo.wlock()
905 if repo.dirstate.parents()[0] not in repo.heads():
905 if repo.dirstate.parents()[0] not in repo.heads():
906 self.ui.status(_("(working directory not at a head)\n"))
906 self.ui.status(_("(working directory not at a head)\n"))
907
907
908 if not self.series:
908 if not self.series:
909 self.ui.warn(_('no patches in series\n'))
909 self.ui.warn(_('no patches in series\n'))
910 return 0
910 return 0
911
911
912 try:
912 try:
913 patch = self.lookup(patch)
913 patch = self.lookup(patch)
914 # Suppose our series file is: A B C and the current 'top'
914 # Suppose our series file is: A B C and the current 'top'
915 # patch is B. qpush C should be performed (moving forward)
915 # patch is B. qpush C should be performed (moving forward)
916 # qpush B is a NOP (no change) qpush A is an error (can't
916 # qpush B is a NOP (no change) qpush A is an error (can't
917 # go backwards with qpush)
917 # go backwards with qpush)
918 if patch:
918 if patch:
919 info = self.isapplied(patch)
919 info = self.isapplied(patch)
920 if info:
920 if info:
921 if info[0] < len(self.applied) - 1:
921 if info[0] < len(self.applied) - 1:
922 raise util.Abort(
922 raise util.Abort(
923 _("cannot push to a previous patch: %s") % patch)
923 _("cannot push to a previous patch: %s") % patch)
924 self.ui.warn(
924 self.ui.warn(
925 _('qpush: %s is already at the top\n') % patch)
925 _('qpush: %s is already at the top\n') % patch)
926 return
926 return
927 pushable, reason = self.pushable(patch)
927 pushable, reason = self.pushable(patch)
928 if not pushable:
928 if not pushable:
929 if reason:
929 if reason:
930 reason = _('guarded by %r') % reason
930 reason = _('guarded by %r') % reason
931 else:
931 else:
932 reason = _('no matching guards')
932 reason = _('no matching guards')
933 self.ui.warn(_("cannot push '%s' - %s\n") % (patch, reason))
933 self.ui.warn(_("cannot push '%s' - %s\n") % (patch, reason))
934 return 1
934 return 1
935 elif all:
935 elif all:
936 patch = self.series[-1]
936 patch = self.series[-1]
937 if self.isapplied(patch):
937 if self.isapplied(patch):
938 self.ui.warn(_('all patches are currently applied\n'))
938 self.ui.warn(_('all patches are currently applied\n'))
939 return 0
939 return 0
940
940
941 # Following the above example, starting at 'top' of B:
941 # Following the above example, starting at 'top' of B:
942 # qpush should be performed (pushes C), but a subsequent
942 # qpush should be performed (pushes C), but a subsequent
943 # qpush without an argument is an error (nothing to
943 # qpush without an argument is an error (nothing to
944 # apply). This allows a loop of "...while hg qpush..." to
944 # apply). This allows a loop of "...while hg qpush..." to
945 # work as it detects an error when done
945 # work as it detects an error when done
946 start = self.series_end()
946 start = self.series_end()
947 if start == len(self.series):
947 if start == len(self.series):
948 self.ui.warn(_('patch series already fully applied\n'))
948 self.ui.warn(_('patch series already fully applied\n'))
949 return 1
949 return 1
950 if not force:
950 if not force:
951 self.check_localchanges(repo)
951 self.check_localchanges(repo)
952
952
953 self.applied_dirty = 1
953 self.applied_dirty = 1
954 if start > 0:
954 if start > 0:
955 self.check_toppatch(repo)
955 self.check_toppatch(repo)
956 if not patch:
956 if not patch:
957 patch = self.series[start]
957 patch = self.series[start]
958 end = start + 1
958 end = start + 1
959 else:
959 else:
960 end = self.series.index(patch, start) + 1
960 end = self.series.index(patch, start) + 1
961 s = self.series[start:end]
961 s = self.series[start:end]
962 all_files = {}
962 all_files = {}
963 try:
963 try:
964 if mergeq:
964 if mergeq:
965 ret = self.mergepatch(repo, mergeq, s)
965 ret = self.mergepatch(repo, mergeq, s)
966 else:
966 else:
967 ret = self.apply(repo, s, list, all_files=all_files)
967 ret = self.apply(repo, s, list, all_files=all_files)
968 except:
968 except:
969 self.ui.warn(_('cleaning up working directory...'))
969 self.ui.warn(_('cleaning up working directory...'))
970 node = repo.dirstate.parents()[0]
970 node = repo.dirstate.parents()[0]
971 hg.revert(repo, node, None)
971 hg.revert(repo, node, None)
972 unknown = repo.status(unknown=True)[4]
972 unknown = repo.status(unknown=True)[4]
973 # only remove unknown files that we know we touched or
973 # only remove unknown files that we know we touched or
974 # created while patching
974 # created while patching
975 for f in unknown:
975 for f in unknown:
976 if f in all_files:
976 if f in all_files:
977 util.unlink(repo.wjoin(f))
977 util.unlink(repo.wjoin(f))
978 self.ui.warn(_('done\n'))
978 self.ui.warn(_('done\n'))
979 raise
979 raise
980 top = self.applied[-1].name
980 top = self.applied[-1].name
981 if ret[0]:
981 if ret[0]:
982 self.ui.write(_("errors during apply, please fix and "
982 self.ui.write(_("errors during apply, please fix and "
983 "refresh %s\n") % top)
983 "refresh %s\n") % top)
984 else:
984 else:
985 self.ui.write(_("now at: %s\n") % top)
985 self.ui.write(_("now at: %s\n") % top)
986 return ret[0]
986 return ret[0]
987 finally:
987 finally:
988 wlock.release()
988 wlock.release()
989
989
990 def pop(self, repo, patch=None, force=False, update=True, all=False):
990 def pop(self, repo, patch=None, force=False, update=True, all=False):
991 def getfile(f, rev, flags):
991 def getfile(f, rev, flags):
992 t = repo.file(f).read(rev)
992 t = repo.file(f).read(rev)
993 repo.wwrite(f, t, flags)
993 repo.wwrite(f, t, flags)
994
994
995 wlock = repo.wlock()
995 wlock = repo.wlock()
996 try:
996 try:
997 if patch:
997 if patch:
998 # index, rev, patch
998 # index, rev, patch
999 info = self.isapplied(patch)
999 info = self.isapplied(patch)
1000 if not info:
1000 if not info:
1001 patch = self.lookup(patch)
1001 patch = self.lookup(patch)
1002 info = self.isapplied(patch)
1002 info = self.isapplied(patch)
1003 if not info:
1003 if not info:
1004 raise util.Abort(_("patch %s is not applied") % patch)
1004 raise util.Abort(_("patch %s is not applied") % patch)
1005
1005
1006 if len(self.applied) == 0:
1006 if len(self.applied) == 0:
1007 # Allow qpop -a to work repeatedly,
1007 # Allow qpop -a to work repeatedly,
1008 # but not qpop without an argument
1008 # but not qpop without an argument
1009 self.ui.warn(_("no patches applied\n"))
1009 self.ui.warn(_("no patches applied\n"))
1010 return not all
1010 return not all
1011
1011
1012 if all:
1012 if all:
1013 start = 0
1013 start = 0
1014 elif patch:
1014 elif patch:
1015 start = info[0] + 1
1015 start = info[0] + 1
1016 else:
1016 else:
1017 start = len(self.applied) - 1
1017 start = len(self.applied) - 1
1018
1018
1019 if start >= len(self.applied):
1019 if start >= len(self.applied):
1020 self.ui.warn(_("qpop: %s is already at the top\n") % patch)
1020 self.ui.warn(_("qpop: %s is already at the top\n") % patch)
1021 return
1021 return
1022
1022
1023 if not update:
1023 if not update:
1024 parents = repo.dirstate.parents()
1024 parents = repo.dirstate.parents()
1025 rr = [ bin(x.rev) for x in self.applied ]
1025 rr = [ bin(x.rev) for x in self.applied ]
1026 for p in parents:
1026 for p in parents:
1027 if p in rr:
1027 if p in rr:
1028 self.ui.warn(_("qpop: forcing dirstate update\n"))
1028 self.ui.warn(_("qpop: forcing dirstate update\n"))
1029 update = True
1029 update = True
1030 else:
1030 else:
1031 parents = [p.hex() for p in repo[None].parents()]
1031 parents = [p.hex() for p in repo[None].parents()]
1032 needupdate = False
1032 needupdate = False
1033 for entry in self.applied[start:]:
1033 for entry in self.applied[start:]:
1034 if entry.rev in parents:
1034 if entry.rev in parents:
1035 needupdate = True
1035 needupdate = True
1036 break
1036 break
1037 update = needupdate
1037 update = needupdate
1038
1038
1039 if not force and update:
1039 if not force and update:
1040 self.check_localchanges(repo)
1040 self.check_localchanges(repo)
1041
1041
1042 self.applied_dirty = 1
1042 self.applied_dirty = 1
1043 end = len(self.applied)
1043 end = len(self.applied)
1044 rev = bin(self.applied[start].rev)
1044 rev = bin(self.applied[start].rev)
1045 if update:
1045 if update:
1046 top = self.check_toppatch(repo)
1046 top = self.check_toppatch(repo)
1047
1047
1048 try:
1048 try:
1049 heads = repo.changelog.heads(rev)
1049 heads = repo.changelog.heads(rev)
1050 except error.LookupError:
1050 except error.LookupError:
1051 node = short(rev)
1051 node = short(rev)
1052 raise util.Abort(_('trying to pop unknown node %s') % node)
1052 raise util.Abort(_('trying to pop unknown node %s') % node)
1053
1053
1054 if heads != [bin(self.applied[-1].rev)]:
1054 if heads != [bin(self.applied[-1].rev)]:
1055 raise util.Abort(_("popping would remove a revision not "
1055 raise util.Abort(_("popping would remove a revision not "
1056 "managed by this patch queue"))
1056 "managed by this patch queue"))
1057
1057
1058 # we know there are no local changes, so we can make a simplified
1058 # we know there are no local changes, so we can make a simplified
1059 # form of hg.update.
1059 # form of hg.update.
1060 if update:
1060 if update:
1061 qp = self.qparents(repo, rev)
1061 qp = self.qparents(repo, rev)
1062 changes = repo.changelog.read(qp)
1062 changes = repo.changelog.read(qp)
1063 mmap = repo.manifest.read(changes[0])
1063 mmap = repo.manifest.read(changes[0])
1064 m, a, r, d = repo.status(qp, top)[:4]
1064 m, a, r, d = repo.status(qp, top)[:4]
1065 if d:
1065 if d:
1066 raise util.Abort(_("deletions found between repo revs"))
1066 raise util.Abort(_("deletions found between repo revs"))
1067 for f in m:
1067 for f in m:
1068 getfile(f, mmap[f], mmap.flags(f))
1068 getfile(f, mmap[f], mmap.flags(f))
1069 for f in r:
1069 for f in r:
1070 getfile(f, mmap[f], mmap.flags(f))
1070 getfile(f, mmap[f], mmap.flags(f))
1071 for f in m + r:
1071 for f in m + r:
1072 repo.dirstate.normal(f)
1072 repo.dirstate.normal(f)
1073 for f in a:
1073 for f in a:
1074 try:
1074 try:
1075 os.unlink(repo.wjoin(f))
1075 os.unlink(repo.wjoin(f))
1076 except OSError, e:
1076 except OSError, e:
1077 if e.errno != errno.ENOENT:
1077 if e.errno != errno.ENOENT:
1078 raise
1078 raise
1079 try: os.removedirs(os.path.dirname(repo.wjoin(f)))
1079 try: os.removedirs(os.path.dirname(repo.wjoin(f)))
1080 except: pass
1080 except: pass
1081 repo.dirstate.forget(f)
1081 repo.dirstate.forget(f)
1082 repo.dirstate.setparents(qp, nullid)
1082 repo.dirstate.setparents(qp, nullid)
1083 del self.applied[start:end]
1083 del self.applied[start:end]
1084 self.strip(repo, rev, update=False, backup='strip')
1084 self.strip(repo, rev, update=False, backup='strip')
1085 if len(self.applied):
1085 if len(self.applied):
1086 self.ui.write(_("now at: %s\n") % self.applied[-1].name)
1086 self.ui.write(_("now at: %s\n") % self.applied[-1].name)
1087 else:
1087 else:
1088 self.ui.write(_("patch queue now empty\n"))
1088 self.ui.write(_("patch queue now empty\n"))
1089 finally:
1089 finally:
1090 wlock.release()
1090 wlock.release()
1091
1091
1092 def diff(self, repo, pats, opts):
1092 def diff(self, repo, pats, opts):
1093 top = self.check_toppatch(repo)
1093 top = self.check_toppatch(repo)
1094 if not top:
1094 if not top:
1095 self.ui.write(_("no patches applied\n"))
1095 self.ui.write(_("no patches applied\n"))
1096 return
1096 return
1097 qp = self.qparents(repo, top)
1097 qp = self.qparents(repo, top)
1098 self._diffopts = patch.diffopts(self.ui, opts)
1098 self._diffopts = patch.diffopts(self.ui, opts)
1099 self.printdiff(repo, qp, files=pats, opts=opts)
1099 self.printdiff(repo, qp, files=pats, opts=opts)
1100
1100
1101 def refresh(self, repo, pats=None, **opts):
1101 def refresh(self, repo, pats=None, **opts):
1102 if len(self.applied) == 0:
1102 if len(self.applied) == 0:
1103 self.ui.write(_("no patches applied\n"))
1103 self.ui.write(_("no patches applied\n"))
1104 return 1
1104 return 1
1105 msg = opts.get('msg', '').rstrip()
1105 msg = opts.get('msg', '').rstrip()
1106 newuser = opts.get('user')
1106 newuser = opts.get('user')
1107 newdate = opts.get('date')
1107 newdate = opts.get('date')
1108 if newdate:
1108 if newdate:
1109 newdate = '%d %d' % util.parsedate(newdate)
1109 newdate = '%d %d' % util.parsedate(newdate)
1110 wlock = repo.wlock()
1110 wlock = repo.wlock()
1111 try:
1111 try:
1112 self.check_toppatch(repo)
1112 self.check_toppatch(repo)
1113 (top, patchfn) = (self.applied[-1].rev, self.applied[-1].name)
1113 (top, patchfn) = (self.applied[-1].rev, self.applied[-1].name)
1114 top = bin(top)
1114 top = bin(top)
1115 if repo.changelog.heads(top) != [top]:
1115 if repo.changelog.heads(top) != [top]:
1116 raise util.Abort(_("cannot refresh a revision with children"))
1116 raise util.Abort(_("cannot refresh a revision with children"))
1117 cparents = repo.changelog.parents(top)
1117 cparents = repo.changelog.parents(top)
1118 patchparent = self.qparents(repo, top)
1118 patchparent = self.qparents(repo, top)
1119 ph = patchheader(self.join(patchfn))
1119 ph = patchheader(self.join(patchfn))
1120
1120
1121 patchf = self.opener(patchfn, 'r')
1121 patchf = self.opener(patchfn, 'r')
1122
1122
1123 # if the patch was a git patch, refresh it as a git patch
1123 # if the patch was a git patch, refresh it as a git patch
1124 for line in patchf:
1124 for line in patchf:
1125 if line.startswith('diff --git'):
1125 if line.startswith('diff --git'):
1126 self.diffopts().git = True
1126 self.diffopts().git = True
1127 break
1127 break
1128
1128
1129 if msg:
1129 if msg:
1130 ph.setmessage(msg)
1130 ph.setmessage(msg)
1131 if newuser:
1131 if newuser:
1132 ph.setuser(newuser)
1132 ph.setuser(newuser)
1133 if newdate:
1133 if newdate:
1134 ph.setdate(newdate)
1134 ph.setdate(newdate)
1135
1135
1136 # only commit new patch when write is complete
1136 # only commit new patch when write is complete
1137 patchf = self.opener(patchfn, 'w', atomictemp=True)
1137 patchf = self.opener(patchfn, 'w', atomictemp=True)
1138
1138
1139 patchf.seek(0)
1139 patchf.seek(0)
1140 patchf.truncate()
1140 patchf.truncate()
1141
1141
1142 comments = str(ph)
1142 comments = str(ph)
1143 if comments:
1143 if comments:
1144 patchf.write(comments)
1144 patchf.write(comments)
1145
1145
1146 if opts.get('git'):
1146 if opts.get('git'):
1147 self.diffopts().git = True
1147 self.diffopts().git = True
1148 tip = repo.changelog.tip()
1148 tip = repo.changelog.tip()
1149 if top == tip:
1149 if top == tip:
1150 # if the top of our patch queue is also the tip, there is an
1150 # if the top of our patch queue is also the tip, there is an
1151 # optimization here. We update the dirstate in place and strip
1151 # optimization here. We update the dirstate in place and strip
1152 # off the tip commit. Then just commit the current directory
1152 # off the tip commit. Then just commit the current directory
1153 # tree. We can also send repo.commit the list of files
1153 # tree. We can also send repo.commit the list of files
1154 # changed to speed up the diff
1154 # changed to speed up the diff
1155 #
1155 #
1156 # in short mode, we only diff the files included in the
1156 # in short mode, we only diff the files included in the
1157 # patch already plus specified files
1157 # patch already plus specified files
1158 #
1158 #
1159 # this should really read:
1159 # this should really read:
1160 # mm, dd, aa, aa2 = repo.status(tip, patchparent)[:4]
1160 # mm, dd, aa, aa2 = repo.status(tip, patchparent)[:4]
1161 # but we do it backwards to take advantage of manifest/chlog
1161 # but we do it backwards to take advantage of manifest/chlog
1162 # caching against the next repo.status call
1162 # caching against the next repo.status call
1163 #
1163 #
1164 mm, aa, dd, aa2 = repo.status(patchparent, tip)[:4]
1164 mm, aa, dd, aa2 = repo.status(patchparent, tip)[:4]
1165 changes = repo.changelog.read(tip)
1165 changes = repo.changelog.read(tip)
1166 man = repo.manifest.read(changes[0])
1166 man = repo.manifest.read(changes[0])
1167 aaa = aa[:]
1167 aaa = aa[:]
1168 matchfn = cmdutil.match(repo, pats, opts)
1168 matchfn = cmdutil.match(repo, pats, opts)
1169 if opts.get('short'):
1169 if opts.get('short'):
1170 # if amending a patch, we start with existing
1170 # if amending a patch, we start with existing
1171 # files plus specified files - unfiltered
1171 # files plus specified files - unfiltered
1172 match = cmdutil.matchfiles(repo, mm + aa + dd + matchfn.files())
1172 match = cmdutil.matchfiles(repo, mm + aa + dd + matchfn.files())
1173 # filter with inc/exl options
1173 # filter with inc/exl options
1174 matchfn = cmdutil.match(repo, opts=opts)
1174 matchfn = cmdutil.match(repo, opts=opts)
1175 else:
1175 else:
1176 match = cmdutil.matchall(repo)
1176 match = cmdutil.matchall(repo)
1177 m, a, r, d = repo.status(match=match)[:4]
1177 m, a, r, d = repo.status(match=match)[:4]
1178
1178
1179 # we might end up with files that were added between
1179 # we might end up with files that were added between
1180 # tip and the dirstate parent, but then changed in the
1180 # tip and the dirstate parent, but then changed in the
1181 # local dirstate. in this case, we want them to only
1181 # local dirstate. in this case, we want them to only
1182 # show up in the added section
1182 # show up in the added section
1183 for x in m:
1183 for x in m:
1184 if x not in aa:
1184 if x not in aa:
1185 mm.append(x)
1185 mm.append(x)
1186 # we might end up with files added by the local dirstate that
1186 # we might end up with files added by the local dirstate that
1187 # were deleted by the patch. In this case, they should only
1187 # were deleted by the patch. In this case, they should only
1188 # show up in the changed section.
1188 # show up in the changed section.
1189 for x in a:
1189 for x in a:
1190 if x in dd:
1190 if x in dd:
1191 del dd[dd.index(x)]
1191 del dd[dd.index(x)]
1192 mm.append(x)
1192 mm.append(x)
1193 else:
1193 else:
1194 aa.append(x)
1194 aa.append(x)
1195 # make sure any files deleted in the local dirstate
1195 # make sure any files deleted in the local dirstate
1196 # are not in the add or change column of the patch
1196 # are not in the add or change column of the patch
1197 forget = []
1197 forget = []
1198 for x in d + r:
1198 for x in d + r:
1199 if x in aa:
1199 if x in aa:
1200 del aa[aa.index(x)]
1200 del aa[aa.index(x)]
1201 forget.append(x)
1201 forget.append(x)
1202 continue
1202 continue
1203 elif x in mm:
1203 elif x in mm:
1204 del mm[mm.index(x)]
1204 del mm[mm.index(x)]
1205 dd.append(x)
1205 dd.append(x)
1206
1206
1207 m = list(set(mm))
1207 m = list(set(mm))
1208 r = list(set(dd))
1208 r = list(set(dd))
1209 a = list(set(aa))
1209 a = list(set(aa))
1210 c = [filter(matchfn, l) for l in (m, a, r)]
1210 c = [filter(matchfn, l) for l in (m, a, r)]
1211 match = cmdutil.matchfiles(repo, set(c[0] + c[1] + c[2]))
1211 match = cmdutil.matchfiles(repo, set(c[0] + c[1] + c[2]))
1212 chunks = patch.diff(repo, patchparent, match=match,
1212 chunks = patch.diff(repo, patchparent, match=match,
1213 changes=c, opts=self.diffopts())
1213 changes=c, opts=self.diffopts())
1214 for chunk in chunks:
1214 for chunk in chunks:
1215 patchf.write(chunk)
1215 patchf.write(chunk)
1216
1216
1217 try:
1217 try:
1218 if self.diffopts().git:
1218 if self.diffopts().git:
1219 copies = {}
1219 copies = {}
1220 for dst in a:
1220 for dst in a:
1221 src = repo.dirstate.copied(dst)
1221 src = repo.dirstate.copied(dst)
1222 # during qfold, the source file for copies may
1222 # during qfold, the source file for copies may
1223 # be removed. Treat this as a simple add.
1223 # be removed. Treat this as a simple add.
1224 if src is not None and src in repo.dirstate:
1224 if src is not None and src in repo.dirstate:
1225 copies.setdefault(src, []).append(dst)
1225 copies.setdefault(src, []).append(dst)
1226 repo.dirstate.add(dst)
1226 repo.dirstate.add(dst)
1227 # remember the copies between patchparent and tip
1227 # remember the copies between patchparent and tip
1228 for dst in aaa:
1228 for dst in aaa:
1229 f = repo.file(dst)
1229 f = repo.file(dst)
1230 src = f.renamed(man[dst])
1230 src = f.renamed(man[dst])
1231 if src:
1231 if src:
1232 copies.setdefault(src[0], []).extend(copies.get(dst, []))
1232 copies.setdefault(src[0], []).extend(copies.get(dst, []))
1233 if dst in a:
1233 if dst in a:
1234 copies[src[0]].append(dst)
1234 copies[src[0]].append(dst)
1235 # we can't copy a file created by the patch itself
1235 # we can't copy a file created by the patch itself
1236 if dst in copies:
1236 if dst in copies:
1237 del copies[dst]
1237 del copies[dst]
1238 for src, dsts in copies.iteritems():
1238 for src, dsts in copies.iteritems():
1239 for dst in dsts:
1239 for dst in dsts:
1240 repo.dirstate.copy(src, dst)
1240 repo.dirstate.copy(src, dst)
1241 else:
1241 else:
1242 for dst in a:
1242 for dst in a:
1243 repo.dirstate.add(dst)
1243 repo.dirstate.add(dst)
1244 # Drop useless copy information
1244 # Drop useless copy information
1245 for f in list(repo.dirstate.copies()):
1245 for f in list(repo.dirstate.copies()):
1246 repo.dirstate.copy(None, f)
1246 repo.dirstate.copy(None, f)
1247 for f in r:
1247 for f in r:
1248 repo.dirstate.remove(f)
1248 repo.dirstate.remove(f)
1249 # if the patch excludes a modified file, mark that
1249 # if the patch excludes a modified file, mark that
1250 # file with mtime=0 so status can see it.
1250 # file with mtime=0 so status can see it.
1251 mm = []
1251 mm = []
1252 for i in xrange(len(m)-1, -1, -1):
1252 for i in xrange(len(m)-1, -1, -1):
1253 if not matchfn(m[i]):
1253 if not matchfn(m[i]):
1254 mm.append(m[i])
1254 mm.append(m[i])
1255 del m[i]
1255 del m[i]
1256 for f in m:
1256 for f in m:
1257 repo.dirstate.normal(f)
1257 repo.dirstate.normal(f)
1258 for f in mm:
1258 for f in mm:
1259 repo.dirstate.normallookup(f)
1259 repo.dirstate.normallookup(f)
1260 for f in forget:
1260 for f in forget:
1261 repo.dirstate.forget(f)
1261 repo.dirstate.forget(f)
1262
1262
1263 if not msg:
1263 if not msg:
1264 if not ph.message:
1264 if not ph.message:
1265 message = "[mq]: %s\n" % patchfn
1265 message = "[mq]: %s\n" % patchfn
1266 else:
1266 else:
1267 message = "\n".join(ph.message)
1267 message = "\n".join(ph.message)
1268 else:
1268 else:
1269 message = msg
1269 message = msg
1270
1270
1271 user = ph.user or changes[1]
1271 user = ph.user or changes[1]
1272
1272
1273 # assumes strip can roll itself back if interrupted
1273 # assumes strip can roll itself back if interrupted
1274 repo.dirstate.setparents(*cparents)
1274 repo.dirstate.setparents(*cparents)
1275 self.applied.pop()
1275 self.applied.pop()
1276 self.applied_dirty = 1
1276 self.applied_dirty = 1
1277 self.strip(repo, top, update=False,
1277 self.strip(repo, top, update=False,
1278 backup='strip')
1278 backup='strip')
1279 except:
1279 except:
1280 repo.dirstate.invalidate()
1280 repo.dirstate.invalidate()
1281 raise
1281 raise
1282
1282
1283 try:
1283 try:
1284 # might be nice to attempt to roll back strip after this
1284 # might be nice to attempt to roll back strip after this
1285 patchf.rename()
1285 patchf.rename()
1286 n = repo.commit(message, user, ph.date, match=match,
1286 n = repo.commit(message, user, ph.date, match=match,
1287 force=True)
1287 force=True)
1288 self.applied.append(statusentry(hex(n), patchfn))
1288 self.applied.append(statusentry(hex(n), patchfn))
1289 except:
1289 except:
1290 ctx = repo[cparents[0]]
1290 ctx = repo[cparents[0]]
1291 repo.dirstate.rebuild(ctx.node(), ctx.manifest())
1291 repo.dirstate.rebuild(ctx.node(), ctx.manifest())
1292 self.save_dirty()
1292 self.save_dirty()
1293 self.ui.warn(_('refresh interrupted while patch was popped! '
1293 self.ui.warn(_('refresh interrupted while patch was popped! '
1294 '(revert --all, qpush to recover)\n'))
1294 '(revert --all, qpush to recover)\n'))
1295 raise
1295 raise
1296 else:
1296 else:
1297 self.printdiff(repo, patchparent, fp=patchf)
1297 self.printdiff(repo, patchparent, fp=patchf)
1298 patchf.rename()
1298 patchf.rename()
1299 added = repo.status()[1]
1299 added = repo.status()[1]
1300 for a in added:
1300 for a in added:
1301 f = repo.wjoin(a)
1301 f = repo.wjoin(a)
1302 try:
1302 try:
1303 os.unlink(f)
1303 os.unlink(f)
1304 except OSError, e:
1304 except OSError, e:
1305 if e.errno != errno.ENOENT:
1305 if e.errno != errno.ENOENT:
1306 raise
1306 raise
1307 try: os.removedirs(os.path.dirname(f))
1307 try: os.removedirs(os.path.dirname(f))
1308 except: pass
1308 except: pass
1309 # forget the file copies in the dirstate
1309 # forget the file copies in the dirstate
1310 # push should readd the files later on
1310 # push should readd the files later on
1311 repo.dirstate.forget(a)
1311 repo.dirstate.forget(a)
1312 self.pop(repo, force=True)
1312 self.pop(repo, force=True)
1313 self.push(repo, force=True)
1313 self.push(repo, force=True)
1314 finally:
1314 finally:
1315 wlock.release()
1315 wlock.release()
1316 self.removeundo(repo)
1316 self.removeundo(repo)
1317
1317
1318 def init(self, repo, create=False):
1318 def init(self, repo, create=False):
1319 if not create and os.path.isdir(self.path):
1319 if not create and os.path.isdir(self.path):
1320 raise util.Abort(_("patch queue directory already exists"))
1320 raise util.Abort(_("patch queue directory already exists"))
1321 try:
1321 try:
1322 os.mkdir(self.path)
1322 os.mkdir(self.path)
1323 except OSError, inst:
1323 except OSError, inst:
1324 if inst.errno != errno.EEXIST or not create:
1324 if inst.errno != errno.EEXIST or not create:
1325 raise
1325 raise
1326 if create:
1326 if create:
1327 return self.qrepo(create=True)
1327 return self.qrepo(create=True)
1328
1328
1329 def unapplied(self, repo, patch=None):
1329 def unapplied(self, repo, patch=None):
1330 if patch and patch not in self.series:
1330 if patch and patch not in self.series:
1331 raise util.Abort(_("patch %s is not in series file") % patch)
1331 raise util.Abort(_("patch %s is not in series file") % patch)
1332 if not patch:
1332 if not patch:
1333 start = self.series_end()
1333 start = self.series_end()
1334 else:
1334 else:
1335 start = self.series.index(patch) + 1
1335 start = self.series.index(patch) + 1
1336 unapplied = []
1336 unapplied = []
1337 for i in xrange(start, len(self.series)):
1337 for i in xrange(start, len(self.series)):
1338 pushable, reason = self.pushable(i)
1338 pushable, reason = self.pushable(i)
1339 if pushable:
1339 if pushable:
1340 unapplied.append((i, self.series[i]))
1340 unapplied.append((i, self.series[i]))
1341 self.explain_pushable(i)
1341 self.explain_pushable(i)
1342 return unapplied
1342 return unapplied
1343
1343
1344 def qseries(self, repo, missing=None, start=0, length=None, status=None,
1344 def qseries(self, repo, missing=None, start=0, length=None, status=None,
1345 summary=False):
1345 summary=False):
1346 def displayname(patchname):
1346 def displayname(patchname):
1347 if summary:
1347 if summary:
1348 ph = patchheader(self.join(patchname))
1348 ph = patchheader(self.join(patchname))
1349 msg = ph.message
1349 msg = ph.message
1350 msg = msg and ': ' + msg[0] or ': '
1350 msg = msg and ': ' + msg[0] or ': '
1351 else:
1351 else:
1352 msg = ''
1352 msg = ''
1353 return '%s%s' % (patchname, msg)
1353 return '%s%s' % (patchname, msg)
1354
1354
1355 applied = set([p.name for p in self.applied])
1355 applied = set([p.name for p in self.applied])
1356 if length is None:
1356 if length is None:
1357 length = len(self.series) - start
1357 length = len(self.series) - start
1358 if not missing:
1358 if not missing:
1359 for i in xrange(start, start+length):
1359 for i in xrange(start, start+length):
1360 patch = self.series[i]
1360 patch = self.series[i]
1361 if patch in applied:
1361 if patch in applied:
1362 stat = 'A'
1362 stat = 'A'
1363 elif self.pushable(i)[0]:
1363 elif self.pushable(i)[0]:
1364 stat = 'U'
1364 stat = 'U'
1365 else:
1365 else:
1366 stat = 'G'
1366 stat = 'G'
1367 pfx = ''
1367 pfx = ''
1368 if self.ui.verbose:
1368 if self.ui.verbose:
1369 pfx = '%d %s ' % (i, stat)
1369 pfx = '%d %s ' % (i, stat)
1370 elif status and status != stat:
1370 elif status and status != stat:
1371 continue
1371 continue
1372 self.ui.write('%s%s\n' % (pfx, displayname(patch)))
1372 self.ui.write('%s%s\n' % (pfx, displayname(patch)))
1373 else:
1373 else:
1374 msng_list = []
1374 msng_list = []
1375 for root, dirs, files in os.walk(self.path):
1375 for root, dirs, files in os.walk(self.path):
1376 d = root[len(self.path) + 1:]
1376 d = root[len(self.path) + 1:]
1377 for f in files:
1377 for f in files:
1378 fl = os.path.join(d, f)
1378 fl = os.path.join(d, f)
1379 if (fl not in self.series and
1379 if (fl not in self.series and
1380 fl not in (self.status_path, self.series_path,
1380 fl not in (self.status_path, self.series_path,
1381 self.guards_path)
1381 self.guards_path)
1382 and not fl.startswith('.')):
1382 and not fl.startswith('.')):
1383 msng_list.append(fl)
1383 msng_list.append(fl)
1384 for x in sorted(msng_list):
1384 for x in sorted(msng_list):
1385 pfx = self.ui.verbose and ('D ') or ''
1385 pfx = self.ui.verbose and ('D ') or ''
1386 self.ui.write("%s%s\n" % (pfx, displayname(x)))
1386 self.ui.write("%s%s\n" % (pfx, displayname(x)))
1387
1387
1388 def issaveline(self, l):
1388 def issaveline(self, l):
1389 if l.name == '.hg.patches.save.line':
1389 if l.name == '.hg.patches.save.line':
1390 return True
1390 return True
1391
1391
1392 def qrepo(self, create=False):
1392 def qrepo(self, create=False):
1393 if create or os.path.isdir(self.join(".hg")):
1393 if create or os.path.isdir(self.join(".hg")):
1394 return hg.repository(self.ui, path=self.path, create=create)
1394 return hg.repository(self.ui, path=self.path, create=create)
1395
1395
1396 def restore(self, repo, rev, delete=None, qupdate=None):
1396 def restore(self, repo, rev, delete=None, qupdate=None):
1397 c = repo.changelog.read(rev)
1397 c = repo.changelog.read(rev)
1398 desc = c[4].strip()
1398 desc = c[4].strip()
1399 lines = desc.splitlines()
1399 lines = desc.splitlines()
1400 i = 0
1400 i = 0
1401 datastart = None
1401 datastart = None
1402 series = []
1402 series = []
1403 applied = []
1403 applied = []
1404 qpp = None
1404 qpp = None
1405 for i, line in enumerate(lines):
1405 for i, line in enumerate(lines):
1406 if line == 'Patch Data:':
1406 if line == 'Patch Data:':
1407 datastart = i + 1
1407 datastart = i + 1
1408 elif line.startswith('Dirstate:'):
1408 elif line.startswith('Dirstate:'):
1409 l = line.rstrip()
1409 l = line.rstrip()
1410 l = l[10:].split(' ')
1410 l = l[10:].split(' ')
1411 qpp = [ bin(x) for x in l ]
1411 qpp = [ bin(x) for x in l ]
1412 elif datastart != None:
1412 elif datastart != None:
1413 l = line.rstrip()
1413 l = line.rstrip()
1414 se = statusentry(l)
1414 se = statusentry(l)
1415 file_ = se.name
1415 file_ = se.name
1416 if se.rev:
1416 if se.rev:
1417 applied.append(se)
1417 applied.append(se)
1418 else:
1418 else:
1419 series.append(file_)
1419 series.append(file_)
1420 if datastart is None:
1420 if datastart is None:
1421 self.ui.warn(_("No saved patch data found\n"))
1421 self.ui.warn(_("No saved patch data found\n"))
1422 return 1
1422 return 1
1423 self.ui.warn(_("restoring status: %s\n") % lines[0])
1423 self.ui.warn(_("restoring status: %s\n") % lines[0])
1424 self.full_series = series
1424 self.full_series = series
1425 self.applied = applied
1425 self.applied = applied
1426 self.parse_series()
1426 self.parse_series()
1427 self.series_dirty = 1
1427 self.series_dirty = 1
1428 self.applied_dirty = 1
1428 self.applied_dirty = 1
1429 heads = repo.changelog.heads()
1429 heads = repo.changelog.heads()
1430 if delete:
1430 if delete:
1431 if rev not in heads:
1431 if rev not in heads:
1432 self.ui.warn(_("save entry has children, leaving it alone\n"))
1432 self.ui.warn(_("save entry has children, leaving it alone\n"))
1433 else:
1433 else:
1434 self.ui.warn(_("removing save entry %s\n") % short(rev))
1434 self.ui.warn(_("removing save entry %s\n") % short(rev))
1435 pp = repo.dirstate.parents()
1435 pp = repo.dirstate.parents()
1436 if rev in pp:
1436 if rev in pp:
1437 update = True
1437 update = True
1438 else:
1438 else:
1439 update = False
1439 update = False
1440 self.strip(repo, rev, update=update, backup='strip')
1440 self.strip(repo, rev, update=update, backup='strip')
1441 if qpp:
1441 if qpp:
1442 self.ui.warn(_("saved queue repository parents: %s %s\n") %
1442 self.ui.warn(_("saved queue repository parents: %s %s\n") %
1443 (short(qpp[0]), short(qpp[1])))
1443 (short(qpp[0]), short(qpp[1])))
1444 if qupdate:
1444 if qupdate:
1445 self.ui.status(_("queue directory updating\n"))
1445 self.ui.status(_("queue directory updating\n"))
1446 r = self.qrepo()
1446 r = self.qrepo()
1447 if not r:
1447 if not r:
1448 self.ui.warn(_("Unable to load queue repository\n"))
1448 self.ui.warn(_("Unable to load queue repository\n"))
1449 return 1
1449 return 1
1450 hg.clean(r, qpp[0])
1450 hg.clean(r, qpp[0])
1451
1451
1452 def save(self, repo, msg=None):
1452 def save(self, repo, msg=None):
1453 if len(self.applied) == 0:
1453 if len(self.applied) == 0:
1454 self.ui.warn(_("save: no patches applied, exiting\n"))
1454 self.ui.warn(_("save: no patches applied, exiting\n"))
1455 return 1
1455 return 1
1456 if self.issaveline(self.applied[-1]):
1456 if self.issaveline(self.applied[-1]):
1457 self.ui.warn(_("status is already saved\n"))
1457 self.ui.warn(_("status is already saved\n"))
1458 return 1
1458 return 1
1459
1459
1460 ar = [ ':' + x for x in self.full_series ]
1460 ar = [ ':' + x for x in self.full_series ]
1461 if not msg:
1461 if not msg:
1462 msg = _("hg patches saved state")
1462 msg = _("hg patches saved state")
1463 else:
1463 else:
1464 msg = "hg patches: " + msg.rstrip('\r\n')
1464 msg = "hg patches: " + msg.rstrip('\r\n')
1465 r = self.qrepo()
1465 r = self.qrepo()
1466 if r:
1466 if r:
1467 pp = r.dirstate.parents()
1467 pp = r.dirstate.parents()
1468 msg += "\nDirstate: %s %s" % (hex(pp[0]), hex(pp[1]))
1468 msg += "\nDirstate: %s %s" % (hex(pp[0]), hex(pp[1]))
1469 msg += "\n\nPatch Data:\n"
1469 msg += "\n\nPatch Data:\n"
1470 text = msg + "\n".join([str(x) for x in self.applied]) + '\n' + (ar and
1470 text = msg + "\n".join([str(x) for x in self.applied]) + '\n' + (ar and
1471 "\n".join(ar) + '\n' or "")
1471 "\n".join(ar) + '\n' or "")
1472 n = repo.commit(text, force=True)
1472 n = repo.commit(text, force=True)
1473 if not n:
1473 if not n:
1474 self.ui.warn(_("repo commit failed\n"))
1474 self.ui.warn(_("repo commit failed\n"))
1475 return 1
1475 return 1
1476 self.applied.append(statusentry(hex(n),'.hg.patches.save.line'))
1476 self.applied.append(statusentry(hex(n),'.hg.patches.save.line'))
1477 self.applied_dirty = 1
1477 self.applied_dirty = 1
1478 self.removeundo(repo)
1478 self.removeundo(repo)
1479
1479
1480 def full_series_end(self):
1480 def full_series_end(self):
1481 if len(self.applied) > 0:
1481 if len(self.applied) > 0:
1482 p = self.applied[-1].name
1482 p = self.applied[-1].name
1483 end = self.find_series(p)
1483 end = self.find_series(p)
1484 if end is None:
1484 if end is None:
1485 return len(self.full_series)
1485 return len(self.full_series)
1486 return end + 1
1486 return end + 1
1487 return 0
1487 return 0
1488
1488
1489 def series_end(self, all_patches=False):
1489 def series_end(self, all_patches=False):
1490 """If all_patches is False, return the index of the next pushable patch
1490 """If all_patches is False, return the index of the next pushable patch
1491 in the series, or the series length. If all_patches is True, return the
1491 in the series, or the series length. If all_patches is True, return the
1492 index of the first patch past the last applied one.
1492 index of the first patch past the last applied one.
1493 """
1493 """
1494 end = 0
1494 end = 0
1495 def next(start):
1495 def next(start):
1496 if all_patches:
1496 if all_patches:
1497 return start
1497 return start
1498 i = start
1498 i = start
1499 while i < len(self.series):
1499 while i < len(self.series):
1500 p, reason = self.pushable(i)
1500 p, reason = self.pushable(i)
1501 if p:
1501 if p:
1502 break
1502 break
1503 self.explain_pushable(i)
1503 self.explain_pushable(i)
1504 i += 1
1504 i += 1
1505 return i
1505 return i
1506 if len(self.applied) > 0:
1506 if len(self.applied) > 0:
1507 p = self.applied[-1].name
1507 p = self.applied[-1].name
1508 try:
1508 try:
1509 end = self.series.index(p)
1509 end = self.series.index(p)
1510 except ValueError:
1510 except ValueError:
1511 return 0
1511 return 0
1512 return next(end + 1)
1512 return next(end + 1)
1513 return next(end)
1513 return next(end)
1514
1514
1515 def appliedname(self, index):
1515 def appliedname(self, index):
1516 pname = self.applied[index].name
1516 pname = self.applied[index].name
1517 if not self.ui.verbose:
1517 if not self.ui.verbose:
1518 p = pname
1518 p = pname
1519 else:
1519 else:
1520 p = str(self.series.index(pname)) + " " + pname
1520 p = str(self.series.index(pname)) + " " + pname
1521 return p
1521 return p
1522
1522
1523 def qimport(self, repo, files, patchname=None, rev=None, existing=None,
1523 def qimport(self, repo, files, patchname=None, rev=None, existing=None,
1524 force=None, git=False):
1524 force=None, git=False):
1525 def checkseries(patchname):
1525 def checkseries(patchname):
1526 if patchname in self.series:
1526 if patchname in self.series:
1527 raise util.Abort(_('patch %s is already in the series file')
1527 raise util.Abort(_('patch %s is already in the series file')
1528 % patchname)
1528 % patchname)
1529 def checkfile(patchname):
1529 def checkfile(patchname):
1530 if not force and os.path.exists(self.join(patchname)):
1530 if not force and os.path.exists(self.join(patchname)):
1531 raise util.Abort(_('patch "%s" already exists')
1531 raise util.Abort(_('patch "%s" already exists')
1532 % patchname)
1532 % patchname)
1533
1533
1534 if rev:
1534 if rev:
1535 if files:
1535 if files:
1536 raise util.Abort(_('option "-r" not valid when importing '
1536 raise util.Abort(_('option "-r" not valid when importing '
1537 'files'))
1537 'files'))
1538 rev = cmdutil.revrange(repo, rev)
1538 rev = cmdutil.revrange(repo, rev)
1539 rev.sort(lambda x, y: cmp(y, x))
1539 rev.sort(lambda x, y: cmp(y, x))
1540 if (len(files) > 1 or len(rev) > 1) and patchname:
1540 if (len(files) > 1 or len(rev) > 1) and patchname:
1541 raise util.Abort(_('option "-n" not valid when importing multiple '
1541 raise util.Abort(_('option "-n" not valid when importing multiple '
1542 'patches'))
1542 'patches'))
1543 i = 0
1543 i = 0
1544 added = []
1544 added = []
1545 if rev:
1545 if rev:
1546 # If mq patches are applied, we can only import revisions
1546 # If mq patches are applied, we can only import revisions
1547 # that form a linear path to qbase.
1547 # that form a linear path to qbase.
1548 # Otherwise, they should form a linear path to a head.
1548 # Otherwise, they should form a linear path to a head.
1549 heads = repo.changelog.heads(repo.changelog.node(rev[-1]))
1549 heads = repo.changelog.heads(repo.changelog.node(rev[-1]))
1550 if len(heads) > 1:
1550 if len(heads) > 1:
1551 raise util.Abort(_('revision %d is the root of more than one '
1551 raise util.Abort(_('revision %d is the root of more than one '
1552 'branch') % rev[-1])
1552 'branch') % rev[-1])
1553 if self.applied:
1553 if self.applied:
1554 base = hex(repo.changelog.node(rev[0]))
1554 base = hex(repo.changelog.node(rev[0]))
1555 if base in [n.rev for n in self.applied]:
1555 if base in [n.rev for n in self.applied]:
1556 raise util.Abort(_('revision %d is already managed')
1556 raise util.Abort(_('revision %d is already managed')
1557 % rev[0])
1557 % rev[0])
1558 if heads != [bin(self.applied[-1].rev)]:
1558 if heads != [bin(self.applied[-1].rev)]:
1559 raise util.Abort(_('revision %d is not the parent of '
1559 raise util.Abort(_('revision %d is not the parent of '
1560 'the queue') % rev[0])
1560 'the queue') % rev[0])
1561 base = repo.changelog.rev(bin(self.applied[0].rev))
1561 base = repo.changelog.rev(bin(self.applied[0].rev))
1562 lastparent = repo.changelog.parentrevs(base)[0]
1562 lastparent = repo.changelog.parentrevs(base)[0]
1563 else:
1563 else:
1564 if heads != [repo.changelog.node(rev[0])]:
1564 if heads != [repo.changelog.node(rev[0])]:
1565 raise util.Abort(_('revision %d has unmanaged children')
1565 raise util.Abort(_('revision %d has unmanaged children')
1566 % rev[0])
1566 % rev[0])
1567 lastparent = None
1567 lastparent = None
1568
1568
1569 if git:
1569 if git:
1570 self.diffopts().git = True
1570 self.diffopts().git = True
1571
1571
1572 for r in rev:
1572 for r in rev:
1573 p1, p2 = repo.changelog.parentrevs(r)
1573 p1, p2 = repo.changelog.parentrevs(r)
1574 n = repo.changelog.node(r)
1574 n = repo.changelog.node(r)
1575 if p2 != nullrev:
1575 if p2 != nullrev:
1576 raise util.Abort(_('cannot import merge revision %d') % r)
1576 raise util.Abort(_('cannot import merge revision %d') % r)
1577 if lastparent and lastparent != r:
1577 if lastparent and lastparent != r:
1578 raise util.Abort(_('revision %d is not the parent of %d')
1578 raise util.Abort(_('revision %d is not the parent of %d')
1579 % (r, lastparent))
1579 % (r, lastparent))
1580 lastparent = p1
1580 lastparent = p1
1581
1581
1582 if not patchname:
1582 if not patchname:
1583 patchname = normname('%d.diff' % r)
1583 patchname = normname('%d.diff' % r)
1584 self.check_reserved_name(patchname)
1584 self.check_reserved_name(patchname)
1585 checkseries(patchname)
1585 checkseries(patchname)
1586 checkfile(patchname)
1586 checkfile(patchname)
1587 self.full_series.insert(0, patchname)
1587 self.full_series.insert(0, patchname)
1588
1588
1589 patchf = self.opener(patchname, "w")
1589 patchf = self.opener(patchname, "w")
1590 patch.export(repo, [n], fp=patchf, opts=self.diffopts())
1590 patch.export(repo, [n], fp=patchf, opts=self.diffopts())
1591 patchf.close()
1591 patchf.close()
1592
1592
1593 se = statusentry(hex(n), patchname)
1593 se = statusentry(hex(n), patchname)
1594 self.applied.insert(0, se)
1594 self.applied.insert(0, se)
1595
1595
1596 added.append(patchname)
1596 added.append(patchname)
1597 patchname = None
1597 patchname = None
1598 self.parse_series()
1598 self.parse_series()
1599 self.applied_dirty = 1
1599 self.applied_dirty = 1
1600
1600
1601 for filename in files:
1601 for filename in files:
1602 if existing:
1602 if existing:
1603 if filename == '-':
1603 if filename == '-':
1604 raise util.Abort(_('-e is incompatible with import from -'))
1604 raise util.Abort(_('-e is incompatible with import from -'))
1605 if not patchname:
1605 if not patchname:
1606 patchname = normname(filename)
1606 patchname = normname(filename)
1607 self.check_reserved_name(patchname)
1607 self.check_reserved_name(patchname)
1608 if not os.path.isfile(self.join(patchname)):
1608 if not os.path.isfile(self.join(patchname)):
1609 raise util.Abort(_("patch %s does not exist") % patchname)
1609 raise util.Abort(_("patch %s does not exist") % patchname)
1610 else:
1610 else:
1611 try:
1611 try:
1612 if filename == '-':
1612 if filename == '-':
1613 if not patchname:
1613 if not patchname:
1614 raise util.Abort(_('need --name to import a patch from -'))
1614 raise util.Abort(_('need --name to import a patch from -'))
1615 text = sys.stdin.read()
1615 text = sys.stdin.read()
1616 else:
1616 else:
1617 text = url.open(self.ui, filename).read()
1617 text = url.open(self.ui, filename).read()
1618 except (OSError, IOError):
1618 except (OSError, IOError):
1619 raise util.Abort(_("unable to read %s") % filename)
1619 raise util.Abort(_("unable to read %s") % filename)
1620 if not patchname:
1620 if not patchname:
1621 patchname = normname(os.path.basename(filename))
1621 patchname = normname(os.path.basename(filename))
1622 self.check_reserved_name(patchname)
1622 self.check_reserved_name(patchname)
1623 checkfile(patchname)
1623 checkfile(patchname)
1624 patchf = self.opener(patchname, "w")
1624 patchf = self.opener(patchname, "w")
1625 patchf.write(text)
1625 patchf.write(text)
1626 if not force:
1626 if not force:
1627 checkseries(patchname)
1627 checkseries(patchname)
1628 if patchname not in self.series:
1628 if patchname not in self.series:
1629 index = self.full_series_end() + i
1629 index = self.full_series_end() + i
1630 self.full_series[index:index] = [patchname]
1630 self.full_series[index:index] = [patchname]
1631 self.parse_series()
1631 self.parse_series()
1632 self.ui.warn(_("adding %s to series file\n") % patchname)
1632 self.ui.warn(_("adding %s to series file\n") % patchname)
1633 i += 1
1633 i += 1
1634 added.append(patchname)
1634 added.append(patchname)
1635 patchname = None
1635 patchname = None
1636 self.series_dirty = 1
1636 self.series_dirty = 1
1637 qrepo = self.qrepo()
1637 qrepo = self.qrepo()
1638 if qrepo:
1638 if qrepo:
1639 qrepo.add(added)
1639 qrepo.add(added)
1640
1640
1641 def delete(ui, repo, *patches, **opts):
1641 def delete(ui, repo, *patches, **opts):
1642 """remove patches from queue
1642 """remove patches from queue
1643
1643
1644 The patches must not be applied, unless they are arguments to the
1644 The patches must not be applied, unless they are arguments to the
1645 -r/--rev parameter. At least one patch or revision is required.
1645 -r/--rev parameter. At least one patch or revision is required.
1646
1646
1647 With --rev, mq will stop managing the named revisions (converting
1647 With --rev, mq will stop managing the named revisions (converting
1648 them to regular Mercurial changesets). The qfinish command should
1648 them to regular Mercurial changesets). The qfinish command should
1649 be used as an alternative for qdelete -r, as the latter option is
1649 be used as an alternative for qdelete -r, as the latter option is
1650 deprecated.
1650 deprecated.
1651
1651
1652 With -k/--keep, the patch files are preserved in the patch
1652 With -k/--keep, the patch files are preserved in the patch
1653 directory."""
1653 directory."""
1654 q = repo.mq
1654 q = repo.mq
1655 q.delete(repo, patches, opts)
1655 q.delete(repo, patches, opts)
1656 q.save_dirty()
1656 q.save_dirty()
1657 return 0
1657 return 0
1658
1658
1659 def applied(ui, repo, patch=None, **opts):
1659 def applied(ui, repo, patch=None, **opts):
1660 """print the patches already applied"""
1660 """print the patches already applied"""
1661 q = repo.mq
1661 q = repo.mq
1662 if patch:
1662 if patch:
1663 if patch not in q.series:
1663 if patch not in q.series:
1664 raise util.Abort(_("patch %s is not in series file") % patch)
1664 raise util.Abort(_("patch %s is not in series file") % patch)
1665 end = q.series.index(patch) + 1
1665 end = q.series.index(patch) + 1
1666 else:
1666 else:
1667 end = q.series_end(True)
1667 end = q.series_end(True)
1668 return q.qseries(repo, length=end, status='A', summary=opts.get('summary'))
1668 return q.qseries(repo, length=end, status='A', summary=opts.get('summary'))
1669
1669
1670 def unapplied(ui, repo, patch=None, **opts):
1670 def unapplied(ui, repo, patch=None, **opts):
1671 """print the patches not yet applied"""
1671 """print the patches not yet applied"""
1672 q = repo.mq
1672 q = repo.mq
1673 if patch:
1673 if patch:
1674 if patch not in q.series:
1674 if patch not in q.series:
1675 raise util.Abort(_("patch %s is not in series file") % patch)
1675 raise util.Abort(_("patch %s is not in series file") % patch)
1676 start = q.series.index(patch) + 1
1676 start = q.series.index(patch) + 1
1677 else:
1677 else:
1678 start = q.series_end(True)
1678 start = q.series_end(True)
1679 q.qseries(repo, start=start, status='U', summary=opts.get('summary'))
1679 q.qseries(repo, start=start, status='U', summary=opts.get('summary'))
1680
1680
1681 def qimport(ui, repo, *filename, **opts):
1681 def qimport(ui, repo, *filename, **opts):
1682 """import a patch
1682 """import a patch
1683
1683
1684 The patch is inserted into the series after the last applied
1684 The patch is inserted into the series after the last applied
1685 patch. If no patches have been applied, qimport prepends the patch
1685 patch. If no patches have been applied, qimport prepends the patch
1686 to the series.
1686 to the series.
1687
1687
1688 The patch will have the same name as its source file unless you
1688 The patch will have the same name as its source file unless you
1689 give it a new one with -n/--name.
1689 give it a new one with -n/--name.
1690
1690
1691 You can register an existing patch inside the patch directory with
1691 You can register an existing patch inside the patch directory with
1692 the -e/--existing flag.
1692 the -e/--existing flag.
1693
1693
1694 With -f/--force, an existing patch of the same name will be
1694 With -f/--force, an existing patch of the same name will be
1695 overwritten.
1695 overwritten.
1696
1696
1697 An existing changeset may be placed under mq control with -r/--rev
1697 An existing changeset may be placed under mq control with -r/--rev
1698 (e.g. qimport --rev tip -n patch will place tip under mq control).
1698 (e.g. qimport --rev tip -n patch will place tip under mq control).
1699 With -g/--git, patches imported with --rev will use the git diff
1699 With -g/--git, patches imported with --rev will use the git diff
1700 format. See the diffs help topic for information on why this is
1700 format. See the diffs help topic for information on why this is
1701 important for preserving rename/copy information and permission
1701 important for preserving rename/copy information and permission
1702 changes.
1702 changes.
1703
1703
1704 To import a patch from standard input, pass - as the patch file.
1704 To import a patch from standard input, pass - as the patch file.
1705 When importing from standard input, a patch name must be specified
1705 When importing from standard input, a patch name must be specified
1706 using the --name flag.
1706 using the --name flag.
1707 """
1707 """
1708 q = repo.mq
1708 q = repo.mq
1709 q.qimport(repo, filename, patchname=opts['name'],
1709 q.qimport(repo, filename, patchname=opts['name'],
1710 existing=opts['existing'], force=opts['force'], rev=opts['rev'],
1710 existing=opts['existing'], force=opts['force'], rev=opts['rev'],
1711 git=opts['git'])
1711 git=opts['git'])
1712 q.save_dirty()
1712 q.save_dirty()
1713
1713
1714 if opts.get('push') and not opts.get('rev'):
1714 if opts.get('push') and not opts.get('rev'):
1715 return q.push(repo, None)
1715 return q.push(repo, None)
1716 return 0
1716 return 0
1717
1717
1718 def init(ui, repo, **opts):
1718 def init(ui, repo, **opts):
1719 """init a new queue repository
1719 """init a new queue repository
1720
1720
1721 The queue repository is unversioned by default. If
1721 The queue repository is unversioned by default. If
1722 -c/--create-repo is specified, qinit will create a separate nested
1722 -c/--create-repo is specified, qinit will create a separate nested
1723 repository for patches (qinit -c may also be run later to convert
1723 repository for patches (qinit -c may also be run later to convert
1724 an unversioned patch repository into a versioned one). You can use
1724 an unversioned patch repository into a versioned one). You can use
1725 qcommit to commit changes to this queue repository."""
1725 qcommit to commit changes to this queue repository."""
1726 q = repo.mq
1726 q = repo.mq
1727 r = q.init(repo, create=opts['create_repo'])
1727 r = q.init(repo, create=opts['create_repo'])
1728 q.save_dirty()
1728 q.save_dirty()
1729 if r:
1729 if r:
1730 if not os.path.exists(r.wjoin('.hgignore')):
1730 if not os.path.exists(r.wjoin('.hgignore')):
1731 fp = r.wopener('.hgignore', 'w')
1731 fp = r.wopener('.hgignore', 'w')
1732 fp.write('^\\.hg\n')
1732 fp.write('^\\.hg\n')
1733 fp.write('^\\.mq\n')
1733 fp.write('^\\.mq\n')
1734 fp.write('syntax: glob\n')
1734 fp.write('syntax: glob\n')
1735 fp.write('status\n')
1735 fp.write('status\n')
1736 fp.write('guards\n')
1736 fp.write('guards\n')
1737 fp.close()
1737 fp.close()
1738 if not os.path.exists(r.wjoin('series')):
1738 if not os.path.exists(r.wjoin('series')):
1739 r.wopener('series', 'w').close()
1739 r.wopener('series', 'w').close()
1740 r.add(['.hgignore', 'series'])
1740 r.add(['.hgignore', 'series'])
1741 commands.add(ui, r)
1741 commands.add(ui, r)
1742 return 0
1742 return 0
1743
1743
1744 def clone(ui, source, dest=None, **opts):
1744 def clone(ui, source, dest=None, **opts):
1745 '''clone main and patch repository at same time
1745 '''clone main and patch repository at same time
1746
1746
1747 If source is local, destination will have no patches applied. If
1747 If source is local, destination will have no patches applied. If
1748 source is remote, this command can not check if patches are
1748 source is remote, this command can not check if patches are
1749 applied in source, so cannot guarantee that patches are not
1749 applied in source, so cannot guarantee that patches are not
1750 applied in destination. If you clone remote repository, be sure
1750 applied in destination. If you clone remote repository, be sure
1751 before that it has no patches applied.
1751 before that it has no patches applied.
1752
1752
1753 Source patch repository is looked for in <src>/.hg/patches by
1753 Source patch repository is looked for in <src>/.hg/patches by
1754 default. Use -p <url> to change.
1754 default. Use -p <url> to change.
1755
1755
1756 The patch directory must be a nested Mercurial repository, as
1756 The patch directory must be a nested Mercurial repository, as
1757 would be created by qinit -c.
1757 would be created by qinit -c.
1758 '''
1758 '''
1759 def patchdir(repo):
1759 def patchdir(repo):
1760 url = repo.url()
1760 url = repo.url()
1761 if url.endswith('/'):
1761 if url.endswith('/'):
1762 url = url[:-1]
1762 url = url[:-1]
1763 return url + '/.hg/patches'
1763 return url + '/.hg/patches'
1764 if dest is None:
1764 if dest is None:
1765 dest = hg.defaultdest(source)
1765 dest = hg.defaultdest(source)
1766 sr = hg.repository(cmdutil.remoteui(ui, opts), ui.expandpath(source))
1766 sr = hg.repository(cmdutil.remoteui(ui, opts), ui.expandpath(source))
1767 if opts['patches']:
1767 if opts['patches']:
1768 patchespath = ui.expandpath(opts['patches'])
1768 patchespath = ui.expandpath(opts['patches'])
1769 else:
1769 else:
1770 patchespath = patchdir(sr)
1770 patchespath = patchdir(sr)
1771 try:
1771 try:
1772 hg.repository(ui, patchespath)
1772 hg.repository(ui, patchespath)
1773 except error.RepoError:
1773 except error.RepoError:
1774 raise util.Abort(_('versioned patch repository not found'
1774 raise util.Abort(_('versioned patch repository not found'
1775 ' (see qinit -c)'))
1775 ' (see qinit -c)'))
1776 qbase, destrev = None, None
1776 qbase, destrev = None, None
1777 if sr.local():
1777 if sr.local():
1778 if sr.mq.applied:
1778 if sr.mq.applied:
1779 qbase = bin(sr.mq.applied[0].rev)
1779 qbase = bin(sr.mq.applied[0].rev)
1780 if not hg.islocal(dest):
1780 if not hg.islocal(dest):
1781 heads = set(sr.heads())
1781 heads = set(sr.heads())
1782 destrev = list(heads.difference(sr.heads(qbase)))
1782 destrev = list(heads.difference(sr.heads(qbase)))
1783 destrev.append(sr.changelog.parents(qbase)[0])
1783 destrev.append(sr.changelog.parents(qbase)[0])
1784 elif sr.capable('lookup'):
1784 elif sr.capable('lookup'):
1785 try:
1785 try:
1786 qbase = sr.lookup('qbase')
1786 qbase = sr.lookup('qbase')
1787 except error.RepoError:
1787 except error.RepoError:
1788 pass
1788 pass
1789 ui.note(_('cloning main repository\n'))
1789 ui.note(_('cloning main repository\n'))
1790 sr, dr = hg.clone(ui, sr.url(), dest,
1790 sr, dr = hg.clone(ui, sr.url(), dest,
1791 pull=opts['pull'],
1791 pull=opts['pull'],
1792 rev=destrev,
1792 rev=destrev,
1793 update=False,
1793 update=False,
1794 stream=opts['uncompressed'])
1794 stream=opts['uncompressed'])
1795 ui.note(_('cloning patch repository\n'))
1795 ui.note(_('cloning patch repository\n'))
1796 hg.clone(ui, opts['patches'] or patchdir(sr), patchdir(dr),
1796 hg.clone(ui, opts['patches'] or patchdir(sr), patchdir(dr),
1797 pull=opts['pull'], update=not opts['noupdate'],
1797 pull=opts['pull'], update=not opts['noupdate'],
1798 stream=opts['uncompressed'])
1798 stream=opts['uncompressed'])
1799 if dr.local():
1799 if dr.local():
1800 if qbase:
1800 if qbase:
1801 ui.note(_('stripping applied patches from destination '
1801 ui.note(_('stripping applied patches from destination '
1802 'repository\n'))
1802 'repository\n'))
1803 dr.mq.strip(dr, qbase, update=False, backup=None)
1803 dr.mq.strip(dr, qbase, update=False, backup=None)
1804 if not opts['noupdate']:
1804 if not opts['noupdate']:
1805 ui.note(_('updating destination repository\n'))
1805 ui.note(_('updating destination repository\n'))
1806 hg.update(dr, dr.changelog.tip())
1806 hg.update(dr, dr.changelog.tip())
1807
1807
1808 def commit(ui, repo, *pats, **opts):
1808 def commit(ui, repo, *pats, **opts):
1809 """commit changes in the queue repository"""
1809 """commit changes in the queue repository"""
1810 q = repo.mq
1810 q = repo.mq
1811 r = q.qrepo()
1811 r = q.qrepo()
1812 if not r: raise util.Abort('no queue repository')
1812 if not r: raise util.Abort('no queue repository')
1813 commands.commit(r.ui, r, *pats, **opts)
1813 commands.commit(r.ui, r, *pats, **opts)
1814
1814
1815 def series(ui, repo, **opts):
1815 def series(ui, repo, **opts):
1816 """print the entire series file"""
1816 """print the entire series file"""
1817 repo.mq.qseries(repo, missing=opts['missing'], summary=opts['summary'])
1817 repo.mq.qseries(repo, missing=opts['missing'], summary=opts['summary'])
1818 return 0
1818 return 0
1819
1819
1820 def top(ui, repo, **opts):
1820 def top(ui, repo, **opts):
1821 """print the name of the current patch"""
1821 """print the name of the current patch"""
1822 q = repo.mq
1822 q = repo.mq
1823 t = q.applied and q.series_end(True) or 0
1823 t = q.applied and q.series_end(True) or 0
1824 if t:
1824 if t:
1825 return q.qseries(repo, start=t-1, length=1, status='A',
1825 return q.qseries(repo, start=t-1, length=1, status='A',
1826 summary=opts.get('summary'))
1826 summary=opts.get('summary'))
1827 else:
1827 else:
1828 ui.write(_("no patches applied\n"))
1828 ui.write(_("no patches applied\n"))
1829 return 1
1829 return 1
1830
1830
1831 def next(ui, repo, **opts):
1831 def next(ui, repo, **opts):
1832 """print the name of the next patch"""
1832 """print the name of the next patch"""
1833 q = repo.mq
1833 q = repo.mq
1834 end = q.series_end()
1834 end = q.series_end()
1835 if end == len(q.series):
1835 if end == len(q.series):
1836 ui.write(_("all patches applied\n"))
1836 ui.write(_("all patches applied\n"))
1837 return 1
1837 return 1
1838 return q.qseries(repo, start=end, length=1, summary=opts.get('summary'))
1838 return q.qseries(repo, start=end, length=1, summary=opts.get('summary'))
1839
1839
1840 def prev(ui, repo, **opts):
1840 def prev(ui, repo, **opts):
1841 """print the name of the previous patch"""
1841 """print the name of the previous patch"""
1842 q = repo.mq
1842 q = repo.mq
1843 l = len(q.applied)
1843 l = len(q.applied)
1844 if l == 1:
1844 if l == 1:
1845 ui.write(_("only one patch applied\n"))
1845 ui.write(_("only one patch applied\n"))
1846 return 1
1846 return 1
1847 if not l:
1847 if not l:
1848 ui.write(_("no patches applied\n"))
1848 ui.write(_("no patches applied\n"))
1849 return 1
1849 return 1
1850 return q.qseries(repo, start=l-2, length=1, status='A',
1850 return q.qseries(repo, start=l-2, length=1, status='A',
1851 summary=opts.get('summary'))
1851 summary=opts.get('summary'))
1852
1852
1853 def setupheaderopts(ui, opts):
1853 def setupheaderopts(ui, opts):
1854 def do(opt,val):
1854 def do(opt,val):
1855 if not opts[opt] and opts['current' + opt]:
1855 if not opts[opt] and opts['current' + opt]:
1856 opts[opt] = val
1856 opts[opt] = val
1857 do('user', ui.username())
1857 do('user', ui.username())
1858 do('date', "%d %d" % util.makedate())
1858 do('date', "%d %d" % util.makedate())
1859
1859
1860 def new(ui, repo, patch, *args, **opts):
1860 def new(ui, repo, patch, *args, **opts):
1861 """create a new patch
1861 """create a new patch
1862
1862
1863 qnew creates a new patch on top of the currently-applied patch (if
1863 qnew creates a new patch on top of the currently-applied patch (if
1864 any). It will refuse to run if there are any outstanding changes
1864 any). It will refuse to run if there are any outstanding changes
1865 unless -f/--force is specified, in which case the patch will be
1865 unless -f/--force is specified, in which case the patch will be
1866 initialized with them. You may also use -I/--include,
1866 initialized with them. You may also use -I/--include,
1867 -X/--exclude, and/or a list of files after the patch name to add
1867 -X/--exclude, and/or a list of files after the patch name to add
1868 only changes to matching files to the new patch, leaving the rest
1868 only changes to matching files to the new patch, leaving the rest
1869 as uncommitted modifications.
1869 as uncommitted modifications.
1870
1870
1871 -u/--user and -d/--date can be used to set the (given) user and
1871 -u/--user and -d/--date can be used to set the (given) user and
1872 date, respectively. -U/--currentuser and -D/--currentdate set user
1872 date, respectively. -U/--currentuser and -D/--currentdate set user
1873 to current user and date to current date.
1873 to current user and date to current date.
1874
1874
1875 -e/--edit, -m/--message or -l/--logfile set the patch header as
1875 -e/--edit, -m/--message or -l/--logfile set the patch header as
1876 well as the commit message. If none is specified, the header is
1876 well as the commit message. If none is specified, the header is
1877 empty and the commit message is '[mq]: PATCH'.
1877 empty and the commit message is '[mq]: PATCH'.
1878
1878
1879 Use the -g/--git option to keep the patch in the git extended diff
1879 Use the -g/--git option to keep the patch in the git extended diff
1880 format. Read the diffs help topic for more information on why this
1880 format. Read the diffs help topic for more information on why this
1881 is important for preserving permission changes and copy/rename
1881 is important for preserving permission changes and copy/rename
1882 information.
1882 information.
1883 """
1883 """
1884 msg = cmdutil.logmessage(opts)
1884 msg = cmdutil.logmessage(opts)
1885 def getmsg(): return ui.edit(msg, ui.username())
1885 def getmsg(): return ui.edit(msg, ui.username())
1886 q = repo.mq
1886 q = repo.mq
1887 opts['msg'] = msg
1887 opts['msg'] = msg
1888 if opts.get('edit'):
1888 if opts.get('edit'):
1889 opts['msg'] = getmsg
1889 opts['msg'] = getmsg
1890 else:
1890 else:
1891 opts['msg'] = msg
1891 opts['msg'] = msg
1892 setupheaderopts(ui, opts)
1892 setupheaderopts(ui, opts)
1893 q.new(repo, patch, *args, **opts)
1893 q.new(repo, patch, *args, **opts)
1894 q.save_dirty()
1894 q.save_dirty()
1895 return 0
1895 return 0
1896
1896
1897 def refresh(ui, repo, *pats, **opts):
1897 def refresh(ui, repo, *pats, **opts):
1898 """update the current patch
1898 """update the current patch
1899
1899
1900 If any file patterns are provided, the refreshed patch will
1900 If any file patterns are provided, the refreshed patch will
1901 contain only the modifications that match those patterns; the
1901 contain only the modifications that match those patterns; the
1902 remaining modifications will remain in the working directory.
1902 remaining modifications will remain in the working directory.
1903
1903
1904 If -s/--short is specified, files currently included in the patch
1904 If -s/--short is specified, files currently included in the patch
1905 will be refreshed just like matched files and remain in the patch.
1905 will be refreshed just like matched files and remain in the patch.
1906
1906
1907 hg add/remove/copy/rename work as usual, though you might want to
1907 hg add/remove/copy/rename work as usual, though you might want to
1908 use git-style patches (-g/--git or [diff] git=1) to track copies
1908 use git-style patches (-g/--git or [diff] git=1) to track copies
1909 and renames. See the diffs help topic for more information on the
1909 and renames. See the diffs help topic for more information on the
1910 git diff format.
1910 git diff format.
1911 """
1911 """
1912 q = repo.mq
1912 q = repo.mq
1913 message = cmdutil.logmessage(opts)
1913 message = cmdutil.logmessage(opts)
1914 if opts['edit']:
1914 if opts['edit']:
1915 if not q.applied:
1915 if not q.applied:
1916 ui.write(_("no patches applied\n"))
1916 ui.write(_("no patches applied\n"))
1917 return 1
1917 return 1
1918 if message:
1918 if message:
1919 raise util.Abort(_('option "-e" incompatible with "-m" or "-l"'))
1919 raise util.Abort(_('option "-e" incompatible with "-m" or "-l"'))
1920 patch = q.applied[-1].name
1920 patch = q.applied[-1].name
1921 ph = patchheader(q.join(patch))
1921 ph = patchheader(q.join(patch))
1922 message = ui.edit('\n'.join(ph.message), ph.user or ui.username())
1922 message = ui.edit('\n'.join(ph.message), ph.user or ui.username())
1923 setupheaderopts(ui, opts)
1923 setupheaderopts(ui, opts)
1924 ret = q.refresh(repo, pats, msg=message, **opts)
1924 ret = q.refresh(repo, pats, msg=message, **opts)
1925 q.save_dirty()
1925 q.save_dirty()
1926 return ret
1926 return ret
1927
1927
1928 def diff(ui, repo, *pats, **opts):
1928 def diff(ui, repo, *pats, **opts):
1929 """diff of the current patch and subsequent modifications
1929 """diff of the current patch and subsequent modifications
1930
1930
1931 Shows a diff which includes the current patch as well as any
1931 Shows a diff which includes the current patch as well as any
1932 changes which have been made in the working directory since the
1932 changes which have been made in the working directory since the
1933 last refresh (thus showing what the current patch would become
1933 last refresh (thus showing what the current patch would become
1934 after a qrefresh).
1934 after a qrefresh).
1935
1935
1936 Use 'hg diff' if you only want to see the changes made since the
1936 Use 'hg diff' if you only want to see the changes made since the
1937 last qrefresh, or 'hg export qtip' if you want to see changes made
1937 last qrefresh, or 'hg export qtip' if you want to see changes made
1938 by the current patch without including changes made since the
1938 by the current patch without including changes made since the
1939 qrefresh.
1939 qrefresh.
1940 """
1940 """
1941 repo.mq.diff(repo, pats, opts)
1941 repo.mq.diff(repo, pats, opts)
1942 return 0
1942 return 0
1943
1943
1944 def fold(ui, repo, *files, **opts):
1944 def fold(ui, repo, *files, **opts):
1945 """fold the named patches into the current patch
1945 """fold the named patches into the current patch
1946
1946
1947 Patches must not yet be applied. Each patch will be successively
1947 Patches must not yet be applied. Each patch will be successively
1948 applied to the current patch in the order given. If all the
1948 applied to the current patch in the order given. If all the
1949 patches apply successfully, the current patch will be refreshed
1949 patches apply successfully, the current patch will be refreshed
1950 with the new cumulative patch, and the folded patches will be
1950 with the new cumulative patch, and the folded patches will be
1951 deleted. With -k/--keep, the folded patch files will not be
1951 deleted. With -k/--keep, the folded patch files will not be
1952 removed afterwards.
1952 removed afterwards.
1953
1953
1954 The header for each folded patch will be concatenated with the
1954 The header for each folded patch will be concatenated with the
1955 current patch header, separated by a line of '* * *'."""
1955 current patch header, separated by a line of '* * *'."""
1956
1956
1957 q = repo.mq
1957 q = repo.mq
1958
1958
1959 if not files:
1959 if not files:
1960 raise util.Abort(_('qfold requires at least one patch name'))
1960 raise util.Abort(_('qfold requires at least one patch name'))
1961 if not q.check_toppatch(repo):
1961 if not q.check_toppatch(repo):
1962 raise util.Abort(_('No patches applied'))
1962 raise util.Abort(_('No patches applied'))
1963 q.check_localchanges(repo)
1963 q.check_localchanges(repo)
1964
1964
1965 message = cmdutil.logmessage(opts)
1965 message = cmdutil.logmessage(opts)
1966 if opts['edit']:
1966 if opts['edit']:
1967 if message:
1967 if message:
1968 raise util.Abort(_('option "-e" incompatible with "-m" or "-l"'))
1968 raise util.Abort(_('option "-e" incompatible with "-m" or "-l"'))
1969
1969
1970 parent = q.lookup('qtip')
1970 parent = q.lookup('qtip')
1971 patches = []
1971 patches = []
1972 messages = []
1972 messages = []
1973 for f in files:
1973 for f in files:
1974 p = q.lookup(f)
1974 p = q.lookup(f)
1975 if p in patches or p == parent:
1975 if p in patches or p == parent:
1976 ui.warn(_('Skipping already folded patch %s') % p)
1976 ui.warn(_('Skipping already folded patch %s') % p)
1977 if q.isapplied(p):
1977 if q.isapplied(p):
1978 raise util.Abort(_('qfold cannot fold already applied patch %s') % p)
1978 raise util.Abort(_('qfold cannot fold already applied patch %s') % p)
1979 patches.append(p)
1979 patches.append(p)
1980
1980
1981 for p in patches:
1981 for p in patches:
1982 if not message:
1982 if not message:
1983 ph = patchheader(q.join(p))
1983 ph = patchheader(q.join(p))
1984 if ph.message:
1984 if ph.message:
1985 messages.append(ph.message)
1985 messages.append(ph.message)
1986 pf = q.join(p)
1986 pf = q.join(p)
1987 (patchsuccess, files, fuzz) = q.patch(repo, pf)
1987 (patchsuccess, files, fuzz) = q.patch(repo, pf)
1988 if not patchsuccess:
1988 if not patchsuccess:
1989 raise util.Abort(_('Error folding patch %s') % p)
1989 raise util.Abort(_('Error folding patch %s') % p)
1990 patch.updatedir(ui, repo, files)
1990 patch.updatedir(ui, repo, files)
1991
1991
1992 if not message:
1992 if not message:
1993 ph = patchheader(q.join(parent))
1993 ph = patchheader(q.join(parent))
1994 message, user = ph.message, ph.user
1994 message, user = ph.message, ph.user
1995 for msg in messages:
1995 for msg in messages:
1996 message.append('* * *')
1996 message.append('* * *')
1997 message.extend(msg)
1997 message.extend(msg)
1998 message = '\n'.join(message)
1998 message = '\n'.join(message)
1999
1999
2000 if opts['edit']:
2000 if opts['edit']:
2001 message = ui.edit(message, user or ui.username())
2001 message = ui.edit(message, user or ui.username())
2002
2002
2003 q.refresh(repo, msg=message)
2003 q.refresh(repo, msg=message)
2004 q.delete(repo, patches, opts)
2004 q.delete(repo, patches, opts)
2005 q.save_dirty()
2005 q.save_dirty()
2006
2006
2007 def goto(ui, repo, patch, **opts):
2007 def goto(ui, repo, patch, **opts):
2008 '''push or pop patches until named patch is at top of stack'''
2008 '''push or pop patches until named patch is at top of stack'''
2009 q = repo.mq
2009 q = repo.mq
2010 patch = q.lookup(patch)
2010 patch = q.lookup(patch)
2011 if q.isapplied(patch):
2011 if q.isapplied(patch):
2012 ret = q.pop(repo, patch, force=opts['force'])
2012 ret = q.pop(repo, patch, force=opts['force'])
2013 else:
2013 else:
2014 ret = q.push(repo, patch, force=opts['force'])
2014 ret = q.push(repo, patch, force=opts['force'])
2015 q.save_dirty()
2015 q.save_dirty()
2016 return ret
2016 return ret
2017
2017
2018 def guard(ui, repo, *args, **opts):
2018 def guard(ui, repo, *args, **opts):
2019 '''set or print guards for a patch
2019 '''set or print guards for a patch
2020
2020
2021 Guards control whether a patch can be pushed. A patch with no
2021 Guards control whether a patch can be pushed. A patch with no
2022 guards is always pushed. A patch with a positive guard ("+foo") is
2022 guards is always pushed. A patch with a positive guard ("+foo") is
2023 pushed only if the qselect command has activated it. A patch with
2023 pushed only if the qselect command has activated it. A patch with
2024 a negative guard ("-foo") is never pushed if the qselect command
2024 a negative guard ("-foo") is never pushed if the qselect command
2025 has activated it.
2025 has activated it.
2026
2026
2027 With no arguments, print the currently active guards.
2027 With no arguments, print the currently active guards.
2028 With arguments, set guards for the named patch.
2028 With arguments, set guards for the named patch.
2029 NOTE: Specifying negative guards now requires '--'.
2029 NOTE: Specifying negative guards now requires '--'.
2030
2030
2031 To set guards on another patch:
2031 To set guards on another patch:
2032 hg qguard -- other.patch +2.6.17 -stable
2032 hg qguard -- other.patch +2.6.17 -stable
2033 '''
2033 '''
2034 def status(idx):
2034 def status(idx):
2035 guards = q.series_guards[idx] or ['unguarded']
2035 guards = q.series_guards[idx] or ['unguarded']
2036 ui.write('%s: %s\n' % (q.series[idx], ' '.join(guards)))
2036 ui.write('%s: %s\n' % (q.series[idx], ' '.join(guards)))
2037 q = repo.mq
2037 q = repo.mq
2038 patch = None
2038 patch = None
2039 args = list(args)
2039 args = list(args)
2040 if opts['list']:
2040 if opts['list']:
2041 if args or opts['none']:
2041 if args or opts['none']:
2042 raise util.Abort(_('cannot mix -l/--list with options or arguments'))
2042 raise util.Abort(_('cannot mix -l/--list with options or arguments'))
2043 for i in xrange(len(q.series)):
2043 for i in xrange(len(q.series)):
2044 status(i)
2044 status(i)
2045 return
2045 return
2046 if not args or args[0][0:1] in '-+':
2046 if not args or args[0][0:1] in '-+':
2047 if not q.applied:
2047 if not q.applied:
2048 raise util.Abort(_('no patches applied'))
2048 raise util.Abort(_('no patches applied'))
2049 patch = q.applied[-1].name
2049 patch = q.applied[-1].name
2050 if patch is None and args[0][0:1] not in '-+':
2050 if patch is None and args[0][0:1] not in '-+':
2051 patch = args.pop(0)
2051 patch = args.pop(0)
2052 if patch is None:
2052 if patch is None:
2053 raise util.Abort(_('no patch to work with'))
2053 raise util.Abort(_('no patch to work with'))
2054 if args or opts['none']:
2054 if args or opts['none']:
2055 idx = q.find_series(patch)
2055 idx = q.find_series(patch)
2056 if idx is None:
2056 if idx is None:
2057 raise util.Abort(_('no patch named %s') % patch)
2057 raise util.Abort(_('no patch named %s') % patch)
2058 q.set_guards(idx, args)
2058 q.set_guards(idx, args)
2059 q.save_dirty()
2059 q.save_dirty()
2060 else:
2060 else:
2061 status(q.series.index(q.lookup(patch)))
2061 status(q.series.index(q.lookup(patch)))
2062
2062
2063 def header(ui, repo, patch=None):
2063 def header(ui, repo, patch=None):
2064 """print the header of the topmost or specified patch"""
2064 """print the header of the topmost or specified patch"""
2065 q = repo.mq
2065 q = repo.mq
2066
2066
2067 if patch:
2067 if patch:
2068 patch = q.lookup(patch)
2068 patch = q.lookup(patch)
2069 else:
2069 else:
2070 if not q.applied:
2070 if not q.applied:
2071 ui.write('no patches applied\n')
2071 ui.write('no patches applied\n')
2072 return 1
2072 return 1
2073 patch = q.lookup('qtip')
2073 patch = q.lookup('qtip')
2074 ph = patchheader(repo.mq.join(patch))
2074 ph = patchheader(repo.mq.join(patch))
2075
2075
2076 ui.write('\n'.join(ph.message) + '\n')
2076 ui.write('\n'.join(ph.message) + '\n')
2077
2077
2078 def lastsavename(path):
2078 def lastsavename(path):
2079 (directory, base) = os.path.split(path)
2079 (directory, base) = os.path.split(path)
2080 names = os.listdir(directory)
2080 names = os.listdir(directory)
2081 namere = re.compile("%s.([0-9]+)" % base)
2081 namere = re.compile("%s.([0-9]+)" % base)
2082 maxindex = None
2082 maxindex = None
2083 maxname = None
2083 maxname = None
2084 for f in names:
2084 for f in names:
2085 m = namere.match(f)
2085 m = namere.match(f)
2086 if m:
2086 if m:
2087 index = int(m.group(1))
2087 index = int(m.group(1))
2088 if maxindex is None or index > maxindex:
2088 if maxindex is None or index > maxindex:
2089 maxindex = index
2089 maxindex = index
2090 maxname = f
2090 maxname = f
2091 if maxname:
2091 if maxname:
2092 return (os.path.join(directory, maxname), maxindex)
2092 return (os.path.join(directory, maxname), maxindex)
2093 return (None, None)
2093 return (None, None)
2094
2094
2095 def savename(path):
2095 def savename(path):
2096 (last, index) = lastsavename(path)
2096 (last, index) = lastsavename(path)
2097 if last is None:
2097 if last is None:
2098 index = 0
2098 index = 0
2099 newpath = path + ".%d" % (index + 1)
2099 newpath = path + ".%d" % (index + 1)
2100 return newpath
2100 return newpath
2101
2101
2102 def push(ui, repo, patch=None, **opts):
2102 def push(ui, repo, patch=None, **opts):
2103 """push the next patch onto the stack
2103 """push the next patch onto the stack
2104
2104
2105 When -f/--force is applied, all local changes in patched files
2105 When -f/--force is applied, all local changes in patched files
2106 will be lost.
2106 will be lost.
2107 """
2107 """
2108 q = repo.mq
2108 q = repo.mq
2109 mergeq = None
2109 mergeq = None
2110
2110
2111 if opts['merge']:
2111 if opts['merge']:
2112 if opts['name']:
2112 if opts['name']:
2113 newpath = repo.join(opts['name'])
2113 newpath = repo.join(opts['name'])
2114 else:
2114 else:
2115 newpath, i = lastsavename(q.path)
2115 newpath, i = lastsavename(q.path)
2116 if not newpath:
2116 if not newpath:
2117 ui.warn(_("no saved queues found, please use -n\n"))
2117 ui.warn(_("no saved queues found, please use -n\n"))
2118 return 1
2118 return 1
2119 mergeq = queue(ui, repo.join(""), newpath)
2119 mergeq = queue(ui, repo.join(""), newpath)
2120 ui.warn(_("merging with queue at: %s\n") % mergeq.path)
2120 ui.warn(_("merging with queue at: %s\n") % mergeq.path)
2121 ret = q.push(repo, patch, force=opts['force'], list=opts['list'],
2121 ret = q.push(repo, patch, force=opts['force'], list=opts['list'],
2122 mergeq=mergeq, all=opts.get('all'))
2122 mergeq=mergeq, all=opts.get('all'))
2123 return ret
2123 return ret
2124
2124
2125 def pop(ui, repo, patch=None, **opts):
2125 def pop(ui, repo, patch=None, **opts):
2126 """pop the current patch off the stack
2126 """pop the current patch off the stack
2127
2127
2128 By default, pops off the top of the patch stack. If given a patch
2128 By default, pops off the top of the patch stack. If given a patch
2129 name, keeps popping off patches until the named patch is at the
2129 name, keeps popping off patches until the named patch is at the
2130 top of the stack.
2130 top of the stack.
2131 """
2131 """
2132 localupdate = True
2132 localupdate = True
2133 if opts['name']:
2133 if opts['name']:
2134 q = queue(ui, repo.join(""), repo.join(opts['name']))
2134 q = queue(ui, repo.join(""), repo.join(opts['name']))
2135 ui.warn(_('using patch queue: %s\n') % q.path)
2135 ui.warn(_('using patch queue: %s\n') % q.path)
2136 localupdate = False
2136 localupdate = False
2137 else:
2137 else:
2138 q = repo.mq
2138 q = repo.mq
2139 ret = q.pop(repo, patch, force=opts['force'], update=localupdate,
2139 ret = q.pop(repo, patch, force=opts['force'], update=localupdate,
2140 all=opts['all'])
2140 all=opts['all'])
2141 q.save_dirty()
2141 q.save_dirty()
2142 return ret
2142 return ret
2143
2143
2144 def rename(ui, repo, patch, name=None, **opts):
2144 def rename(ui, repo, patch, name=None, **opts):
2145 """rename a patch
2145 """rename a patch
2146
2146
2147 With one argument, renames the current patch to PATCH1.
2147 With one argument, renames the current patch to PATCH1.
2148 With two arguments, renames PATCH1 to PATCH2."""
2148 With two arguments, renames PATCH1 to PATCH2."""
2149
2149
2150 q = repo.mq
2150 q = repo.mq
2151
2151
2152 if not name:
2152 if not name:
2153 name = patch
2153 name = patch
2154 patch = None
2154 patch = None
2155
2155
2156 if patch:
2156 if patch:
2157 patch = q.lookup(patch)
2157 patch = q.lookup(patch)
2158 else:
2158 else:
2159 if not q.applied:
2159 if not q.applied:
2160 ui.write(_('no patches applied\n'))
2160 ui.write(_('no patches applied\n'))
2161 return
2161 return
2162 patch = q.lookup('qtip')
2162 patch = q.lookup('qtip')
2163 absdest = q.join(name)
2163 absdest = q.join(name)
2164 if os.path.isdir(absdest):
2164 if os.path.isdir(absdest):
2165 name = normname(os.path.join(name, os.path.basename(patch)))
2165 name = normname(os.path.join(name, os.path.basename(patch)))
2166 absdest = q.join(name)
2166 absdest = q.join(name)
2167 if os.path.exists(absdest):
2167 if os.path.exists(absdest):
2168 raise util.Abort(_('%s already exists') % absdest)
2168 raise util.Abort(_('%s already exists') % absdest)
2169
2169
2170 if name in q.series:
2170 if name in q.series:
2171 raise util.Abort(_('A patch named %s already exists in the series file') % name)
2171 raise util.Abort(_('A patch named %s already exists in the series file') % name)
2172
2172
2173 if ui.verbose:
2173 if ui.verbose:
2174 ui.write('renaming %s to %s\n' % (patch, name))
2174 ui.write('renaming %s to %s\n' % (patch, name))
2175 i = q.find_series(patch)
2175 i = q.find_series(patch)
2176 guards = q.guard_re.findall(q.full_series[i])
2176 guards = q.guard_re.findall(q.full_series[i])
2177 q.full_series[i] = name + ''.join([' #' + g for g in guards])
2177 q.full_series[i] = name + ''.join([' #' + g for g in guards])
2178 q.parse_series()
2178 q.parse_series()
2179 q.series_dirty = 1
2179 q.series_dirty = 1
2180
2180
2181 info = q.isapplied(patch)
2181 info = q.isapplied(patch)
2182 if info:
2182 if info:
2183 q.applied[info[0]] = statusentry(info[1], name)
2183 q.applied[info[0]] = statusentry(info[1], name)
2184 q.applied_dirty = 1
2184 q.applied_dirty = 1
2185
2185
2186 util.rename(q.join(patch), absdest)
2186 util.rename(q.join(patch), absdest)
2187 r = q.qrepo()
2187 r = q.qrepo()
2188 if r:
2188 if r:
2189 wlock = r.wlock()
2189 wlock = r.wlock()
2190 try:
2190 try:
2191 if r.dirstate[patch] == 'a':
2191 if r.dirstate[patch] == 'a':
2192 r.dirstate.forget(patch)
2192 r.dirstate.forget(patch)
2193 r.dirstate.add(name)
2193 r.dirstate.add(name)
2194 else:
2194 else:
2195 if r.dirstate[name] == 'r':
2195 if r.dirstate[name] == 'r':
2196 r.undelete([name])
2196 r.undelete([name])
2197 r.copy(patch, name)
2197 r.copy(patch, name)
2198 r.remove([patch], False)
2198 r.remove([patch], False)
2199 finally:
2199 finally:
2200 wlock.release()
2200 wlock.release()
2201
2201
2202 q.save_dirty()
2202 q.save_dirty()
2203
2203
2204 def restore(ui, repo, rev, **opts):
2204 def restore(ui, repo, rev, **opts):
2205 """restore the queue state saved by a revision"""
2205 """restore the queue state saved by a revision"""
2206 rev = repo.lookup(rev)
2206 rev = repo.lookup(rev)
2207 q = repo.mq
2207 q = repo.mq
2208 q.restore(repo, rev, delete=opts['delete'],
2208 q.restore(repo, rev, delete=opts['delete'],
2209 qupdate=opts['update'])
2209 qupdate=opts['update'])
2210 q.save_dirty()
2210 q.save_dirty()
2211 return 0
2211 return 0
2212
2212
2213 def save(ui, repo, **opts):
2213 def save(ui, repo, **opts):
2214 """save current queue state"""
2214 """save current queue state"""
2215 q = repo.mq
2215 q = repo.mq
2216 message = cmdutil.logmessage(opts)
2216 message = cmdutil.logmessage(opts)
2217 ret = q.save(repo, msg=message)
2217 ret = q.save(repo, msg=message)
2218 if ret:
2218 if ret:
2219 return ret
2219 return ret
2220 q.save_dirty()
2220 q.save_dirty()
2221 if opts['copy']:
2221 if opts['copy']:
2222 path = q.path
2222 path = q.path
2223 if opts['name']:
2223 if opts['name']:
2224 newpath = os.path.join(q.basepath, opts['name'])
2224 newpath = os.path.join(q.basepath, opts['name'])
2225 if os.path.exists(newpath):
2225 if os.path.exists(newpath):
2226 if not os.path.isdir(newpath):
2226 if not os.path.isdir(newpath):
2227 raise util.Abort(_('destination %s exists and is not '
2227 raise util.Abort(_('destination %s exists and is not '
2228 'a directory') % newpath)
2228 'a directory') % newpath)
2229 if not opts['force']:
2229 if not opts['force']:
2230 raise util.Abort(_('destination %s exists, '
2230 raise util.Abort(_('destination %s exists, '
2231 'use -f to force') % newpath)
2231 'use -f to force') % newpath)
2232 else:
2232 else:
2233 newpath = savename(path)
2233 newpath = savename(path)
2234 ui.warn(_("copy %s to %s\n") % (path, newpath))
2234 ui.warn(_("copy %s to %s\n") % (path, newpath))
2235 util.copyfiles(path, newpath)
2235 util.copyfiles(path, newpath)
2236 if opts['empty']:
2236 if opts['empty']:
2237 try:
2237 try:
2238 os.unlink(q.join(q.status_path))
2238 os.unlink(q.join(q.status_path))
2239 except:
2239 except:
2240 pass
2240 pass
2241 return 0
2241 return 0
2242
2242
2243 def strip(ui, repo, rev, **opts):
2243 def strip(ui, repo, rev, **opts):
2244 """strip a revision and all its descendants from the repository
2244 """strip a revision and all its descendants from the repository
2245
2245
2246 If one of the working directory's parent revisions is stripped, the
2246 If one of the working directory's parent revisions is stripped, the
2247 working directory will be updated to the parent of the stripped
2247 working directory will be updated to the parent of the stripped
2248 revision.
2248 revision.
2249 """
2249 """
2250 backup = 'all'
2250 backup = 'all'
2251 if opts['backup']:
2251 if opts['backup']:
2252 backup = 'strip'
2252 backup = 'strip'
2253 elif opts['nobackup']:
2253 elif opts['nobackup']:
2254 backup = 'none'
2254 backup = 'none'
2255
2255
2256 rev = repo.lookup(rev)
2256 rev = repo.lookup(rev)
2257 p = repo.dirstate.parents()
2257 p = repo.dirstate.parents()
2258 cl = repo.changelog
2258 cl = repo.changelog
2259 update = True
2259 update = True
2260 if p[0] == nullid:
2260 if p[0] == nullid:
2261 update = False
2261 update = False
2262 elif p[1] == nullid and rev != cl.ancestor(p[0], rev):
2262 elif p[1] == nullid and rev != cl.ancestor(p[0], rev):
2263 update = False
2263 update = False
2264 elif rev not in (cl.ancestor(p[0], rev), cl.ancestor(p[1], rev)):
2264 elif rev not in (cl.ancestor(p[0], rev), cl.ancestor(p[1], rev)):
2265 update = False
2265 update = False
2266
2266
2267 repo.mq.strip(repo, rev, backup=backup, update=update, force=opts['force'])
2267 repo.mq.strip(repo, rev, backup=backup, update=update, force=opts['force'])
2268 return 0
2268 return 0
2269
2269
2270 def select(ui, repo, *args, **opts):
2270 def select(ui, repo, *args, **opts):
2271 '''set or print guarded patches to push
2271 '''set or print guarded patches to push
2272
2272
2273 Use the qguard command to set or print guards on patch, then use
2273 Use the qguard command to set or print guards on patch, then use
2274 qselect to tell mq which guards to use. A patch will be pushed if
2274 qselect to tell mq which guards to use. A patch will be pushed if
2275 it has no guards or any positive guards match the currently
2275 it has no guards or any positive guards match the currently
2276 selected guard, but will not be pushed if any negative guards
2276 selected guard, but will not be pushed if any negative guards
2277 match the current guard. For example:
2277 match the current guard. For example:
2278
2278
2279 qguard foo.patch -stable (negative guard)
2279 qguard foo.patch -stable (negative guard)
2280 qguard bar.patch +stable (positive guard)
2280 qguard bar.patch +stable (positive guard)
2281 qselect stable
2281 qselect stable
2282
2282
2283 This activates the "stable" guard. mq will skip foo.patch (because
2283 This activates the "stable" guard. mq will skip foo.patch (because
2284 it has a negative match) but push bar.patch (because it has a
2284 it has a negative match) but push bar.patch (because it has a
2285 positive match).
2285 positive match).
2286
2286
2287 With no arguments, prints the currently active guards.
2287 With no arguments, prints the currently active guards.
2288 With one argument, sets the active guard.
2288 With one argument, sets the active guard.
2289
2289
2290 Use -n/--none to deactivate guards (no other arguments needed).
2290 Use -n/--none to deactivate guards (no other arguments needed).
2291 When no guards are active, patches with positive guards are
2291 When no guards are active, patches with positive guards are
2292 skipped and patches with negative guards are pushed.
2292 skipped and patches with negative guards are pushed.
2293
2293
2294 qselect can change the guards on applied patches. It does not pop
2294 qselect can change the guards on applied patches. It does not pop
2295 guarded patches by default. Use --pop to pop back to the last
2295 guarded patches by default. Use --pop to pop back to the last
2296 applied patch that is not guarded. Use --reapply (which implies
2296 applied patch that is not guarded. Use --reapply (which implies
2297 --pop) to push back to the current patch afterwards, but skip
2297 --pop) to push back to the current patch afterwards, but skip
2298 guarded patches.
2298 guarded patches.
2299
2299
2300 Use -s/--series to print a list of all guards in the series file
2300 Use -s/--series to print a list of all guards in the series file
2301 (no other arguments needed). Use -v for more information.'''
2301 (no other arguments needed). Use -v for more information.'''
2302
2302
2303 q = repo.mq
2303 q = repo.mq
2304 guards = q.active()
2304 guards = q.active()
2305 if args or opts['none']:
2305 if args or opts['none']:
2306 old_unapplied = q.unapplied(repo)
2306 old_unapplied = q.unapplied(repo)
2307 old_guarded = [i for i in xrange(len(q.applied)) if
2307 old_guarded = [i for i in xrange(len(q.applied)) if
2308 not q.pushable(i)[0]]
2308 not q.pushable(i)[0]]
2309 q.set_active(args)
2309 q.set_active(args)
2310 q.save_dirty()
2310 q.save_dirty()
2311 if not args:
2311 if not args:
2312 ui.status(_('guards deactivated\n'))
2312 ui.status(_('guards deactivated\n'))
2313 if not opts['pop'] and not opts['reapply']:
2313 if not opts['pop'] and not opts['reapply']:
2314 unapplied = q.unapplied(repo)
2314 unapplied = q.unapplied(repo)
2315 guarded = [i for i in xrange(len(q.applied))
2315 guarded = [i for i in xrange(len(q.applied))
2316 if not q.pushable(i)[0]]
2316 if not q.pushable(i)[0]]
2317 if len(unapplied) != len(old_unapplied):
2317 if len(unapplied) != len(old_unapplied):
2318 ui.status(_('number of unguarded, unapplied patches has '
2318 ui.status(_('number of unguarded, unapplied patches has '
2319 'changed from %d to %d\n') %
2319 'changed from %d to %d\n') %
2320 (len(old_unapplied), len(unapplied)))
2320 (len(old_unapplied), len(unapplied)))
2321 if len(guarded) != len(old_guarded):
2321 if len(guarded) != len(old_guarded):
2322 ui.status(_('number of guarded, applied patches has changed '
2322 ui.status(_('number of guarded, applied patches has changed '
2323 'from %d to %d\n') %
2323 'from %d to %d\n') %
2324 (len(old_guarded), len(guarded)))
2324 (len(old_guarded), len(guarded)))
2325 elif opts['series']:
2325 elif opts['series']:
2326 guards = {}
2326 guards = {}
2327 noguards = 0
2327 noguards = 0
2328 for gs in q.series_guards:
2328 for gs in q.series_guards:
2329 if not gs:
2329 if not gs:
2330 noguards += 1
2330 noguards += 1
2331 for g in gs:
2331 for g in gs:
2332 guards.setdefault(g, 0)
2332 guards.setdefault(g, 0)
2333 guards[g] += 1
2333 guards[g] += 1
2334 if ui.verbose:
2334 if ui.verbose:
2335 guards['NONE'] = noguards
2335 guards['NONE'] = noguards
2336 guards = guards.items()
2336 guards = guards.items()
2337 guards.sort(lambda a, b: cmp(a[0][1:], b[0][1:]))
2337 guards.sort(lambda a, b: cmp(a[0][1:], b[0][1:]))
2338 if guards:
2338 if guards:
2339 ui.note(_('guards in series file:\n'))
2339 ui.note(_('guards in series file:\n'))
2340 for guard, count in guards:
2340 for guard, count in guards:
2341 ui.note('%2d ' % count)
2341 ui.note('%2d ' % count)
2342 ui.write(guard, '\n')
2342 ui.write(guard, '\n')
2343 else:
2343 else:
2344 ui.note(_('no guards in series file\n'))
2344 ui.note(_('no guards in series file\n'))
2345 else:
2345 else:
2346 if guards:
2346 if guards:
2347 ui.note(_('active guards:\n'))
2347 ui.note(_('active guards:\n'))
2348 for g in guards:
2348 for g in guards:
2349 ui.write(g, '\n')
2349 ui.write(g, '\n')
2350 else:
2350 else:
2351 ui.write(_('no active guards\n'))
2351 ui.write(_('no active guards\n'))
2352 reapply = opts['reapply'] and q.applied and q.appliedname(-1)
2352 reapply = opts['reapply'] and q.applied and q.appliedname(-1)
2353 popped = False
2353 popped = False
2354 if opts['pop'] or opts['reapply']:
2354 if opts['pop'] or opts['reapply']:
2355 for i in xrange(len(q.applied)):
2355 for i in xrange(len(q.applied)):
2356 pushable, reason = q.pushable(i)
2356 pushable, reason = q.pushable(i)
2357 if not pushable:
2357 if not pushable:
2358 ui.status(_('popping guarded patches\n'))
2358 ui.status(_('popping guarded patches\n'))
2359 popped = True
2359 popped = True
2360 if i == 0:
2360 if i == 0:
2361 q.pop(repo, all=True)
2361 q.pop(repo, all=True)
2362 else:
2362 else:
2363 q.pop(repo, i-1)
2363 q.pop(repo, i-1)
2364 break
2364 break
2365 if popped:
2365 if popped:
2366 try:
2366 try:
2367 if reapply:
2367 if reapply:
2368 ui.status(_('reapplying unguarded patches\n'))
2368 ui.status(_('reapplying unguarded patches\n'))
2369 q.push(repo, reapply)
2369 q.push(repo, reapply)
2370 finally:
2370 finally:
2371 q.save_dirty()
2371 q.save_dirty()
2372
2372
2373 def finish(ui, repo, *revrange, **opts):
2373 def finish(ui, repo, *revrange, **opts):
2374 """move applied patches into repository history
2374 """move applied patches into repository history
2375
2375
2376 Finishes the specified revisions (corresponding to applied
2376 Finishes the specified revisions (corresponding to applied
2377 patches) by moving them out of mq control into regular repository
2377 patches) by moving them out of mq control into regular repository
2378 history.
2378 history.
2379
2379
2380 Accepts a revision range or the -a/--applied option. If --applied
2380 Accepts a revision range or the -a/--applied option. If --applied
2381 is specified, all applied mq revisions are removed from mq
2381 is specified, all applied mq revisions are removed from mq
2382 control. Otherwise, the given revisions must be at the base of the
2382 control. Otherwise, the given revisions must be at the base of the
2383 stack of applied patches.
2383 stack of applied patches.
2384
2384
2385 This can be especially useful if your changes have been applied to
2385 This can be especially useful if your changes have been applied to
2386 an upstream repository, or if you are about to push your changes
2386 an upstream repository, or if you are about to push your changes
2387 to upstream.
2387 to upstream.
2388 """
2388 """
2389 if not opts['applied'] and not revrange:
2389 if not opts['applied'] and not revrange:
2390 raise util.Abort(_('no revisions specified'))
2390 raise util.Abort(_('no revisions specified'))
2391 elif opts['applied']:
2391 elif opts['applied']:
2392 revrange = ('qbase:qtip',) + revrange
2392 revrange = ('qbase:qtip',) + revrange
2393
2393
2394 q = repo.mq
2394 q = repo.mq
2395 if not q.applied:
2395 if not q.applied:
2396 ui.status(_('no patches applied\n'))
2396 ui.status(_('no patches applied\n'))
2397 return 0
2397 return 0
2398
2398
2399 revs = cmdutil.revrange(repo, revrange)
2399 revs = cmdutil.revrange(repo, revrange)
2400 q.finish(repo, revs)
2400 q.finish(repo, revs)
2401 q.save_dirty()
2401 q.save_dirty()
2402 return 0
2402 return 0
2403
2403
2404 def reposetup(ui, repo):
2404 def reposetup(ui, repo):
2405 class mqrepo(repo.__class__):
2405 class mqrepo(repo.__class__):
2406 @util.propertycache
2406 @util.propertycache
2407 def mq(self):
2407 def mq(self):
2408 return queue(self.ui, self.join(""))
2408 return queue(self.ui, self.join(""))
2409
2409
2410 def abort_if_wdir_patched(self, errmsg, force=False):
2410 def abort_if_wdir_patched(self, errmsg, force=False):
2411 if self.mq.applied and not force:
2411 if self.mq.applied and not force:
2412 parent = hex(self.dirstate.parents()[0])
2412 parent = hex(self.dirstate.parents()[0])
2413 if parent in [s.rev for s in self.mq.applied]:
2413 if parent in [s.rev for s in self.mq.applied]:
2414 raise util.Abort(errmsg)
2414 raise util.Abort(errmsg)
2415
2415
2416 def commit(self, text="", user=None, date=None, match=None,
2416 def commit(self, text="", user=None, date=None, match=None,
2417 force=False, editor=False, extra={}):
2417 force=False, editor=False, extra={}):
2418 self.abort_if_wdir_patched(
2418 self.abort_if_wdir_patched(
2419 _('cannot commit over an applied mq patch'),
2419 _('cannot commit over an applied mq patch'),
2420 force)
2420 force)
2421
2421
2422 return super(mqrepo, self).commit(text, user, date, match, force,
2422 return super(mqrepo, self).commit(text, user, date, match, force,
2423 editor, extra)
2423 editor, extra)
2424
2424
2425 def push(self, remote, force=False, revs=None):
2425 def push(self, remote, force=False, revs=None):
2426 if self.mq.applied and not force and not revs:
2426 if self.mq.applied and not force and not revs:
2427 raise util.Abort(_('source has mq patches applied'))
2427 raise util.Abort(_('source has mq patches applied'))
2428 return super(mqrepo, self).push(remote, force, revs)
2428 return super(mqrepo, self).push(remote, force, revs)
2429
2429
2430 def tags(self):
2430 def tags(self):
2431 if self.tagscache:
2431 if self.tagscache:
2432 return self.tagscache
2432 return self.tagscache
2433
2433
2434 tagscache = super(mqrepo, self).tags()
2434 tagscache = super(mqrepo, self).tags()
2435
2435
2436 q = self.mq
2436 q = self.mq
2437 if not q.applied:
2437 if not q.applied:
2438 return tagscache
2438 return tagscache
2439
2439
2440 mqtags = [(bin(patch.rev), patch.name) for patch in q.applied]
2440 mqtags = [(bin(patch.rev), patch.name) for patch in q.applied]
2441
2441
2442 if mqtags[-1][0] not in self.changelog.nodemap:
2442 if mqtags[-1][0] not in self.changelog.nodemap:
2443 self.ui.warn(_('mq status file refers to unknown node %s\n')
2443 self.ui.warn(_('mq status file refers to unknown node %s\n')
2444 % short(mqtags[-1][0]))
2444 % short(mqtags[-1][0]))
2445 return tagscache
2445 return tagscache
2446
2446
2447 mqtags.append((mqtags[-1][0], 'qtip'))
2447 mqtags.append((mqtags[-1][0], 'qtip'))
2448 mqtags.append((mqtags[0][0], 'qbase'))
2448 mqtags.append((mqtags[0][0], 'qbase'))
2449 mqtags.append((self.changelog.parents(mqtags[0][0])[0], 'qparent'))
2449 mqtags.append((self.changelog.parents(mqtags[0][0])[0], 'qparent'))
2450 for patch in mqtags:
2450 for patch in mqtags:
2451 if patch[1] in tagscache:
2451 if patch[1] in tagscache:
2452 self.ui.warn(_('Tag %s overrides mq patch of the same name\n')
2452 self.ui.warn(_('Tag %s overrides mq patch of the same name\n')
2453 % patch[1])
2453 % patch[1])
2454 else:
2454 else:
2455 tagscache[patch[1]] = patch[0]
2455 tagscache[patch[1]] = patch[0]
2456
2456
2457 return tagscache
2457 return tagscache
2458
2458
2459 def _branchtags(self, partial, lrev):
2459 def _branchtags(self, partial, lrev):
2460 q = self.mq
2460 q = self.mq
2461 if not q.applied:
2461 if not q.applied:
2462 return super(mqrepo, self)._branchtags(partial, lrev)
2462 return super(mqrepo, self)._branchtags(partial, lrev)
2463
2463
2464 cl = self.changelog
2464 cl = self.changelog
2465 qbasenode = bin(q.applied[0].rev)
2465 qbasenode = bin(q.applied[0].rev)
2466 if qbasenode not in cl.nodemap:
2466 if qbasenode not in cl.nodemap:
2467 self.ui.warn(_('mq status file refers to unknown node %s\n')
2467 self.ui.warn(_('mq status file refers to unknown node %s\n')
2468 % short(qbasenode))
2468 % short(qbasenode))
2469 return super(mqrepo, self)._branchtags(partial, lrev)
2469 return super(mqrepo, self)._branchtags(partial, lrev)
2470
2470
2471 qbase = cl.rev(qbasenode)
2471 qbase = cl.rev(qbasenode)
2472 start = lrev + 1
2472 start = lrev + 1
2473 if start < qbase:
2473 if start < qbase:
2474 # update the cache (excluding the patches) and save it
2474 # update the cache (excluding the patches) and save it
2475 self._updatebranchcache(partial, lrev+1, qbase)
2475 self._updatebranchcache(partial, lrev+1, qbase)
2476 self._writebranchcache(partial, cl.node(qbase-1), qbase-1)
2476 self._writebranchcache(partial, cl.node(qbase-1), qbase-1)
2477 start = qbase
2477 start = qbase
2478 # if start = qbase, the cache is as updated as it should be.
2478 # if start = qbase, the cache is as updated as it should be.
2479 # if start > qbase, the cache includes (part of) the patches.
2479 # if start > qbase, the cache includes (part of) the patches.
2480 # we might as well use it, but we won't save it.
2480 # we might as well use it, but we won't save it.
2481
2481
2482 # update the cache up to the tip
2482 # update the cache up to the tip
2483 self._updatebranchcache(partial, start, len(cl))
2483 self._updatebranchcache(partial, start, len(cl))
2484
2484
2485 return partial
2485 return partial
2486
2486
2487 if repo.local():
2487 if repo.local():
2488 repo.__class__ = mqrepo
2488 repo.__class__ = mqrepo
2489
2489
2490 def mqimport(orig, ui, repo, *args, **kwargs):
2490 def mqimport(orig, ui, repo, *args, **kwargs):
2491 if hasattr(repo, 'abort_if_wdir_patched'):
2491 if hasattr(repo, 'abort_if_wdir_patched'):
2492 repo.abort_if_wdir_patched(_('cannot import over an applied patch'),
2492 repo.abort_if_wdir_patched(_('cannot import over an applied patch'),
2493 kwargs.get('force'))
2493 kwargs.get('force'))
2494 return orig(ui, repo, *args, **kwargs)
2494 return orig(ui, repo, *args, **kwargs)
2495
2495
2496 def uisetup(ui):
2496 def uisetup(ui):
2497 extensions.wrapcommand(commands.table, 'import', mqimport)
2497 extensions.wrapcommand(commands.table, 'import', mqimport)
2498
2498
2499 seriesopts = [('s', 'summary', None, _('print first line of patch header'))]
2499 seriesopts = [('s', 'summary', None, _('print first line of patch header'))]
2500
2500
2501 cmdtable = {
2501 cmdtable = {
2502 "qapplied": (applied, [] + seriesopts, _('hg qapplied [-s] [PATCH]')),
2502 "qapplied": (applied, [] + seriesopts, _('hg qapplied [-s] [PATCH]')),
2503 "qclone":
2503 "qclone":
2504 (clone,
2504 (clone,
2505 [('', 'pull', None, _('use pull protocol to copy metadata')),
2505 [('', 'pull', None, _('use pull protocol to copy metadata')),
2506 ('U', 'noupdate', None, _('do not update the new working directories')),
2506 ('U', 'noupdate', None, _('do not update the new working directories')),
2507 ('', 'uncompressed', None,
2507 ('', 'uncompressed', None,
2508 _('use uncompressed transfer (fast over LAN)')),
2508 _('use uncompressed transfer (fast over LAN)')),
2509 ('p', 'patches', '', _('location of source patch repository')),
2509 ('p', 'patches', '', _('location of source patch repository')),
2510 ] + commands.remoteopts,
2510 ] + commands.remoteopts,
2511 _('hg qclone [OPTION]... SOURCE [DEST]')),
2511 _('hg qclone [OPTION]... SOURCE [DEST]')),
2512 "qcommit|qci":
2512 "qcommit|qci":
2513 (commit,
2513 (commit,
2514 commands.table["^commit|ci"][1],
2514 commands.table["^commit|ci"][1],
2515 _('hg qcommit [OPTION]... [FILE]...')),
2515 _('hg qcommit [OPTION]... [FILE]...')),
2516 "^qdiff":
2516 "^qdiff":
2517 (diff,
2517 (diff,
2518 commands.diffopts + commands.diffopts2 + commands.walkopts,
2518 commands.diffopts + commands.diffopts2 + commands.walkopts,
2519 _('hg qdiff [OPTION]... [FILE]...')),
2519 _('hg qdiff [OPTION]... [FILE]...')),
2520 "qdelete|qremove|qrm":
2520 "qdelete|qremove|qrm":
2521 (delete,
2521 (delete,
2522 [('k', 'keep', None, _('keep patch file')),
2522 [('k', 'keep', None, _('keep patch file')),
2523 ('r', 'rev', [], _('stop managing a revision'))],
2523 ('r', 'rev', [], _('stop managing a revision'))],
2524 _('hg qdelete [-k] [-r REV]... [PATCH]...')),
2524 _('hg qdelete [-k] [-r REV]... [PATCH]...')),
2525 'qfold':
2525 'qfold':
2526 (fold,
2526 (fold,
2527 [('e', 'edit', None, _('edit patch header')),
2527 [('e', 'edit', None, _('edit patch header')),
2528 ('k', 'keep', None, _('keep folded patch files')),
2528 ('k', 'keep', None, _('keep folded patch files')),
2529 ] + commands.commitopts,
2529 ] + commands.commitopts,
2530 _('hg qfold [-e] [-k] [-m TEXT] [-l FILE] PATCH...')),
2530 _('hg qfold [-e] [-k] [-m TEXT] [-l FILE] PATCH...')),
2531 'qgoto':
2531 'qgoto':
2532 (goto,
2532 (goto,
2533 [('f', 'force', None, _('overwrite any local changes'))],
2533 [('f', 'force', None, _('overwrite any local changes'))],
2534 _('hg qgoto [OPTION]... PATCH')),
2534 _('hg qgoto [OPTION]... PATCH')),
2535 'qguard':
2535 'qguard':
2536 (guard,
2536 (guard,
2537 [('l', 'list', None, _('list all patches and guards')),
2537 [('l', 'list', None, _('list all patches and guards')),
2538 ('n', 'none', None, _('drop all guards'))],
2538 ('n', 'none', None, _('drop all guards'))],
2539 _('hg qguard [-l] [-n] -- [PATCH] [+GUARD]... [-GUARD]...')),
2539 _('hg qguard [-l] [-n] -- [PATCH] [+GUARD]... [-GUARD]...')),
2540 'qheader': (header, [], _('hg qheader [PATCH]')),
2540 'qheader': (header, [], _('hg qheader [PATCH]')),
2541 "^qimport":
2541 "^qimport":
2542 (qimport,
2542 (qimport,
2543 [('e', 'existing', None, _('import file in patch directory')),
2543 [('e', 'existing', None, _('import file in patch directory')),
2544 ('n', 'name', '', _('name of patch file')),
2544 ('n', 'name', '', _('name of patch file')),
2545 ('f', 'force', None, _('overwrite existing files')),
2545 ('f', 'force', None, _('overwrite existing files')),
2546 ('r', 'rev', [], _('place existing revisions under mq control')),
2546 ('r', 'rev', [], _('place existing revisions under mq control')),
2547 ('g', 'git', None, _('use git extended diff format')),
2547 ('g', 'git', None, _('use git extended diff format')),
2548 ('P', 'push', None, _('qpush after importing'))],
2548 ('P', 'push', None, _('qpush after importing'))],
2549 _('hg qimport [-e] [-n NAME] [-f] [-g] [-P] [-r REV]... FILE...')),
2549 _('hg qimport [-e] [-n NAME] [-f] [-g] [-P] [-r REV]... FILE...')),
2550 "^qinit":
2550 "^qinit":
2551 (init,
2551 (init,
2552 [('c', 'create-repo', None, _('create queue repository'))],
2552 [('c', 'create-repo', None, _('create queue repository'))],
2553 _('hg qinit [-c]')),
2553 _('hg qinit [-c]')),
2554 "qnew":
2554 "qnew":
2555 (new,
2555 (new,
2556 [('e', 'edit', None, _('edit commit message')),
2556 [('e', 'edit', None, _('edit commit message')),
2557 ('f', 'force', None, _('import uncommitted changes into patch')),
2557 ('f', 'force', None, _('import uncommitted changes into patch')),
2558 ('g', 'git', None, _('use git extended diff format')),
2558 ('g', 'git', None, _('use git extended diff format')),
2559 ('U', 'currentuser', None, _('add "From: <current user>" to patch')),
2559 ('U', 'currentuser', None, _('add "From: <current user>" to patch')),
2560 ('u', 'user', '', _('add "From: <given user>" to patch')),
2560 ('u', 'user', '', _('add "From: <given user>" to patch')),
2561 ('D', 'currentdate', None, _('add "Date: <current date>" to patch')),
2561 ('D', 'currentdate', None, _('add "Date: <current date>" to patch')),
2562 ('d', 'date', '', _('add "Date: <given date>" to patch'))
2562 ('d', 'date', '', _('add "Date: <given date>" to patch'))
2563 ] + commands.walkopts + commands.commitopts,
2563 ] + commands.walkopts + commands.commitopts,
2564 _('hg qnew [-e] [-m TEXT] [-l FILE] [-f] PATCH [FILE]...')),
2564 _('hg qnew [-e] [-m TEXT] [-l FILE] [-f] PATCH [FILE]...')),
2565 "qnext": (next, [] + seriesopts, _('hg qnext [-s]')),
2565 "qnext": (next, [] + seriesopts, _('hg qnext [-s]')),
2566 "qprev": (prev, [] + seriesopts, _('hg qprev [-s]')),
2566 "qprev": (prev, [] + seriesopts, _('hg qprev [-s]')),
2567 "^qpop":
2567 "^qpop":
2568 (pop,
2568 (pop,
2569 [('a', 'all', None, _('pop all patches')),
2569 [('a', 'all', None, _('pop all patches')),
2570 ('n', 'name', '', _('queue name to pop')),
2570 ('n', 'name', '', _('queue name to pop')),
2571 ('f', 'force', None, _('forget any local changes'))],
2571 ('f', 'force', None, _('forget any local changes'))],
2572 _('hg qpop [-a] [-n NAME] [-f] [PATCH | INDEX]')),
2572 _('hg qpop [-a] [-n NAME] [-f] [PATCH | INDEX]')),
2573 "^qpush":
2573 "^qpush":
2574 (push,
2574 (push,
2575 [('f', 'force', None, _('apply if the patch has rejects')),
2575 [('f', 'force', None, _('apply if the patch has rejects')),
2576 ('l', 'list', None, _('list patch name in commit text')),
2576 ('l', 'list', None, _('list patch name in commit text')),
2577 ('a', 'all', None, _('apply all patches')),
2577 ('a', 'all', None, _('apply all patches')),
2578 ('m', 'merge', None, _('merge from another queue')),
2578 ('m', 'merge', None, _('merge from another queue')),
2579 ('n', 'name', '', _('merge queue name'))],
2579 ('n', 'name', '', _('merge queue name'))],
2580 _('hg qpush [-f] [-l] [-a] [-m] [-n NAME] [PATCH | INDEX]')),
2580 _('hg qpush [-f] [-l] [-a] [-m] [-n NAME] [PATCH | INDEX]')),
2581 "^qrefresh":
2581 "^qrefresh":
2582 (refresh,
2582 (refresh,
2583 [('e', 'edit', None, _('edit commit message')),
2583 [('e', 'edit', None, _('edit commit message')),
2584 ('g', 'git', None, _('use git extended diff format')),
2584 ('g', 'git', None, _('use git extended diff format')),
2585 ('s', 'short', None, _('refresh only files already in the patch and specified files')),
2585 ('s', 'short', None, _('refresh only files already in the patch and specified files')),
2586 ('U', 'currentuser', None, _('add/update "From: <current user>" in patch')),
2586 ('U', 'currentuser', None, _('add/update "From: <current user>" in patch')),
2587 ('u', 'user', '', _('add/update "From: <given user>" in patch')),
2587 ('u', 'user', '', _('add/update "From: <given user>" in patch')),
2588 ('D', 'currentdate', None, _('update "Date: <current date>" in patch (if present)')),
2588 ('D', 'currentdate', None, _('update "Date: <current date>" in patch (if present)')),
2589 ('d', 'date', '', _('update "Date: <given date>" in patch (if present)'))
2589 ('d', 'date', '', _('update "Date: <given date>" in patch (if present)'))
2590 ] + commands.walkopts + commands.commitopts,
2590 ] + commands.walkopts + commands.commitopts,
2591 _('hg qrefresh [-I] [-X] [-e] [-m TEXT] [-l FILE] [-s] [FILE]...')),
2591 _('hg qrefresh [-I] [-X] [-e] [-m TEXT] [-l FILE] [-s] [FILE]...')),
2592 'qrename|qmv':
2592 'qrename|qmv':
2593 (rename, [], _('hg qrename PATCH1 [PATCH2]')),
2593 (rename, [], _('hg qrename PATCH1 [PATCH2]')),
2594 "qrestore":
2594 "qrestore":
2595 (restore,
2595 (restore,
2596 [('d', 'delete', None, _('delete save entry')),
2596 [('d', 'delete', None, _('delete save entry')),
2597 ('u', 'update', None, _('update queue working directory'))],
2597 ('u', 'update', None, _('update queue working directory'))],
2598 _('hg qrestore [-d] [-u] REV')),
2598 _('hg qrestore [-d] [-u] REV')),
2599 "qsave":
2599 "qsave":
2600 (save,
2600 (save,
2601 [('c', 'copy', None, _('copy patch directory')),
2601 [('c', 'copy', None, _('copy patch directory')),
2602 ('n', 'name', '', _('copy directory name')),
2602 ('n', 'name', '', _('copy directory name')),
2603 ('e', 'empty', None, _('clear queue status file')),
2603 ('e', 'empty', None, _('clear queue status file')),
2604 ('f', 'force', None, _('force copy'))] + commands.commitopts,
2604 ('f', 'force', None, _('force copy'))] + commands.commitopts,
2605 _('hg qsave [-m TEXT] [-l FILE] [-c] [-n NAME] [-e] [-f]')),
2605 _('hg qsave [-m TEXT] [-l FILE] [-c] [-n NAME] [-e] [-f]')),
2606 "qselect":
2606 "qselect":
2607 (select,
2607 (select,
2608 [('n', 'none', None, _('disable all guards')),
2608 [('n', 'none', None, _('disable all guards')),
2609 ('s', 'series', None, _('list all guards in series file')),
2609 ('s', 'series', None, _('list all guards in series file')),
2610 ('', 'pop', None, _('pop to before first guarded applied patch')),
2610 ('', 'pop', None, _('pop to before first guarded applied patch')),
2611 ('', 'reapply', None, _('pop, then reapply patches'))],
2611 ('', 'reapply', None, _('pop, then reapply patches'))],
2612 _('hg qselect [OPTION]... [GUARD]...')),
2612 _('hg qselect [OPTION]... [GUARD]...')),
2613 "qseries":
2613 "qseries":
2614 (series,
2614 (series,
2615 [('m', 'missing', None, _('print patches not in series')),
2615 [('m', 'missing', None, _('print patches not in series')),
2616 ] + seriesopts,
2616 ] + seriesopts,
2617 _('hg qseries [-ms]')),
2617 _('hg qseries [-ms]')),
2618 "^strip":
2618 "^strip":
2619 (strip,
2619 (strip,
2620 [('f', 'force', None, _('force removal with local changes')),
2620 [('f', 'force', None, _('force removal with local changes')),
2621 ('b', 'backup', None, _('bundle unrelated changesets')),
2621 ('b', 'backup', None, _('bundle unrelated changesets')),
2622 ('n', 'nobackup', None, _('no backups'))],
2622 ('n', 'nobackup', None, _('no backups'))],
2623 _('hg strip [-f] [-b] [-n] REV')),
2623 _('hg strip [-f] [-b] [-n] REV')),
2624 "qtop": (top, [] + seriesopts, _('hg qtop [-s]')),
2624 "qtop": (top, [] + seriesopts, _('hg qtop [-s]')),
2625 "qunapplied": (unapplied, [] + seriesopts, _('hg qunapplied [-s] [PATCH]')),
2625 "qunapplied": (unapplied, [] + seriesopts, _('hg qunapplied [-s] [PATCH]')),
2626 "qfinish":
2626 "qfinish":
2627 (finish,
2627 (finish,
2628 [('a', 'applied', None, _('finish all applied changesets'))],
2628 [('a', 'applied', None, _('finish all applied changesets'))],
2629 _('hg qfinish [-a] [REV...]')),
2629 _('hg qfinish [-a] [REV...]')),
2630 }
2630 }
@@ -1,605 +1,605
1 # Patch transplanting extension for Mercurial
1 # Patch transplanting extension for Mercurial
2 #
2 #
3 # Copyright 2006, 2007 Brendan Cully <brendan@kublai.com>
3 # Copyright 2006, 2007 Brendan Cully <brendan@kublai.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2, incorporated herein by reference.
6 # GNU General Public License version 2, incorporated herein by reference.
7
7
8 '''patch transplanting tool
8 '''patch transplanting tool
9
9
10 This extension allows you to transplant patches from another branch.
10 This extension allows you to transplant patches from another branch.
11
11
12 Transplanted patches are recorded in .hg/transplant/transplants, as a
12 Transplanted patches are recorded in .hg/transplant/transplants, as a
13 map from a changeset hash to its hash in the source repository.
13 map from a changeset hash to its hash in the source repository.
14 '''
14 '''
15
15
16 from mercurial.i18n import _
16 from mercurial.i18n import _
17 import os, tempfile
17 import os, tempfile
18 from mercurial import bundlerepo, changegroup, cmdutil, hg, merge, match
18 from mercurial import bundlerepo, changegroup, cmdutil, hg, merge, match
19 from mercurial import patch, revlog, util, error
19 from mercurial import patch, revlog, util, error
20
20
21 class transplantentry:
21 class transplantentry(object):
22 def __init__(self, lnode, rnode):
22 def __init__(self, lnode, rnode):
23 self.lnode = lnode
23 self.lnode = lnode
24 self.rnode = rnode
24 self.rnode = rnode
25
25
26 class transplants:
26 class transplants(object):
27 def __init__(self, path=None, transplantfile=None, opener=None):
27 def __init__(self, path=None, transplantfile=None, opener=None):
28 self.path = path
28 self.path = path
29 self.transplantfile = transplantfile
29 self.transplantfile = transplantfile
30 self.opener = opener
30 self.opener = opener
31
31
32 if not opener:
32 if not opener:
33 self.opener = util.opener(self.path)
33 self.opener = util.opener(self.path)
34 self.transplants = []
34 self.transplants = []
35 self.dirty = False
35 self.dirty = False
36 self.read()
36 self.read()
37
37
38 def read(self):
38 def read(self):
39 abspath = os.path.join(self.path, self.transplantfile)
39 abspath = os.path.join(self.path, self.transplantfile)
40 if self.transplantfile and os.path.exists(abspath):
40 if self.transplantfile and os.path.exists(abspath):
41 for line in self.opener(self.transplantfile).read().splitlines():
41 for line in self.opener(self.transplantfile).read().splitlines():
42 lnode, rnode = map(revlog.bin, line.split(':'))
42 lnode, rnode = map(revlog.bin, line.split(':'))
43 self.transplants.append(transplantentry(lnode, rnode))
43 self.transplants.append(transplantentry(lnode, rnode))
44
44
45 def write(self):
45 def write(self):
46 if self.dirty and self.transplantfile:
46 if self.dirty and self.transplantfile:
47 if not os.path.isdir(self.path):
47 if not os.path.isdir(self.path):
48 os.mkdir(self.path)
48 os.mkdir(self.path)
49 fp = self.opener(self.transplantfile, 'w')
49 fp = self.opener(self.transplantfile, 'w')
50 for c in self.transplants:
50 for c in self.transplants:
51 l, r = map(revlog.hex, (c.lnode, c.rnode))
51 l, r = map(revlog.hex, (c.lnode, c.rnode))
52 fp.write(l + ':' + r + '\n')
52 fp.write(l + ':' + r + '\n')
53 fp.close()
53 fp.close()
54 self.dirty = False
54 self.dirty = False
55
55
56 def get(self, rnode):
56 def get(self, rnode):
57 return [t for t in self.transplants if t.rnode == rnode]
57 return [t for t in self.transplants if t.rnode == rnode]
58
58
59 def set(self, lnode, rnode):
59 def set(self, lnode, rnode):
60 self.transplants.append(transplantentry(lnode, rnode))
60 self.transplants.append(transplantentry(lnode, rnode))
61 self.dirty = True
61 self.dirty = True
62
62
63 def remove(self, transplant):
63 def remove(self, transplant):
64 del self.transplants[self.transplants.index(transplant)]
64 del self.transplants[self.transplants.index(transplant)]
65 self.dirty = True
65 self.dirty = True
66
66
67 class transplanter:
67 class transplanter(object):
68 def __init__(self, ui, repo):
68 def __init__(self, ui, repo):
69 self.ui = ui
69 self.ui = ui
70 self.path = repo.join('transplant')
70 self.path = repo.join('transplant')
71 self.opener = util.opener(self.path)
71 self.opener = util.opener(self.path)
72 self.transplants = transplants(self.path, 'transplants',
72 self.transplants = transplants(self.path, 'transplants',
73 opener=self.opener)
73 opener=self.opener)
74
74
75 def applied(self, repo, node, parent):
75 def applied(self, repo, node, parent):
76 '''returns True if a node is already an ancestor of parent
76 '''returns True if a node is already an ancestor of parent
77 or has already been transplanted'''
77 or has already been transplanted'''
78 if hasnode(repo, node):
78 if hasnode(repo, node):
79 if node in repo.changelog.reachable(parent, stop=node):
79 if node in repo.changelog.reachable(parent, stop=node):
80 return True
80 return True
81 for t in self.transplants.get(node):
81 for t in self.transplants.get(node):
82 # it might have been stripped
82 # it might have been stripped
83 if not hasnode(repo, t.lnode):
83 if not hasnode(repo, t.lnode):
84 self.transplants.remove(t)
84 self.transplants.remove(t)
85 return False
85 return False
86 if t.lnode in repo.changelog.reachable(parent, stop=t.lnode):
86 if t.lnode in repo.changelog.reachable(parent, stop=t.lnode):
87 return True
87 return True
88 return False
88 return False
89
89
90 def apply(self, repo, source, revmap, merges, opts={}):
90 def apply(self, repo, source, revmap, merges, opts={}):
91 '''apply the revisions in revmap one by one in revision order'''
91 '''apply the revisions in revmap one by one in revision order'''
92 revs = sorted(revmap)
92 revs = sorted(revmap)
93 p1, p2 = repo.dirstate.parents()
93 p1, p2 = repo.dirstate.parents()
94 pulls = []
94 pulls = []
95 diffopts = patch.diffopts(self.ui, opts)
95 diffopts = patch.diffopts(self.ui, opts)
96 diffopts.git = True
96 diffopts.git = True
97
97
98 lock = wlock = None
98 lock = wlock = None
99 try:
99 try:
100 wlock = repo.wlock()
100 wlock = repo.wlock()
101 lock = repo.lock()
101 lock = repo.lock()
102 for rev in revs:
102 for rev in revs:
103 node = revmap[rev]
103 node = revmap[rev]
104 revstr = '%s:%s' % (rev, revlog.short(node))
104 revstr = '%s:%s' % (rev, revlog.short(node))
105
105
106 if self.applied(repo, node, p1):
106 if self.applied(repo, node, p1):
107 self.ui.warn(_('skipping already applied revision %s\n') %
107 self.ui.warn(_('skipping already applied revision %s\n') %
108 revstr)
108 revstr)
109 continue
109 continue
110
110
111 parents = source.changelog.parents(node)
111 parents = source.changelog.parents(node)
112 if not opts.get('filter'):
112 if not opts.get('filter'):
113 # If the changeset parent is the same as the
113 # If the changeset parent is the same as the
114 # wdir's parent, just pull it.
114 # wdir's parent, just pull it.
115 if parents[0] == p1:
115 if parents[0] == p1:
116 pulls.append(node)
116 pulls.append(node)
117 p1 = node
117 p1 = node
118 continue
118 continue
119 if pulls:
119 if pulls:
120 if source != repo:
120 if source != repo:
121 repo.pull(source, heads=pulls)
121 repo.pull(source, heads=pulls)
122 merge.update(repo, pulls[-1], False, False, None)
122 merge.update(repo, pulls[-1], False, False, None)
123 p1, p2 = repo.dirstate.parents()
123 p1, p2 = repo.dirstate.parents()
124 pulls = []
124 pulls = []
125
125
126 domerge = False
126 domerge = False
127 if node in merges:
127 if node in merges:
128 # pulling all the merge revs at once would mean we
128 # pulling all the merge revs at once would mean we
129 # couldn't transplant after the latest even if
129 # couldn't transplant after the latest even if
130 # transplants before them fail.
130 # transplants before them fail.
131 domerge = True
131 domerge = True
132 if not hasnode(repo, node):
132 if not hasnode(repo, node):
133 repo.pull(source, heads=[node])
133 repo.pull(source, heads=[node])
134
134
135 if parents[1] != revlog.nullid:
135 if parents[1] != revlog.nullid:
136 self.ui.note(_('skipping merge changeset %s:%s\n')
136 self.ui.note(_('skipping merge changeset %s:%s\n')
137 % (rev, revlog.short(node)))
137 % (rev, revlog.short(node)))
138 patchfile = None
138 patchfile = None
139 else:
139 else:
140 fd, patchfile = tempfile.mkstemp(prefix='hg-transplant-')
140 fd, patchfile = tempfile.mkstemp(prefix='hg-transplant-')
141 fp = os.fdopen(fd, 'w')
141 fp = os.fdopen(fd, 'w')
142 gen = patch.diff(source, parents[0], node, opts=diffopts)
142 gen = patch.diff(source, parents[0], node, opts=diffopts)
143 for chunk in gen:
143 for chunk in gen:
144 fp.write(chunk)
144 fp.write(chunk)
145 fp.close()
145 fp.close()
146
146
147 del revmap[rev]
147 del revmap[rev]
148 if patchfile or domerge:
148 if patchfile or domerge:
149 try:
149 try:
150 n = self.applyone(repo, node,
150 n = self.applyone(repo, node,
151 source.changelog.read(node),
151 source.changelog.read(node),
152 patchfile, merge=domerge,
152 patchfile, merge=domerge,
153 log=opts.get('log'),
153 log=opts.get('log'),
154 filter=opts.get('filter'))
154 filter=opts.get('filter'))
155 if n and domerge:
155 if n and domerge:
156 self.ui.status(_('%s merged at %s\n') % (revstr,
156 self.ui.status(_('%s merged at %s\n') % (revstr,
157 revlog.short(n)))
157 revlog.short(n)))
158 elif n:
158 elif n:
159 self.ui.status(_('%s transplanted to %s\n')
159 self.ui.status(_('%s transplanted to %s\n')
160 % (revlog.short(node),
160 % (revlog.short(node),
161 revlog.short(n)))
161 revlog.short(n)))
162 finally:
162 finally:
163 if patchfile:
163 if patchfile:
164 os.unlink(patchfile)
164 os.unlink(patchfile)
165 if pulls:
165 if pulls:
166 repo.pull(source, heads=pulls)
166 repo.pull(source, heads=pulls)
167 merge.update(repo, pulls[-1], False, False, None)
167 merge.update(repo, pulls[-1], False, False, None)
168 finally:
168 finally:
169 self.saveseries(revmap, merges)
169 self.saveseries(revmap, merges)
170 self.transplants.write()
170 self.transplants.write()
171 lock.release()
171 lock.release()
172 wlock.release()
172 wlock.release()
173
173
174 def filter(self, filter, changelog, patchfile):
174 def filter(self, filter, changelog, patchfile):
175 '''arbitrarily rewrite changeset before applying it'''
175 '''arbitrarily rewrite changeset before applying it'''
176
176
177 self.ui.status(_('filtering %s\n') % patchfile)
177 self.ui.status(_('filtering %s\n') % patchfile)
178 user, date, msg = (changelog[1], changelog[2], changelog[4])
178 user, date, msg = (changelog[1], changelog[2], changelog[4])
179
179
180 fd, headerfile = tempfile.mkstemp(prefix='hg-transplant-')
180 fd, headerfile = tempfile.mkstemp(prefix='hg-transplant-')
181 fp = os.fdopen(fd, 'w')
181 fp = os.fdopen(fd, 'w')
182 fp.write("# HG changeset patch\n")
182 fp.write("# HG changeset patch\n")
183 fp.write("# User %s\n" % user)
183 fp.write("# User %s\n" % user)
184 fp.write("# Date %d %d\n" % date)
184 fp.write("# Date %d %d\n" % date)
185 fp.write(changelog[4])
185 fp.write(changelog[4])
186 fp.close()
186 fp.close()
187
187
188 try:
188 try:
189 util.system('%s %s %s' % (filter, util.shellquote(headerfile),
189 util.system('%s %s %s' % (filter, util.shellquote(headerfile),
190 util.shellquote(patchfile)),
190 util.shellquote(patchfile)),
191 environ={'HGUSER': changelog[1]},
191 environ={'HGUSER': changelog[1]},
192 onerr=util.Abort, errprefix=_('filter failed'))
192 onerr=util.Abort, errprefix=_('filter failed'))
193 user, date, msg = self.parselog(file(headerfile))[1:4]
193 user, date, msg = self.parselog(file(headerfile))[1:4]
194 finally:
194 finally:
195 os.unlink(headerfile)
195 os.unlink(headerfile)
196
196
197 return (user, date, msg)
197 return (user, date, msg)
198
198
199 def applyone(self, repo, node, cl, patchfile, merge=False, log=False,
199 def applyone(self, repo, node, cl, patchfile, merge=False, log=False,
200 filter=None):
200 filter=None):
201 '''apply the patch in patchfile to the repository as a transplant'''
201 '''apply the patch in patchfile to the repository as a transplant'''
202 (manifest, user, (time, timezone), files, message) = cl[:5]
202 (manifest, user, (time, timezone), files, message) = cl[:5]
203 date = "%d %d" % (time, timezone)
203 date = "%d %d" % (time, timezone)
204 extra = {'transplant_source': node}
204 extra = {'transplant_source': node}
205 if filter:
205 if filter:
206 (user, date, message) = self.filter(filter, cl, patchfile)
206 (user, date, message) = self.filter(filter, cl, patchfile)
207
207
208 if log:
208 if log:
209 message += '\n(transplanted from %s)' % revlog.hex(node)
209 message += '\n(transplanted from %s)' % revlog.hex(node)
210
210
211 self.ui.status(_('applying %s\n') % revlog.short(node))
211 self.ui.status(_('applying %s\n') % revlog.short(node))
212 self.ui.note('%s %s\n%s\n' % (user, date, message))
212 self.ui.note('%s %s\n%s\n' % (user, date, message))
213
213
214 if not patchfile and not merge:
214 if not patchfile and not merge:
215 raise util.Abort(_('can only omit patchfile if merging'))
215 raise util.Abort(_('can only omit patchfile if merging'))
216 if patchfile:
216 if patchfile:
217 try:
217 try:
218 files = {}
218 files = {}
219 try:
219 try:
220 patch.patch(patchfile, self.ui, cwd=repo.root,
220 patch.patch(patchfile, self.ui, cwd=repo.root,
221 files=files)
221 files=files)
222 if not files:
222 if not files:
223 self.ui.warn(_('%s: empty changeset')
223 self.ui.warn(_('%s: empty changeset')
224 % revlog.hex(node))
224 % revlog.hex(node))
225 return None
225 return None
226 finally:
226 finally:
227 files = patch.updatedir(self.ui, repo, files)
227 files = patch.updatedir(self.ui, repo, files)
228 except Exception, inst:
228 except Exception, inst:
229 if filter:
229 if filter:
230 os.unlink(patchfile)
230 os.unlink(patchfile)
231 seriespath = os.path.join(self.path, 'series')
231 seriespath = os.path.join(self.path, 'series')
232 if os.path.exists(seriespath):
232 if os.path.exists(seriespath):
233 os.unlink(seriespath)
233 os.unlink(seriespath)
234 p1 = repo.dirstate.parents()[0]
234 p1 = repo.dirstate.parents()[0]
235 p2 = node
235 p2 = node
236 self.log(user, date, message, p1, p2, merge=merge)
236 self.log(user, date, message, p1, p2, merge=merge)
237 self.ui.write(str(inst) + '\n')
237 self.ui.write(str(inst) + '\n')
238 raise util.Abort(_('Fix up the merge and run '
238 raise util.Abort(_('Fix up the merge and run '
239 'hg transplant --continue'))
239 'hg transplant --continue'))
240 else:
240 else:
241 files = None
241 files = None
242 if merge:
242 if merge:
243 p1, p2 = repo.dirstate.parents()
243 p1, p2 = repo.dirstate.parents()
244 repo.dirstate.setparents(p1, node)
244 repo.dirstate.setparents(p1, node)
245 m = match.always(repo.root, '')
245 m = match.always(repo.root, '')
246 else:
246 else:
247 m = match.exact(repo.root, '', files)
247 m = match.exact(repo.root, '', files)
248
248
249 n = repo.commit(message, user, date, extra=extra, match=m)
249 n = repo.commit(message, user, date, extra=extra, match=m)
250 if not merge:
250 if not merge:
251 self.transplants.set(n, node)
251 self.transplants.set(n, node)
252
252
253 return n
253 return n
254
254
255 def resume(self, repo, source, opts=None):
255 def resume(self, repo, source, opts=None):
256 '''recover last transaction and apply remaining changesets'''
256 '''recover last transaction and apply remaining changesets'''
257 if os.path.exists(os.path.join(self.path, 'journal')):
257 if os.path.exists(os.path.join(self.path, 'journal')):
258 n, node = self.recover(repo)
258 n, node = self.recover(repo)
259 self.ui.status(_('%s transplanted as %s\n') % (revlog.short(node),
259 self.ui.status(_('%s transplanted as %s\n') % (revlog.short(node),
260 revlog.short(n)))
260 revlog.short(n)))
261 seriespath = os.path.join(self.path, 'series')
261 seriespath = os.path.join(self.path, 'series')
262 if not os.path.exists(seriespath):
262 if not os.path.exists(seriespath):
263 self.transplants.write()
263 self.transplants.write()
264 return
264 return
265 nodes, merges = self.readseries()
265 nodes, merges = self.readseries()
266 revmap = {}
266 revmap = {}
267 for n in nodes:
267 for n in nodes:
268 revmap[source.changelog.rev(n)] = n
268 revmap[source.changelog.rev(n)] = n
269 os.unlink(seriespath)
269 os.unlink(seriespath)
270
270
271 self.apply(repo, source, revmap, merges, opts)
271 self.apply(repo, source, revmap, merges, opts)
272
272
273 def recover(self, repo):
273 def recover(self, repo):
274 '''commit working directory using journal metadata'''
274 '''commit working directory using journal metadata'''
275 node, user, date, message, parents = self.readlog()
275 node, user, date, message, parents = self.readlog()
276 merge = len(parents) == 2
276 merge = len(parents) == 2
277
277
278 if not user or not date or not message or not parents[0]:
278 if not user or not date or not message or not parents[0]:
279 raise util.Abort(_('transplant log file is corrupt'))
279 raise util.Abort(_('transplant log file is corrupt'))
280
280
281 extra = {'transplant_source': node}
281 extra = {'transplant_source': node}
282 wlock = repo.wlock()
282 wlock = repo.wlock()
283 try:
283 try:
284 p1, p2 = repo.dirstate.parents()
284 p1, p2 = repo.dirstate.parents()
285 if p1 != parents[0]:
285 if p1 != parents[0]:
286 raise util.Abort(
286 raise util.Abort(
287 _('working dir not at transplant parent %s') %
287 _('working dir not at transplant parent %s') %
288 revlog.hex(parents[0]))
288 revlog.hex(parents[0]))
289 if merge:
289 if merge:
290 repo.dirstate.setparents(p1, parents[1])
290 repo.dirstate.setparents(p1, parents[1])
291 n = repo.commit(message, user, date, extra=extra)
291 n = repo.commit(message, user, date, extra=extra)
292 if not n:
292 if not n:
293 raise util.Abort(_('commit failed'))
293 raise util.Abort(_('commit failed'))
294 if not merge:
294 if not merge:
295 self.transplants.set(n, node)
295 self.transplants.set(n, node)
296 self.unlog()
296 self.unlog()
297
297
298 return n, node
298 return n, node
299 finally:
299 finally:
300 wlock.release()
300 wlock.release()
301
301
302 def readseries(self):
302 def readseries(self):
303 nodes = []
303 nodes = []
304 merges = []
304 merges = []
305 cur = nodes
305 cur = nodes
306 for line in self.opener('series').read().splitlines():
306 for line in self.opener('series').read().splitlines():
307 if line.startswith('# Merges'):
307 if line.startswith('# Merges'):
308 cur = merges
308 cur = merges
309 continue
309 continue
310 cur.append(revlog.bin(line))
310 cur.append(revlog.bin(line))
311
311
312 return (nodes, merges)
312 return (nodes, merges)
313
313
314 def saveseries(self, revmap, merges):
314 def saveseries(self, revmap, merges):
315 if not revmap:
315 if not revmap:
316 return
316 return
317
317
318 if not os.path.isdir(self.path):
318 if not os.path.isdir(self.path):
319 os.mkdir(self.path)
319 os.mkdir(self.path)
320 series = self.opener('series', 'w')
320 series = self.opener('series', 'w')
321 for rev in sorted(revmap):
321 for rev in sorted(revmap):
322 series.write(revlog.hex(revmap[rev]) + '\n')
322 series.write(revlog.hex(revmap[rev]) + '\n')
323 if merges:
323 if merges:
324 series.write('# Merges\n')
324 series.write('# Merges\n')
325 for m in merges:
325 for m in merges:
326 series.write(revlog.hex(m) + '\n')
326 series.write(revlog.hex(m) + '\n')
327 series.close()
327 series.close()
328
328
329 def parselog(self, fp):
329 def parselog(self, fp):
330 parents = []
330 parents = []
331 message = []
331 message = []
332 node = revlog.nullid
332 node = revlog.nullid
333 inmsg = False
333 inmsg = False
334 for line in fp.read().splitlines():
334 for line in fp.read().splitlines():
335 if inmsg:
335 if inmsg:
336 message.append(line)
336 message.append(line)
337 elif line.startswith('# User '):
337 elif line.startswith('# User '):
338 user = line[7:]
338 user = line[7:]
339 elif line.startswith('# Date '):
339 elif line.startswith('# Date '):
340 date = line[7:]
340 date = line[7:]
341 elif line.startswith('# Node ID '):
341 elif line.startswith('# Node ID '):
342 node = revlog.bin(line[10:])
342 node = revlog.bin(line[10:])
343 elif line.startswith('# Parent '):
343 elif line.startswith('# Parent '):
344 parents.append(revlog.bin(line[9:]))
344 parents.append(revlog.bin(line[9:]))
345 elif not line.startswith('#'):
345 elif not line.startswith('#'):
346 inmsg = True
346 inmsg = True
347 message.append(line)
347 message.append(line)
348 return (node, user, date, '\n'.join(message), parents)
348 return (node, user, date, '\n'.join(message), parents)
349
349
350 def log(self, user, date, message, p1, p2, merge=False):
350 def log(self, user, date, message, p1, p2, merge=False):
351 '''journal changelog metadata for later recover'''
351 '''journal changelog metadata for later recover'''
352
352
353 if not os.path.isdir(self.path):
353 if not os.path.isdir(self.path):
354 os.mkdir(self.path)
354 os.mkdir(self.path)
355 fp = self.opener('journal', 'w')
355 fp = self.opener('journal', 'w')
356 fp.write('# User %s\n' % user)
356 fp.write('# User %s\n' % user)
357 fp.write('# Date %s\n' % date)
357 fp.write('# Date %s\n' % date)
358 fp.write('# Node ID %s\n' % revlog.hex(p2))
358 fp.write('# Node ID %s\n' % revlog.hex(p2))
359 fp.write('# Parent ' + revlog.hex(p1) + '\n')
359 fp.write('# Parent ' + revlog.hex(p1) + '\n')
360 if merge:
360 if merge:
361 fp.write('# Parent ' + revlog.hex(p2) + '\n')
361 fp.write('# Parent ' + revlog.hex(p2) + '\n')
362 fp.write(message.rstrip() + '\n')
362 fp.write(message.rstrip() + '\n')
363 fp.close()
363 fp.close()
364
364
365 def readlog(self):
365 def readlog(self):
366 return self.parselog(self.opener('journal'))
366 return self.parselog(self.opener('journal'))
367
367
368 def unlog(self):
368 def unlog(self):
369 '''remove changelog journal'''
369 '''remove changelog journal'''
370 absdst = os.path.join(self.path, 'journal')
370 absdst = os.path.join(self.path, 'journal')
371 if os.path.exists(absdst):
371 if os.path.exists(absdst):
372 os.unlink(absdst)
372 os.unlink(absdst)
373
373
374 def transplantfilter(self, repo, source, root):
374 def transplantfilter(self, repo, source, root):
375 def matchfn(node):
375 def matchfn(node):
376 if self.applied(repo, node, root):
376 if self.applied(repo, node, root):
377 return False
377 return False
378 if source.changelog.parents(node)[1] != revlog.nullid:
378 if source.changelog.parents(node)[1] != revlog.nullid:
379 return False
379 return False
380 extra = source.changelog.read(node)[5]
380 extra = source.changelog.read(node)[5]
381 cnode = extra.get('transplant_source')
381 cnode = extra.get('transplant_source')
382 if cnode and self.applied(repo, cnode, root):
382 if cnode and self.applied(repo, cnode, root):
383 return False
383 return False
384 return True
384 return True
385
385
386 return matchfn
386 return matchfn
387
387
388 def hasnode(repo, node):
388 def hasnode(repo, node):
389 try:
389 try:
390 return repo.changelog.rev(node) != None
390 return repo.changelog.rev(node) != None
391 except error.RevlogError:
391 except error.RevlogError:
392 return False
392 return False
393
393
394 def browserevs(ui, repo, nodes, opts):
394 def browserevs(ui, repo, nodes, opts):
395 '''interactively transplant changesets'''
395 '''interactively transplant changesets'''
396 def browsehelp(ui):
396 def browsehelp(ui):
397 ui.write('y: transplant this changeset\n'
397 ui.write('y: transplant this changeset\n'
398 'n: skip this changeset\n'
398 'n: skip this changeset\n'
399 'm: merge at this changeset\n'
399 'm: merge at this changeset\n'
400 'p: show patch\n'
400 'p: show patch\n'
401 'c: commit selected changesets\n'
401 'c: commit selected changesets\n'
402 'q: cancel transplant\n'
402 'q: cancel transplant\n'
403 '?: show this help\n')
403 '?: show this help\n')
404
404
405 displayer = cmdutil.show_changeset(ui, repo, opts)
405 displayer = cmdutil.show_changeset(ui, repo, opts)
406 transplants = []
406 transplants = []
407 merges = []
407 merges = []
408 for node in nodes:
408 for node in nodes:
409 displayer.show(repo[node])
409 displayer.show(repo[node])
410 action = None
410 action = None
411 while not action:
411 while not action:
412 action = ui.prompt(_('apply changeset? [ynmpcq?]:'))
412 action = ui.prompt(_('apply changeset? [ynmpcq?]:'))
413 if action == '?':
413 if action == '?':
414 browsehelp(ui)
414 browsehelp(ui)
415 action = None
415 action = None
416 elif action == 'p':
416 elif action == 'p':
417 parent = repo.changelog.parents(node)[0]
417 parent = repo.changelog.parents(node)[0]
418 for chunk in patch.diff(repo, parent, node):
418 for chunk in patch.diff(repo, parent, node):
419 ui.write(chunk)
419 ui.write(chunk)
420 action = None
420 action = None
421 elif action not in ('y', 'n', 'm', 'c', 'q'):
421 elif action not in ('y', 'n', 'm', 'c', 'q'):
422 ui.write('no such option\n')
422 ui.write('no such option\n')
423 action = None
423 action = None
424 if action == 'y':
424 if action == 'y':
425 transplants.append(node)
425 transplants.append(node)
426 elif action == 'm':
426 elif action == 'm':
427 merges.append(node)
427 merges.append(node)
428 elif action == 'c':
428 elif action == 'c':
429 break
429 break
430 elif action == 'q':
430 elif action == 'q':
431 transplants = ()
431 transplants = ()
432 merges = ()
432 merges = ()
433 break
433 break
434 return (transplants, merges)
434 return (transplants, merges)
435
435
436 def transplant(ui, repo, *revs, **opts):
436 def transplant(ui, repo, *revs, **opts):
437 '''transplant changesets from another branch
437 '''transplant changesets from another branch
438
438
439 Selected changesets will be applied on top of the current working
439 Selected changesets will be applied on top of the current working
440 directory with the log of the original changeset. If --log is
440 directory with the log of the original changeset. If --log is
441 specified, log messages will have a comment appended of the form:
441 specified, log messages will have a comment appended of the form:
442
442
443 (transplanted from CHANGESETHASH)
443 (transplanted from CHANGESETHASH)
444
444
445 You can rewrite the changelog message with the --filter option.
445 You can rewrite the changelog message with the --filter option.
446 Its argument will be invoked with the current changelog message as
446 Its argument will be invoked with the current changelog message as
447 $1 and the patch as $2.
447 $1 and the patch as $2.
448
448
449 If --source/-s is specified, selects changesets from the named
449 If --source/-s is specified, selects changesets from the named
450 repository. If --branch/-b is specified, selects changesets from
450 repository. If --branch/-b is specified, selects changesets from
451 the branch holding the named revision, up to that revision. If
451 the branch holding the named revision, up to that revision. If
452 --all/-a is specified, all changesets on the branch will be
452 --all/-a is specified, all changesets on the branch will be
453 transplanted, otherwise you will be prompted to select the
453 transplanted, otherwise you will be prompted to select the
454 changesets you want.
454 changesets you want.
455
455
456 hg transplant --branch REVISION --all will rebase the selected
456 hg transplant --branch REVISION --all will rebase the selected
457 branch (up to the named revision) onto your current working
457 branch (up to the named revision) onto your current working
458 directory.
458 directory.
459
459
460 You can optionally mark selected transplanted changesets as merge
460 You can optionally mark selected transplanted changesets as merge
461 changesets. You will not be prompted to transplant any ancestors
461 changesets. You will not be prompted to transplant any ancestors
462 of a merged transplant, and you can merge descendants of them
462 of a merged transplant, and you can merge descendants of them
463 normally instead of transplanting them.
463 normally instead of transplanting them.
464
464
465 If no merges or revisions are provided, hg transplant will start
465 If no merges or revisions are provided, hg transplant will start
466 an interactive changeset browser.
466 an interactive changeset browser.
467
467
468 If a changeset application fails, you can fix the merge by hand
468 If a changeset application fails, you can fix the merge by hand
469 and then resume where you left off by calling hg transplant
469 and then resume where you left off by calling hg transplant
470 --continue/-c.
470 --continue/-c.
471 '''
471 '''
472 def getremotechanges(repo, url):
472 def getremotechanges(repo, url):
473 sourcerepo = ui.expandpath(url)
473 sourcerepo = ui.expandpath(url)
474 source = hg.repository(ui, sourcerepo)
474 source = hg.repository(ui, sourcerepo)
475 common, incoming, rheads = repo.findcommonincoming(source, force=True)
475 common, incoming, rheads = repo.findcommonincoming(source, force=True)
476 if not incoming:
476 if not incoming:
477 return (source, None, None)
477 return (source, None, None)
478
478
479 bundle = None
479 bundle = None
480 if not source.local():
480 if not source.local():
481 if source.capable('changegroupsubset'):
481 if source.capable('changegroupsubset'):
482 cg = source.changegroupsubset(incoming, rheads, 'incoming')
482 cg = source.changegroupsubset(incoming, rheads, 'incoming')
483 else:
483 else:
484 cg = source.changegroup(incoming, 'incoming')
484 cg = source.changegroup(incoming, 'incoming')
485 bundle = changegroup.writebundle(cg, None, 'HG10UN')
485 bundle = changegroup.writebundle(cg, None, 'HG10UN')
486 source = bundlerepo.bundlerepository(ui, repo.root, bundle)
486 source = bundlerepo.bundlerepository(ui, repo.root, bundle)
487
487
488 return (source, incoming, bundle)
488 return (source, incoming, bundle)
489
489
490 def incwalk(repo, incoming, branches, match=util.always):
490 def incwalk(repo, incoming, branches, match=util.always):
491 if not branches:
491 if not branches:
492 branches=None
492 branches=None
493 for node in repo.changelog.nodesbetween(incoming, branches)[0]:
493 for node in repo.changelog.nodesbetween(incoming, branches)[0]:
494 if match(node):
494 if match(node):
495 yield node
495 yield node
496
496
497 def transplantwalk(repo, root, branches, match=util.always):
497 def transplantwalk(repo, root, branches, match=util.always):
498 if not branches:
498 if not branches:
499 branches = repo.heads()
499 branches = repo.heads()
500 ancestors = []
500 ancestors = []
501 for branch in branches:
501 for branch in branches:
502 ancestors.append(repo.changelog.ancestor(root, branch))
502 ancestors.append(repo.changelog.ancestor(root, branch))
503 for node in repo.changelog.nodesbetween(ancestors, branches)[0]:
503 for node in repo.changelog.nodesbetween(ancestors, branches)[0]:
504 if match(node):
504 if match(node):
505 yield node
505 yield node
506
506
507 def checkopts(opts, revs):
507 def checkopts(opts, revs):
508 if opts.get('continue'):
508 if opts.get('continue'):
509 if filter(lambda opt: opts.get(opt), ('branch', 'all', 'merge')):
509 if filter(lambda opt: opts.get(opt), ('branch', 'all', 'merge')):
510 raise util.Abort(_('--continue is incompatible with '
510 raise util.Abort(_('--continue is incompatible with '
511 'branch, all or merge'))
511 'branch, all or merge'))
512 return
512 return
513 if not (opts.get('source') or revs or
513 if not (opts.get('source') or revs or
514 opts.get('merge') or opts.get('branch')):
514 opts.get('merge') or opts.get('branch')):
515 raise util.Abort(_('no source URL, branch tag or revision '
515 raise util.Abort(_('no source URL, branch tag or revision '
516 'list provided'))
516 'list provided'))
517 if opts.get('all'):
517 if opts.get('all'):
518 if not opts.get('branch'):
518 if not opts.get('branch'):
519 raise util.Abort(_('--all requires a branch revision'))
519 raise util.Abort(_('--all requires a branch revision'))
520 if revs:
520 if revs:
521 raise util.Abort(_('--all is incompatible with a '
521 raise util.Abort(_('--all is incompatible with a '
522 'revision list'))
522 'revision list'))
523
523
524 checkopts(opts, revs)
524 checkopts(opts, revs)
525
525
526 if not opts.get('log'):
526 if not opts.get('log'):
527 opts['log'] = ui.config('transplant', 'log')
527 opts['log'] = ui.config('transplant', 'log')
528 if not opts.get('filter'):
528 if not opts.get('filter'):
529 opts['filter'] = ui.config('transplant', 'filter')
529 opts['filter'] = ui.config('transplant', 'filter')
530
530
531 tp = transplanter(ui, repo)
531 tp = transplanter(ui, repo)
532
532
533 p1, p2 = repo.dirstate.parents()
533 p1, p2 = repo.dirstate.parents()
534 if len(repo) > 0 and p1 == revlog.nullid:
534 if len(repo) > 0 and p1 == revlog.nullid:
535 raise util.Abort(_('no revision checked out'))
535 raise util.Abort(_('no revision checked out'))
536 if not opts.get('continue'):
536 if not opts.get('continue'):
537 if p2 != revlog.nullid:
537 if p2 != revlog.nullid:
538 raise util.Abort(_('outstanding uncommitted merges'))
538 raise util.Abort(_('outstanding uncommitted merges'))
539 m, a, r, d = repo.status()[:4]
539 m, a, r, d = repo.status()[:4]
540 if m or a or r or d:
540 if m or a or r or d:
541 raise util.Abort(_('outstanding local changes'))
541 raise util.Abort(_('outstanding local changes'))
542
542
543 bundle = None
543 bundle = None
544 source = opts.get('source')
544 source = opts.get('source')
545 if source:
545 if source:
546 (source, incoming, bundle) = getremotechanges(repo, source)
546 (source, incoming, bundle) = getremotechanges(repo, source)
547 else:
547 else:
548 source = repo
548 source = repo
549
549
550 try:
550 try:
551 if opts.get('continue'):
551 if opts.get('continue'):
552 tp.resume(repo, source, opts)
552 tp.resume(repo, source, opts)
553 return
553 return
554
554
555 tf=tp.transplantfilter(repo, source, p1)
555 tf=tp.transplantfilter(repo, source, p1)
556 if opts.get('prune'):
556 if opts.get('prune'):
557 prune = [source.lookup(r)
557 prune = [source.lookup(r)
558 for r in cmdutil.revrange(source, opts.get('prune'))]
558 for r in cmdutil.revrange(source, opts.get('prune'))]
559 matchfn = lambda x: tf(x) and x not in prune
559 matchfn = lambda x: tf(x) and x not in prune
560 else:
560 else:
561 matchfn = tf
561 matchfn = tf
562 branches = map(source.lookup, opts.get('branch', ()))
562 branches = map(source.lookup, opts.get('branch', ()))
563 merges = map(source.lookup, opts.get('merge', ()))
563 merges = map(source.lookup, opts.get('merge', ()))
564 revmap = {}
564 revmap = {}
565 if revs:
565 if revs:
566 for r in cmdutil.revrange(source, revs):
566 for r in cmdutil.revrange(source, revs):
567 revmap[int(r)] = source.lookup(r)
567 revmap[int(r)] = source.lookup(r)
568 elif opts.get('all') or not merges:
568 elif opts.get('all') or not merges:
569 if source != repo:
569 if source != repo:
570 alltransplants = incwalk(source, incoming, branches,
570 alltransplants = incwalk(source, incoming, branches,
571 match=matchfn)
571 match=matchfn)
572 else:
572 else:
573 alltransplants = transplantwalk(source, p1, branches,
573 alltransplants = transplantwalk(source, p1, branches,
574 match=matchfn)
574 match=matchfn)
575 if opts.get('all'):
575 if opts.get('all'):
576 revs = alltransplants
576 revs = alltransplants
577 else:
577 else:
578 revs, newmerges = browserevs(ui, source, alltransplants, opts)
578 revs, newmerges = browserevs(ui, source, alltransplants, opts)
579 merges.extend(newmerges)
579 merges.extend(newmerges)
580 for r in revs:
580 for r in revs:
581 revmap[source.changelog.rev(r)] = r
581 revmap[source.changelog.rev(r)] = r
582 for r in merges:
582 for r in merges:
583 revmap[source.changelog.rev(r)] = r
583 revmap[source.changelog.rev(r)] = r
584
584
585 tp.apply(repo, source, revmap, merges, opts)
585 tp.apply(repo, source, revmap, merges, opts)
586 finally:
586 finally:
587 if bundle:
587 if bundle:
588 source.close()
588 source.close()
589 os.unlink(bundle)
589 os.unlink(bundle)
590
590
591 cmdtable = {
591 cmdtable = {
592 "transplant":
592 "transplant":
593 (transplant,
593 (transplant,
594 [('s', 'source', '', _('pull patches from REPOSITORY')),
594 [('s', 'source', '', _('pull patches from REPOSITORY')),
595 ('b', 'branch', [], _('pull patches from branch BRANCH')),
595 ('b', 'branch', [], _('pull patches from branch BRANCH')),
596 ('a', 'all', None, _('pull all changesets up to BRANCH')),
596 ('a', 'all', None, _('pull all changesets up to BRANCH')),
597 ('p', 'prune', [], _('skip over REV')),
597 ('p', 'prune', [], _('skip over REV')),
598 ('m', 'merge', [], _('merge at REV')),
598 ('m', 'merge', [], _('merge at REV')),
599 ('', 'log', None, _('append transplant info to log message')),
599 ('', 'log', None, _('append transplant info to log message')),
600 ('c', 'continue', None, _('continue last transplant session '
600 ('c', 'continue', None, _('continue last transplant session '
601 'after repair')),
601 'after repair')),
602 ('', 'filter', '', _('filter changesets through FILTER'))],
602 ('', 'filter', '', _('filter changesets through FILTER'))],
603 _('hg transplant [-s REPOSITORY] [-b BRANCH [-a]] [-p REV] '
603 _('hg transplant [-s REPOSITORY] [-b BRANCH [-a]] [-p REV] '
604 '[-m REV] [REV]...'))
604 '[-m REV] [REV]...'))
605 }
605 }
@@ -1,226 +1,226
1 # archival.py - revision archival for mercurial
1 # archival.py - revision archival for mercurial
2 #
2 #
3 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
3 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2, incorporated herein by reference.
6 # GNU General Public License version 2, incorporated herein by reference.
7
7
8 from i18n import _
8 from i18n import _
9 from node import hex
9 from node import hex
10 import util
10 import util
11 import cStringIO, os, stat, tarfile, time, zipfile
11 import cStringIO, os, stat, tarfile, time, zipfile
12 import zlib, gzip
12 import zlib, gzip
13
13
14 def tidyprefix(dest, prefix, suffixes):
14 def tidyprefix(dest, prefix, suffixes):
15 '''choose prefix to use for names in archive. make sure prefix is
15 '''choose prefix to use for names in archive. make sure prefix is
16 safe for consumers.'''
16 safe for consumers.'''
17
17
18 if prefix:
18 if prefix:
19 prefix = util.normpath(prefix)
19 prefix = util.normpath(prefix)
20 else:
20 else:
21 if not isinstance(dest, str):
21 if not isinstance(dest, str):
22 raise ValueError('dest must be string if no prefix')
22 raise ValueError('dest must be string if no prefix')
23 prefix = os.path.basename(dest)
23 prefix = os.path.basename(dest)
24 lower = prefix.lower()
24 lower = prefix.lower()
25 for sfx in suffixes:
25 for sfx in suffixes:
26 if lower.endswith(sfx):
26 if lower.endswith(sfx):
27 prefix = prefix[:-len(sfx)]
27 prefix = prefix[:-len(sfx)]
28 break
28 break
29 lpfx = os.path.normpath(util.localpath(prefix))
29 lpfx = os.path.normpath(util.localpath(prefix))
30 prefix = util.pconvert(lpfx)
30 prefix = util.pconvert(lpfx)
31 if not prefix.endswith('/'):
31 if not prefix.endswith('/'):
32 prefix += '/'
32 prefix += '/'
33 if prefix.startswith('../') or os.path.isabs(lpfx) or '/../' in prefix:
33 if prefix.startswith('../') or os.path.isabs(lpfx) or '/../' in prefix:
34 raise util.Abort(_('archive prefix contains illegal components'))
34 raise util.Abort(_('archive prefix contains illegal components'))
35 return prefix
35 return prefix
36
36
37 class tarit:
37 class tarit(object):
38 '''write archive to tar file or stream. can write uncompressed,
38 '''write archive to tar file or stream. can write uncompressed,
39 or compress with gzip or bzip2.'''
39 or compress with gzip or bzip2.'''
40
40
41 class GzipFileWithTime(gzip.GzipFile):
41 class GzipFileWithTime(gzip.GzipFile):
42
42
43 def __init__(self, *args, **kw):
43 def __init__(self, *args, **kw):
44 timestamp = None
44 timestamp = None
45 if 'timestamp' in kw:
45 if 'timestamp' in kw:
46 timestamp = kw.pop('timestamp')
46 timestamp = kw.pop('timestamp')
47 if timestamp is None:
47 if timestamp is None:
48 self.timestamp = time.time()
48 self.timestamp = time.time()
49 else:
49 else:
50 self.timestamp = timestamp
50 self.timestamp = timestamp
51 gzip.GzipFile.__init__(self, *args, **kw)
51 gzip.GzipFile.__init__(self, *args, **kw)
52
52
53 def _write_gzip_header(self):
53 def _write_gzip_header(self):
54 self.fileobj.write('\037\213') # magic header
54 self.fileobj.write('\037\213') # magic header
55 self.fileobj.write('\010') # compression method
55 self.fileobj.write('\010') # compression method
56 # Python 2.6 deprecates self.filename
56 # Python 2.6 deprecates self.filename
57 fname = getattr(self, 'name', None) or self.filename
57 fname = getattr(self, 'name', None) or self.filename
58 flags = 0
58 flags = 0
59 if fname:
59 if fname:
60 flags = gzip.FNAME
60 flags = gzip.FNAME
61 self.fileobj.write(chr(flags))
61 self.fileobj.write(chr(flags))
62 gzip.write32u(self.fileobj, long(self.timestamp))
62 gzip.write32u(self.fileobj, long(self.timestamp))
63 self.fileobj.write('\002')
63 self.fileobj.write('\002')
64 self.fileobj.write('\377')
64 self.fileobj.write('\377')
65 if fname:
65 if fname:
66 self.fileobj.write(fname + '\000')
66 self.fileobj.write(fname + '\000')
67
67
68 def __init__(self, dest, prefix, mtime, kind=''):
68 def __init__(self, dest, prefix, mtime, kind=''):
69 self.prefix = tidyprefix(dest, prefix, ['.tar', '.tar.bz2', '.tar.gz',
69 self.prefix = tidyprefix(dest, prefix, ['.tar', '.tar.bz2', '.tar.gz',
70 '.tgz', '.tbz2'])
70 '.tgz', '.tbz2'])
71 self.mtime = mtime
71 self.mtime = mtime
72
72
73 def taropen(name, mode, fileobj=None):
73 def taropen(name, mode, fileobj=None):
74 if kind == 'gz':
74 if kind == 'gz':
75 mode = mode[0]
75 mode = mode[0]
76 if not fileobj:
76 if not fileobj:
77 fileobj = open(name, mode + 'b')
77 fileobj = open(name, mode + 'b')
78 gzfileobj = self.GzipFileWithTime(name, mode + 'b',
78 gzfileobj = self.GzipFileWithTime(name, mode + 'b',
79 zlib.Z_BEST_COMPRESSION,
79 zlib.Z_BEST_COMPRESSION,
80 fileobj, timestamp=mtime)
80 fileobj, timestamp=mtime)
81 return tarfile.TarFile.taropen(name, mode, gzfileobj)
81 return tarfile.TarFile.taropen(name, mode, gzfileobj)
82 else:
82 else:
83 return tarfile.open(name, mode + kind, fileobj)
83 return tarfile.open(name, mode + kind, fileobj)
84
84
85 if isinstance(dest, str):
85 if isinstance(dest, str):
86 self.z = taropen(dest, mode='w:')
86 self.z = taropen(dest, mode='w:')
87 else:
87 else:
88 # Python 2.5-2.5.1 have a regression that requires a name arg
88 # Python 2.5-2.5.1 have a regression that requires a name arg
89 self.z = taropen(name='', mode='w|', fileobj=dest)
89 self.z = taropen(name='', mode='w|', fileobj=dest)
90
90
91 def addfile(self, name, mode, islink, data):
91 def addfile(self, name, mode, islink, data):
92 i = tarfile.TarInfo(self.prefix + name)
92 i = tarfile.TarInfo(self.prefix + name)
93 i.mtime = self.mtime
93 i.mtime = self.mtime
94 i.size = len(data)
94 i.size = len(data)
95 if islink:
95 if islink:
96 i.type = tarfile.SYMTYPE
96 i.type = tarfile.SYMTYPE
97 i.mode = 0777
97 i.mode = 0777
98 i.linkname = data
98 i.linkname = data
99 data = None
99 data = None
100 i.size = 0
100 i.size = 0
101 else:
101 else:
102 i.mode = mode
102 i.mode = mode
103 data = cStringIO.StringIO(data)
103 data = cStringIO.StringIO(data)
104 self.z.addfile(i, data)
104 self.z.addfile(i, data)
105
105
106 def done(self):
106 def done(self):
107 self.z.close()
107 self.z.close()
108
108
109 class tellable:
109 class tellable(object):
110 '''provide tell method for zipfile.ZipFile when writing to http
110 '''provide tell method for zipfile.ZipFile when writing to http
111 response file object.'''
111 response file object.'''
112
112
113 def __init__(self, fp):
113 def __init__(self, fp):
114 self.fp = fp
114 self.fp = fp
115 self.offset = 0
115 self.offset = 0
116
116
117 def __getattr__(self, key):
117 def __getattr__(self, key):
118 return getattr(self.fp, key)
118 return getattr(self.fp, key)
119
119
120 def write(self, s):
120 def write(self, s):
121 self.fp.write(s)
121 self.fp.write(s)
122 self.offset += len(s)
122 self.offset += len(s)
123
123
124 def tell(self):
124 def tell(self):
125 return self.offset
125 return self.offset
126
126
127 class zipit:
127 class zipit(object):
128 '''write archive to zip file or stream. can write uncompressed,
128 '''write archive to zip file or stream. can write uncompressed,
129 or compressed with deflate.'''
129 or compressed with deflate.'''
130
130
131 def __init__(self, dest, prefix, mtime, compress=True):
131 def __init__(self, dest, prefix, mtime, compress=True):
132 self.prefix = tidyprefix(dest, prefix, ('.zip',))
132 self.prefix = tidyprefix(dest, prefix, ('.zip',))
133 if not isinstance(dest, str):
133 if not isinstance(dest, str):
134 try:
134 try:
135 dest.tell()
135 dest.tell()
136 except (AttributeError, IOError):
136 except (AttributeError, IOError):
137 dest = tellable(dest)
137 dest = tellable(dest)
138 self.z = zipfile.ZipFile(dest, 'w',
138 self.z = zipfile.ZipFile(dest, 'w',
139 compress and zipfile.ZIP_DEFLATED or
139 compress and zipfile.ZIP_DEFLATED or
140 zipfile.ZIP_STORED)
140 zipfile.ZIP_STORED)
141 self.date_time = time.gmtime(mtime)[:6]
141 self.date_time = time.gmtime(mtime)[:6]
142
142
143 def addfile(self, name, mode, islink, data):
143 def addfile(self, name, mode, islink, data):
144 i = zipfile.ZipInfo(self.prefix + name, self.date_time)
144 i = zipfile.ZipInfo(self.prefix + name, self.date_time)
145 i.compress_type = self.z.compression
145 i.compress_type = self.z.compression
146 # unzip will not honor unix file modes unless file creator is
146 # unzip will not honor unix file modes unless file creator is
147 # set to unix (id 3).
147 # set to unix (id 3).
148 i.create_system = 3
148 i.create_system = 3
149 ftype = stat.S_IFREG
149 ftype = stat.S_IFREG
150 if islink:
150 if islink:
151 mode = 0777
151 mode = 0777
152 ftype = stat.S_IFLNK
152 ftype = stat.S_IFLNK
153 i.external_attr = (mode | ftype) << 16L
153 i.external_attr = (mode | ftype) << 16L
154 self.z.writestr(i, data)
154 self.z.writestr(i, data)
155
155
156 def done(self):
156 def done(self):
157 self.z.close()
157 self.z.close()
158
158
159 class fileit:
159 class fileit(object):
160 '''write archive as files in directory.'''
160 '''write archive as files in directory.'''
161
161
162 def __init__(self, name, prefix, mtime):
162 def __init__(self, name, prefix, mtime):
163 if prefix:
163 if prefix:
164 raise util.Abort(_('cannot give prefix when archiving to files'))
164 raise util.Abort(_('cannot give prefix when archiving to files'))
165 self.basedir = name
165 self.basedir = name
166 self.opener = util.opener(self.basedir)
166 self.opener = util.opener(self.basedir)
167
167
168 def addfile(self, name, mode, islink, data):
168 def addfile(self, name, mode, islink, data):
169 if islink:
169 if islink:
170 self.opener.symlink(data, name)
170 self.opener.symlink(data, name)
171 return
171 return
172 f = self.opener(name, "w", atomictemp=True)
172 f = self.opener(name, "w", atomictemp=True)
173 f.write(data)
173 f.write(data)
174 f.rename()
174 f.rename()
175 destfile = os.path.join(self.basedir, name)
175 destfile = os.path.join(self.basedir, name)
176 os.chmod(destfile, mode)
176 os.chmod(destfile, mode)
177
177
178 def done(self):
178 def done(self):
179 pass
179 pass
180
180
181 archivers = {
181 archivers = {
182 'files': fileit,
182 'files': fileit,
183 'tar': tarit,
183 'tar': tarit,
184 'tbz2': lambda name, prefix, mtime: tarit(name, prefix, mtime, 'bz2'),
184 'tbz2': lambda name, prefix, mtime: tarit(name, prefix, mtime, 'bz2'),
185 'tgz': lambda name, prefix, mtime: tarit(name, prefix, mtime, 'gz'),
185 'tgz': lambda name, prefix, mtime: tarit(name, prefix, mtime, 'gz'),
186 'uzip': lambda name, prefix, mtime: zipit(name, prefix, mtime, False),
186 'uzip': lambda name, prefix, mtime: zipit(name, prefix, mtime, False),
187 'zip': zipit,
187 'zip': zipit,
188 }
188 }
189
189
190 def archive(repo, dest, node, kind, decode=True, matchfn=None,
190 def archive(repo, dest, node, kind, decode=True, matchfn=None,
191 prefix=None, mtime=None):
191 prefix=None, mtime=None):
192 '''create archive of repo as it was at node.
192 '''create archive of repo as it was at node.
193
193
194 dest can be name of directory, name of archive file, or file
194 dest can be name of directory, name of archive file, or file
195 object to write archive to.
195 object to write archive to.
196
196
197 kind is type of archive to create.
197 kind is type of archive to create.
198
198
199 decode tells whether to put files through decode filters from
199 decode tells whether to put files through decode filters from
200 hgrc.
200 hgrc.
201
201
202 matchfn is function to filter names of files to write to archive.
202 matchfn is function to filter names of files to write to archive.
203
203
204 prefix is name of path to put before every archive member.'''
204 prefix is name of path to put before every archive member.'''
205
205
206 def write(name, mode, islink, getdata):
206 def write(name, mode, islink, getdata):
207 if matchfn and not matchfn(name): return
207 if matchfn and not matchfn(name): return
208 data = getdata()
208 data = getdata()
209 if decode:
209 if decode:
210 data = repo.wwritedata(name, data)
210 data = repo.wwritedata(name, data)
211 archiver.addfile(name, mode, islink, data)
211 archiver.addfile(name, mode, islink, data)
212
212
213 if kind not in archivers:
213 if kind not in archivers:
214 raise util.Abort(_("unknown archive type '%s'") % kind)
214 raise util.Abort(_("unknown archive type '%s'") % kind)
215
215
216 ctx = repo[node]
216 ctx = repo[node]
217 archiver = archivers[kind](dest, prefix, mtime or ctx.date()[0])
217 archiver = archivers[kind](dest, prefix, mtime or ctx.date()[0])
218
218
219 if repo.ui.configbool("ui", "archivemeta", True):
219 if repo.ui.configbool("ui", "archivemeta", True):
220 write('.hg_archival.txt', 0644, False,
220 write('.hg_archival.txt', 0644, False,
221 lambda: 'repo: %s\nnode: %s\n' % (
221 lambda: 'repo: %s\nnode: %s\n' % (
222 hex(repo.changelog.node(0)), hex(node)))
222 hex(repo.changelog.node(0)), hex(node)))
223 for f in ctx:
223 for f in ctx:
224 ff = ctx.flags(f)
224 ff = ctx.flags(f)
225 write(f, 'x' in ff and 0755 or 0644, 'l' in ff, ctx[f].data)
225 write(f, 'x' in ff and 0755 or 0644, 'l' in ff, ctx[f].data)
226 archiver.done()
226 archiver.done()
@@ -1,230 +1,230
1 # changelog.py - changelog class for mercurial
1 # changelog.py - changelog class for mercurial
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2, incorporated herein by reference.
6 # GNU General Public License version 2, incorporated herein by reference.
7
7
8 from node import bin, hex, nullid
8 from node import bin, hex, nullid
9 from i18n import _
9 from i18n import _
10 import util, error, revlog, encoding
10 import util, error, revlog, encoding
11
11
12 def _string_escape(text):
12 def _string_escape(text):
13 """
13 """
14 >>> d = {'nl': chr(10), 'bs': chr(92), 'cr': chr(13), 'nul': chr(0)}
14 >>> d = {'nl': chr(10), 'bs': chr(92), 'cr': chr(13), 'nul': chr(0)}
15 >>> s = "ab%(nl)scd%(bs)s%(bs)sn%(nul)sab%(cr)scd%(bs)s%(nl)s" % d
15 >>> s = "ab%(nl)scd%(bs)s%(bs)sn%(nul)sab%(cr)scd%(bs)s%(nl)s" % d
16 >>> s
16 >>> s
17 'ab\\ncd\\\\\\\\n\\x00ab\\rcd\\\\\\n'
17 'ab\\ncd\\\\\\\\n\\x00ab\\rcd\\\\\\n'
18 >>> res = _string_escape(s)
18 >>> res = _string_escape(s)
19 >>> s == res.decode('string_escape')
19 >>> s == res.decode('string_escape')
20 True
20 True
21 """
21 """
22 # subset of the string_escape codec
22 # subset of the string_escape codec
23 text = text.replace('\\', '\\\\').replace('\n', '\\n').replace('\r', '\\r')
23 text = text.replace('\\', '\\\\').replace('\n', '\\n').replace('\r', '\\r')
24 return text.replace('\0', '\\0')
24 return text.replace('\0', '\\0')
25
25
26 def decodeextra(text):
26 def decodeextra(text):
27 extra = {}
27 extra = {}
28 for l in text.split('\0'):
28 for l in text.split('\0'):
29 if l:
29 if l:
30 k, v = l.decode('string_escape').split(':', 1)
30 k, v = l.decode('string_escape').split(':', 1)
31 extra[k] = v
31 extra[k] = v
32 return extra
32 return extra
33
33
34 def encodeextra(d):
34 def encodeextra(d):
35 # keys must be sorted to produce a deterministic changelog entry
35 # keys must be sorted to produce a deterministic changelog entry
36 items = [_string_escape('%s:%s' % (k, d[k])) for k in sorted(d)]
36 items = [_string_escape('%s:%s' % (k, d[k])) for k in sorted(d)]
37 return "\0".join(items)
37 return "\0".join(items)
38
38
39 class appender:
39 class appender(object):
40 '''the changelog index must be updated last on disk, so we use this class
40 '''the changelog index must be updated last on disk, so we use this class
41 to delay writes to it'''
41 to delay writes to it'''
42 def __init__(self, fp, buf):
42 def __init__(self, fp, buf):
43 self.data = buf
43 self.data = buf
44 self.fp = fp
44 self.fp = fp
45 self.offset = fp.tell()
45 self.offset = fp.tell()
46 self.size = util.fstat(fp).st_size
46 self.size = util.fstat(fp).st_size
47
47
48 def end(self):
48 def end(self):
49 return self.size + len("".join(self.data))
49 return self.size + len("".join(self.data))
50 def tell(self):
50 def tell(self):
51 return self.offset
51 return self.offset
52 def flush(self):
52 def flush(self):
53 pass
53 pass
54 def close(self):
54 def close(self):
55 self.fp.close()
55 self.fp.close()
56
56
57 def seek(self, offset, whence=0):
57 def seek(self, offset, whence=0):
58 '''virtual file offset spans real file and data'''
58 '''virtual file offset spans real file and data'''
59 if whence == 0:
59 if whence == 0:
60 self.offset = offset
60 self.offset = offset
61 elif whence == 1:
61 elif whence == 1:
62 self.offset += offset
62 self.offset += offset
63 elif whence == 2:
63 elif whence == 2:
64 self.offset = self.end() + offset
64 self.offset = self.end() + offset
65 if self.offset < self.size:
65 if self.offset < self.size:
66 self.fp.seek(self.offset)
66 self.fp.seek(self.offset)
67
67
68 def read(self, count=-1):
68 def read(self, count=-1):
69 '''only trick here is reads that span real file and data'''
69 '''only trick here is reads that span real file and data'''
70 ret = ""
70 ret = ""
71 if self.offset < self.size:
71 if self.offset < self.size:
72 s = self.fp.read(count)
72 s = self.fp.read(count)
73 ret = s
73 ret = s
74 self.offset += len(s)
74 self.offset += len(s)
75 if count > 0:
75 if count > 0:
76 count -= len(s)
76 count -= len(s)
77 if count != 0:
77 if count != 0:
78 doff = self.offset - self.size
78 doff = self.offset - self.size
79 self.data.insert(0, "".join(self.data))
79 self.data.insert(0, "".join(self.data))
80 del self.data[1:]
80 del self.data[1:]
81 s = self.data[0][doff:doff+count]
81 s = self.data[0][doff:doff+count]
82 self.offset += len(s)
82 self.offset += len(s)
83 ret += s
83 ret += s
84 return ret
84 return ret
85
85
86 def write(self, s):
86 def write(self, s):
87 self.data.append(str(s))
87 self.data.append(str(s))
88 self.offset += len(s)
88 self.offset += len(s)
89
89
90 class changelog(revlog.revlog):
90 class changelog(revlog.revlog):
91 def __init__(self, opener):
91 def __init__(self, opener):
92 self._realopener = opener
92 self._realopener = opener
93 self._delayed = False
93 self._delayed = False
94 revlog.revlog.__init__(self, self._delayopener, "00changelog.i")
94 revlog.revlog.__init__(self, self._delayopener, "00changelog.i")
95
95
96 def delayupdate(self):
96 def delayupdate(self):
97 "delay visibility of index updates to other readers"
97 "delay visibility of index updates to other readers"
98 self._delayed = True
98 self._delayed = True
99 self._delaycount = len(self)
99 self._delaycount = len(self)
100 self._delaybuf = []
100 self._delaybuf = []
101 self._delayname = None
101 self._delayname = None
102
102
103 def finalize(self, tr):
103 def finalize(self, tr):
104 "finalize index updates"
104 "finalize index updates"
105 self._delayed = False
105 self._delayed = False
106 # move redirected index data back into place
106 # move redirected index data back into place
107 if self._delayname:
107 if self._delayname:
108 util.rename(self._delayname + ".a", self._delayname)
108 util.rename(self._delayname + ".a", self._delayname)
109 elif self._delaybuf:
109 elif self._delaybuf:
110 fp = self.opener(self.indexfile, 'a')
110 fp = self.opener(self.indexfile, 'a')
111 fp.write("".join(self._delaybuf))
111 fp.write("".join(self._delaybuf))
112 fp.close()
112 fp.close()
113 self._delaybuf = []
113 self._delaybuf = []
114 # split when we're done
114 # split when we're done
115 self.checkinlinesize(tr)
115 self.checkinlinesize(tr)
116
116
117 def _delayopener(self, name, mode='r'):
117 def _delayopener(self, name, mode='r'):
118 fp = self._realopener(name, mode)
118 fp = self._realopener(name, mode)
119 # only divert the index
119 # only divert the index
120 if not self._delayed or not name == self.indexfile:
120 if not self._delayed or not name == self.indexfile:
121 return fp
121 return fp
122 # if we're doing an initial clone, divert to another file
122 # if we're doing an initial clone, divert to another file
123 if self._delaycount == 0:
123 if self._delaycount == 0:
124 self._delayname = fp.name
124 self._delayname = fp.name
125 if not len(self):
125 if not len(self):
126 # make sure to truncate the file
126 # make sure to truncate the file
127 mode = mode.replace('a', 'w')
127 mode = mode.replace('a', 'w')
128 return self._realopener(name + ".a", mode)
128 return self._realopener(name + ".a", mode)
129 # otherwise, divert to memory
129 # otherwise, divert to memory
130 return appender(fp, self._delaybuf)
130 return appender(fp, self._delaybuf)
131
131
132 def readpending(self, file):
132 def readpending(self, file):
133 r = revlog.revlog(self.opener, file)
133 r = revlog.revlog(self.opener, file)
134 self.index = r.index
134 self.index = r.index
135 self.nodemap = r.nodemap
135 self.nodemap = r.nodemap
136 self._chunkcache = r._chunkcache
136 self._chunkcache = r._chunkcache
137
137
138 def writepending(self):
138 def writepending(self):
139 "create a file containing the unfinalized state for pretxnchangegroup"
139 "create a file containing the unfinalized state for pretxnchangegroup"
140 if self._delaybuf:
140 if self._delaybuf:
141 # make a temporary copy of the index
141 # make a temporary copy of the index
142 fp1 = self._realopener(self.indexfile)
142 fp1 = self._realopener(self.indexfile)
143 fp2 = self._realopener(self.indexfile + ".a", "w")
143 fp2 = self._realopener(self.indexfile + ".a", "w")
144 fp2.write(fp1.read())
144 fp2.write(fp1.read())
145 # add pending data
145 # add pending data
146 fp2.write("".join(self._delaybuf))
146 fp2.write("".join(self._delaybuf))
147 fp2.close()
147 fp2.close()
148 # switch modes so finalize can simply rename
148 # switch modes so finalize can simply rename
149 self._delaybuf = []
149 self._delaybuf = []
150 self._delayname = fp1.name
150 self._delayname = fp1.name
151
151
152 if self._delayname:
152 if self._delayname:
153 return True
153 return True
154
154
155 return False
155 return False
156
156
157 def checkinlinesize(self, tr, fp=None):
157 def checkinlinesize(self, tr, fp=None):
158 if self.opener == self._delayopener:
158 if self.opener == self._delayopener:
159 return
159 return
160 return revlog.revlog.checkinlinesize(self, tr, fp)
160 return revlog.revlog.checkinlinesize(self, tr, fp)
161
161
162 def read(self, node):
162 def read(self, node):
163 """
163 """
164 format used:
164 format used:
165 nodeid\n : manifest node in ascii
165 nodeid\n : manifest node in ascii
166 user\n : user, no \n or \r allowed
166 user\n : user, no \n or \r allowed
167 time tz extra\n : date (time is int or float, timezone is int)
167 time tz extra\n : date (time is int or float, timezone is int)
168 : extra is metadatas, encoded and separated by '\0'
168 : extra is metadatas, encoded and separated by '\0'
169 : older versions ignore it
169 : older versions ignore it
170 files\n\n : files modified by the cset, no \n or \r allowed
170 files\n\n : files modified by the cset, no \n or \r allowed
171 (.*) : comment (free text, ideally utf-8)
171 (.*) : comment (free text, ideally utf-8)
172
172
173 changelog v0 doesn't use extra
173 changelog v0 doesn't use extra
174 """
174 """
175 text = self.revision(node)
175 text = self.revision(node)
176 if not text:
176 if not text:
177 return (nullid, "", (0, 0), [], "", {'branch': 'default'})
177 return (nullid, "", (0, 0), [], "", {'branch': 'default'})
178 last = text.index("\n\n")
178 last = text.index("\n\n")
179 desc = encoding.tolocal(text[last + 2:])
179 desc = encoding.tolocal(text[last + 2:])
180 l = text[:last].split('\n')
180 l = text[:last].split('\n')
181 manifest = bin(l[0])
181 manifest = bin(l[0])
182 user = encoding.tolocal(l[1])
182 user = encoding.tolocal(l[1])
183
183
184 extra_data = l[2].split(' ', 2)
184 extra_data = l[2].split(' ', 2)
185 if len(extra_data) != 3:
185 if len(extra_data) != 3:
186 time = float(extra_data.pop(0))
186 time = float(extra_data.pop(0))
187 try:
187 try:
188 # various tools did silly things with the time zone field.
188 # various tools did silly things with the time zone field.
189 timezone = int(extra_data[0])
189 timezone = int(extra_data[0])
190 except:
190 except:
191 timezone = 0
191 timezone = 0
192 extra = {}
192 extra = {}
193 else:
193 else:
194 time, timezone, extra = extra_data
194 time, timezone, extra = extra_data
195 time, timezone = float(time), int(timezone)
195 time, timezone = float(time), int(timezone)
196 extra = decodeextra(extra)
196 extra = decodeextra(extra)
197 if not extra.get('branch'):
197 if not extra.get('branch'):
198 extra['branch'] = 'default'
198 extra['branch'] = 'default'
199 files = l[3:]
199 files = l[3:]
200 return (manifest, user, (time, timezone), files, desc, extra)
200 return (manifest, user, (time, timezone), files, desc, extra)
201
201
202 def add(self, manifest, files, desc, transaction, p1, p2,
202 def add(self, manifest, files, desc, transaction, p1, p2,
203 user, date=None, extra={}):
203 user, date=None, extra={}):
204 user = user.strip()
204 user = user.strip()
205 # An empty username or a username with a "\n" will make the
205 # An empty username or a username with a "\n" will make the
206 # revision text contain two "\n\n" sequences -> corrupt
206 # revision text contain two "\n\n" sequences -> corrupt
207 # repository since read cannot unpack the revision.
207 # repository since read cannot unpack the revision.
208 if not user:
208 if not user:
209 raise error.RevlogError(_("empty username"))
209 raise error.RevlogError(_("empty username"))
210 if "\n" in user:
210 if "\n" in user:
211 raise error.RevlogError(_("username %s contains a newline")
211 raise error.RevlogError(_("username %s contains a newline")
212 % repr(user))
212 % repr(user))
213
213
214 # strip trailing whitespace and leading and trailing empty lines
214 # strip trailing whitespace and leading and trailing empty lines
215 desc = '\n'.join([l.rstrip() for l in desc.splitlines()]).strip('\n')
215 desc = '\n'.join([l.rstrip() for l in desc.splitlines()]).strip('\n')
216
216
217 user, desc = encoding.fromlocal(user), encoding.fromlocal(desc)
217 user, desc = encoding.fromlocal(user), encoding.fromlocal(desc)
218
218
219 if date:
219 if date:
220 parseddate = "%d %d" % util.parsedate(date)
220 parseddate = "%d %d" % util.parsedate(date)
221 else:
221 else:
222 parseddate = "%d %d" % util.makedate()
222 parseddate = "%d %d" % util.makedate()
223 if extra and extra.get("branch") in ("default", ""):
223 if extra and extra.get("branch") in ("default", ""):
224 del extra["branch"]
224 del extra["branch"]
225 if extra:
225 if extra:
226 extra = encodeextra(extra)
226 extra = encodeextra(extra)
227 parseddate = "%s %s" % (parseddate, extra)
227 parseddate = "%s %s" % (parseddate, extra)
228 l = [hex(manifest), user, parseddate] + sorted(files) + ["", desc]
228 l = [hex(manifest), user, parseddate] + sorted(files) + ["", desc]
229 text = "\n".join(l)
229 text = "\n".join(l)
230 return self.addrevision(text, transaction, len(self), p1, p2)
230 return self.addrevision(text, transaction, len(self), p1, p2)
@@ -1,1237 +1,1237
1 # cmdutil.py - help for command processing in mercurial
1 # cmdutil.py - help for command processing in mercurial
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2, incorporated herein by reference.
6 # GNU General Public License version 2, incorporated herein by reference.
7
7
8 from node import hex, nullid, nullrev, short
8 from node import hex, nullid, nullrev, short
9 from i18n import _
9 from i18n import _
10 import os, sys, errno, re, glob
10 import os, sys, errno, re, glob
11 import mdiff, bdiff, util, templater, patch, error, encoding
11 import mdiff, bdiff, util, templater, patch, error, encoding
12 import match as _match
12 import match as _match
13
13
14 revrangesep = ':'
14 revrangesep = ':'
15
15
16 def findpossible(cmd, table, strict=False):
16 def findpossible(cmd, table, strict=False):
17 """
17 """
18 Return cmd -> (aliases, command table entry)
18 Return cmd -> (aliases, command table entry)
19 for each matching command.
19 for each matching command.
20 Return debug commands (or their aliases) only if no normal command matches.
20 Return debug commands (or their aliases) only if no normal command matches.
21 """
21 """
22 choice = {}
22 choice = {}
23 debugchoice = {}
23 debugchoice = {}
24 for e in table.keys():
24 for e in table.keys():
25 aliases = e.lstrip("^").split("|")
25 aliases = e.lstrip("^").split("|")
26 found = None
26 found = None
27 if cmd in aliases:
27 if cmd in aliases:
28 found = cmd
28 found = cmd
29 elif not strict:
29 elif not strict:
30 for a in aliases:
30 for a in aliases:
31 if a.startswith(cmd):
31 if a.startswith(cmd):
32 found = a
32 found = a
33 break
33 break
34 if found is not None:
34 if found is not None:
35 if aliases[0].startswith("debug") or found.startswith("debug"):
35 if aliases[0].startswith("debug") or found.startswith("debug"):
36 debugchoice[found] = (aliases, table[e])
36 debugchoice[found] = (aliases, table[e])
37 else:
37 else:
38 choice[found] = (aliases, table[e])
38 choice[found] = (aliases, table[e])
39
39
40 if not choice and debugchoice:
40 if not choice and debugchoice:
41 choice = debugchoice
41 choice = debugchoice
42
42
43 return choice
43 return choice
44
44
45 def findcmd(cmd, table, strict=True):
45 def findcmd(cmd, table, strict=True):
46 """Return (aliases, command table entry) for command string."""
46 """Return (aliases, command table entry) for command string."""
47 choice = findpossible(cmd, table, strict)
47 choice = findpossible(cmd, table, strict)
48
48
49 if cmd in choice:
49 if cmd in choice:
50 return choice[cmd]
50 return choice[cmd]
51
51
52 if len(choice) > 1:
52 if len(choice) > 1:
53 clist = choice.keys()
53 clist = choice.keys()
54 clist.sort()
54 clist.sort()
55 raise error.AmbiguousCommand(cmd, clist)
55 raise error.AmbiguousCommand(cmd, clist)
56
56
57 if choice:
57 if choice:
58 return choice.values()[0]
58 return choice.values()[0]
59
59
60 raise error.UnknownCommand(cmd)
60 raise error.UnknownCommand(cmd)
61
61
62 def bail_if_changed(repo):
62 def bail_if_changed(repo):
63 if repo.dirstate.parents()[1] != nullid:
63 if repo.dirstate.parents()[1] != nullid:
64 raise util.Abort(_('outstanding uncommitted merge'))
64 raise util.Abort(_('outstanding uncommitted merge'))
65 modified, added, removed, deleted = repo.status()[:4]
65 modified, added, removed, deleted = repo.status()[:4]
66 if modified or added or removed or deleted:
66 if modified or added or removed or deleted:
67 raise util.Abort(_("outstanding uncommitted changes"))
67 raise util.Abort(_("outstanding uncommitted changes"))
68
68
69 def logmessage(opts):
69 def logmessage(opts):
70 """ get the log message according to -m and -l option """
70 """ get the log message according to -m and -l option """
71 message = opts.get('message')
71 message = opts.get('message')
72 logfile = opts.get('logfile')
72 logfile = opts.get('logfile')
73
73
74 if message and logfile:
74 if message and logfile:
75 raise util.Abort(_('options --message and --logfile are mutually '
75 raise util.Abort(_('options --message and --logfile are mutually '
76 'exclusive'))
76 'exclusive'))
77 if not message and logfile:
77 if not message and logfile:
78 try:
78 try:
79 if logfile == '-':
79 if logfile == '-':
80 message = sys.stdin.read()
80 message = sys.stdin.read()
81 else:
81 else:
82 message = open(logfile).read()
82 message = open(logfile).read()
83 except IOError, inst:
83 except IOError, inst:
84 raise util.Abort(_("can't read commit message '%s': %s") %
84 raise util.Abort(_("can't read commit message '%s': %s") %
85 (logfile, inst.strerror))
85 (logfile, inst.strerror))
86 return message
86 return message
87
87
88 def loglimit(opts):
88 def loglimit(opts):
89 """get the log limit according to option -l/--limit"""
89 """get the log limit according to option -l/--limit"""
90 limit = opts.get('limit')
90 limit = opts.get('limit')
91 if limit:
91 if limit:
92 try:
92 try:
93 limit = int(limit)
93 limit = int(limit)
94 except ValueError:
94 except ValueError:
95 raise util.Abort(_('limit must be a positive integer'))
95 raise util.Abort(_('limit must be a positive integer'))
96 if limit <= 0: raise util.Abort(_('limit must be positive'))
96 if limit <= 0: raise util.Abort(_('limit must be positive'))
97 else:
97 else:
98 limit = sys.maxint
98 limit = sys.maxint
99 return limit
99 return limit
100
100
101 def remoteui(src, opts):
101 def remoteui(src, opts):
102 'build a remote ui from ui or repo and opts'
102 'build a remote ui from ui or repo and opts'
103 if hasattr(src, 'baseui'): # looks like a repository
103 if hasattr(src, 'baseui'): # looks like a repository
104 dst = src.baseui # drop repo-specific config
104 dst = src.baseui # drop repo-specific config
105 src = src.ui # copy target options from repo
105 src = src.ui # copy target options from repo
106 else: # assume it's a global ui object
106 else: # assume it's a global ui object
107 dst = src # keep all global options
107 dst = src # keep all global options
108
108
109 # copy ssh-specific options
109 # copy ssh-specific options
110 for o in 'ssh', 'remotecmd':
110 for o in 'ssh', 'remotecmd':
111 v = opts.get(o) or src.config('ui', o)
111 v = opts.get(o) or src.config('ui', o)
112 if v:
112 if v:
113 dst.setconfig("ui", o, v)
113 dst.setconfig("ui", o, v)
114 # copy bundle-specific options
114 # copy bundle-specific options
115 r = src.config('bundle', 'mainreporoot')
115 r = src.config('bundle', 'mainreporoot')
116 if r:
116 if r:
117 dst.setconfig('bundle', 'mainreporoot', r)
117 dst.setconfig('bundle', 'mainreporoot', r)
118
118
119 return dst
119 return dst
120
120
121 def revpair(repo, revs):
121 def revpair(repo, revs):
122 '''return pair of nodes, given list of revisions. second item can
122 '''return pair of nodes, given list of revisions. second item can
123 be None, meaning use working dir.'''
123 be None, meaning use working dir.'''
124
124
125 def revfix(repo, val, defval):
125 def revfix(repo, val, defval):
126 if not val and val != 0 and defval is not None:
126 if not val and val != 0 and defval is not None:
127 val = defval
127 val = defval
128 return repo.lookup(val)
128 return repo.lookup(val)
129
129
130 if not revs:
130 if not revs:
131 return repo.dirstate.parents()[0], None
131 return repo.dirstate.parents()[0], None
132 end = None
132 end = None
133 if len(revs) == 1:
133 if len(revs) == 1:
134 if revrangesep in revs[0]:
134 if revrangesep in revs[0]:
135 start, end = revs[0].split(revrangesep, 1)
135 start, end = revs[0].split(revrangesep, 1)
136 start = revfix(repo, start, 0)
136 start = revfix(repo, start, 0)
137 end = revfix(repo, end, len(repo) - 1)
137 end = revfix(repo, end, len(repo) - 1)
138 else:
138 else:
139 start = revfix(repo, revs[0], None)
139 start = revfix(repo, revs[0], None)
140 elif len(revs) == 2:
140 elif len(revs) == 2:
141 if revrangesep in revs[0] or revrangesep in revs[1]:
141 if revrangesep in revs[0] or revrangesep in revs[1]:
142 raise util.Abort(_('too many revisions specified'))
142 raise util.Abort(_('too many revisions specified'))
143 start = revfix(repo, revs[0], None)
143 start = revfix(repo, revs[0], None)
144 end = revfix(repo, revs[1], None)
144 end = revfix(repo, revs[1], None)
145 else:
145 else:
146 raise util.Abort(_('too many revisions specified'))
146 raise util.Abort(_('too many revisions specified'))
147 return start, end
147 return start, end
148
148
149 def revrange(repo, revs):
149 def revrange(repo, revs):
150 """Yield revision as strings from a list of revision specifications."""
150 """Yield revision as strings from a list of revision specifications."""
151
151
152 def revfix(repo, val, defval):
152 def revfix(repo, val, defval):
153 if not val and val != 0 and defval is not None:
153 if not val and val != 0 and defval is not None:
154 return defval
154 return defval
155 return repo.changelog.rev(repo.lookup(val))
155 return repo.changelog.rev(repo.lookup(val))
156
156
157 seen, l = set(), []
157 seen, l = set(), []
158 for spec in revs:
158 for spec in revs:
159 if revrangesep in spec:
159 if revrangesep in spec:
160 start, end = spec.split(revrangesep, 1)
160 start, end = spec.split(revrangesep, 1)
161 start = revfix(repo, start, 0)
161 start = revfix(repo, start, 0)
162 end = revfix(repo, end, len(repo) - 1)
162 end = revfix(repo, end, len(repo) - 1)
163 step = start > end and -1 or 1
163 step = start > end and -1 or 1
164 for rev in xrange(start, end+step, step):
164 for rev in xrange(start, end+step, step):
165 if rev in seen:
165 if rev in seen:
166 continue
166 continue
167 seen.add(rev)
167 seen.add(rev)
168 l.append(rev)
168 l.append(rev)
169 else:
169 else:
170 rev = revfix(repo, spec, None)
170 rev = revfix(repo, spec, None)
171 if rev in seen:
171 if rev in seen:
172 continue
172 continue
173 seen.add(rev)
173 seen.add(rev)
174 l.append(rev)
174 l.append(rev)
175
175
176 return l
176 return l
177
177
178 def make_filename(repo, pat, node,
178 def make_filename(repo, pat, node,
179 total=None, seqno=None, revwidth=None, pathname=None):
179 total=None, seqno=None, revwidth=None, pathname=None):
180 node_expander = {
180 node_expander = {
181 'H': lambda: hex(node),
181 'H': lambda: hex(node),
182 'R': lambda: str(repo.changelog.rev(node)),
182 'R': lambda: str(repo.changelog.rev(node)),
183 'h': lambda: short(node),
183 'h': lambda: short(node),
184 }
184 }
185 expander = {
185 expander = {
186 '%': lambda: '%',
186 '%': lambda: '%',
187 'b': lambda: os.path.basename(repo.root),
187 'b': lambda: os.path.basename(repo.root),
188 }
188 }
189
189
190 try:
190 try:
191 if node:
191 if node:
192 expander.update(node_expander)
192 expander.update(node_expander)
193 if node:
193 if node:
194 expander['r'] = (lambda:
194 expander['r'] = (lambda:
195 str(repo.changelog.rev(node)).zfill(revwidth or 0))
195 str(repo.changelog.rev(node)).zfill(revwidth or 0))
196 if total is not None:
196 if total is not None:
197 expander['N'] = lambda: str(total)
197 expander['N'] = lambda: str(total)
198 if seqno is not None:
198 if seqno is not None:
199 expander['n'] = lambda: str(seqno)
199 expander['n'] = lambda: str(seqno)
200 if total is not None and seqno is not None:
200 if total is not None and seqno is not None:
201 expander['n'] = lambda: str(seqno).zfill(len(str(total)))
201 expander['n'] = lambda: str(seqno).zfill(len(str(total)))
202 if pathname is not None:
202 if pathname is not None:
203 expander['s'] = lambda: os.path.basename(pathname)
203 expander['s'] = lambda: os.path.basename(pathname)
204 expander['d'] = lambda: os.path.dirname(pathname) or '.'
204 expander['d'] = lambda: os.path.dirname(pathname) or '.'
205 expander['p'] = lambda: pathname
205 expander['p'] = lambda: pathname
206
206
207 newname = []
207 newname = []
208 patlen = len(pat)
208 patlen = len(pat)
209 i = 0
209 i = 0
210 while i < patlen:
210 while i < patlen:
211 c = pat[i]
211 c = pat[i]
212 if c == '%':
212 if c == '%':
213 i += 1
213 i += 1
214 c = pat[i]
214 c = pat[i]
215 c = expander[c]()
215 c = expander[c]()
216 newname.append(c)
216 newname.append(c)
217 i += 1
217 i += 1
218 return ''.join(newname)
218 return ''.join(newname)
219 except KeyError, inst:
219 except KeyError, inst:
220 raise util.Abort(_("invalid format spec '%%%s' in output filename") %
220 raise util.Abort(_("invalid format spec '%%%s' in output filename") %
221 inst.args[0])
221 inst.args[0])
222
222
223 def make_file(repo, pat, node=None,
223 def make_file(repo, pat, node=None,
224 total=None, seqno=None, revwidth=None, mode='wb', pathname=None):
224 total=None, seqno=None, revwidth=None, mode='wb', pathname=None):
225
225
226 writable = 'w' in mode or 'a' in mode
226 writable = 'w' in mode or 'a' in mode
227
227
228 if not pat or pat == '-':
228 if not pat or pat == '-':
229 return writable and sys.stdout or sys.stdin
229 return writable and sys.stdout or sys.stdin
230 if hasattr(pat, 'write') and writable:
230 if hasattr(pat, 'write') and writable:
231 return pat
231 return pat
232 if hasattr(pat, 'read') and 'r' in mode:
232 if hasattr(pat, 'read') and 'r' in mode:
233 return pat
233 return pat
234 return open(make_filename(repo, pat, node, total, seqno, revwidth,
234 return open(make_filename(repo, pat, node, total, seqno, revwidth,
235 pathname),
235 pathname),
236 mode)
236 mode)
237
237
238 def expandpats(pats):
238 def expandpats(pats):
239 if not util.expandglobs:
239 if not util.expandglobs:
240 return list(pats)
240 return list(pats)
241 ret = []
241 ret = []
242 for p in pats:
242 for p in pats:
243 kind, name = _match._patsplit(p, None)
243 kind, name = _match._patsplit(p, None)
244 if kind is None:
244 if kind is None:
245 globbed = glob.glob(name)
245 globbed = glob.glob(name)
246 if globbed:
246 if globbed:
247 ret.extend(globbed)
247 ret.extend(globbed)
248 continue
248 continue
249 ret.append(p)
249 ret.append(p)
250 return ret
250 return ret
251
251
252 def match(repo, pats=[], opts={}, globbed=False, default='relpath'):
252 def match(repo, pats=[], opts={}, globbed=False, default='relpath'):
253 if not globbed and default == 'relpath':
253 if not globbed and default == 'relpath':
254 pats = expandpats(pats or [])
254 pats = expandpats(pats or [])
255 m = _match.match(repo.root, repo.getcwd(), pats,
255 m = _match.match(repo.root, repo.getcwd(), pats,
256 opts.get('include'), opts.get('exclude'), default)
256 opts.get('include'), opts.get('exclude'), default)
257 def badfn(f, msg):
257 def badfn(f, msg):
258 repo.ui.warn("%s: %s\n" % (m.rel(f), msg))
258 repo.ui.warn("%s: %s\n" % (m.rel(f), msg))
259 m.bad = badfn
259 m.bad = badfn
260 return m
260 return m
261
261
262 def matchall(repo):
262 def matchall(repo):
263 return _match.always(repo.root, repo.getcwd())
263 return _match.always(repo.root, repo.getcwd())
264
264
265 def matchfiles(repo, files):
265 def matchfiles(repo, files):
266 return _match.exact(repo.root, repo.getcwd(), files)
266 return _match.exact(repo.root, repo.getcwd(), files)
267
267
268 def findrenames(repo, match=None, threshold=0.5):
268 def findrenames(repo, match=None, threshold=0.5):
269 '''find renamed files -- yields (before, after, score) tuples'''
269 '''find renamed files -- yields (before, after, score) tuples'''
270 added, removed = repo.status(match=match)[1:3]
270 added, removed = repo.status(match=match)[1:3]
271 ctx = repo['.']
271 ctx = repo['.']
272 for a in added:
272 for a in added:
273 aa = repo.wread(a)
273 aa = repo.wread(a)
274 bestname, bestscore = None, threshold
274 bestname, bestscore = None, threshold
275 for r in removed:
275 for r in removed:
276 rr = ctx.filectx(r).data()
276 rr = ctx.filectx(r).data()
277
277
278 # bdiff.blocks() returns blocks of matching lines
278 # bdiff.blocks() returns blocks of matching lines
279 # count the number of bytes in each
279 # count the number of bytes in each
280 equal = 0
280 equal = 0
281 alines = mdiff.splitnewlines(aa)
281 alines = mdiff.splitnewlines(aa)
282 matches = bdiff.blocks(aa, rr)
282 matches = bdiff.blocks(aa, rr)
283 for x1,x2,y1,y2 in matches:
283 for x1,x2,y1,y2 in matches:
284 for line in alines[x1:x2]:
284 for line in alines[x1:x2]:
285 equal += len(line)
285 equal += len(line)
286
286
287 lengths = len(aa) + len(rr)
287 lengths = len(aa) + len(rr)
288 if lengths:
288 if lengths:
289 myscore = equal*2.0 / lengths
289 myscore = equal*2.0 / lengths
290 if myscore >= bestscore:
290 if myscore >= bestscore:
291 bestname, bestscore = r, myscore
291 bestname, bestscore = r, myscore
292 if bestname:
292 if bestname:
293 yield bestname, a, bestscore
293 yield bestname, a, bestscore
294
294
295 def addremove(repo, pats=[], opts={}, dry_run=None, similarity=None):
295 def addremove(repo, pats=[], opts={}, dry_run=None, similarity=None):
296 if dry_run is None:
296 if dry_run is None:
297 dry_run = opts.get('dry_run')
297 dry_run = opts.get('dry_run')
298 if similarity is None:
298 if similarity is None:
299 similarity = float(opts.get('similarity') or 0)
299 similarity = float(opts.get('similarity') or 0)
300 add, remove = [], []
300 add, remove = [], []
301 audit_path = util.path_auditor(repo.root)
301 audit_path = util.path_auditor(repo.root)
302 m = match(repo, pats, opts)
302 m = match(repo, pats, opts)
303 for abs in repo.walk(m):
303 for abs in repo.walk(m):
304 target = repo.wjoin(abs)
304 target = repo.wjoin(abs)
305 good = True
305 good = True
306 try:
306 try:
307 audit_path(abs)
307 audit_path(abs)
308 except:
308 except:
309 good = False
309 good = False
310 rel = m.rel(abs)
310 rel = m.rel(abs)
311 exact = m.exact(abs)
311 exact = m.exact(abs)
312 if good and abs not in repo.dirstate:
312 if good and abs not in repo.dirstate:
313 add.append(abs)
313 add.append(abs)
314 if repo.ui.verbose or not exact:
314 if repo.ui.verbose or not exact:
315 repo.ui.status(_('adding %s\n') % ((pats and rel) or abs))
315 repo.ui.status(_('adding %s\n') % ((pats and rel) or abs))
316 if repo.dirstate[abs] != 'r' and (not good or not util.lexists(target)
316 if repo.dirstate[abs] != 'r' and (not good or not util.lexists(target)
317 or (os.path.isdir(target) and not os.path.islink(target))):
317 or (os.path.isdir(target) and not os.path.islink(target))):
318 remove.append(abs)
318 remove.append(abs)
319 if repo.ui.verbose or not exact:
319 if repo.ui.verbose or not exact:
320 repo.ui.status(_('removing %s\n') % ((pats and rel) or abs))
320 repo.ui.status(_('removing %s\n') % ((pats and rel) or abs))
321 if not dry_run:
321 if not dry_run:
322 repo.remove(remove)
322 repo.remove(remove)
323 repo.add(add)
323 repo.add(add)
324 if similarity > 0:
324 if similarity > 0:
325 for old, new, score in findrenames(repo, m, similarity):
325 for old, new, score in findrenames(repo, m, similarity):
326 oldexact, newexact = m.exact(old), m.exact(new)
326 oldexact, newexact = m.exact(old), m.exact(new)
327 if repo.ui.verbose or not oldexact or not newexact:
327 if repo.ui.verbose or not oldexact or not newexact:
328 oldrel, newrel = m.rel(old), m.rel(new)
328 oldrel, newrel = m.rel(old), m.rel(new)
329 repo.ui.status(_('recording removal of %s as rename to %s '
329 repo.ui.status(_('recording removal of %s as rename to %s '
330 '(%d%% similar)\n') %
330 '(%d%% similar)\n') %
331 (oldrel, newrel, score * 100))
331 (oldrel, newrel, score * 100))
332 if not dry_run:
332 if not dry_run:
333 repo.copy(old, new)
333 repo.copy(old, new)
334
334
335 def copy(ui, repo, pats, opts, rename=False):
335 def copy(ui, repo, pats, opts, rename=False):
336 # called with the repo lock held
336 # called with the repo lock held
337 #
337 #
338 # hgsep => pathname that uses "/" to separate directories
338 # hgsep => pathname that uses "/" to separate directories
339 # ossep => pathname that uses os.sep to separate directories
339 # ossep => pathname that uses os.sep to separate directories
340 cwd = repo.getcwd()
340 cwd = repo.getcwd()
341 targets = {}
341 targets = {}
342 after = opts.get("after")
342 after = opts.get("after")
343 dryrun = opts.get("dry_run")
343 dryrun = opts.get("dry_run")
344
344
345 def walkpat(pat):
345 def walkpat(pat):
346 srcs = []
346 srcs = []
347 m = match(repo, [pat], opts, globbed=True)
347 m = match(repo, [pat], opts, globbed=True)
348 for abs in repo.walk(m):
348 for abs in repo.walk(m):
349 state = repo.dirstate[abs]
349 state = repo.dirstate[abs]
350 rel = m.rel(abs)
350 rel = m.rel(abs)
351 exact = m.exact(abs)
351 exact = m.exact(abs)
352 if state in '?r':
352 if state in '?r':
353 if exact and state == '?':
353 if exact and state == '?':
354 ui.warn(_('%s: not copying - file is not managed\n') % rel)
354 ui.warn(_('%s: not copying - file is not managed\n') % rel)
355 if exact and state == 'r':
355 if exact and state == 'r':
356 ui.warn(_('%s: not copying - file has been marked for'
356 ui.warn(_('%s: not copying - file has been marked for'
357 ' remove\n') % rel)
357 ' remove\n') % rel)
358 continue
358 continue
359 # abs: hgsep
359 # abs: hgsep
360 # rel: ossep
360 # rel: ossep
361 srcs.append((abs, rel, exact))
361 srcs.append((abs, rel, exact))
362 return srcs
362 return srcs
363
363
364 # abssrc: hgsep
364 # abssrc: hgsep
365 # relsrc: ossep
365 # relsrc: ossep
366 # otarget: ossep
366 # otarget: ossep
367 def copyfile(abssrc, relsrc, otarget, exact):
367 def copyfile(abssrc, relsrc, otarget, exact):
368 abstarget = util.canonpath(repo.root, cwd, otarget)
368 abstarget = util.canonpath(repo.root, cwd, otarget)
369 reltarget = repo.pathto(abstarget, cwd)
369 reltarget = repo.pathto(abstarget, cwd)
370 target = repo.wjoin(abstarget)
370 target = repo.wjoin(abstarget)
371 src = repo.wjoin(abssrc)
371 src = repo.wjoin(abssrc)
372 state = repo.dirstate[abstarget]
372 state = repo.dirstate[abstarget]
373
373
374 # check for collisions
374 # check for collisions
375 prevsrc = targets.get(abstarget)
375 prevsrc = targets.get(abstarget)
376 if prevsrc is not None:
376 if prevsrc is not None:
377 ui.warn(_('%s: not overwriting - %s collides with %s\n') %
377 ui.warn(_('%s: not overwriting - %s collides with %s\n') %
378 (reltarget, repo.pathto(abssrc, cwd),
378 (reltarget, repo.pathto(abssrc, cwd),
379 repo.pathto(prevsrc, cwd)))
379 repo.pathto(prevsrc, cwd)))
380 return
380 return
381
381
382 # check for overwrites
382 # check for overwrites
383 exists = os.path.exists(target)
383 exists = os.path.exists(target)
384 if not after and exists or after and state in 'mn':
384 if not after and exists or after and state in 'mn':
385 if not opts['force']:
385 if not opts['force']:
386 ui.warn(_('%s: not overwriting - file exists\n') %
386 ui.warn(_('%s: not overwriting - file exists\n') %
387 reltarget)
387 reltarget)
388 return
388 return
389
389
390 if after:
390 if after:
391 if not exists:
391 if not exists:
392 return
392 return
393 elif not dryrun:
393 elif not dryrun:
394 try:
394 try:
395 if exists:
395 if exists:
396 os.unlink(target)
396 os.unlink(target)
397 targetdir = os.path.dirname(target) or '.'
397 targetdir = os.path.dirname(target) or '.'
398 if not os.path.isdir(targetdir):
398 if not os.path.isdir(targetdir):
399 os.makedirs(targetdir)
399 os.makedirs(targetdir)
400 util.copyfile(src, target)
400 util.copyfile(src, target)
401 except IOError, inst:
401 except IOError, inst:
402 if inst.errno == errno.ENOENT:
402 if inst.errno == errno.ENOENT:
403 ui.warn(_('%s: deleted in working copy\n') % relsrc)
403 ui.warn(_('%s: deleted in working copy\n') % relsrc)
404 else:
404 else:
405 ui.warn(_('%s: cannot copy - %s\n') %
405 ui.warn(_('%s: cannot copy - %s\n') %
406 (relsrc, inst.strerror))
406 (relsrc, inst.strerror))
407 return True # report a failure
407 return True # report a failure
408
408
409 if ui.verbose or not exact:
409 if ui.verbose or not exact:
410 if rename:
410 if rename:
411 ui.status(_('moving %s to %s\n') % (relsrc, reltarget))
411 ui.status(_('moving %s to %s\n') % (relsrc, reltarget))
412 else:
412 else:
413 ui.status(_('copying %s to %s\n') % (relsrc, reltarget))
413 ui.status(_('copying %s to %s\n') % (relsrc, reltarget))
414
414
415 targets[abstarget] = abssrc
415 targets[abstarget] = abssrc
416
416
417 # fix up dirstate
417 # fix up dirstate
418 origsrc = repo.dirstate.copied(abssrc) or abssrc
418 origsrc = repo.dirstate.copied(abssrc) or abssrc
419 if abstarget == origsrc: # copying back a copy?
419 if abstarget == origsrc: # copying back a copy?
420 if state not in 'mn' and not dryrun:
420 if state not in 'mn' and not dryrun:
421 repo.dirstate.normallookup(abstarget)
421 repo.dirstate.normallookup(abstarget)
422 else:
422 else:
423 if repo.dirstate[origsrc] == 'a' and origsrc == abssrc:
423 if repo.dirstate[origsrc] == 'a' and origsrc == abssrc:
424 if not ui.quiet:
424 if not ui.quiet:
425 ui.warn(_("%s has not been committed yet, so no copy "
425 ui.warn(_("%s has not been committed yet, so no copy "
426 "data will be stored for %s.\n")
426 "data will be stored for %s.\n")
427 % (repo.pathto(origsrc, cwd), reltarget))
427 % (repo.pathto(origsrc, cwd), reltarget))
428 if repo.dirstate[abstarget] in '?r' and not dryrun:
428 if repo.dirstate[abstarget] in '?r' and not dryrun:
429 repo.add([abstarget])
429 repo.add([abstarget])
430 elif not dryrun:
430 elif not dryrun:
431 repo.copy(origsrc, abstarget)
431 repo.copy(origsrc, abstarget)
432
432
433 if rename and not dryrun:
433 if rename and not dryrun:
434 repo.remove([abssrc], not after)
434 repo.remove([abssrc], not after)
435
435
436 # pat: ossep
436 # pat: ossep
437 # dest ossep
437 # dest ossep
438 # srcs: list of (hgsep, hgsep, ossep, bool)
438 # srcs: list of (hgsep, hgsep, ossep, bool)
439 # return: function that takes hgsep and returns ossep
439 # return: function that takes hgsep and returns ossep
440 def targetpathfn(pat, dest, srcs):
440 def targetpathfn(pat, dest, srcs):
441 if os.path.isdir(pat):
441 if os.path.isdir(pat):
442 abspfx = util.canonpath(repo.root, cwd, pat)
442 abspfx = util.canonpath(repo.root, cwd, pat)
443 abspfx = util.localpath(abspfx)
443 abspfx = util.localpath(abspfx)
444 if destdirexists:
444 if destdirexists:
445 striplen = len(os.path.split(abspfx)[0])
445 striplen = len(os.path.split(abspfx)[0])
446 else:
446 else:
447 striplen = len(abspfx)
447 striplen = len(abspfx)
448 if striplen:
448 if striplen:
449 striplen += len(os.sep)
449 striplen += len(os.sep)
450 res = lambda p: os.path.join(dest, util.localpath(p)[striplen:])
450 res = lambda p: os.path.join(dest, util.localpath(p)[striplen:])
451 elif destdirexists:
451 elif destdirexists:
452 res = lambda p: os.path.join(dest,
452 res = lambda p: os.path.join(dest,
453 os.path.basename(util.localpath(p)))
453 os.path.basename(util.localpath(p)))
454 else:
454 else:
455 res = lambda p: dest
455 res = lambda p: dest
456 return res
456 return res
457
457
458 # pat: ossep
458 # pat: ossep
459 # dest ossep
459 # dest ossep
460 # srcs: list of (hgsep, hgsep, ossep, bool)
460 # srcs: list of (hgsep, hgsep, ossep, bool)
461 # return: function that takes hgsep and returns ossep
461 # return: function that takes hgsep and returns ossep
462 def targetpathafterfn(pat, dest, srcs):
462 def targetpathafterfn(pat, dest, srcs):
463 if _match.patkind(pat):
463 if _match.patkind(pat):
464 # a mercurial pattern
464 # a mercurial pattern
465 res = lambda p: os.path.join(dest,
465 res = lambda p: os.path.join(dest,
466 os.path.basename(util.localpath(p)))
466 os.path.basename(util.localpath(p)))
467 else:
467 else:
468 abspfx = util.canonpath(repo.root, cwd, pat)
468 abspfx = util.canonpath(repo.root, cwd, pat)
469 if len(abspfx) < len(srcs[0][0]):
469 if len(abspfx) < len(srcs[0][0]):
470 # A directory. Either the target path contains the last
470 # A directory. Either the target path contains the last
471 # component of the source path or it does not.
471 # component of the source path or it does not.
472 def evalpath(striplen):
472 def evalpath(striplen):
473 score = 0
473 score = 0
474 for s in srcs:
474 for s in srcs:
475 t = os.path.join(dest, util.localpath(s[0])[striplen:])
475 t = os.path.join(dest, util.localpath(s[0])[striplen:])
476 if os.path.exists(t):
476 if os.path.exists(t):
477 score += 1
477 score += 1
478 return score
478 return score
479
479
480 abspfx = util.localpath(abspfx)
480 abspfx = util.localpath(abspfx)
481 striplen = len(abspfx)
481 striplen = len(abspfx)
482 if striplen:
482 if striplen:
483 striplen += len(os.sep)
483 striplen += len(os.sep)
484 if os.path.isdir(os.path.join(dest, os.path.split(abspfx)[1])):
484 if os.path.isdir(os.path.join(dest, os.path.split(abspfx)[1])):
485 score = evalpath(striplen)
485 score = evalpath(striplen)
486 striplen1 = len(os.path.split(abspfx)[0])
486 striplen1 = len(os.path.split(abspfx)[0])
487 if striplen1:
487 if striplen1:
488 striplen1 += len(os.sep)
488 striplen1 += len(os.sep)
489 if evalpath(striplen1) > score:
489 if evalpath(striplen1) > score:
490 striplen = striplen1
490 striplen = striplen1
491 res = lambda p: os.path.join(dest,
491 res = lambda p: os.path.join(dest,
492 util.localpath(p)[striplen:])
492 util.localpath(p)[striplen:])
493 else:
493 else:
494 # a file
494 # a file
495 if destdirexists:
495 if destdirexists:
496 res = lambda p: os.path.join(dest,
496 res = lambda p: os.path.join(dest,
497 os.path.basename(util.localpath(p)))
497 os.path.basename(util.localpath(p)))
498 else:
498 else:
499 res = lambda p: dest
499 res = lambda p: dest
500 return res
500 return res
501
501
502
502
503 pats = expandpats(pats)
503 pats = expandpats(pats)
504 if not pats:
504 if not pats:
505 raise util.Abort(_('no source or destination specified'))
505 raise util.Abort(_('no source or destination specified'))
506 if len(pats) == 1:
506 if len(pats) == 1:
507 raise util.Abort(_('no destination specified'))
507 raise util.Abort(_('no destination specified'))
508 dest = pats.pop()
508 dest = pats.pop()
509 destdirexists = os.path.isdir(dest) and not os.path.islink(dest)
509 destdirexists = os.path.isdir(dest) and not os.path.islink(dest)
510 if not destdirexists:
510 if not destdirexists:
511 if len(pats) > 1 or _match.patkind(pats[0]):
511 if len(pats) > 1 or _match.patkind(pats[0]):
512 raise util.Abort(_('with multiple sources, destination must be an '
512 raise util.Abort(_('with multiple sources, destination must be an '
513 'existing directory'))
513 'existing directory'))
514 if util.endswithsep(dest):
514 if util.endswithsep(dest):
515 raise util.Abort(_('destination %s is not a directory') % dest)
515 raise util.Abort(_('destination %s is not a directory') % dest)
516
516
517 tfn = targetpathfn
517 tfn = targetpathfn
518 if after:
518 if after:
519 tfn = targetpathafterfn
519 tfn = targetpathafterfn
520 copylist = []
520 copylist = []
521 for pat in pats:
521 for pat in pats:
522 srcs = walkpat(pat)
522 srcs = walkpat(pat)
523 if not srcs:
523 if not srcs:
524 continue
524 continue
525 copylist.append((tfn(pat, dest, srcs), srcs))
525 copylist.append((tfn(pat, dest, srcs), srcs))
526 if not copylist:
526 if not copylist:
527 raise util.Abort(_('no files to copy'))
527 raise util.Abort(_('no files to copy'))
528
528
529 errors = 0
529 errors = 0
530 for targetpath, srcs in copylist:
530 for targetpath, srcs in copylist:
531 for abssrc, relsrc, exact in srcs:
531 for abssrc, relsrc, exact in srcs:
532 if copyfile(abssrc, relsrc, targetpath(abssrc), exact):
532 if copyfile(abssrc, relsrc, targetpath(abssrc), exact):
533 errors += 1
533 errors += 1
534
534
535 if errors:
535 if errors:
536 ui.warn(_('(consider using --after)\n'))
536 ui.warn(_('(consider using --after)\n'))
537
537
538 return errors
538 return errors
539
539
540 def service(opts, parentfn=None, initfn=None, runfn=None):
540 def service(opts, parentfn=None, initfn=None, runfn=None):
541 '''Run a command as a service.'''
541 '''Run a command as a service.'''
542
542
543 if opts['daemon'] and not opts['daemon_pipefds']:
543 if opts['daemon'] and not opts['daemon_pipefds']:
544 rfd, wfd = os.pipe()
544 rfd, wfd = os.pipe()
545 args = sys.argv[:]
545 args = sys.argv[:]
546 args.append('--daemon-pipefds=%d,%d' % (rfd, wfd))
546 args.append('--daemon-pipefds=%d,%d' % (rfd, wfd))
547 # Don't pass --cwd to the child process, because we've already
547 # Don't pass --cwd to the child process, because we've already
548 # changed directory.
548 # changed directory.
549 for i in xrange(1,len(args)):
549 for i in xrange(1,len(args)):
550 if args[i].startswith('--cwd='):
550 if args[i].startswith('--cwd='):
551 del args[i]
551 del args[i]
552 break
552 break
553 elif args[i].startswith('--cwd'):
553 elif args[i].startswith('--cwd'):
554 del args[i:i+2]
554 del args[i:i+2]
555 break
555 break
556 pid = os.spawnvp(os.P_NOWAIT | getattr(os, 'P_DETACH', 0),
556 pid = os.spawnvp(os.P_NOWAIT | getattr(os, 'P_DETACH', 0),
557 args[0], args)
557 args[0], args)
558 os.close(wfd)
558 os.close(wfd)
559 os.read(rfd, 1)
559 os.read(rfd, 1)
560 if parentfn:
560 if parentfn:
561 return parentfn(pid)
561 return parentfn(pid)
562 else:
562 else:
563 os._exit(0)
563 os._exit(0)
564
564
565 if initfn:
565 if initfn:
566 initfn()
566 initfn()
567
567
568 if opts['pid_file']:
568 if opts['pid_file']:
569 fp = open(opts['pid_file'], 'w')
569 fp = open(opts['pid_file'], 'w')
570 fp.write(str(os.getpid()) + '\n')
570 fp.write(str(os.getpid()) + '\n')
571 fp.close()
571 fp.close()
572
572
573 if opts['daemon_pipefds']:
573 if opts['daemon_pipefds']:
574 rfd, wfd = [int(x) for x in opts['daemon_pipefds'].split(',')]
574 rfd, wfd = [int(x) for x in opts['daemon_pipefds'].split(',')]
575 os.close(rfd)
575 os.close(rfd)
576 try:
576 try:
577 os.setsid()
577 os.setsid()
578 except AttributeError:
578 except AttributeError:
579 pass
579 pass
580 os.write(wfd, 'y')
580 os.write(wfd, 'y')
581 os.close(wfd)
581 os.close(wfd)
582 sys.stdout.flush()
582 sys.stdout.flush()
583 sys.stderr.flush()
583 sys.stderr.flush()
584 fd = os.open(util.nulldev, os.O_RDWR)
584 fd = os.open(util.nulldev, os.O_RDWR)
585 if fd != 0: os.dup2(fd, 0)
585 if fd != 0: os.dup2(fd, 0)
586 if fd != 1: os.dup2(fd, 1)
586 if fd != 1: os.dup2(fd, 1)
587 if fd != 2: os.dup2(fd, 2)
587 if fd != 2: os.dup2(fd, 2)
588 if fd not in (0, 1, 2): os.close(fd)
588 if fd not in (0, 1, 2): os.close(fd)
589
589
590 if runfn:
590 if runfn:
591 return runfn()
591 return runfn()
592
592
593 class changeset_printer(object):
593 class changeset_printer(object):
594 '''show changeset information when templating not requested.'''
594 '''show changeset information when templating not requested.'''
595
595
596 def __init__(self, ui, repo, patch, diffopts, buffered):
596 def __init__(self, ui, repo, patch, diffopts, buffered):
597 self.ui = ui
597 self.ui = ui
598 self.repo = repo
598 self.repo = repo
599 self.buffered = buffered
599 self.buffered = buffered
600 self.patch = patch
600 self.patch = patch
601 self.diffopts = diffopts
601 self.diffopts = diffopts
602 self.header = {}
602 self.header = {}
603 self.hunk = {}
603 self.hunk = {}
604 self.lastheader = None
604 self.lastheader = None
605
605
606 def flush(self, rev):
606 def flush(self, rev):
607 if rev in self.header:
607 if rev in self.header:
608 h = self.header[rev]
608 h = self.header[rev]
609 if h != self.lastheader:
609 if h != self.lastheader:
610 self.lastheader = h
610 self.lastheader = h
611 self.ui.write(h)
611 self.ui.write(h)
612 del self.header[rev]
612 del self.header[rev]
613 if rev in self.hunk:
613 if rev in self.hunk:
614 self.ui.write(self.hunk[rev])
614 self.ui.write(self.hunk[rev])
615 del self.hunk[rev]
615 del self.hunk[rev]
616 return 1
616 return 1
617 return 0
617 return 0
618
618
619 def show(self, ctx, copies=(), **props):
619 def show(self, ctx, copies=(), **props):
620 if self.buffered:
620 if self.buffered:
621 self.ui.pushbuffer()
621 self.ui.pushbuffer()
622 self._show(ctx, copies, props)
622 self._show(ctx, copies, props)
623 self.hunk[ctx.rev()] = self.ui.popbuffer()
623 self.hunk[ctx.rev()] = self.ui.popbuffer()
624 else:
624 else:
625 self._show(ctx, copies, props)
625 self._show(ctx, copies, props)
626
626
627 def _show(self, ctx, copies, props):
627 def _show(self, ctx, copies, props):
628 '''show a single changeset or file revision'''
628 '''show a single changeset or file revision'''
629 changenode = ctx.node()
629 changenode = ctx.node()
630 rev = ctx.rev()
630 rev = ctx.rev()
631
631
632 if self.ui.quiet:
632 if self.ui.quiet:
633 self.ui.write("%d:%s\n" % (rev, short(changenode)))
633 self.ui.write("%d:%s\n" % (rev, short(changenode)))
634 return
634 return
635
635
636 log = self.repo.changelog
636 log = self.repo.changelog
637 changes = log.read(changenode)
637 changes = log.read(changenode)
638 date = util.datestr(changes[2])
638 date = util.datestr(changes[2])
639 extra = changes[5]
639 extra = changes[5]
640 branch = extra.get("branch")
640 branch = extra.get("branch")
641
641
642 hexfunc = self.ui.debugflag and hex or short
642 hexfunc = self.ui.debugflag and hex or short
643
643
644 parents = [(p, hexfunc(log.node(p)))
644 parents = [(p, hexfunc(log.node(p)))
645 for p in self._meaningful_parentrevs(log, rev)]
645 for p in self._meaningful_parentrevs(log, rev)]
646
646
647 self.ui.write(_("changeset: %d:%s\n") % (rev, hexfunc(changenode)))
647 self.ui.write(_("changeset: %d:%s\n") % (rev, hexfunc(changenode)))
648
648
649 # don't show the default branch name
649 # don't show the default branch name
650 if branch != 'default':
650 if branch != 'default':
651 branch = encoding.tolocal(branch)
651 branch = encoding.tolocal(branch)
652 self.ui.write(_("branch: %s\n") % branch)
652 self.ui.write(_("branch: %s\n") % branch)
653 for tag in self.repo.nodetags(changenode):
653 for tag in self.repo.nodetags(changenode):
654 self.ui.write(_("tag: %s\n") % tag)
654 self.ui.write(_("tag: %s\n") % tag)
655 for parent in parents:
655 for parent in parents:
656 self.ui.write(_("parent: %d:%s\n") % parent)
656 self.ui.write(_("parent: %d:%s\n") % parent)
657
657
658 if self.ui.debugflag:
658 if self.ui.debugflag:
659 self.ui.write(_("manifest: %d:%s\n") %
659 self.ui.write(_("manifest: %d:%s\n") %
660 (self.repo.manifest.rev(changes[0]), hex(changes[0])))
660 (self.repo.manifest.rev(changes[0]), hex(changes[0])))
661 self.ui.write(_("user: %s\n") % changes[1])
661 self.ui.write(_("user: %s\n") % changes[1])
662 self.ui.write(_("date: %s\n") % date)
662 self.ui.write(_("date: %s\n") % date)
663
663
664 if self.ui.debugflag:
664 if self.ui.debugflag:
665 files = self.repo.status(log.parents(changenode)[0], changenode)[:3]
665 files = self.repo.status(log.parents(changenode)[0], changenode)[:3]
666 for key, value in zip([_("files:"), _("files+:"), _("files-:")],
666 for key, value in zip([_("files:"), _("files+:"), _("files-:")],
667 files):
667 files):
668 if value:
668 if value:
669 self.ui.write("%-12s %s\n" % (key, " ".join(value)))
669 self.ui.write("%-12s %s\n" % (key, " ".join(value)))
670 elif changes[3] and self.ui.verbose:
670 elif changes[3] and self.ui.verbose:
671 self.ui.write(_("files: %s\n") % " ".join(changes[3]))
671 self.ui.write(_("files: %s\n") % " ".join(changes[3]))
672 if copies and self.ui.verbose:
672 if copies and self.ui.verbose:
673 copies = ['%s (%s)' % c for c in copies]
673 copies = ['%s (%s)' % c for c in copies]
674 self.ui.write(_("copies: %s\n") % ' '.join(copies))
674 self.ui.write(_("copies: %s\n") % ' '.join(copies))
675
675
676 if extra and self.ui.debugflag:
676 if extra and self.ui.debugflag:
677 for key, value in sorted(extra.items()):
677 for key, value in sorted(extra.items()):
678 self.ui.write(_("extra: %s=%s\n")
678 self.ui.write(_("extra: %s=%s\n")
679 % (key, value.encode('string_escape')))
679 % (key, value.encode('string_escape')))
680
680
681 description = changes[4].strip()
681 description = changes[4].strip()
682 if description:
682 if description:
683 if self.ui.verbose:
683 if self.ui.verbose:
684 self.ui.write(_("description:\n"))
684 self.ui.write(_("description:\n"))
685 self.ui.write(description)
685 self.ui.write(description)
686 self.ui.write("\n\n")
686 self.ui.write("\n\n")
687 else:
687 else:
688 self.ui.write(_("summary: %s\n") %
688 self.ui.write(_("summary: %s\n") %
689 description.splitlines()[0])
689 description.splitlines()[0])
690 self.ui.write("\n")
690 self.ui.write("\n")
691
691
692 self.showpatch(changenode)
692 self.showpatch(changenode)
693
693
694 def showpatch(self, node):
694 def showpatch(self, node):
695 if self.patch:
695 if self.patch:
696 prev = self.repo.changelog.parents(node)[0]
696 prev = self.repo.changelog.parents(node)[0]
697 chunks = patch.diff(self.repo, prev, node, match=self.patch,
697 chunks = patch.diff(self.repo, prev, node, match=self.patch,
698 opts=patch.diffopts(self.ui, self.diffopts))
698 opts=patch.diffopts(self.ui, self.diffopts))
699 for chunk in chunks:
699 for chunk in chunks:
700 self.ui.write(chunk)
700 self.ui.write(chunk)
701 self.ui.write("\n")
701 self.ui.write("\n")
702
702
703 def _meaningful_parentrevs(self, log, rev):
703 def _meaningful_parentrevs(self, log, rev):
704 """Return list of meaningful (or all if debug) parentrevs for rev.
704 """Return list of meaningful (or all if debug) parentrevs for rev.
705
705
706 For merges (two non-nullrev revisions) both parents are meaningful.
706 For merges (two non-nullrev revisions) both parents are meaningful.
707 Otherwise the first parent revision is considered meaningful if it
707 Otherwise the first parent revision is considered meaningful if it
708 is not the preceding revision.
708 is not the preceding revision.
709 """
709 """
710 parents = log.parentrevs(rev)
710 parents = log.parentrevs(rev)
711 if not self.ui.debugflag and parents[1] == nullrev:
711 if not self.ui.debugflag and parents[1] == nullrev:
712 if parents[0] >= rev - 1:
712 if parents[0] >= rev - 1:
713 parents = []
713 parents = []
714 else:
714 else:
715 parents = [parents[0]]
715 parents = [parents[0]]
716 return parents
716 return parents
717
717
718
718
719 class changeset_templater(changeset_printer):
719 class changeset_templater(changeset_printer):
720 '''format changeset information.'''
720 '''format changeset information.'''
721
721
722 def __init__(self, ui, repo, patch, diffopts, mapfile, buffered):
722 def __init__(self, ui, repo, patch, diffopts, mapfile, buffered):
723 changeset_printer.__init__(self, ui, repo, patch, diffopts, buffered)
723 changeset_printer.__init__(self, ui, repo, patch, diffopts, buffered)
724 formatnode = ui.debugflag and (lambda x: x) or (lambda x: x[:12])
724 formatnode = ui.debugflag and (lambda x: x) or (lambda x: x[:12])
725 self.t = templater.templater(mapfile, {'formatnode': formatnode},
725 self.t = templater.templater(mapfile, {'formatnode': formatnode},
726 cache={
726 cache={
727 'parent': '{rev}:{node|formatnode} ',
727 'parent': '{rev}:{node|formatnode} ',
728 'manifest': '{rev}:{node|formatnode}',
728 'manifest': '{rev}:{node|formatnode}',
729 'filecopy': '{name} ({source})'})
729 'filecopy': '{name} ({source})'})
730
730
731 def use_template(self, t):
731 def use_template(self, t):
732 '''set template string to use'''
732 '''set template string to use'''
733 self.t.cache['changeset'] = t
733 self.t.cache['changeset'] = t
734
734
735 def _meaningful_parentrevs(self, ctx):
735 def _meaningful_parentrevs(self, ctx):
736 """Return list of meaningful (or all if debug) parentrevs for rev.
736 """Return list of meaningful (or all if debug) parentrevs for rev.
737 """
737 """
738 parents = ctx.parents()
738 parents = ctx.parents()
739 if len(parents) > 1:
739 if len(parents) > 1:
740 return parents
740 return parents
741 if self.ui.debugflag:
741 if self.ui.debugflag:
742 return [parents[0], self.repo['null']]
742 return [parents[0], self.repo['null']]
743 if parents[0].rev() >= ctx.rev() - 1:
743 if parents[0].rev() >= ctx.rev() - 1:
744 return []
744 return []
745 return parents
745 return parents
746
746
747 def _show(self, ctx, copies, props):
747 def _show(self, ctx, copies, props):
748 '''show a single changeset or file revision'''
748 '''show a single changeset or file revision'''
749
749
750 def showlist(name, values, plural=None, **args):
750 def showlist(name, values, plural=None, **args):
751 '''expand set of values.
751 '''expand set of values.
752 name is name of key in template map.
752 name is name of key in template map.
753 values is list of strings or dicts.
753 values is list of strings or dicts.
754 plural is plural of name, if not simply name + 's'.
754 plural is plural of name, if not simply name + 's'.
755
755
756 expansion works like this, given name 'foo'.
756 expansion works like this, given name 'foo'.
757
757
758 if values is empty, expand 'no_foos'.
758 if values is empty, expand 'no_foos'.
759
759
760 if 'foo' not in template map, return values as a string,
760 if 'foo' not in template map, return values as a string,
761 joined by space.
761 joined by space.
762
762
763 expand 'start_foos'.
763 expand 'start_foos'.
764
764
765 for each value, expand 'foo'. if 'last_foo' in template
765 for each value, expand 'foo'. if 'last_foo' in template
766 map, expand it instead of 'foo' for last key.
766 map, expand it instead of 'foo' for last key.
767
767
768 expand 'end_foos'.
768 expand 'end_foos'.
769 '''
769 '''
770 if plural: names = plural
770 if plural: names = plural
771 else: names = name + 's'
771 else: names = name + 's'
772 if not values:
772 if not values:
773 noname = 'no_' + names
773 noname = 'no_' + names
774 if noname in self.t:
774 if noname in self.t:
775 yield self.t(noname, **args)
775 yield self.t(noname, **args)
776 return
776 return
777 if name not in self.t:
777 if name not in self.t:
778 if isinstance(values[0], str):
778 if isinstance(values[0], str):
779 yield ' '.join(values)
779 yield ' '.join(values)
780 else:
780 else:
781 for v in values:
781 for v in values:
782 yield dict(v, **args)
782 yield dict(v, **args)
783 return
783 return
784 startname = 'start_' + names
784 startname = 'start_' + names
785 if startname in self.t:
785 if startname in self.t:
786 yield self.t(startname, **args)
786 yield self.t(startname, **args)
787 vargs = args.copy()
787 vargs = args.copy()
788 def one(v, tag=name):
788 def one(v, tag=name):
789 try:
789 try:
790 vargs.update(v)
790 vargs.update(v)
791 except (AttributeError, ValueError):
791 except (AttributeError, ValueError):
792 try:
792 try:
793 for a, b in v:
793 for a, b in v:
794 vargs[a] = b
794 vargs[a] = b
795 except ValueError:
795 except ValueError:
796 vargs[name] = v
796 vargs[name] = v
797 return self.t(tag, **vargs)
797 return self.t(tag, **vargs)
798 lastname = 'last_' + name
798 lastname = 'last_' + name
799 if lastname in self.t:
799 if lastname in self.t:
800 last = values.pop()
800 last = values.pop()
801 else:
801 else:
802 last = None
802 last = None
803 for v in values:
803 for v in values:
804 yield one(v)
804 yield one(v)
805 if last is not None:
805 if last is not None:
806 yield one(last, tag=lastname)
806 yield one(last, tag=lastname)
807 endname = 'end_' + names
807 endname = 'end_' + names
808 if endname in self.t:
808 if endname in self.t:
809 yield self.t(endname, **args)
809 yield self.t(endname, **args)
810
810
811 def showbranches(**args):
811 def showbranches(**args):
812 branch = ctx.branch()
812 branch = ctx.branch()
813 if branch != 'default':
813 if branch != 'default':
814 branch = encoding.tolocal(branch)
814 branch = encoding.tolocal(branch)
815 return showlist('branch', [branch], plural='branches', **args)
815 return showlist('branch', [branch], plural='branches', **args)
816
816
817 def showparents(**args):
817 def showparents(**args):
818 parents = [[('rev', p.rev()), ('node', p.hex())]
818 parents = [[('rev', p.rev()), ('node', p.hex())]
819 for p in self._meaningful_parentrevs(ctx)]
819 for p in self._meaningful_parentrevs(ctx)]
820 return showlist('parent', parents, **args)
820 return showlist('parent', parents, **args)
821
821
822 def showtags(**args):
822 def showtags(**args):
823 return showlist('tag', ctx.tags(), **args)
823 return showlist('tag', ctx.tags(), **args)
824
824
825 def showextras(**args):
825 def showextras(**args):
826 for key, value in sorted(ctx.extra().items()):
826 for key, value in sorted(ctx.extra().items()):
827 args = args.copy()
827 args = args.copy()
828 args.update(dict(key=key, value=value))
828 args.update(dict(key=key, value=value))
829 yield self.t('extra', **args)
829 yield self.t('extra', **args)
830
830
831 def showcopies(**args):
831 def showcopies(**args):
832 c = [{'name': x[0], 'source': x[1]} for x in copies]
832 c = [{'name': x[0], 'source': x[1]} for x in copies]
833 return showlist('file_copy', c, plural='file_copies', **args)
833 return showlist('file_copy', c, plural='file_copies', **args)
834
834
835 files = []
835 files = []
836 def getfiles():
836 def getfiles():
837 if not files:
837 if not files:
838 files[:] = self.repo.status(ctx.parents()[0].node(),
838 files[:] = self.repo.status(ctx.parents()[0].node(),
839 ctx.node())[:3]
839 ctx.node())[:3]
840 return files
840 return files
841 def showfiles(**args):
841 def showfiles(**args):
842 return showlist('file', ctx.files(), **args)
842 return showlist('file', ctx.files(), **args)
843 def showmods(**args):
843 def showmods(**args):
844 return showlist('file_mod', getfiles()[0], **args)
844 return showlist('file_mod', getfiles()[0], **args)
845 def showadds(**args):
845 def showadds(**args):
846 return showlist('file_add', getfiles()[1], **args)
846 return showlist('file_add', getfiles()[1], **args)
847 def showdels(**args):
847 def showdels(**args):
848 return showlist('file_del', getfiles()[2], **args)
848 return showlist('file_del', getfiles()[2], **args)
849 def showmanifest(**args):
849 def showmanifest(**args):
850 args = args.copy()
850 args = args.copy()
851 args.update(dict(rev=self.repo.manifest.rev(ctx.changeset()[0]),
851 args.update(dict(rev=self.repo.manifest.rev(ctx.changeset()[0]),
852 node=hex(ctx.changeset()[0])))
852 node=hex(ctx.changeset()[0])))
853 return self.t('manifest', **args)
853 return self.t('manifest', **args)
854
854
855 def showdiffstat(**args):
855 def showdiffstat(**args):
856 diff = patch.diff(self.repo, ctx.parents()[0].node(), ctx.node())
856 diff = patch.diff(self.repo, ctx.parents()[0].node(), ctx.node())
857 files, adds, removes = 0, 0, 0
857 files, adds, removes = 0, 0, 0
858 for i in patch.diffstatdata(util.iterlines(diff)):
858 for i in patch.diffstatdata(util.iterlines(diff)):
859 files += 1
859 files += 1
860 adds += i[1]
860 adds += i[1]
861 removes += i[2]
861 removes += i[2]
862 return '%s: +%s/-%s' % (files, adds, removes)
862 return '%s: +%s/-%s' % (files, adds, removes)
863
863
864 defprops = {
864 defprops = {
865 'author': ctx.user(),
865 'author': ctx.user(),
866 'branches': showbranches,
866 'branches': showbranches,
867 'date': ctx.date(),
867 'date': ctx.date(),
868 'desc': ctx.description().strip(),
868 'desc': ctx.description().strip(),
869 'file_adds': showadds,
869 'file_adds': showadds,
870 'file_dels': showdels,
870 'file_dels': showdels,
871 'file_mods': showmods,
871 'file_mods': showmods,
872 'files': showfiles,
872 'files': showfiles,
873 'file_copies': showcopies,
873 'file_copies': showcopies,
874 'manifest': showmanifest,
874 'manifest': showmanifest,
875 'node': ctx.hex(),
875 'node': ctx.hex(),
876 'parents': showparents,
876 'parents': showparents,
877 'rev': ctx.rev(),
877 'rev': ctx.rev(),
878 'tags': showtags,
878 'tags': showtags,
879 'extras': showextras,
879 'extras': showextras,
880 'diffstat': showdiffstat,
880 'diffstat': showdiffstat,
881 }
881 }
882 props = props.copy()
882 props = props.copy()
883 props.update(defprops)
883 props.update(defprops)
884
884
885 # find correct templates for current mode
885 # find correct templates for current mode
886
886
887 tmplmodes = [
887 tmplmodes = [
888 (True, None),
888 (True, None),
889 (self.ui.verbose, 'verbose'),
889 (self.ui.verbose, 'verbose'),
890 (self.ui.quiet, 'quiet'),
890 (self.ui.quiet, 'quiet'),
891 (self.ui.debugflag, 'debug'),
891 (self.ui.debugflag, 'debug'),
892 ]
892 ]
893
893
894 types = {'header': '', 'changeset': 'changeset'}
894 types = {'header': '', 'changeset': 'changeset'}
895 for mode, postfix in tmplmodes:
895 for mode, postfix in tmplmodes:
896 for type in types:
896 for type in types:
897 cur = postfix and ('%s_%s' % (type, postfix)) or type
897 cur = postfix and ('%s_%s' % (type, postfix)) or type
898 if mode and cur in self.t:
898 if mode and cur in self.t:
899 types[type] = cur
899 types[type] = cur
900
900
901 try:
901 try:
902
902
903 # write header
903 # write header
904 if types['header']:
904 if types['header']:
905 h = templater.stringify(self.t(types['header'], **props))
905 h = templater.stringify(self.t(types['header'], **props))
906 if self.buffered:
906 if self.buffered:
907 self.header[ctx.rev()] = h
907 self.header[ctx.rev()] = h
908 else:
908 else:
909 self.ui.write(h)
909 self.ui.write(h)
910
910
911 # write changeset metadata, then patch if requested
911 # write changeset metadata, then patch if requested
912 key = types['changeset']
912 key = types['changeset']
913 self.ui.write(templater.stringify(self.t(key, **props)))
913 self.ui.write(templater.stringify(self.t(key, **props)))
914 self.showpatch(ctx.node())
914 self.showpatch(ctx.node())
915
915
916 except KeyError, inst:
916 except KeyError, inst:
917 msg = _("%s: no key named '%s'")
917 msg = _("%s: no key named '%s'")
918 raise util.Abort(msg % (self.t.mapfile, inst.args[0]))
918 raise util.Abort(msg % (self.t.mapfile, inst.args[0]))
919 except SyntaxError, inst:
919 except SyntaxError, inst:
920 raise util.Abort(_('%s: %s') % (self.t.mapfile, inst.args[0]))
920 raise util.Abort(_('%s: %s') % (self.t.mapfile, inst.args[0]))
921
921
922 def show_changeset(ui, repo, opts, buffered=False, matchfn=False):
922 def show_changeset(ui, repo, opts, buffered=False, matchfn=False):
923 """show one changeset using template or regular display.
923 """show one changeset using template or regular display.
924
924
925 Display format will be the first non-empty hit of:
925 Display format will be the first non-empty hit of:
926 1. option 'template'
926 1. option 'template'
927 2. option 'style'
927 2. option 'style'
928 3. [ui] setting 'logtemplate'
928 3. [ui] setting 'logtemplate'
929 4. [ui] setting 'style'
929 4. [ui] setting 'style'
930 If all of these values are either the unset or the empty string,
930 If all of these values are either the unset or the empty string,
931 regular display via changeset_printer() is done.
931 regular display via changeset_printer() is done.
932 """
932 """
933 # options
933 # options
934 patch = False
934 patch = False
935 if opts.get('patch'):
935 if opts.get('patch'):
936 patch = matchfn or matchall(repo)
936 patch = matchfn or matchall(repo)
937
937
938 tmpl = opts.get('template')
938 tmpl = opts.get('template')
939 style = None
939 style = None
940 if tmpl:
940 if tmpl:
941 tmpl = templater.parsestring(tmpl, quoted=False)
941 tmpl = templater.parsestring(tmpl, quoted=False)
942 else:
942 else:
943 style = opts.get('style')
943 style = opts.get('style')
944
944
945 # ui settings
945 # ui settings
946 if not (tmpl or style):
946 if not (tmpl or style):
947 tmpl = ui.config('ui', 'logtemplate')
947 tmpl = ui.config('ui', 'logtemplate')
948 if tmpl:
948 if tmpl:
949 tmpl = templater.parsestring(tmpl)
949 tmpl = templater.parsestring(tmpl)
950 else:
950 else:
951 style = ui.config('ui', 'style')
951 style = ui.config('ui', 'style')
952
952
953 if not (tmpl or style):
953 if not (tmpl or style):
954 return changeset_printer(ui, repo, patch, opts, buffered)
954 return changeset_printer(ui, repo, patch, opts, buffered)
955
955
956 mapfile = None
956 mapfile = None
957 if style and not tmpl:
957 if style and not tmpl:
958 mapfile = style
958 mapfile = style
959 if not os.path.split(mapfile)[0]:
959 if not os.path.split(mapfile)[0]:
960 mapname = (templater.templatepath('map-cmdline.' + mapfile)
960 mapname = (templater.templatepath('map-cmdline.' + mapfile)
961 or templater.templatepath(mapfile))
961 or templater.templatepath(mapfile))
962 if mapname: mapfile = mapname
962 if mapname: mapfile = mapname
963
963
964 try:
964 try:
965 t = changeset_templater(ui, repo, patch, opts, mapfile, buffered)
965 t = changeset_templater(ui, repo, patch, opts, mapfile, buffered)
966 except SyntaxError, inst:
966 except SyntaxError, inst:
967 raise util.Abort(inst.args[0])
967 raise util.Abort(inst.args[0])
968 if tmpl: t.use_template(tmpl)
968 if tmpl: t.use_template(tmpl)
969 return t
969 return t
970
970
971 def finddate(ui, repo, date):
971 def finddate(ui, repo, date):
972 """Find the tipmost changeset that matches the given date spec"""
972 """Find the tipmost changeset that matches the given date spec"""
973 df = util.matchdate(date)
973 df = util.matchdate(date)
974 get = util.cachefunc(lambda r: repo[r].changeset())
974 get = util.cachefunc(lambda r: repo[r].changeset())
975 changeiter, matchfn = walkchangerevs(ui, repo, [], get, {'rev':None})
975 changeiter, matchfn = walkchangerevs(ui, repo, [], get, {'rev':None})
976 results = {}
976 results = {}
977 for st, rev, fns in changeiter:
977 for st, rev, fns in changeiter:
978 if st == 'add':
978 if st == 'add':
979 d = get(rev)[2]
979 d = get(rev)[2]
980 if df(d[0]):
980 if df(d[0]):
981 results[rev] = d
981 results[rev] = d
982 elif st == 'iter':
982 elif st == 'iter':
983 if rev in results:
983 if rev in results:
984 ui.status(_("Found revision %s from %s\n") %
984 ui.status(_("Found revision %s from %s\n") %
985 (rev, util.datestr(results[rev])))
985 (rev, util.datestr(results[rev])))
986 return str(rev)
986 return str(rev)
987
987
988 raise util.Abort(_("revision matching date not found"))
988 raise util.Abort(_("revision matching date not found"))
989
989
990 def walkchangerevs(ui, repo, pats, change, opts):
990 def walkchangerevs(ui, repo, pats, change, opts):
991 '''Iterate over files and the revs in which they changed.
991 '''Iterate over files and the revs in which they changed.
992
992
993 Callers most commonly need to iterate backwards over the history
993 Callers most commonly need to iterate backwards over the history
994 in which they are interested. Doing so has awful (quadratic-looking)
994 in which they are interested. Doing so has awful (quadratic-looking)
995 performance, so we use iterators in a "windowed" way.
995 performance, so we use iterators in a "windowed" way.
996
996
997 We walk a window of revisions in the desired order. Within the
997 We walk a window of revisions in the desired order. Within the
998 window, we first walk forwards to gather data, then in the desired
998 window, we first walk forwards to gather data, then in the desired
999 order (usually backwards) to display it.
999 order (usually backwards) to display it.
1000
1000
1001 This function returns an (iterator, matchfn) tuple. The iterator
1001 This function returns an (iterator, matchfn) tuple. The iterator
1002 yields 3-tuples. They will be of one of the following forms:
1002 yields 3-tuples. They will be of one of the following forms:
1003
1003
1004 "window", incrementing, lastrev: stepping through a window,
1004 "window", incrementing, lastrev: stepping through a window,
1005 positive if walking forwards through revs, last rev in the
1005 positive if walking forwards through revs, last rev in the
1006 sequence iterated over - use to reset state for the current window
1006 sequence iterated over - use to reset state for the current window
1007
1007
1008 "add", rev, fns: out-of-order traversal of the given filenames
1008 "add", rev, fns: out-of-order traversal of the given filenames
1009 fns, which changed during revision rev - use to gather data for
1009 fns, which changed during revision rev - use to gather data for
1010 possible display
1010 possible display
1011
1011
1012 "iter", rev, None: in-order traversal of the revs earlier iterated
1012 "iter", rev, None: in-order traversal of the revs earlier iterated
1013 over with "add" - use to display data'''
1013 over with "add" - use to display data'''
1014
1014
1015 def increasing_windows(start, end, windowsize=8, sizelimit=512):
1015 def increasing_windows(start, end, windowsize=8, sizelimit=512):
1016 if start < end:
1016 if start < end:
1017 while start < end:
1017 while start < end:
1018 yield start, min(windowsize, end-start)
1018 yield start, min(windowsize, end-start)
1019 start += windowsize
1019 start += windowsize
1020 if windowsize < sizelimit:
1020 if windowsize < sizelimit:
1021 windowsize *= 2
1021 windowsize *= 2
1022 else:
1022 else:
1023 while start > end:
1023 while start > end:
1024 yield start, min(windowsize, start-end-1)
1024 yield start, min(windowsize, start-end-1)
1025 start -= windowsize
1025 start -= windowsize
1026 if windowsize < sizelimit:
1026 if windowsize < sizelimit:
1027 windowsize *= 2
1027 windowsize *= 2
1028
1028
1029 m = match(repo, pats, opts)
1029 m = match(repo, pats, opts)
1030 follow = opts.get('follow') or opts.get('follow_first')
1030 follow = opts.get('follow') or opts.get('follow_first')
1031
1031
1032 if not len(repo):
1032 if not len(repo):
1033 return [], m
1033 return [], m
1034
1034
1035 if follow:
1035 if follow:
1036 defrange = '%s:0' % repo['.'].rev()
1036 defrange = '%s:0' % repo['.'].rev()
1037 else:
1037 else:
1038 defrange = '-1:0'
1038 defrange = '-1:0'
1039 revs = revrange(repo, opts['rev'] or [defrange])
1039 revs = revrange(repo, opts['rev'] or [defrange])
1040 wanted = set()
1040 wanted = set()
1041 slowpath = m.anypats() or (m.files() and opts.get('removed'))
1041 slowpath = m.anypats() or (m.files() and opts.get('removed'))
1042 fncache = {}
1042 fncache = {}
1043
1043
1044 if not slowpath and not m.files():
1044 if not slowpath and not m.files():
1045 # No files, no patterns. Display all revs.
1045 # No files, no patterns. Display all revs.
1046 wanted = set(revs)
1046 wanted = set(revs)
1047 copies = []
1047 copies = []
1048 if not slowpath:
1048 if not slowpath:
1049 # Only files, no patterns. Check the history of each file.
1049 # Only files, no patterns. Check the history of each file.
1050 def filerevgen(filelog, node):
1050 def filerevgen(filelog, node):
1051 cl_count = len(repo)
1051 cl_count = len(repo)
1052 if node is None:
1052 if node is None:
1053 last = len(filelog) - 1
1053 last = len(filelog) - 1
1054 else:
1054 else:
1055 last = filelog.rev(node)
1055 last = filelog.rev(node)
1056 for i, window in increasing_windows(last, nullrev):
1056 for i, window in increasing_windows(last, nullrev):
1057 revs = []
1057 revs = []
1058 for j in xrange(i - window, i + 1):
1058 for j in xrange(i - window, i + 1):
1059 n = filelog.node(j)
1059 n = filelog.node(j)
1060 revs.append((filelog.linkrev(j),
1060 revs.append((filelog.linkrev(j),
1061 follow and filelog.renamed(n)))
1061 follow and filelog.renamed(n)))
1062 for rev in reversed(revs):
1062 for rev in reversed(revs):
1063 # only yield rev for which we have the changelog, it can
1063 # only yield rev for which we have the changelog, it can
1064 # happen while doing "hg log" during a pull or commit
1064 # happen while doing "hg log" during a pull or commit
1065 if rev[0] < cl_count:
1065 if rev[0] < cl_count:
1066 yield rev
1066 yield rev
1067 def iterfiles():
1067 def iterfiles():
1068 for filename in m.files():
1068 for filename in m.files():
1069 yield filename, None
1069 yield filename, None
1070 for filename_node in copies:
1070 for filename_node in copies:
1071 yield filename_node
1071 yield filename_node
1072 minrev, maxrev = min(revs), max(revs)
1072 minrev, maxrev = min(revs), max(revs)
1073 for file_, node in iterfiles():
1073 for file_, node in iterfiles():
1074 filelog = repo.file(file_)
1074 filelog = repo.file(file_)
1075 if not len(filelog):
1075 if not len(filelog):
1076 if node is None:
1076 if node is None:
1077 # A zero count may be a directory or deleted file, so
1077 # A zero count may be a directory or deleted file, so
1078 # try to find matching entries on the slow path.
1078 # try to find matching entries on the slow path.
1079 if follow:
1079 if follow:
1080 raise util.Abort(_('cannot follow nonexistent file: "%s"') % file_)
1080 raise util.Abort(_('cannot follow nonexistent file: "%s"') % file_)
1081 slowpath = True
1081 slowpath = True
1082 break
1082 break
1083 else:
1083 else:
1084 ui.warn(_('%s:%s copy source revision cannot be found!\n')
1084 ui.warn(_('%s:%s copy source revision cannot be found!\n')
1085 % (file_, short(node)))
1085 % (file_, short(node)))
1086 continue
1086 continue
1087 for rev, copied in filerevgen(filelog, node):
1087 for rev, copied in filerevgen(filelog, node):
1088 if rev <= maxrev:
1088 if rev <= maxrev:
1089 if rev < minrev:
1089 if rev < minrev:
1090 break
1090 break
1091 fncache.setdefault(rev, [])
1091 fncache.setdefault(rev, [])
1092 fncache[rev].append(file_)
1092 fncache[rev].append(file_)
1093 wanted.add(rev)
1093 wanted.add(rev)
1094 if follow and copied:
1094 if follow and copied:
1095 copies.append(copied)
1095 copies.append(copied)
1096 if slowpath:
1096 if slowpath:
1097 if follow:
1097 if follow:
1098 raise util.Abort(_('can only follow copies/renames for explicit '
1098 raise util.Abort(_('can only follow copies/renames for explicit '
1099 'filenames'))
1099 'filenames'))
1100
1100
1101 # The slow path checks files modified in every changeset.
1101 # The slow path checks files modified in every changeset.
1102 def changerevgen():
1102 def changerevgen():
1103 for i, window in increasing_windows(len(repo) - 1, nullrev):
1103 for i, window in increasing_windows(len(repo) - 1, nullrev):
1104 for j in xrange(i - window, i + 1):
1104 for j in xrange(i - window, i + 1):
1105 yield j, change(j)[3]
1105 yield j, change(j)[3]
1106
1106
1107 for rev, changefiles in changerevgen():
1107 for rev, changefiles in changerevgen():
1108 matches = filter(m, changefiles)
1108 matches = filter(m, changefiles)
1109 if matches:
1109 if matches:
1110 fncache[rev] = matches
1110 fncache[rev] = matches
1111 wanted.add(rev)
1111 wanted.add(rev)
1112
1112
1113 class followfilter:
1113 class followfilter(object):
1114 def __init__(self, onlyfirst=False):
1114 def __init__(self, onlyfirst=False):
1115 self.startrev = nullrev
1115 self.startrev = nullrev
1116 self.roots = []
1116 self.roots = []
1117 self.onlyfirst = onlyfirst
1117 self.onlyfirst = onlyfirst
1118
1118
1119 def match(self, rev):
1119 def match(self, rev):
1120 def realparents(rev):
1120 def realparents(rev):
1121 if self.onlyfirst:
1121 if self.onlyfirst:
1122 return repo.changelog.parentrevs(rev)[0:1]
1122 return repo.changelog.parentrevs(rev)[0:1]
1123 else:
1123 else:
1124 return filter(lambda x: x != nullrev,
1124 return filter(lambda x: x != nullrev,
1125 repo.changelog.parentrevs(rev))
1125 repo.changelog.parentrevs(rev))
1126
1126
1127 if self.startrev == nullrev:
1127 if self.startrev == nullrev:
1128 self.startrev = rev
1128 self.startrev = rev
1129 return True
1129 return True
1130
1130
1131 if rev > self.startrev:
1131 if rev > self.startrev:
1132 # forward: all descendants
1132 # forward: all descendants
1133 if not self.roots:
1133 if not self.roots:
1134 self.roots.append(self.startrev)
1134 self.roots.append(self.startrev)
1135 for parent in realparents(rev):
1135 for parent in realparents(rev):
1136 if parent in self.roots:
1136 if parent in self.roots:
1137 self.roots.append(rev)
1137 self.roots.append(rev)
1138 return True
1138 return True
1139 else:
1139 else:
1140 # backwards: all parents
1140 # backwards: all parents
1141 if not self.roots:
1141 if not self.roots:
1142 self.roots.extend(realparents(self.startrev))
1142 self.roots.extend(realparents(self.startrev))
1143 if rev in self.roots:
1143 if rev in self.roots:
1144 self.roots.remove(rev)
1144 self.roots.remove(rev)
1145 self.roots.extend(realparents(rev))
1145 self.roots.extend(realparents(rev))
1146 return True
1146 return True
1147
1147
1148 return False
1148 return False
1149
1149
1150 # it might be worthwhile to do this in the iterator if the rev range
1150 # it might be worthwhile to do this in the iterator if the rev range
1151 # is descending and the prune args are all within that range
1151 # is descending and the prune args are all within that range
1152 for rev in opts.get('prune', ()):
1152 for rev in opts.get('prune', ()):
1153 rev = repo.changelog.rev(repo.lookup(rev))
1153 rev = repo.changelog.rev(repo.lookup(rev))
1154 ff = followfilter()
1154 ff = followfilter()
1155 stop = min(revs[0], revs[-1])
1155 stop = min(revs[0], revs[-1])
1156 for x in xrange(rev, stop-1, -1):
1156 for x in xrange(rev, stop-1, -1):
1157 if ff.match(x):
1157 if ff.match(x):
1158 wanted.discard(x)
1158 wanted.discard(x)
1159
1159
1160 def iterate():
1160 def iterate():
1161 if follow and not m.files():
1161 if follow and not m.files():
1162 ff = followfilter(onlyfirst=opts.get('follow_first'))
1162 ff = followfilter(onlyfirst=opts.get('follow_first'))
1163 def want(rev):
1163 def want(rev):
1164 return ff.match(rev) and rev in wanted
1164 return ff.match(rev) and rev in wanted
1165 else:
1165 else:
1166 def want(rev):
1166 def want(rev):
1167 return rev in wanted
1167 return rev in wanted
1168
1168
1169 for i, window in increasing_windows(0, len(revs)):
1169 for i, window in increasing_windows(0, len(revs)):
1170 yield 'window', revs[0] < revs[-1], revs[-1]
1170 yield 'window', revs[0] < revs[-1], revs[-1]
1171 nrevs = [rev for rev in revs[i:i+window] if want(rev)]
1171 nrevs = [rev for rev in revs[i:i+window] if want(rev)]
1172 for rev in sorted(nrevs):
1172 for rev in sorted(nrevs):
1173 fns = fncache.get(rev)
1173 fns = fncache.get(rev)
1174 if not fns:
1174 if not fns:
1175 def fns_generator():
1175 def fns_generator():
1176 for f in change(rev)[3]:
1176 for f in change(rev)[3]:
1177 if m(f):
1177 if m(f):
1178 yield f
1178 yield f
1179 fns = fns_generator()
1179 fns = fns_generator()
1180 yield 'add', rev, fns
1180 yield 'add', rev, fns
1181 for rev in nrevs:
1181 for rev in nrevs:
1182 yield 'iter', rev, None
1182 yield 'iter', rev, None
1183 return iterate(), m
1183 return iterate(), m
1184
1184
1185 def commit(ui, repo, commitfunc, pats, opts):
1185 def commit(ui, repo, commitfunc, pats, opts):
1186 '''commit the specified files or all outstanding changes'''
1186 '''commit the specified files or all outstanding changes'''
1187 date = opts.get('date')
1187 date = opts.get('date')
1188 if date:
1188 if date:
1189 opts['date'] = util.parsedate(date)
1189 opts['date'] = util.parsedate(date)
1190 message = logmessage(opts)
1190 message = logmessage(opts)
1191
1191
1192 # extract addremove carefully -- this function can be called from a command
1192 # extract addremove carefully -- this function can be called from a command
1193 # that doesn't support addremove
1193 # that doesn't support addremove
1194 if opts.get('addremove'):
1194 if opts.get('addremove'):
1195 addremove(repo, pats, opts)
1195 addremove(repo, pats, opts)
1196
1196
1197 return commitfunc(ui, repo, message, match(repo, pats, opts), opts)
1197 return commitfunc(ui, repo, message, match(repo, pats, opts), opts)
1198
1198
1199 def commiteditor(repo, ctx):
1199 def commiteditor(repo, ctx):
1200 if ctx.description():
1200 if ctx.description():
1201 return ctx.description()
1201 return ctx.description()
1202 return commitforceeditor(repo, ctx)
1202 return commitforceeditor(repo, ctx)
1203
1203
1204 def commitforceeditor(repo, ctx):
1204 def commitforceeditor(repo, ctx):
1205 edittext = []
1205 edittext = []
1206 modified, added, removed = ctx.modified(), ctx.added(), ctx.removed()
1206 modified, added, removed = ctx.modified(), ctx.added(), ctx.removed()
1207 if ctx.description():
1207 if ctx.description():
1208 edittext.append(ctx.description())
1208 edittext.append(ctx.description())
1209 edittext.append("")
1209 edittext.append("")
1210 edittext.append("") # Empty line between message and comments.
1210 edittext.append("") # Empty line between message and comments.
1211 edittext.append(_("HG: Enter commit message."
1211 edittext.append(_("HG: Enter commit message."
1212 " Lines beginning with 'HG:' are removed."))
1212 " Lines beginning with 'HG:' are removed."))
1213 edittext.append(_("HG: Leave message empty to abort commit."))
1213 edittext.append(_("HG: Leave message empty to abort commit."))
1214 edittext.append("HG: --")
1214 edittext.append("HG: --")
1215 edittext.append(_("HG: user: %s") % ctx.user())
1215 edittext.append(_("HG: user: %s") % ctx.user())
1216 if ctx.p2():
1216 if ctx.p2():
1217 edittext.append(_("HG: branch merge"))
1217 edittext.append(_("HG: branch merge"))
1218 if ctx.branch():
1218 if ctx.branch():
1219 edittext.append(_("HG: branch '%s'")
1219 edittext.append(_("HG: branch '%s'")
1220 % encoding.tolocal(ctx.branch()))
1220 % encoding.tolocal(ctx.branch()))
1221 edittext.extend([_("HG: added %s") % f for f in added])
1221 edittext.extend([_("HG: added %s") % f for f in added])
1222 edittext.extend([_("HG: changed %s") % f for f in modified])
1222 edittext.extend([_("HG: changed %s") % f for f in modified])
1223 edittext.extend([_("HG: removed %s") % f for f in removed])
1223 edittext.extend([_("HG: removed %s") % f for f in removed])
1224 if not added and not modified and not removed:
1224 if not added and not modified and not removed:
1225 edittext.append(_("HG: no files changed"))
1225 edittext.append(_("HG: no files changed"))
1226 edittext.append("")
1226 edittext.append("")
1227 # run editor in the repository root
1227 # run editor in the repository root
1228 olddir = os.getcwd()
1228 olddir = os.getcwd()
1229 os.chdir(repo.root)
1229 os.chdir(repo.root)
1230 text = repo.ui.edit("\n".join(edittext), ctx.user())
1230 text = repo.ui.edit("\n".join(edittext), ctx.user())
1231 text = re.sub("(?m)^HG:.*\n", "", text)
1231 text = re.sub("(?m)^HG:.*\n", "", text)
1232 os.chdir(olddir)
1232 os.chdir(olddir)
1233
1233
1234 if not text.strip():
1234 if not text.strip():
1235 raise util.Abort(_("empty commit message"))
1235 raise util.Abort(_("empty commit message"))
1236
1236
1237 return text
1237 return text
@@ -1,3467 +1,3467
1 # commands.py - command processing for mercurial
1 # commands.py - command processing for mercurial
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2, incorporated herein by reference.
6 # GNU General Public License version 2, incorporated herein by reference.
7
7
8 from node import hex, nullid, nullrev, short
8 from node import hex, nullid, nullrev, short
9 from lock import release
9 from lock import release
10 from i18n import _, gettext
10 from i18n import _, gettext
11 import os, re, sys, textwrap, subprocess, difflib, time
11 import os, re, sys, textwrap, subprocess, difflib, time
12 import hg, util, revlog, bundlerepo, extensions, copies, context, error
12 import hg, util, revlog, bundlerepo, extensions, copies, context, error
13 import patch, help, mdiff, tempfile, url, encoding
13 import patch, help, mdiff, tempfile, url, encoding
14 import archival, changegroup, cmdutil, sshserver, hbisect
14 import archival, changegroup, cmdutil, sshserver, hbisect
15 from hgweb import server
15 from hgweb import server
16 import merge as merge_
16 import merge as merge_
17
17
18 # Commands start here, listed alphabetically
18 # Commands start here, listed alphabetically
19
19
20 def add(ui, repo, *pats, **opts):
20 def add(ui, repo, *pats, **opts):
21 """add the specified files on the next commit
21 """add the specified files on the next commit
22
22
23 Schedule files to be version controlled and added to the
23 Schedule files to be version controlled and added to the
24 repository.
24 repository.
25
25
26 The files will be added to the repository at the next commit. To
26 The files will be added to the repository at the next commit. To
27 undo an add before that, see hg revert.
27 undo an add before that, see hg revert.
28
28
29 If no names are given, add all files to the repository.
29 If no names are given, add all files to the repository.
30 """
30 """
31
31
32 bad = []
32 bad = []
33 exacts = {}
33 exacts = {}
34 names = []
34 names = []
35 m = cmdutil.match(repo, pats, opts)
35 m = cmdutil.match(repo, pats, opts)
36 oldbad = m.bad
36 oldbad = m.bad
37 m.bad = lambda x,y: bad.append(x) or oldbad(x,y)
37 m.bad = lambda x,y: bad.append(x) or oldbad(x,y)
38
38
39 for f in repo.walk(m):
39 for f in repo.walk(m):
40 exact = m.exact(f)
40 exact = m.exact(f)
41 if exact or f not in repo.dirstate:
41 if exact or f not in repo.dirstate:
42 names.append(f)
42 names.append(f)
43 if ui.verbose or not exact:
43 if ui.verbose or not exact:
44 ui.status(_('adding %s\n') % m.rel(f))
44 ui.status(_('adding %s\n') % m.rel(f))
45 if not opts.get('dry_run'):
45 if not opts.get('dry_run'):
46 bad += [f for f in repo.add(names) if f in m.files()]
46 bad += [f for f in repo.add(names) if f in m.files()]
47 return bad and 1 or 0
47 return bad and 1 or 0
48
48
49 def addremove(ui, repo, *pats, **opts):
49 def addremove(ui, repo, *pats, **opts):
50 """add all new files, delete all missing files
50 """add all new files, delete all missing files
51
51
52 Add all new files and remove all missing files from the
52 Add all new files and remove all missing files from the
53 repository.
53 repository.
54
54
55 New files are ignored if they match any of the patterns in
55 New files are ignored if they match any of the patterns in
56 .hgignore. As with add, these changes take effect at the next
56 .hgignore. As with add, these changes take effect at the next
57 commit.
57 commit.
58
58
59 Use the -s/--similarity option to detect renamed files. With a
59 Use the -s/--similarity option to detect renamed files. With a
60 parameter > 0, this compares every removed file with every added
60 parameter > 0, this compares every removed file with every added
61 file and records those similar enough as renames. This option
61 file and records those similar enough as renames. This option
62 takes a percentage between 0 (disabled) and 100 (files must be
62 takes a percentage between 0 (disabled) and 100 (files must be
63 identical) as its parameter. Detecting renamed files this way can
63 identical) as its parameter. Detecting renamed files this way can
64 be expensive.
64 be expensive.
65 """
65 """
66 try:
66 try:
67 sim = float(opts.get('similarity') or 0)
67 sim = float(opts.get('similarity') or 0)
68 except ValueError:
68 except ValueError:
69 raise util.Abort(_('similarity must be a number'))
69 raise util.Abort(_('similarity must be a number'))
70 if sim < 0 or sim > 100:
70 if sim < 0 or sim > 100:
71 raise util.Abort(_('similarity must be between 0 and 100'))
71 raise util.Abort(_('similarity must be between 0 and 100'))
72 return cmdutil.addremove(repo, pats, opts, similarity=sim/100.)
72 return cmdutil.addremove(repo, pats, opts, similarity=sim/100.)
73
73
74 def annotate(ui, repo, *pats, **opts):
74 def annotate(ui, repo, *pats, **opts):
75 """show changeset information per file line
75 """show changeset information per file line
76
76
77 List changes in files, showing the revision id responsible for
77 List changes in files, showing the revision id responsible for
78 each line
78 each line
79
79
80 This command is useful to discover who did a change or when a
80 This command is useful to discover who did a change or when a
81 change took place.
81 change took place.
82
82
83 Without the -a/--text option, annotate will avoid processing files
83 Without the -a/--text option, annotate will avoid processing files
84 it detects as binary. With -a, annotate will generate an
84 it detects as binary. With -a, annotate will generate an
85 annotation anyway, probably with undesirable results.
85 annotation anyway, probably with undesirable results.
86 """
86 """
87 datefunc = ui.quiet and util.shortdate or util.datestr
87 datefunc = ui.quiet and util.shortdate or util.datestr
88 getdate = util.cachefunc(lambda x: datefunc(x[0].date()))
88 getdate = util.cachefunc(lambda x: datefunc(x[0].date()))
89
89
90 if not pats:
90 if not pats:
91 raise util.Abort(_('at least one filename or pattern is required'))
91 raise util.Abort(_('at least one filename or pattern is required'))
92
92
93 opmap = [('user', lambda x: ui.shortuser(x[0].user())),
93 opmap = [('user', lambda x: ui.shortuser(x[0].user())),
94 ('number', lambda x: str(x[0].rev())),
94 ('number', lambda x: str(x[0].rev())),
95 ('changeset', lambda x: short(x[0].node())),
95 ('changeset', lambda x: short(x[0].node())),
96 ('date', getdate),
96 ('date', getdate),
97 ('follow', lambda x: x[0].path()),
97 ('follow', lambda x: x[0].path()),
98 ]
98 ]
99
99
100 if (not opts.get('user') and not opts.get('changeset') and not opts.get('date')
100 if (not opts.get('user') and not opts.get('changeset') and not opts.get('date')
101 and not opts.get('follow')):
101 and not opts.get('follow')):
102 opts['number'] = 1
102 opts['number'] = 1
103
103
104 linenumber = opts.get('line_number') is not None
104 linenumber = opts.get('line_number') is not None
105 if (linenumber and (not opts.get('changeset')) and (not opts.get('number'))):
105 if (linenumber and (not opts.get('changeset')) and (not opts.get('number'))):
106 raise util.Abort(_('at least one of -n/-c is required for -l'))
106 raise util.Abort(_('at least one of -n/-c is required for -l'))
107
107
108 funcmap = [func for op, func in opmap if opts.get(op)]
108 funcmap = [func for op, func in opmap if opts.get(op)]
109 if linenumber:
109 if linenumber:
110 lastfunc = funcmap[-1]
110 lastfunc = funcmap[-1]
111 funcmap[-1] = lambda x: "%s:%s" % (lastfunc(x), x[1])
111 funcmap[-1] = lambda x: "%s:%s" % (lastfunc(x), x[1])
112
112
113 ctx = repo[opts.get('rev')]
113 ctx = repo[opts.get('rev')]
114
114
115 m = cmdutil.match(repo, pats, opts)
115 m = cmdutil.match(repo, pats, opts)
116 for abs in ctx.walk(m):
116 for abs in ctx.walk(m):
117 fctx = ctx[abs]
117 fctx = ctx[abs]
118 if not opts.get('text') and util.binary(fctx.data()):
118 if not opts.get('text') and util.binary(fctx.data()):
119 ui.write(_("%s: binary file\n") % ((pats and m.rel(abs)) or abs))
119 ui.write(_("%s: binary file\n") % ((pats and m.rel(abs)) or abs))
120 continue
120 continue
121
121
122 lines = fctx.annotate(follow=opts.get('follow'),
122 lines = fctx.annotate(follow=opts.get('follow'),
123 linenumber=linenumber)
123 linenumber=linenumber)
124 pieces = []
124 pieces = []
125
125
126 for f in funcmap:
126 for f in funcmap:
127 l = [f(n) for n, dummy in lines]
127 l = [f(n) for n, dummy in lines]
128 if l:
128 if l:
129 ml = max(map(len, l))
129 ml = max(map(len, l))
130 pieces.append(["%*s" % (ml, x) for x in l])
130 pieces.append(["%*s" % (ml, x) for x in l])
131
131
132 if pieces:
132 if pieces:
133 for p, l in zip(zip(*pieces), lines):
133 for p, l in zip(zip(*pieces), lines):
134 ui.write("%s: %s" % (" ".join(p), l[1]))
134 ui.write("%s: %s" % (" ".join(p), l[1]))
135
135
136 def archive(ui, repo, dest, **opts):
136 def archive(ui, repo, dest, **opts):
137 '''create unversioned archive of a repository revision
137 '''create unversioned archive of a repository revision
138
138
139 By default, the revision used is the parent of the working
139 By default, the revision used is the parent of the working
140 directory; use -r/--rev to specify a different revision.
140 directory; use -r/--rev to specify a different revision.
141
141
142 To specify the type of archive to create, use -t/--type. Valid
142 To specify the type of archive to create, use -t/--type. Valid
143 types are:
143 types are:
144
144
145 "files" (default): a directory full of files
145 "files" (default): a directory full of files
146 "tar": tar archive, uncompressed
146 "tar": tar archive, uncompressed
147 "tbz2": tar archive, compressed using bzip2
147 "tbz2": tar archive, compressed using bzip2
148 "tgz": tar archive, compressed using gzip
148 "tgz": tar archive, compressed using gzip
149 "uzip": zip archive, uncompressed
149 "uzip": zip archive, uncompressed
150 "zip": zip archive, compressed using deflate
150 "zip": zip archive, compressed using deflate
151
151
152 The exact name of the destination archive or directory is given
152 The exact name of the destination archive or directory is given
153 using a format string; see 'hg help export' for details.
153 using a format string; see 'hg help export' for details.
154
154
155 Each member added to an archive file has a directory prefix
155 Each member added to an archive file has a directory prefix
156 prepended. Use -p/--prefix to specify a format string for the
156 prepended. Use -p/--prefix to specify a format string for the
157 prefix. The default is the basename of the archive, with suffixes
157 prefix. The default is the basename of the archive, with suffixes
158 removed.
158 removed.
159 '''
159 '''
160
160
161 ctx = repo[opts.get('rev')]
161 ctx = repo[opts.get('rev')]
162 if not ctx:
162 if not ctx:
163 raise util.Abort(_('no working directory: please specify a revision'))
163 raise util.Abort(_('no working directory: please specify a revision'))
164 node = ctx.node()
164 node = ctx.node()
165 dest = cmdutil.make_filename(repo, dest, node)
165 dest = cmdutil.make_filename(repo, dest, node)
166 if os.path.realpath(dest) == repo.root:
166 if os.path.realpath(dest) == repo.root:
167 raise util.Abort(_('repository root cannot be destination'))
167 raise util.Abort(_('repository root cannot be destination'))
168 matchfn = cmdutil.match(repo, [], opts)
168 matchfn = cmdutil.match(repo, [], opts)
169 kind = opts.get('type') or 'files'
169 kind = opts.get('type') or 'files'
170 prefix = opts.get('prefix')
170 prefix = opts.get('prefix')
171 if dest == '-':
171 if dest == '-':
172 if kind == 'files':
172 if kind == 'files':
173 raise util.Abort(_('cannot archive plain files to stdout'))
173 raise util.Abort(_('cannot archive plain files to stdout'))
174 dest = sys.stdout
174 dest = sys.stdout
175 if not prefix: prefix = os.path.basename(repo.root) + '-%h'
175 if not prefix: prefix = os.path.basename(repo.root) + '-%h'
176 prefix = cmdutil.make_filename(repo, prefix, node)
176 prefix = cmdutil.make_filename(repo, prefix, node)
177 archival.archive(repo, dest, node, kind, not opts.get('no_decode'),
177 archival.archive(repo, dest, node, kind, not opts.get('no_decode'),
178 matchfn, prefix)
178 matchfn, prefix)
179
179
180 def backout(ui, repo, node=None, rev=None, **opts):
180 def backout(ui, repo, node=None, rev=None, **opts):
181 '''reverse effect of earlier changeset
181 '''reverse effect of earlier changeset
182
182
183 Commit the backed out changes as a new changeset. The new
183 Commit the backed out changes as a new changeset. The new
184 changeset is a child of the backed out changeset.
184 changeset is a child of the backed out changeset.
185
185
186 If you back out a changeset other than the tip, a new head is
186 If you back out a changeset other than the tip, a new head is
187 created. This head will be the new tip and you should merge this
187 created. This head will be the new tip and you should merge this
188 backout changeset with another head (current one by default).
188 backout changeset with another head (current one by default).
189
189
190 The --merge option remembers the parent of the working directory
190 The --merge option remembers the parent of the working directory
191 before starting the backout, then merges the new head with that
191 before starting the backout, then merges the new head with that
192 changeset afterwards. This saves you from doing the merge by hand.
192 changeset afterwards. This saves you from doing the merge by hand.
193 The result of this merge is not committed, as with a normal merge.
193 The result of this merge is not committed, as with a normal merge.
194
194
195 See 'hg help dates' for a list of formats valid for -d/--date.
195 See 'hg help dates' for a list of formats valid for -d/--date.
196 '''
196 '''
197 if rev and node:
197 if rev and node:
198 raise util.Abort(_("please specify just one revision"))
198 raise util.Abort(_("please specify just one revision"))
199
199
200 if not rev:
200 if not rev:
201 rev = node
201 rev = node
202
202
203 if not rev:
203 if not rev:
204 raise util.Abort(_("please specify a revision to backout"))
204 raise util.Abort(_("please specify a revision to backout"))
205
205
206 date = opts.get('date')
206 date = opts.get('date')
207 if date:
207 if date:
208 opts['date'] = util.parsedate(date)
208 opts['date'] = util.parsedate(date)
209
209
210 cmdutil.bail_if_changed(repo)
210 cmdutil.bail_if_changed(repo)
211 node = repo.lookup(rev)
211 node = repo.lookup(rev)
212
212
213 op1, op2 = repo.dirstate.parents()
213 op1, op2 = repo.dirstate.parents()
214 a = repo.changelog.ancestor(op1, node)
214 a = repo.changelog.ancestor(op1, node)
215 if a != node:
215 if a != node:
216 raise util.Abort(_('cannot back out change on a different branch'))
216 raise util.Abort(_('cannot back out change on a different branch'))
217
217
218 p1, p2 = repo.changelog.parents(node)
218 p1, p2 = repo.changelog.parents(node)
219 if p1 == nullid:
219 if p1 == nullid:
220 raise util.Abort(_('cannot back out a change with no parents'))
220 raise util.Abort(_('cannot back out a change with no parents'))
221 if p2 != nullid:
221 if p2 != nullid:
222 if not opts.get('parent'):
222 if not opts.get('parent'):
223 raise util.Abort(_('cannot back out a merge changeset without '
223 raise util.Abort(_('cannot back out a merge changeset without '
224 '--parent'))
224 '--parent'))
225 p = repo.lookup(opts['parent'])
225 p = repo.lookup(opts['parent'])
226 if p not in (p1, p2):
226 if p not in (p1, p2):
227 raise util.Abort(_('%s is not a parent of %s') %
227 raise util.Abort(_('%s is not a parent of %s') %
228 (short(p), short(node)))
228 (short(p), short(node)))
229 parent = p
229 parent = p
230 else:
230 else:
231 if opts.get('parent'):
231 if opts.get('parent'):
232 raise util.Abort(_('cannot use --parent on non-merge changeset'))
232 raise util.Abort(_('cannot use --parent on non-merge changeset'))
233 parent = p1
233 parent = p1
234
234
235 # the backout should appear on the same branch
235 # the backout should appear on the same branch
236 branch = repo.dirstate.branch()
236 branch = repo.dirstate.branch()
237 hg.clean(repo, node, show_stats=False)
237 hg.clean(repo, node, show_stats=False)
238 repo.dirstate.setbranch(branch)
238 repo.dirstate.setbranch(branch)
239 revert_opts = opts.copy()
239 revert_opts = opts.copy()
240 revert_opts['date'] = None
240 revert_opts['date'] = None
241 revert_opts['all'] = True
241 revert_opts['all'] = True
242 revert_opts['rev'] = hex(parent)
242 revert_opts['rev'] = hex(parent)
243 revert_opts['no_backup'] = None
243 revert_opts['no_backup'] = None
244 revert(ui, repo, **revert_opts)
244 revert(ui, repo, **revert_opts)
245 commit_opts = opts.copy()
245 commit_opts = opts.copy()
246 commit_opts['addremove'] = False
246 commit_opts['addremove'] = False
247 if not commit_opts['message'] and not commit_opts['logfile']:
247 if not commit_opts['message'] and not commit_opts['logfile']:
248 commit_opts['message'] = _("Backed out changeset %s") % (short(node))
248 commit_opts['message'] = _("Backed out changeset %s") % (short(node))
249 commit_opts['force_editor'] = True
249 commit_opts['force_editor'] = True
250 commit(ui, repo, **commit_opts)
250 commit(ui, repo, **commit_opts)
251 def nice(node):
251 def nice(node):
252 return '%d:%s' % (repo.changelog.rev(node), short(node))
252 return '%d:%s' % (repo.changelog.rev(node), short(node))
253 ui.status(_('changeset %s backs out changeset %s\n') %
253 ui.status(_('changeset %s backs out changeset %s\n') %
254 (nice(repo.changelog.tip()), nice(node)))
254 (nice(repo.changelog.tip()), nice(node)))
255 if op1 != node:
255 if op1 != node:
256 hg.clean(repo, op1, show_stats=False)
256 hg.clean(repo, op1, show_stats=False)
257 if opts.get('merge'):
257 if opts.get('merge'):
258 ui.status(_('merging with changeset %s\n') % nice(repo.changelog.tip()))
258 ui.status(_('merging with changeset %s\n') % nice(repo.changelog.tip()))
259 hg.merge(repo, hex(repo.changelog.tip()))
259 hg.merge(repo, hex(repo.changelog.tip()))
260 else:
260 else:
261 ui.status(_('the backout changeset is a new head - '
261 ui.status(_('the backout changeset is a new head - '
262 'do not forget to merge\n'))
262 'do not forget to merge\n'))
263 ui.status(_('(use "backout --merge" '
263 ui.status(_('(use "backout --merge" '
264 'if you want to auto-merge)\n'))
264 'if you want to auto-merge)\n'))
265
265
266 def bisect(ui, repo, rev=None, extra=None, command=None,
266 def bisect(ui, repo, rev=None, extra=None, command=None,
267 reset=None, good=None, bad=None, skip=None, noupdate=None):
267 reset=None, good=None, bad=None, skip=None, noupdate=None):
268 """subdivision search of changesets
268 """subdivision search of changesets
269
269
270 This command helps to find changesets which introduce problems. To
270 This command helps to find changesets which introduce problems. To
271 use, mark the earliest changeset you know exhibits the problem as
271 use, mark the earliest changeset you know exhibits the problem as
272 bad, then mark the latest changeset which is free from the problem
272 bad, then mark the latest changeset which is free from the problem
273 as good. Bisect will update your working directory to a revision
273 as good. Bisect will update your working directory to a revision
274 for testing (unless the -U/--noupdate option is specified). Once
274 for testing (unless the -U/--noupdate option is specified). Once
275 you have performed tests, mark the working directory as bad or
275 you have performed tests, mark the working directory as bad or
276 good and bisect will either update to another candidate changeset
276 good and bisect will either update to another candidate changeset
277 or announce that it has found the bad revision.
277 or announce that it has found the bad revision.
278
278
279 As a shortcut, you can also use the revision argument to mark a
279 As a shortcut, you can also use the revision argument to mark a
280 revision as good or bad without checking it out first.
280 revision as good or bad without checking it out first.
281
281
282 If you supply a command it will be used for automatic bisection.
282 If you supply a command it will be used for automatic bisection.
283 Its exit status will be used as flag to mark revision as bad or
283 Its exit status will be used as flag to mark revision as bad or
284 good. In case exit status is 0 the revision is marked as good, 125
284 good. In case exit status is 0 the revision is marked as good, 125
285 - skipped, 127 (command not found) - bisection will be aborted;
285 - skipped, 127 (command not found) - bisection will be aborted;
286 any other status bigger than 0 will mark revision as bad.
286 any other status bigger than 0 will mark revision as bad.
287 """
287 """
288 def print_result(nodes, good):
288 def print_result(nodes, good):
289 displayer = cmdutil.show_changeset(ui, repo, {})
289 displayer = cmdutil.show_changeset(ui, repo, {})
290 if len(nodes) == 1:
290 if len(nodes) == 1:
291 # narrowed it down to a single revision
291 # narrowed it down to a single revision
292 if good:
292 if good:
293 ui.write(_("The first good revision is:\n"))
293 ui.write(_("The first good revision is:\n"))
294 else:
294 else:
295 ui.write(_("The first bad revision is:\n"))
295 ui.write(_("The first bad revision is:\n"))
296 displayer.show(repo[nodes[0]])
296 displayer.show(repo[nodes[0]])
297 else:
297 else:
298 # multiple possible revisions
298 # multiple possible revisions
299 if good:
299 if good:
300 ui.write(_("Due to skipped revisions, the first "
300 ui.write(_("Due to skipped revisions, the first "
301 "good revision could be any of:\n"))
301 "good revision could be any of:\n"))
302 else:
302 else:
303 ui.write(_("Due to skipped revisions, the first "
303 ui.write(_("Due to skipped revisions, the first "
304 "bad revision could be any of:\n"))
304 "bad revision could be any of:\n"))
305 for n in nodes:
305 for n in nodes:
306 displayer.show(repo[n])
306 displayer.show(repo[n])
307
307
308 def check_state(state, interactive=True):
308 def check_state(state, interactive=True):
309 if not state['good'] or not state['bad']:
309 if not state['good'] or not state['bad']:
310 if (good or bad or skip or reset) and interactive:
310 if (good or bad or skip or reset) and interactive:
311 return
311 return
312 if not state['good']:
312 if not state['good']:
313 raise util.Abort(_('cannot bisect (no known good revisions)'))
313 raise util.Abort(_('cannot bisect (no known good revisions)'))
314 else:
314 else:
315 raise util.Abort(_('cannot bisect (no known bad revisions)'))
315 raise util.Abort(_('cannot bisect (no known bad revisions)'))
316 return True
316 return True
317
317
318 # backward compatibility
318 # backward compatibility
319 if rev in "good bad reset init".split():
319 if rev in "good bad reset init".split():
320 ui.warn(_("(use of 'hg bisect <cmd>' is deprecated)\n"))
320 ui.warn(_("(use of 'hg bisect <cmd>' is deprecated)\n"))
321 cmd, rev, extra = rev, extra, None
321 cmd, rev, extra = rev, extra, None
322 if cmd == "good":
322 if cmd == "good":
323 good = True
323 good = True
324 elif cmd == "bad":
324 elif cmd == "bad":
325 bad = True
325 bad = True
326 else:
326 else:
327 reset = True
327 reset = True
328 elif extra or good + bad + skip + reset + bool(command) > 1:
328 elif extra or good + bad + skip + reset + bool(command) > 1:
329 raise util.Abort(_('incompatible arguments'))
329 raise util.Abort(_('incompatible arguments'))
330
330
331 if reset:
331 if reset:
332 p = repo.join("bisect.state")
332 p = repo.join("bisect.state")
333 if os.path.exists(p):
333 if os.path.exists(p):
334 os.unlink(p)
334 os.unlink(p)
335 return
335 return
336
336
337 state = hbisect.load_state(repo)
337 state = hbisect.load_state(repo)
338
338
339 if command:
339 if command:
340 commandpath = util.find_exe(command)
340 commandpath = util.find_exe(command)
341 changesets = 1
341 changesets = 1
342 try:
342 try:
343 while changesets:
343 while changesets:
344 # update state
344 # update state
345 status = subprocess.call([commandpath])
345 status = subprocess.call([commandpath])
346 if status == 125:
346 if status == 125:
347 transition = "skip"
347 transition = "skip"
348 elif status == 0:
348 elif status == 0:
349 transition = "good"
349 transition = "good"
350 # status < 0 means process was killed
350 # status < 0 means process was killed
351 elif status == 127:
351 elif status == 127:
352 raise util.Abort(_("failed to execute %s") % command)
352 raise util.Abort(_("failed to execute %s") % command)
353 elif status < 0:
353 elif status < 0:
354 raise util.Abort(_("%s killed") % command)
354 raise util.Abort(_("%s killed") % command)
355 else:
355 else:
356 transition = "bad"
356 transition = "bad"
357 node = repo.lookup(rev or '.')
357 node = repo.lookup(rev or '.')
358 state[transition].append(node)
358 state[transition].append(node)
359 ui.note(_('Changeset %s: %s\n') % (short(node), transition))
359 ui.note(_('Changeset %s: %s\n') % (short(node), transition))
360 check_state(state, interactive=False)
360 check_state(state, interactive=False)
361 # bisect
361 # bisect
362 nodes, changesets, good = hbisect.bisect(repo.changelog, state)
362 nodes, changesets, good = hbisect.bisect(repo.changelog, state)
363 # update to next check
363 # update to next check
364 cmdutil.bail_if_changed(repo)
364 cmdutil.bail_if_changed(repo)
365 hg.clean(repo, nodes[0], show_stats=False)
365 hg.clean(repo, nodes[0], show_stats=False)
366 finally:
366 finally:
367 hbisect.save_state(repo, state)
367 hbisect.save_state(repo, state)
368 return print_result(nodes, not status)
368 return print_result(nodes, not status)
369
369
370 # update state
370 # update state
371 node = repo.lookup(rev or '.')
371 node = repo.lookup(rev or '.')
372 if good:
372 if good:
373 state['good'].append(node)
373 state['good'].append(node)
374 elif bad:
374 elif bad:
375 state['bad'].append(node)
375 state['bad'].append(node)
376 elif skip:
376 elif skip:
377 state['skip'].append(node)
377 state['skip'].append(node)
378
378
379 hbisect.save_state(repo, state)
379 hbisect.save_state(repo, state)
380
380
381 if not check_state(state):
381 if not check_state(state):
382 return
382 return
383
383
384 # actually bisect
384 # actually bisect
385 nodes, changesets, good = hbisect.bisect(repo.changelog, state)
385 nodes, changesets, good = hbisect.bisect(repo.changelog, state)
386 if changesets == 0:
386 if changesets == 0:
387 print_result(nodes, good)
387 print_result(nodes, good)
388 else:
388 else:
389 assert len(nodes) == 1 # only a single node can be tested next
389 assert len(nodes) == 1 # only a single node can be tested next
390 node = nodes[0]
390 node = nodes[0]
391 # compute the approximate number of remaining tests
391 # compute the approximate number of remaining tests
392 tests, size = 0, 2
392 tests, size = 0, 2
393 while size <= changesets:
393 while size <= changesets:
394 tests, size = tests + 1, size * 2
394 tests, size = tests + 1, size * 2
395 rev = repo.changelog.rev(node)
395 rev = repo.changelog.rev(node)
396 ui.write(_("Testing changeset %s:%s "
396 ui.write(_("Testing changeset %s:%s "
397 "(%s changesets remaining, ~%s tests)\n")
397 "(%s changesets remaining, ~%s tests)\n")
398 % (rev, short(node), changesets, tests))
398 % (rev, short(node), changesets, tests))
399 if not noupdate:
399 if not noupdate:
400 cmdutil.bail_if_changed(repo)
400 cmdutil.bail_if_changed(repo)
401 return hg.clean(repo, node)
401 return hg.clean(repo, node)
402
402
403 def branch(ui, repo, label=None, **opts):
403 def branch(ui, repo, label=None, **opts):
404 """set or show the current branch name
404 """set or show the current branch name
405
405
406 With no argument, show the current branch name. With one argument,
406 With no argument, show the current branch name. With one argument,
407 set the working directory branch name (the branch does not exist
407 set the working directory branch name (the branch does not exist
408 in the repository until the next commit). It is recommended to use
408 in the repository until the next commit). It is recommended to use
409 the 'default' branch as your primary development branch.
409 the 'default' branch as your primary development branch.
410
410
411 Unless -f/--force is specified, branch will not let you set a
411 Unless -f/--force is specified, branch will not let you set a
412 branch name that shadows an existing branch.
412 branch name that shadows an existing branch.
413
413
414 Use -C/--clean to reset the working directory branch to that of
414 Use -C/--clean to reset the working directory branch to that of
415 the parent of the working directory, negating a previous branch
415 the parent of the working directory, negating a previous branch
416 change.
416 change.
417
417
418 Use the command 'hg update' to switch to an existing branch.
418 Use the command 'hg update' to switch to an existing branch.
419 """
419 """
420
420
421 if opts.get('clean'):
421 if opts.get('clean'):
422 label = repo[None].parents()[0].branch()
422 label = repo[None].parents()[0].branch()
423 repo.dirstate.setbranch(label)
423 repo.dirstate.setbranch(label)
424 ui.status(_('reset working directory to branch %s\n') % label)
424 ui.status(_('reset working directory to branch %s\n') % label)
425 elif label:
425 elif label:
426 if not opts.get('force') and label in repo.branchtags():
426 if not opts.get('force') and label in repo.branchtags():
427 if label not in [p.branch() for p in repo.parents()]:
427 if label not in [p.branch() for p in repo.parents()]:
428 raise util.Abort(_('a branch of the same name already exists'
428 raise util.Abort(_('a branch of the same name already exists'
429 ' (use --force to override)'))
429 ' (use --force to override)'))
430 repo.dirstate.setbranch(encoding.fromlocal(label))
430 repo.dirstate.setbranch(encoding.fromlocal(label))
431 ui.status(_('marked working directory as branch %s\n') % label)
431 ui.status(_('marked working directory as branch %s\n') % label)
432 else:
432 else:
433 ui.write("%s\n" % encoding.tolocal(repo.dirstate.branch()))
433 ui.write("%s\n" % encoding.tolocal(repo.dirstate.branch()))
434
434
435 def branches(ui, repo, active=False):
435 def branches(ui, repo, active=False):
436 """list repository named branches
436 """list repository named branches
437
437
438 List the repository's named branches, indicating which ones are
438 List the repository's named branches, indicating which ones are
439 inactive. If active is specified, only show active branches.
439 inactive. If active is specified, only show active branches.
440
440
441 A branch is considered active if it contains repository heads.
441 A branch is considered active if it contains repository heads.
442
442
443 Use the command 'hg update' to switch to an existing branch.
443 Use the command 'hg update' to switch to an existing branch.
444 """
444 """
445 hexfunc = ui.debugflag and hex or short
445 hexfunc = ui.debugflag and hex or short
446 activebranches = [encoding.tolocal(repo[n].branch())
446 activebranches = [encoding.tolocal(repo[n].branch())
447 for n in repo.heads(closed=False)]
447 for n in repo.heads(closed=False)]
448 branches = sorted([(tag in activebranches, repo.changelog.rev(node), tag)
448 branches = sorted([(tag in activebranches, repo.changelog.rev(node), tag)
449 for tag, node in repo.branchtags().items()],
449 for tag, node in repo.branchtags().items()],
450 reverse=True)
450 reverse=True)
451
451
452 for isactive, node, tag in branches:
452 for isactive, node, tag in branches:
453 if (not active) or isactive:
453 if (not active) or isactive:
454 if ui.quiet:
454 if ui.quiet:
455 ui.write("%s\n" % tag)
455 ui.write("%s\n" % tag)
456 else:
456 else:
457 hn = repo.lookup(node)
457 hn = repo.lookup(node)
458 if isactive:
458 if isactive:
459 notice = ''
459 notice = ''
460 elif hn not in repo.branchheads(tag, closed=False):
460 elif hn not in repo.branchheads(tag, closed=False):
461 notice = ' (closed)'
461 notice = ' (closed)'
462 else:
462 else:
463 notice = ' (inactive)'
463 notice = ' (inactive)'
464 rev = str(node).rjust(31 - encoding.colwidth(tag))
464 rev = str(node).rjust(31 - encoding.colwidth(tag))
465 data = tag, rev, hexfunc(hn), notice
465 data = tag, rev, hexfunc(hn), notice
466 ui.write("%s %s:%s%s\n" % data)
466 ui.write("%s %s:%s%s\n" % data)
467
467
468 def bundle(ui, repo, fname, dest=None, **opts):
468 def bundle(ui, repo, fname, dest=None, **opts):
469 """create a changegroup file
469 """create a changegroup file
470
470
471 Generate a compressed changegroup file collecting changesets not
471 Generate a compressed changegroup file collecting changesets not
472 known to be in another repository.
472 known to be in another repository.
473
473
474 If no destination repository is specified the destination is
474 If no destination repository is specified the destination is
475 assumed to have all the nodes specified by one or more --base
475 assumed to have all the nodes specified by one or more --base
476 parameters. To create a bundle containing all changesets, use
476 parameters. To create a bundle containing all changesets, use
477 -a/--all (or --base null). To change the compression method
477 -a/--all (or --base null). To change the compression method
478 applied, use the -t/--type option (by default, bundles are
478 applied, use the -t/--type option (by default, bundles are
479 compressed using bz2).
479 compressed using bz2).
480
480
481 The bundle file can then be transferred using conventional means
481 The bundle file can then be transferred using conventional means
482 and applied to another repository with the unbundle or pull
482 and applied to another repository with the unbundle or pull
483 command. This is useful when direct push and pull are not
483 command. This is useful when direct push and pull are not
484 available or when exporting an entire repository is undesirable.
484 available or when exporting an entire repository is undesirable.
485
485
486 Applying bundles preserves all changeset contents including
486 Applying bundles preserves all changeset contents including
487 permissions, copy/rename information, and revision history.
487 permissions, copy/rename information, and revision history.
488 """
488 """
489 revs = opts.get('rev') or None
489 revs = opts.get('rev') or None
490 if revs:
490 if revs:
491 revs = [repo.lookup(rev) for rev in revs]
491 revs = [repo.lookup(rev) for rev in revs]
492 if opts.get('all'):
492 if opts.get('all'):
493 base = ['null']
493 base = ['null']
494 else:
494 else:
495 base = opts.get('base')
495 base = opts.get('base')
496 if base:
496 if base:
497 if dest:
497 if dest:
498 raise util.Abort(_("--base is incompatible with specifying "
498 raise util.Abort(_("--base is incompatible with specifying "
499 "a destination"))
499 "a destination"))
500 base = [repo.lookup(rev) for rev in base]
500 base = [repo.lookup(rev) for rev in base]
501 # create the right base
501 # create the right base
502 # XXX: nodesbetween / changegroup* should be "fixed" instead
502 # XXX: nodesbetween / changegroup* should be "fixed" instead
503 o = []
503 o = []
504 has = set((nullid,))
504 has = set((nullid,))
505 for n in base:
505 for n in base:
506 has.update(repo.changelog.reachable(n))
506 has.update(repo.changelog.reachable(n))
507 if revs:
507 if revs:
508 visit = list(revs)
508 visit = list(revs)
509 else:
509 else:
510 visit = repo.changelog.heads()
510 visit = repo.changelog.heads()
511 seen = {}
511 seen = {}
512 while visit:
512 while visit:
513 n = visit.pop(0)
513 n = visit.pop(0)
514 parents = [p for p in repo.changelog.parents(n) if p not in has]
514 parents = [p for p in repo.changelog.parents(n) if p not in has]
515 if len(parents) == 0:
515 if len(parents) == 0:
516 o.insert(0, n)
516 o.insert(0, n)
517 else:
517 else:
518 for p in parents:
518 for p in parents:
519 if p not in seen:
519 if p not in seen:
520 seen[p] = 1
520 seen[p] = 1
521 visit.append(p)
521 visit.append(p)
522 else:
522 else:
523 dest, revs, checkout = hg.parseurl(
523 dest, revs, checkout = hg.parseurl(
524 ui.expandpath(dest or 'default-push', dest or 'default'), revs)
524 ui.expandpath(dest or 'default-push', dest or 'default'), revs)
525 other = hg.repository(cmdutil.remoteui(repo, opts), dest)
525 other = hg.repository(cmdutil.remoteui(repo, opts), dest)
526 o = repo.findoutgoing(other, force=opts.get('force'))
526 o = repo.findoutgoing(other, force=opts.get('force'))
527
527
528 if revs:
528 if revs:
529 cg = repo.changegroupsubset(o, revs, 'bundle')
529 cg = repo.changegroupsubset(o, revs, 'bundle')
530 else:
530 else:
531 cg = repo.changegroup(o, 'bundle')
531 cg = repo.changegroup(o, 'bundle')
532
532
533 bundletype = opts.get('type', 'bzip2').lower()
533 bundletype = opts.get('type', 'bzip2').lower()
534 btypes = {'none': 'HG10UN', 'bzip2': 'HG10BZ', 'gzip': 'HG10GZ'}
534 btypes = {'none': 'HG10UN', 'bzip2': 'HG10BZ', 'gzip': 'HG10GZ'}
535 bundletype = btypes.get(bundletype)
535 bundletype = btypes.get(bundletype)
536 if bundletype not in changegroup.bundletypes:
536 if bundletype not in changegroup.bundletypes:
537 raise util.Abort(_('unknown bundle type specified with --type'))
537 raise util.Abort(_('unknown bundle type specified with --type'))
538
538
539 changegroup.writebundle(cg, fname, bundletype)
539 changegroup.writebundle(cg, fname, bundletype)
540
540
541 def cat(ui, repo, file1, *pats, **opts):
541 def cat(ui, repo, file1, *pats, **opts):
542 """output the current or given revision of files
542 """output the current or given revision of files
543
543
544 Print the specified files as they were at the given revision. If
544 Print the specified files as they were at the given revision. If
545 no revision is given, the parent of the working directory is used,
545 no revision is given, the parent of the working directory is used,
546 or tip if no revision is checked out.
546 or tip if no revision is checked out.
547
547
548 Output may be to a file, in which case the name of the file is
548 Output may be to a file, in which case the name of the file is
549 given using a format string. The formatting rules are the same as
549 given using a format string. The formatting rules are the same as
550 for the export command, with the following additions:
550 for the export command, with the following additions:
551
551
552 %s basename of file being printed
552 %s basename of file being printed
553 %d dirname of file being printed, or '.' if in repository root
553 %d dirname of file being printed, or '.' if in repository root
554 %p root-relative path name of file being printed
554 %p root-relative path name of file being printed
555 """
555 """
556 ctx = repo[opts.get('rev')]
556 ctx = repo[opts.get('rev')]
557 err = 1
557 err = 1
558 m = cmdutil.match(repo, (file1,) + pats, opts)
558 m = cmdutil.match(repo, (file1,) + pats, opts)
559 for abs in ctx.walk(m):
559 for abs in ctx.walk(m):
560 fp = cmdutil.make_file(repo, opts.get('output'), ctx.node(), pathname=abs)
560 fp = cmdutil.make_file(repo, opts.get('output'), ctx.node(), pathname=abs)
561 data = ctx[abs].data()
561 data = ctx[abs].data()
562 if opts.get('decode'):
562 if opts.get('decode'):
563 data = repo.wwritedata(abs, data)
563 data = repo.wwritedata(abs, data)
564 fp.write(data)
564 fp.write(data)
565 err = 0
565 err = 0
566 return err
566 return err
567
567
568 def clone(ui, source, dest=None, **opts):
568 def clone(ui, source, dest=None, **opts):
569 """make a copy of an existing repository
569 """make a copy of an existing repository
570
570
571 Create a copy of an existing repository in a new directory.
571 Create a copy of an existing repository in a new directory.
572
572
573 If no destination directory name is specified, it defaults to the
573 If no destination directory name is specified, it defaults to the
574 basename of the source.
574 basename of the source.
575
575
576 The location of the source is added to the new repository's
576 The location of the source is added to the new repository's
577 .hg/hgrc file, as the default to be used for future pulls.
577 .hg/hgrc file, as the default to be used for future pulls.
578
578
579 If you use the -r/--rev option to clone up to a specific revision,
579 If you use the -r/--rev option to clone up to a specific revision,
580 no subsequent revisions (including subsequent tags) will be
580 no subsequent revisions (including subsequent tags) will be
581 present in the cloned repository. This option implies --pull, even
581 present in the cloned repository. This option implies --pull, even
582 on local repositories.
582 on local repositories.
583
583
584 By default, clone will check out the head of the 'default' branch.
584 By default, clone will check out the head of the 'default' branch.
585 If the -U/--noupdate option is used, the new clone will contain
585 If the -U/--noupdate option is used, the new clone will contain
586 only a repository (.hg) and no working copy (the working copy
586 only a repository (.hg) and no working copy (the working copy
587 parent is the null revision).
587 parent is the null revision).
588
588
589 See 'hg help urls' for valid source format details.
589 See 'hg help urls' for valid source format details.
590
590
591 It is possible to specify an ssh:// URL as the destination, but no
591 It is possible to specify an ssh:// URL as the destination, but no
592 .hg/hgrc and working directory will be created on the remote side.
592 .hg/hgrc and working directory will be created on the remote side.
593 Look at the help text for URLs for important details about ssh://
593 Look at the help text for URLs for important details about ssh://
594 URLs.
594 URLs.
595
595
596 For efficiency, hardlinks are used for cloning whenever the source
596 For efficiency, hardlinks are used for cloning whenever the source
597 and destination are on the same filesystem (note this applies only
597 and destination are on the same filesystem (note this applies only
598 to the repository data, not to the checked out files). Some
598 to the repository data, not to the checked out files). Some
599 filesystems, such as AFS, implement hardlinking incorrectly, but
599 filesystems, such as AFS, implement hardlinking incorrectly, but
600 do not report errors. In these cases, use the --pull option to
600 do not report errors. In these cases, use the --pull option to
601 avoid hardlinking.
601 avoid hardlinking.
602
602
603 In some cases, you can clone repositories and checked out files
603 In some cases, you can clone repositories and checked out files
604 using full hardlinks with
604 using full hardlinks with
605
605
606 $ cp -al REPO REPOCLONE
606 $ cp -al REPO REPOCLONE
607
607
608 This is the fastest way to clone, but it is not always safe. The
608 This is the fastest way to clone, but it is not always safe. The
609 operation is not atomic (making sure REPO is not modified during
609 operation is not atomic (making sure REPO is not modified during
610 the operation is up to you) and you have to make sure your editor
610 the operation is up to you) and you have to make sure your editor
611 breaks hardlinks (Emacs and most Linux Kernel tools do so). Also,
611 breaks hardlinks (Emacs and most Linux Kernel tools do so). Also,
612 this is not compatible with certain extensions that place their
612 this is not compatible with certain extensions that place their
613 metadata under the .hg directory, such as mq.
613 metadata under the .hg directory, such as mq.
614
614
615 """
615 """
616 hg.clone(cmdutil.remoteui(ui, opts), source, dest,
616 hg.clone(cmdutil.remoteui(ui, opts), source, dest,
617 pull=opts.get('pull'),
617 pull=opts.get('pull'),
618 stream=opts.get('uncompressed'),
618 stream=opts.get('uncompressed'),
619 rev=opts.get('rev'),
619 rev=opts.get('rev'),
620 update=not opts.get('noupdate'))
620 update=not opts.get('noupdate'))
621
621
622 def commit(ui, repo, *pats, **opts):
622 def commit(ui, repo, *pats, **opts):
623 """commit the specified files or all outstanding changes
623 """commit the specified files or all outstanding changes
624
624
625 Commit changes to the given files into the repository. Unlike a
625 Commit changes to the given files into the repository. Unlike a
626 centralized RCS, this operation is a local operation. See hg push
626 centralized RCS, this operation is a local operation. See hg push
627 for means to actively distribute your changes.
627 for means to actively distribute your changes.
628
628
629 If a list of files is omitted, all changes reported by "hg status"
629 If a list of files is omitted, all changes reported by "hg status"
630 will be committed.
630 will be committed.
631
631
632 If you are committing the result of a merge, do not provide any
632 If you are committing the result of a merge, do not provide any
633 filenames or -I/-X filters.
633 filenames or -I/-X filters.
634
634
635 If no commit message is specified, the configured editor is
635 If no commit message is specified, the configured editor is
636 started to prompt you for a message.
636 started to prompt you for a message.
637
637
638 See 'hg help dates' for a list of formats valid for -d/--date.
638 See 'hg help dates' for a list of formats valid for -d/--date.
639 """
639 """
640 extra = {}
640 extra = {}
641 if opts.get('close_branch'):
641 if opts.get('close_branch'):
642 extra['close'] = 1
642 extra['close'] = 1
643 e = cmdutil.commiteditor
643 e = cmdutil.commiteditor
644 if opts.get('force_editor'):
644 if opts.get('force_editor'):
645 e = cmdutil.commitforceeditor
645 e = cmdutil.commitforceeditor
646
646
647 def commitfunc(ui, repo, message, match, opts):
647 def commitfunc(ui, repo, message, match, opts):
648 return repo.commit(message, opts.get('user'), opts.get('date'), match,
648 return repo.commit(message, opts.get('user'), opts.get('date'), match,
649 editor=e, extra=extra)
649 editor=e, extra=extra)
650
650
651 node = cmdutil.commit(ui, repo, commitfunc, pats, opts)
651 node = cmdutil.commit(ui, repo, commitfunc, pats, opts)
652 if not node:
652 if not node:
653 return
653 return
654 cl = repo.changelog
654 cl = repo.changelog
655 rev = cl.rev(node)
655 rev = cl.rev(node)
656 parents = cl.parentrevs(rev)
656 parents = cl.parentrevs(rev)
657 if rev - 1 in parents:
657 if rev - 1 in parents:
658 # one of the parents was the old tip
658 # one of the parents was the old tip
659 pass
659 pass
660 elif (parents == (nullrev, nullrev) or
660 elif (parents == (nullrev, nullrev) or
661 len(cl.heads(cl.node(parents[0]))) > 1 and
661 len(cl.heads(cl.node(parents[0]))) > 1 and
662 (parents[1] == nullrev or len(cl.heads(cl.node(parents[1]))) > 1)):
662 (parents[1] == nullrev or len(cl.heads(cl.node(parents[1]))) > 1)):
663 ui.status(_('created new head\n'))
663 ui.status(_('created new head\n'))
664
664
665 if ui.debugflag:
665 if ui.debugflag:
666 ui.write(_('committed changeset %d:%s\n') % (rev,hex(node)))
666 ui.write(_('committed changeset %d:%s\n') % (rev,hex(node)))
667 elif ui.verbose:
667 elif ui.verbose:
668 ui.write(_('committed changeset %d:%s\n') % (rev,short(node)))
668 ui.write(_('committed changeset %d:%s\n') % (rev,short(node)))
669
669
670 def copy(ui, repo, *pats, **opts):
670 def copy(ui, repo, *pats, **opts):
671 """mark files as copied for the next commit
671 """mark files as copied for the next commit
672
672
673 Mark dest as having copies of source files. If dest is a
673 Mark dest as having copies of source files. If dest is a
674 directory, copies are put in that directory. If dest is a file,
674 directory, copies are put in that directory. If dest is a file,
675 the source must be a single file.
675 the source must be a single file.
676
676
677 By default, this command copies the contents of files as they
677 By default, this command copies the contents of files as they
678 stand in the working directory. If invoked with -A/--after, the
678 stand in the working directory. If invoked with -A/--after, the
679 operation is recorded, but no copying is performed.
679 operation is recorded, but no copying is performed.
680
680
681 This command takes effect with the next commit. To undo a copy
681 This command takes effect with the next commit. To undo a copy
682 before that, see hg revert.
682 before that, see hg revert.
683 """
683 """
684 wlock = repo.wlock(False)
684 wlock = repo.wlock(False)
685 try:
685 try:
686 return cmdutil.copy(ui, repo, pats, opts)
686 return cmdutil.copy(ui, repo, pats, opts)
687 finally:
687 finally:
688 wlock.release()
688 wlock.release()
689
689
690 def debugancestor(ui, repo, *args):
690 def debugancestor(ui, repo, *args):
691 """find the ancestor revision of two revisions in a given index"""
691 """find the ancestor revision of two revisions in a given index"""
692 if len(args) == 3:
692 if len(args) == 3:
693 index, rev1, rev2 = args
693 index, rev1, rev2 = args
694 r = revlog.revlog(util.opener(os.getcwd(), audit=False), index)
694 r = revlog.revlog(util.opener(os.getcwd(), audit=False), index)
695 lookup = r.lookup
695 lookup = r.lookup
696 elif len(args) == 2:
696 elif len(args) == 2:
697 if not repo:
697 if not repo:
698 raise util.Abort(_("There is no Mercurial repository here "
698 raise util.Abort(_("There is no Mercurial repository here "
699 "(.hg not found)"))
699 "(.hg not found)"))
700 rev1, rev2 = args
700 rev1, rev2 = args
701 r = repo.changelog
701 r = repo.changelog
702 lookup = repo.lookup
702 lookup = repo.lookup
703 else:
703 else:
704 raise util.Abort(_('either two or three arguments required'))
704 raise util.Abort(_('either two or three arguments required'))
705 a = r.ancestor(lookup(rev1), lookup(rev2))
705 a = r.ancestor(lookup(rev1), lookup(rev2))
706 ui.write("%d:%s\n" % (r.rev(a), hex(a)))
706 ui.write("%d:%s\n" % (r.rev(a), hex(a)))
707
707
708 def debugcommands(ui, cmd='', *args):
708 def debugcommands(ui, cmd='', *args):
709 for cmd, vals in sorted(table.iteritems()):
709 for cmd, vals in sorted(table.iteritems()):
710 cmd = cmd.split('|')[0].strip('^')
710 cmd = cmd.split('|')[0].strip('^')
711 opts = ', '.join([i[1] for i in vals[1]])
711 opts = ', '.join([i[1] for i in vals[1]])
712 ui.write('%s: %s\n' % (cmd, opts))
712 ui.write('%s: %s\n' % (cmd, opts))
713
713
714 def debugcomplete(ui, cmd='', **opts):
714 def debugcomplete(ui, cmd='', **opts):
715 """returns the completion list associated with the given command"""
715 """returns the completion list associated with the given command"""
716
716
717 if opts.get('options'):
717 if opts.get('options'):
718 options = []
718 options = []
719 otables = [globalopts]
719 otables = [globalopts]
720 if cmd:
720 if cmd:
721 aliases, entry = cmdutil.findcmd(cmd, table, False)
721 aliases, entry = cmdutil.findcmd(cmd, table, False)
722 otables.append(entry[1])
722 otables.append(entry[1])
723 for t in otables:
723 for t in otables:
724 for o in t:
724 for o in t:
725 if o[0]:
725 if o[0]:
726 options.append('-%s' % o[0])
726 options.append('-%s' % o[0])
727 options.append('--%s' % o[1])
727 options.append('--%s' % o[1])
728 ui.write("%s\n" % "\n".join(options))
728 ui.write("%s\n" % "\n".join(options))
729 return
729 return
730
730
731 cmdlist = cmdutil.findpossible(cmd, table)
731 cmdlist = cmdutil.findpossible(cmd, table)
732 if ui.verbose:
732 if ui.verbose:
733 cmdlist = [' '.join(c[0]) for c in cmdlist.values()]
733 cmdlist = [' '.join(c[0]) for c in cmdlist.values()]
734 ui.write("%s\n" % "\n".join(sorted(cmdlist)))
734 ui.write("%s\n" % "\n".join(sorted(cmdlist)))
735
735
736 def debugfsinfo(ui, path = "."):
736 def debugfsinfo(ui, path = "."):
737 file('.debugfsinfo', 'w').write('')
737 file('.debugfsinfo', 'w').write('')
738 ui.write('exec: %s\n' % (util.checkexec(path) and 'yes' or 'no'))
738 ui.write('exec: %s\n' % (util.checkexec(path) and 'yes' or 'no'))
739 ui.write('symlink: %s\n' % (util.checklink(path) and 'yes' or 'no'))
739 ui.write('symlink: %s\n' % (util.checklink(path) and 'yes' or 'no'))
740 ui.write('case-sensitive: %s\n' % (util.checkcase('.debugfsinfo')
740 ui.write('case-sensitive: %s\n' % (util.checkcase('.debugfsinfo')
741 and 'yes' or 'no'))
741 and 'yes' or 'no'))
742 os.unlink('.debugfsinfo')
742 os.unlink('.debugfsinfo')
743
743
744 def debugrebuildstate(ui, repo, rev="tip"):
744 def debugrebuildstate(ui, repo, rev="tip"):
745 """rebuild the dirstate as it would look like for the given revision"""
745 """rebuild the dirstate as it would look like for the given revision"""
746 ctx = repo[rev]
746 ctx = repo[rev]
747 wlock = repo.wlock()
747 wlock = repo.wlock()
748 try:
748 try:
749 repo.dirstate.rebuild(ctx.node(), ctx.manifest())
749 repo.dirstate.rebuild(ctx.node(), ctx.manifest())
750 finally:
750 finally:
751 wlock.release()
751 wlock.release()
752
752
753 def debugcheckstate(ui, repo):
753 def debugcheckstate(ui, repo):
754 """validate the correctness of the current dirstate"""
754 """validate the correctness of the current dirstate"""
755 parent1, parent2 = repo.dirstate.parents()
755 parent1, parent2 = repo.dirstate.parents()
756 m1 = repo[parent1].manifest()
756 m1 = repo[parent1].manifest()
757 m2 = repo[parent2].manifest()
757 m2 = repo[parent2].manifest()
758 errors = 0
758 errors = 0
759 for f in repo.dirstate:
759 for f in repo.dirstate:
760 state = repo.dirstate[f]
760 state = repo.dirstate[f]
761 if state in "nr" and f not in m1:
761 if state in "nr" and f not in m1:
762 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
762 ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state))
763 errors += 1
763 errors += 1
764 if state in "a" and f in m1:
764 if state in "a" and f in m1:
765 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
765 ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state))
766 errors += 1
766 errors += 1
767 if state in "m" and f not in m1 and f not in m2:
767 if state in "m" and f not in m1 and f not in m2:
768 ui.warn(_("%s in state %s, but not in either manifest\n") %
768 ui.warn(_("%s in state %s, but not in either manifest\n") %
769 (f, state))
769 (f, state))
770 errors += 1
770 errors += 1
771 for f in m1:
771 for f in m1:
772 state = repo.dirstate[f]
772 state = repo.dirstate[f]
773 if state not in "nrm":
773 if state not in "nrm":
774 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
774 ui.warn(_("%s in manifest1, but listed as state %s") % (f, state))
775 errors += 1
775 errors += 1
776 if errors:
776 if errors:
777 error = _(".hg/dirstate inconsistent with current parent's manifest")
777 error = _(".hg/dirstate inconsistent with current parent's manifest")
778 raise util.Abort(error)
778 raise util.Abort(error)
779
779
780 def showconfig(ui, repo, *values, **opts):
780 def showconfig(ui, repo, *values, **opts):
781 """show combined config settings from all hgrc files
781 """show combined config settings from all hgrc files
782
782
783 With no arguments, print names and values of all config items.
783 With no arguments, print names and values of all config items.
784
784
785 With one argument of the form section.name, print just the value
785 With one argument of the form section.name, print just the value
786 of that config item.
786 of that config item.
787
787
788 With multiple arguments, print names and values of all config
788 With multiple arguments, print names and values of all config
789 items with matching section names.
789 items with matching section names.
790
790
791 With the --debug flag, the source (filename and line number) is
791 With the --debug flag, the source (filename and line number) is
792 printed for each config item.
792 printed for each config item.
793 """
793 """
794
794
795 untrusted = bool(opts.get('untrusted'))
795 untrusted = bool(opts.get('untrusted'))
796 if values:
796 if values:
797 if len([v for v in values if '.' in v]) > 1:
797 if len([v for v in values if '.' in v]) > 1:
798 raise util.Abort(_('only one config item permitted'))
798 raise util.Abort(_('only one config item permitted'))
799 for section, name, value in ui.walkconfig(untrusted=untrusted):
799 for section, name, value in ui.walkconfig(untrusted=untrusted):
800 sectname = section + '.' + name
800 sectname = section + '.' + name
801 if values:
801 if values:
802 for v in values:
802 for v in values:
803 if v == section:
803 if v == section:
804 ui.debug('%s: ' %
804 ui.debug('%s: ' %
805 ui.configsource(section, name, untrusted))
805 ui.configsource(section, name, untrusted))
806 ui.write('%s=%s\n' % (sectname, value))
806 ui.write('%s=%s\n' % (sectname, value))
807 elif v == sectname:
807 elif v == sectname:
808 ui.debug('%s: ' %
808 ui.debug('%s: ' %
809 ui.configsource(section, name, untrusted))
809 ui.configsource(section, name, untrusted))
810 ui.write(value, '\n')
810 ui.write(value, '\n')
811 else:
811 else:
812 ui.debug('%s: ' %
812 ui.debug('%s: ' %
813 ui.configsource(section, name, untrusted))
813 ui.configsource(section, name, untrusted))
814 ui.write('%s=%s\n' % (sectname, value))
814 ui.write('%s=%s\n' % (sectname, value))
815
815
816 def debugsetparents(ui, repo, rev1, rev2=None):
816 def debugsetparents(ui, repo, rev1, rev2=None):
817 """manually set the parents of the current working directory
817 """manually set the parents of the current working directory
818
818
819 This is useful for writing repository conversion tools, but should
819 This is useful for writing repository conversion tools, but should
820 be used with care.
820 be used with care.
821 """
821 """
822
822
823 if not rev2:
823 if not rev2:
824 rev2 = hex(nullid)
824 rev2 = hex(nullid)
825
825
826 wlock = repo.wlock()
826 wlock = repo.wlock()
827 try:
827 try:
828 repo.dirstate.setparents(repo.lookup(rev1), repo.lookup(rev2))
828 repo.dirstate.setparents(repo.lookup(rev1), repo.lookup(rev2))
829 finally:
829 finally:
830 wlock.release()
830 wlock.release()
831
831
832 def debugstate(ui, repo, nodates=None):
832 def debugstate(ui, repo, nodates=None):
833 """show the contents of the current dirstate"""
833 """show the contents of the current dirstate"""
834 timestr = ""
834 timestr = ""
835 showdate = not nodates
835 showdate = not nodates
836 for file_, ent in sorted(repo.dirstate._map.iteritems()):
836 for file_, ent in sorted(repo.dirstate._map.iteritems()):
837 if showdate:
837 if showdate:
838 if ent[3] == -1:
838 if ent[3] == -1:
839 # Pad or slice to locale representation
839 # Pad or slice to locale representation
840 locale_len = len(time.strftime("%Y-%m-%d %H:%M:%S ", time.localtime(0)))
840 locale_len = len(time.strftime("%Y-%m-%d %H:%M:%S ", time.localtime(0)))
841 timestr = 'unset'
841 timestr = 'unset'
842 timestr = timestr[:locale_len] + ' '*(locale_len - len(timestr))
842 timestr = timestr[:locale_len] + ' '*(locale_len - len(timestr))
843 else:
843 else:
844 timestr = time.strftime("%Y-%m-%d %H:%M:%S ", time.localtime(ent[3]))
844 timestr = time.strftime("%Y-%m-%d %H:%M:%S ", time.localtime(ent[3]))
845 if ent[1] & 020000:
845 if ent[1] & 020000:
846 mode = 'lnk'
846 mode = 'lnk'
847 else:
847 else:
848 mode = '%3o' % (ent[1] & 0777)
848 mode = '%3o' % (ent[1] & 0777)
849 ui.write("%c %s %10d %s%s\n" % (ent[0], mode, ent[2], timestr, file_))
849 ui.write("%c %s %10d %s%s\n" % (ent[0], mode, ent[2], timestr, file_))
850 for f in repo.dirstate.copies():
850 for f in repo.dirstate.copies():
851 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
851 ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
852
852
853 def debugdata(ui, file_, rev):
853 def debugdata(ui, file_, rev):
854 """dump the contents of a data file revision"""
854 """dump the contents of a data file revision"""
855 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_[:-2] + ".i")
855 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_[:-2] + ".i")
856 try:
856 try:
857 ui.write(r.revision(r.lookup(rev)))
857 ui.write(r.revision(r.lookup(rev)))
858 except KeyError:
858 except KeyError:
859 raise util.Abort(_('invalid revision identifier %s') % rev)
859 raise util.Abort(_('invalid revision identifier %s') % rev)
860
860
861 def debugdate(ui, date, range=None, **opts):
861 def debugdate(ui, date, range=None, **opts):
862 """parse and display a date"""
862 """parse and display a date"""
863 if opts["extended"]:
863 if opts["extended"]:
864 d = util.parsedate(date, util.extendeddateformats)
864 d = util.parsedate(date, util.extendeddateformats)
865 else:
865 else:
866 d = util.parsedate(date)
866 d = util.parsedate(date)
867 ui.write("internal: %s %s\n" % d)
867 ui.write("internal: %s %s\n" % d)
868 ui.write("standard: %s\n" % util.datestr(d))
868 ui.write("standard: %s\n" % util.datestr(d))
869 if range:
869 if range:
870 m = util.matchdate(range)
870 m = util.matchdate(range)
871 ui.write("match: %s\n" % m(d[0]))
871 ui.write("match: %s\n" % m(d[0]))
872
872
873 def debugindex(ui, file_):
873 def debugindex(ui, file_):
874 """dump the contents of an index file"""
874 """dump the contents of an index file"""
875 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_)
875 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_)
876 ui.write(" rev offset length base linkrev"
876 ui.write(" rev offset length base linkrev"
877 " nodeid p1 p2\n")
877 " nodeid p1 p2\n")
878 for i in r:
878 for i in r:
879 node = r.node(i)
879 node = r.node(i)
880 try:
880 try:
881 pp = r.parents(node)
881 pp = r.parents(node)
882 except:
882 except:
883 pp = [nullid, nullid]
883 pp = [nullid, nullid]
884 ui.write("% 6d % 9d % 7d % 6d % 7d %s %s %s\n" % (
884 ui.write("% 6d % 9d % 7d % 6d % 7d %s %s %s\n" % (
885 i, r.start(i), r.length(i), r.base(i), r.linkrev(i),
885 i, r.start(i), r.length(i), r.base(i), r.linkrev(i),
886 short(node), short(pp[0]), short(pp[1])))
886 short(node), short(pp[0]), short(pp[1])))
887
887
888 def debugindexdot(ui, file_):
888 def debugindexdot(ui, file_):
889 """dump an index DAG as a .dot file"""
889 """dump an index DAG as a .dot file"""
890 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_)
890 r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_)
891 ui.write("digraph G {\n")
891 ui.write("digraph G {\n")
892 for i in r:
892 for i in r:
893 node = r.node(i)
893 node = r.node(i)
894 pp = r.parents(node)
894 pp = r.parents(node)
895 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
895 ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i))
896 if pp[1] != nullid:
896 if pp[1] != nullid:
897 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
897 ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i))
898 ui.write("}\n")
898 ui.write("}\n")
899
899
900 def debuginstall(ui):
900 def debuginstall(ui):
901 '''test Mercurial installation'''
901 '''test Mercurial installation'''
902
902
903 def writetemp(contents):
903 def writetemp(contents):
904 (fd, name) = tempfile.mkstemp(prefix="hg-debuginstall-")
904 (fd, name) = tempfile.mkstemp(prefix="hg-debuginstall-")
905 f = os.fdopen(fd, "wb")
905 f = os.fdopen(fd, "wb")
906 f.write(contents)
906 f.write(contents)
907 f.close()
907 f.close()
908 return name
908 return name
909
909
910 problems = 0
910 problems = 0
911
911
912 # encoding
912 # encoding
913 ui.status(_("Checking encoding (%s)...\n") % encoding.encoding)
913 ui.status(_("Checking encoding (%s)...\n") % encoding.encoding)
914 try:
914 try:
915 encoding.fromlocal("test")
915 encoding.fromlocal("test")
916 except util.Abort, inst:
916 except util.Abort, inst:
917 ui.write(" %s\n" % inst)
917 ui.write(" %s\n" % inst)
918 ui.write(_(" (check that your locale is properly set)\n"))
918 ui.write(_(" (check that your locale is properly set)\n"))
919 problems += 1
919 problems += 1
920
920
921 # compiled modules
921 # compiled modules
922 ui.status(_("Checking extensions...\n"))
922 ui.status(_("Checking extensions...\n"))
923 try:
923 try:
924 import bdiff, mpatch, base85
924 import bdiff, mpatch, base85
925 except Exception, inst:
925 except Exception, inst:
926 ui.write(" %s\n" % inst)
926 ui.write(" %s\n" % inst)
927 ui.write(_(" One or more extensions could not be found"))
927 ui.write(_(" One or more extensions could not be found"))
928 ui.write(_(" (check that you compiled the extensions)\n"))
928 ui.write(_(" (check that you compiled the extensions)\n"))
929 problems += 1
929 problems += 1
930
930
931 # templates
931 # templates
932 ui.status(_("Checking templates...\n"))
932 ui.status(_("Checking templates...\n"))
933 try:
933 try:
934 import templater
934 import templater
935 templater.templater(templater.templatepath("map-cmdline.default"))
935 templater.templater(templater.templatepath("map-cmdline.default"))
936 except Exception, inst:
936 except Exception, inst:
937 ui.write(" %s\n" % inst)
937 ui.write(" %s\n" % inst)
938 ui.write(_(" (templates seem to have been installed incorrectly)\n"))
938 ui.write(_(" (templates seem to have been installed incorrectly)\n"))
939 problems += 1
939 problems += 1
940
940
941 # patch
941 # patch
942 ui.status(_("Checking patch...\n"))
942 ui.status(_("Checking patch...\n"))
943 patchproblems = 0
943 patchproblems = 0
944 a = "1\n2\n3\n4\n"
944 a = "1\n2\n3\n4\n"
945 b = "1\n2\n3\ninsert\n4\n"
945 b = "1\n2\n3\ninsert\n4\n"
946 fa = writetemp(a)
946 fa = writetemp(a)
947 d = mdiff.unidiff(a, None, b, None, os.path.basename(fa),
947 d = mdiff.unidiff(a, None, b, None, os.path.basename(fa),
948 os.path.basename(fa))
948 os.path.basename(fa))
949 fd = writetemp(d)
949 fd = writetemp(d)
950
950
951 files = {}
951 files = {}
952 try:
952 try:
953 patch.patch(fd, ui, cwd=os.path.dirname(fa), files=files)
953 patch.patch(fd, ui, cwd=os.path.dirname(fa), files=files)
954 except util.Abort, e:
954 except util.Abort, e:
955 ui.write(_(" patch call failed:\n"))
955 ui.write(_(" patch call failed:\n"))
956 ui.write(" " + str(e) + "\n")
956 ui.write(" " + str(e) + "\n")
957 patchproblems += 1
957 patchproblems += 1
958 else:
958 else:
959 if list(files) != [os.path.basename(fa)]:
959 if list(files) != [os.path.basename(fa)]:
960 ui.write(_(" unexpected patch output!\n"))
960 ui.write(_(" unexpected patch output!\n"))
961 patchproblems += 1
961 patchproblems += 1
962 a = file(fa).read()
962 a = file(fa).read()
963 if a != b:
963 if a != b:
964 ui.write(_(" patch test failed!\n"))
964 ui.write(_(" patch test failed!\n"))
965 patchproblems += 1
965 patchproblems += 1
966
966
967 if patchproblems:
967 if patchproblems:
968 if ui.config('ui', 'patch'):
968 if ui.config('ui', 'patch'):
969 ui.write(_(" (Current patch tool may be incompatible with patch,"
969 ui.write(_(" (Current patch tool may be incompatible with patch,"
970 " or misconfigured. Please check your .hgrc file)\n"))
970 " or misconfigured. Please check your .hgrc file)\n"))
971 else:
971 else:
972 ui.write(_(" Internal patcher failure, please report this error"
972 ui.write(_(" Internal patcher failure, please report this error"
973 " to http://www.selenic.com/mercurial/bts\n"))
973 " to http://www.selenic.com/mercurial/bts\n"))
974 problems += patchproblems
974 problems += patchproblems
975
975
976 os.unlink(fa)
976 os.unlink(fa)
977 os.unlink(fd)
977 os.unlink(fd)
978
978
979 # editor
979 # editor
980 ui.status(_("Checking commit editor...\n"))
980 ui.status(_("Checking commit editor...\n"))
981 editor = ui.geteditor()
981 editor = ui.geteditor()
982 cmdpath = util.find_exe(editor) or util.find_exe(editor.split()[0])
982 cmdpath = util.find_exe(editor) or util.find_exe(editor.split()[0])
983 if not cmdpath:
983 if not cmdpath:
984 if editor == 'vi':
984 if editor == 'vi':
985 ui.write(_(" No commit editor set and can't find vi in PATH\n"))
985 ui.write(_(" No commit editor set and can't find vi in PATH\n"))
986 ui.write(_(" (specify a commit editor in your .hgrc file)\n"))
986 ui.write(_(" (specify a commit editor in your .hgrc file)\n"))
987 else:
987 else:
988 ui.write(_(" Can't find editor '%s' in PATH\n") % editor)
988 ui.write(_(" Can't find editor '%s' in PATH\n") % editor)
989 ui.write(_(" (specify a commit editor in your .hgrc file)\n"))
989 ui.write(_(" (specify a commit editor in your .hgrc file)\n"))
990 problems += 1
990 problems += 1
991
991
992 # check username
992 # check username
993 ui.status(_("Checking username...\n"))
993 ui.status(_("Checking username...\n"))
994 user = os.environ.get("HGUSER")
994 user = os.environ.get("HGUSER")
995 if user is None:
995 if user is None:
996 user = ui.config("ui", "username")
996 user = ui.config("ui", "username")
997 if user is None:
997 if user is None:
998 user = os.environ.get("EMAIL")
998 user = os.environ.get("EMAIL")
999 if not user:
999 if not user:
1000 ui.warn(" ")
1000 ui.warn(" ")
1001 ui.username()
1001 ui.username()
1002 ui.write(_(" (specify a username in your .hgrc file)\n"))
1002 ui.write(_(" (specify a username in your .hgrc file)\n"))
1003
1003
1004 if not problems:
1004 if not problems:
1005 ui.status(_("No problems detected\n"))
1005 ui.status(_("No problems detected\n"))
1006 else:
1006 else:
1007 ui.write(_("%s problems detected,"
1007 ui.write(_("%s problems detected,"
1008 " please check your install!\n") % problems)
1008 " please check your install!\n") % problems)
1009
1009
1010 return problems
1010 return problems
1011
1011
1012 def debugrename(ui, repo, file1, *pats, **opts):
1012 def debugrename(ui, repo, file1, *pats, **opts):
1013 """dump rename information"""
1013 """dump rename information"""
1014
1014
1015 ctx = repo[opts.get('rev')]
1015 ctx = repo[opts.get('rev')]
1016 m = cmdutil.match(repo, (file1,) + pats, opts)
1016 m = cmdutil.match(repo, (file1,) + pats, opts)
1017 for abs in ctx.walk(m):
1017 for abs in ctx.walk(m):
1018 fctx = ctx[abs]
1018 fctx = ctx[abs]
1019 o = fctx.filelog().renamed(fctx.filenode())
1019 o = fctx.filelog().renamed(fctx.filenode())
1020 rel = m.rel(abs)
1020 rel = m.rel(abs)
1021 if o:
1021 if o:
1022 ui.write(_("%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
1022 ui.write(_("%s renamed from %s:%s\n") % (rel, o[0], hex(o[1])))
1023 else:
1023 else:
1024 ui.write(_("%s not renamed\n") % rel)
1024 ui.write(_("%s not renamed\n") % rel)
1025
1025
1026 def debugwalk(ui, repo, *pats, **opts):
1026 def debugwalk(ui, repo, *pats, **opts):
1027 """show how files match on given patterns"""
1027 """show how files match on given patterns"""
1028 m = cmdutil.match(repo, pats, opts)
1028 m = cmdutil.match(repo, pats, opts)
1029 items = list(repo.walk(m))
1029 items = list(repo.walk(m))
1030 if not items:
1030 if not items:
1031 return
1031 return
1032 fmt = 'f %%-%ds %%-%ds %%s' % (
1032 fmt = 'f %%-%ds %%-%ds %%s' % (
1033 max([len(abs) for abs in items]),
1033 max([len(abs) for abs in items]),
1034 max([len(m.rel(abs)) for abs in items]))
1034 max([len(m.rel(abs)) for abs in items]))
1035 for abs in items:
1035 for abs in items:
1036 line = fmt % (abs, m.rel(abs), m.exact(abs) and 'exact' or '')
1036 line = fmt % (abs, m.rel(abs), m.exact(abs) and 'exact' or '')
1037 ui.write("%s\n" % line.rstrip())
1037 ui.write("%s\n" % line.rstrip())
1038
1038
1039 def diff(ui, repo, *pats, **opts):
1039 def diff(ui, repo, *pats, **opts):
1040 """diff repository (or selected files)
1040 """diff repository (or selected files)
1041
1041
1042 Show differences between revisions for the specified files.
1042 Show differences between revisions for the specified files.
1043
1043
1044 Differences between files are shown using the unified diff format.
1044 Differences between files are shown using the unified diff format.
1045
1045
1046 NOTE: diff may generate unexpected results for merges, as it will
1046 NOTE: diff may generate unexpected results for merges, as it will
1047 default to comparing against the working directory's first parent
1047 default to comparing against the working directory's first parent
1048 changeset if no revisions are specified.
1048 changeset if no revisions are specified.
1049
1049
1050 When two revision arguments are given, then changes are shown
1050 When two revision arguments are given, then changes are shown
1051 between those revisions. If only one revision is specified then
1051 between those revisions. If only one revision is specified then
1052 that revision is compared to the working directory, and, when no
1052 that revision is compared to the working directory, and, when no
1053 revisions are specified, the working directory files are compared
1053 revisions are specified, the working directory files are compared
1054 to its parent.
1054 to its parent.
1055
1055
1056 Without the -a/--text option, diff will avoid generating diffs of
1056 Without the -a/--text option, diff will avoid generating diffs of
1057 files it detects as binary. With -a, diff will generate a diff
1057 files it detects as binary. With -a, diff will generate a diff
1058 anyway, probably with undesirable results.
1058 anyway, probably with undesirable results.
1059
1059
1060 Use the -g/--git option to generate diffs in the git extended diff
1060 Use the -g/--git option to generate diffs in the git extended diff
1061 format. For more information, read 'hg help diffs'.
1061 format. For more information, read 'hg help diffs'.
1062 """
1062 """
1063
1063
1064 revs = opts.get('rev')
1064 revs = opts.get('rev')
1065 change = opts.get('change')
1065 change = opts.get('change')
1066
1066
1067 if revs and change:
1067 if revs and change:
1068 msg = _('cannot specify --rev and --change at the same time')
1068 msg = _('cannot specify --rev and --change at the same time')
1069 raise util.Abort(msg)
1069 raise util.Abort(msg)
1070 elif change:
1070 elif change:
1071 node2 = repo.lookup(change)
1071 node2 = repo.lookup(change)
1072 node1 = repo[node2].parents()[0].node()
1072 node1 = repo[node2].parents()[0].node()
1073 else:
1073 else:
1074 node1, node2 = cmdutil.revpair(repo, revs)
1074 node1, node2 = cmdutil.revpair(repo, revs)
1075
1075
1076 m = cmdutil.match(repo, pats, opts)
1076 m = cmdutil.match(repo, pats, opts)
1077 it = patch.diff(repo, node1, node2, match=m, opts=patch.diffopts(ui, opts))
1077 it = patch.diff(repo, node1, node2, match=m, opts=patch.diffopts(ui, opts))
1078 for chunk in it:
1078 for chunk in it:
1079 ui.write(chunk)
1079 ui.write(chunk)
1080
1080
1081 def export(ui, repo, *changesets, **opts):
1081 def export(ui, repo, *changesets, **opts):
1082 """dump the header and diffs for one or more changesets
1082 """dump the header and diffs for one or more changesets
1083
1083
1084 Print the changeset header and diffs for one or more revisions.
1084 Print the changeset header and diffs for one or more revisions.
1085
1085
1086 The information shown in the changeset header is: author,
1086 The information shown in the changeset header is: author,
1087 changeset hash, parent(s) and commit comment.
1087 changeset hash, parent(s) and commit comment.
1088
1088
1089 NOTE: export may generate unexpected diff output for merge
1089 NOTE: export may generate unexpected diff output for merge
1090 changesets, as it will compare the merge changeset against its
1090 changesets, as it will compare the merge changeset against its
1091 first parent only.
1091 first parent only.
1092
1092
1093 Output may be to a file, in which case the name of the file is
1093 Output may be to a file, in which case the name of the file is
1094 given using a format string. The formatting rules are as follows:
1094 given using a format string. The formatting rules are as follows:
1095
1095
1096 %% literal "%" character
1096 %% literal "%" character
1097 %H changeset hash (40 bytes of hexadecimal)
1097 %H changeset hash (40 bytes of hexadecimal)
1098 %N number of patches being generated
1098 %N number of patches being generated
1099 %R changeset revision number
1099 %R changeset revision number
1100 %b basename of the exporting repository
1100 %b basename of the exporting repository
1101 %h short-form changeset hash (12 bytes of hexadecimal)
1101 %h short-form changeset hash (12 bytes of hexadecimal)
1102 %n zero-padded sequence number, starting at 1
1102 %n zero-padded sequence number, starting at 1
1103 %r zero-padded changeset revision number
1103 %r zero-padded changeset revision number
1104
1104
1105 Without the -a/--text option, export will avoid generating diffs
1105 Without the -a/--text option, export will avoid generating diffs
1106 of files it detects as binary. With -a, export will generate a
1106 of files it detects as binary. With -a, export will generate a
1107 diff anyway, probably with undesirable results.
1107 diff anyway, probably with undesirable results.
1108
1108
1109 Use the -g/--git option to generate diffs in the git extended diff
1109 Use the -g/--git option to generate diffs in the git extended diff
1110 format. Read the diffs help topic for more information.
1110 format. Read the diffs help topic for more information.
1111
1111
1112 With the --switch-parent option, the diff will be against the
1112 With the --switch-parent option, the diff will be against the
1113 second parent. It can be useful to review a merge.
1113 second parent. It can be useful to review a merge.
1114 """
1114 """
1115 if not changesets:
1115 if not changesets:
1116 raise util.Abort(_("export requires at least one changeset"))
1116 raise util.Abort(_("export requires at least one changeset"))
1117 revs = cmdutil.revrange(repo, changesets)
1117 revs = cmdutil.revrange(repo, changesets)
1118 if len(revs) > 1:
1118 if len(revs) > 1:
1119 ui.note(_('exporting patches:\n'))
1119 ui.note(_('exporting patches:\n'))
1120 else:
1120 else:
1121 ui.note(_('exporting patch:\n'))
1121 ui.note(_('exporting patch:\n'))
1122 patch.export(repo, revs, template=opts.get('output'),
1122 patch.export(repo, revs, template=opts.get('output'),
1123 switch_parent=opts.get('switch_parent'),
1123 switch_parent=opts.get('switch_parent'),
1124 opts=patch.diffopts(ui, opts))
1124 opts=patch.diffopts(ui, opts))
1125
1125
1126 def grep(ui, repo, pattern, *pats, **opts):
1126 def grep(ui, repo, pattern, *pats, **opts):
1127 """search for a pattern in specified files and revisions
1127 """search for a pattern in specified files and revisions
1128
1128
1129 Search revisions of files for a regular expression.
1129 Search revisions of files for a regular expression.
1130
1130
1131 This command behaves differently than Unix grep. It only accepts
1131 This command behaves differently than Unix grep. It only accepts
1132 Python/Perl regexps. It searches repository history, not the
1132 Python/Perl regexps. It searches repository history, not the
1133 working directory. It always prints the revision number in which a
1133 working directory. It always prints the revision number in which a
1134 match appears.
1134 match appears.
1135
1135
1136 By default, grep only prints output for the first revision of a
1136 By default, grep only prints output for the first revision of a
1137 file in which it finds a match. To get it to print every revision
1137 file in which it finds a match. To get it to print every revision
1138 that contains a change in match status ("-" for a match that
1138 that contains a change in match status ("-" for a match that
1139 becomes a non-match, or "+" for a non-match that becomes a match),
1139 becomes a non-match, or "+" for a non-match that becomes a match),
1140 use the --all flag.
1140 use the --all flag.
1141 """
1141 """
1142 reflags = 0
1142 reflags = 0
1143 if opts.get('ignore_case'):
1143 if opts.get('ignore_case'):
1144 reflags |= re.I
1144 reflags |= re.I
1145 try:
1145 try:
1146 regexp = re.compile(pattern, reflags)
1146 regexp = re.compile(pattern, reflags)
1147 except Exception, inst:
1147 except Exception, inst:
1148 ui.warn(_("grep: invalid match pattern: %s\n") % inst)
1148 ui.warn(_("grep: invalid match pattern: %s\n") % inst)
1149 return None
1149 return None
1150 sep, eol = ':', '\n'
1150 sep, eol = ':', '\n'
1151 if opts.get('print0'):
1151 if opts.get('print0'):
1152 sep = eol = '\0'
1152 sep = eol = '\0'
1153
1153
1154 fcache = {}
1154 fcache = {}
1155 forder = []
1155 forder = []
1156 def getfile(fn):
1156 def getfile(fn):
1157 if fn not in fcache:
1157 if fn not in fcache:
1158 if len(fcache) > 20:
1158 if len(fcache) > 20:
1159 del fcache[forder.pop(0)]
1159 del fcache[forder.pop(0)]
1160 fcache[fn] = repo.file(fn)
1160 fcache[fn] = repo.file(fn)
1161 else:
1161 else:
1162 forder.remove(fn)
1162 forder.remove(fn)
1163
1163
1164 forder.append(fn)
1164 forder.append(fn)
1165 return fcache[fn]
1165 return fcache[fn]
1166
1166
1167 def matchlines(body):
1167 def matchlines(body):
1168 begin = 0
1168 begin = 0
1169 linenum = 0
1169 linenum = 0
1170 while True:
1170 while True:
1171 match = regexp.search(body, begin)
1171 match = regexp.search(body, begin)
1172 if not match:
1172 if not match:
1173 break
1173 break
1174 mstart, mend = match.span()
1174 mstart, mend = match.span()
1175 linenum += body.count('\n', begin, mstart) + 1
1175 linenum += body.count('\n', begin, mstart) + 1
1176 lstart = body.rfind('\n', begin, mstart) + 1 or begin
1176 lstart = body.rfind('\n', begin, mstart) + 1 or begin
1177 begin = body.find('\n', mend) + 1 or len(body)
1177 begin = body.find('\n', mend) + 1 or len(body)
1178 lend = begin - 1
1178 lend = begin - 1
1179 yield linenum, mstart - lstart, mend - lstart, body[lstart:lend]
1179 yield linenum, mstart - lstart, mend - lstart, body[lstart:lend]
1180
1180
1181 class linestate(object):
1181 class linestate(object):
1182 def __init__(self, line, linenum, colstart, colend):
1182 def __init__(self, line, linenum, colstart, colend):
1183 self.line = line
1183 self.line = line
1184 self.linenum = linenum
1184 self.linenum = linenum
1185 self.colstart = colstart
1185 self.colstart = colstart
1186 self.colend = colend
1186 self.colend = colend
1187
1187
1188 def __hash__(self):
1188 def __hash__(self):
1189 return hash((self.linenum, self.line))
1189 return hash((self.linenum, self.line))
1190
1190
1191 def __eq__(self, other):
1191 def __eq__(self, other):
1192 return self.line == other.line
1192 return self.line == other.line
1193
1193
1194 matches = {}
1194 matches = {}
1195 copies = {}
1195 copies = {}
1196 def grepbody(fn, rev, body):
1196 def grepbody(fn, rev, body):
1197 matches[rev].setdefault(fn, [])
1197 matches[rev].setdefault(fn, [])
1198 m = matches[rev][fn]
1198 m = matches[rev][fn]
1199 for lnum, cstart, cend, line in matchlines(body):
1199 for lnum, cstart, cend, line in matchlines(body):
1200 s = linestate(line, lnum, cstart, cend)
1200 s = linestate(line, lnum, cstart, cend)
1201 m.append(s)
1201 m.append(s)
1202
1202
1203 def difflinestates(a, b):
1203 def difflinestates(a, b):
1204 sm = difflib.SequenceMatcher(None, a, b)
1204 sm = difflib.SequenceMatcher(None, a, b)
1205 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
1205 for tag, alo, ahi, blo, bhi in sm.get_opcodes():
1206 if tag == 'insert':
1206 if tag == 'insert':
1207 for i in xrange(blo, bhi):
1207 for i in xrange(blo, bhi):
1208 yield ('+', b[i])
1208 yield ('+', b[i])
1209 elif tag == 'delete':
1209 elif tag == 'delete':
1210 for i in xrange(alo, ahi):
1210 for i in xrange(alo, ahi):
1211 yield ('-', a[i])
1211 yield ('-', a[i])
1212 elif tag == 'replace':
1212 elif tag == 'replace':
1213 for i in xrange(alo, ahi):
1213 for i in xrange(alo, ahi):
1214 yield ('-', a[i])
1214 yield ('-', a[i])
1215 for i in xrange(blo, bhi):
1215 for i in xrange(blo, bhi):
1216 yield ('+', b[i])
1216 yield ('+', b[i])
1217
1217
1218 prev = {}
1218 prev = {}
1219 def display(fn, rev, states, prevstates):
1219 def display(fn, rev, states, prevstates):
1220 datefunc = ui.quiet and util.shortdate or util.datestr
1220 datefunc = ui.quiet and util.shortdate or util.datestr
1221 found = False
1221 found = False
1222 filerevmatches = {}
1222 filerevmatches = {}
1223 r = prev.get(fn, -1)
1223 r = prev.get(fn, -1)
1224 if opts.get('all'):
1224 if opts.get('all'):
1225 iter = difflinestates(states, prevstates)
1225 iter = difflinestates(states, prevstates)
1226 else:
1226 else:
1227 iter = [('', l) for l in prevstates]
1227 iter = [('', l) for l in prevstates]
1228 for change, l in iter:
1228 for change, l in iter:
1229 cols = [fn, str(r)]
1229 cols = [fn, str(r)]
1230 if opts.get('line_number'):
1230 if opts.get('line_number'):
1231 cols.append(str(l.linenum))
1231 cols.append(str(l.linenum))
1232 if opts.get('all'):
1232 if opts.get('all'):
1233 cols.append(change)
1233 cols.append(change)
1234 if opts.get('user'):
1234 if opts.get('user'):
1235 cols.append(ui.shortuser(get(r)[1]))
1235 cols.append(ui.shortuser(get(r)[1]))
1236 if opts.get('date'):
1236 if opts.get('date'):
1237 cols.append(datefunc(get(r)[2]))
1237 cols.append(datefunc(get(r)[2]))
1238 if opts.get('files_with_matches'):
1238 if opts.get('files_with_matches'):
1239 c = (fn, r)
1239 c = (fn, r)
1240 if c in filerevmatches:
1240 if c in filerevmatches:
1241 continue
1241 continue
1242 filerevmatches[c] = 1
1242 filerevmatches[c] = 1
1243 else:
1243 else:
1244 cols.append(l.line)
1244 cols.append(l.line)
1245 ui.write(sep.join(cols), eol)
1245 ui.write(sep.join(cols), eol)
1246 found = True
1246 found = True
1247 return found
1247 return found
1248
1248
1249 fstate = {}
1249 fstate = {}
1250 skip = {}
1250 skip = {}
1251 get = util.cachefunc(lambda r: repo[r].changeset())
1251 get = util.cachefunc(lambda r: repo[r].changeset())
1252 changeiter, matchfn = cmdutil.walkchangerevs(ui, repo, pats, get, opts)
1252 changeiter, matchfn = cmdutil.walkchangerevs(ui, repo, pats, get, opts)
1253 found = False
1253 found = False
1254 follow = opts.get('follow')
1254 follow = opts.get('follow')
1255 for st, rev, fns in changeiter:
1255 for st, rev, fns in changeiter:
1256 if st == 'window':
1256 if st == 'window':
1257 matches.clear()
1257 matches.clear()
1258 elif st == 'add':
1258 elif st == 'add':
1259 ctx = repo[rev]
1259 ctx = repo[rev]
1260 matches[rev] = {}
1260 matches[rev] = {}
1261 for fn in fns:
1261 for fn in fns:
1262 if fn in skip:
1262 if fn in skip:
1263 continue
1263 continue
1264 try:
1264 try:
1265 grepbody(fn, rev, getfile(fn).read(ctx.filenode(fn)))
1265 grepbody(fn, rev, getfile(fn).read(ctx.filenode(fn)))
1266 fstate.setdefault(fn, [])
1266 fstate.setdefault(fn, [])
1267 if follow:
1267 if follow:
1268 copied = getfile(fn).renamed(ctx.filenode(fn))
1268 copied = getfile(fn).renamed(ctx.filenode(fn))
1269 if copied:
1269 if copied:
1270 copies.setdefault(rev, {})[fn] = copied[0]
1270 copies.setdefault(rev, {})[fn] = copied[0]
1271 except error.LookupError:
1271 except error.LookupError:
1272 pass
1272 pass
1273 elif st == 'iter':
1273 elif st == 'iter':
1274 for fn, m in sorted(matches[rev].items()):
1274 for fn, m in sorted(matches[rev].items()):
1275 copy = copies.get(rev, {}).get(fn)
1275 copy = copies.get(rev, {}).get(fn)
1276 if fn in skip:
1276 if fn in skip:
1277 if copy:
1277 if copy:
1278 skip[copy] = True
1278 skip[copy] = True
1279 continue
1279 continue
1280 if fn in prev or fstate[fn]:
1280 if fn in prev or fstate[fn]:
1281 r = display(fn, rev, m, fstate[fn])
1281 r = display(fn, rev, m, fstate[fn])
1282 found = found or r
1282 found = found or r
1283 if r and not opts.get('all'):
1283 if r and not opts.get('all'):
1284 skip[fn] = True
1284 skip[fn] = True
1285 if copy:
1285 if copy:
1286 skip[copy] = True
1286 skip[copy] = True
1287 fstate[fn] = m
1287 fstate[fn] = m
1288 if copy:
1288 if copy:
1289 fstate[copy] = m
1289 fstate[copy] = m
1290 prev[fn] = rev
1290 prev[fn] = rev
1291
1291
1292 for fn, state in sorted(fstate.items()):
1292 for fn, state in sorted(fstate.items()):
1293 if fn in skip:
1293 if fn in skip:
1294 continue
1294 continue
1295 if fn not in copies.get(prev[fn], {}):
1295 if fn not in copies.get(prev[fn], {}):
1296 found = display(fn, rev, {}, state) or found
1296 found = display(fn, rev, {}, state) or found
1297 return (not found and 1) or 0
1297 return (not found and 1) or 0
1298
1298
1299 def heads(ui, repo, *branchrevs, **opts):
1299 def heads(ui, repo, *branchrevs, **opts):
1300 """show current repository heads or show branch heads
1300 """show current repository heads or show branch heads
1301
1301
1302 With no arguments, show all repository head changesets.
1302 With no arguments, show all repository head changesets.
1303
1303
1304 If branch or revisions names are given this will show the heads of
1304 If branch or revisions names are given this will show the heads of
1305 the specified branches or the branches those revisions are tagged
1305 the specified branches or the branches those revisions are tagged
1306 with.
1306 with.
1307
1307
1308 Repository "heads" are changesets that don't have child
1308 Repository "heads" are changesets that don't have child
1309 changesets. They are where development generally takes place and
1309 changesets. They are where development generally takes place and
1310 are the usual targets for update and merge operations.
1310 are the usual targets for update and merge operations.
1311
1311
1312 Branch heads are changesets that have a given branch tag, but have
1312 Branch heads are changesets that have a given branch tag, but have
1313 no child changesets with that tag. They are usually where
1313 no child changesets with that tag. They are usually where
1314 development on the given branch takes place.
1314 development on the given branch takes place.
1315 """
1315 """
1316 if opts.get('rev'):
1316 if opts.get('rev'):
1317 start = repo.lookup(opts['rev'])
1317 start = repo.lookup(opts['rev'])
1318 else:
1318 else:
1319 start = None
1319 start = None
1320 closed = opts.get('closed')
1320 closed = opts.get('closed')
1321 hideinactive, _heads = opts.get('active'), None
1321 hideinactive, _heads = opts.get('active'), None
1322 if not branchrevs:
1322 if not branchrevs:
1323 # Assume we're looking repo-wide heads if no revs were specified.
1323 # Assume we're looking repo-wide heads if no revs were specified.
1324 heads = repo.heads(start, closed=closed)
1324 heads = repo.heads(start, closed=closed)
1325 else:
1325 else:
1326 if hideinactive:
1326 if hideinactive:
1327 _heads = repo.heads(start, closed=closed)
1327 _heads = repo.heads(start, closed=closed)
1328 heads = []
1328 heads = []
1329 visitedset = set()
1329 visitedset = set()
1330 for branchrev in branchrevs:
1330 for branchrev in branchrevs:
1331 branch = repo[branchrev].branch()
1331 branch = repo[branchrev].branch()
1332 if branch in visitedset:
1332 if branch in visitedset:
1333 continue
1333 continue
1334 visitedset.add(branch)
1334 visitedset.add(branch)
1335 bheads = repo.branchheads(branch, start, closed=closed)
1335 bheads = repo.branchheads(branch, start, closed=closed)
1336 if not bheads:
1336 if not bheads:
1337 if branch != branchrev:
1337 if branch != branchrev:
1338 ui.warn(_("no changes on branch %s containing %s are "
1338 ui.warn(_("no changes on branch %s containing %s are "
1339 "reachable from %s\n")
1339 "reachable from %s\n")
1340 % (branch, branchrev, opts.get('rev')))
1340 % (branch, branchrev, opts.get('rev')))
1341 else:
1341 else:
1342 ui.warn(_("no changes on branch %s are reachable from %s\n")
1342 ui.warn(_("no changes on branch %s are reachable from %s\n")
1343 % (branch, opts.get('rev')))
1343 % (branch, opts.get('rev')))
1344 if hideinactive:
1344 if hideinactive:
1345 bheads = [bhead for bhead in bheads if bhead in _heads]
1345 bheads = [bhead for bhead in bheads if bhead in _heads]
1346 heads.extend(bheads)
1346 heads.extend(bheads)
1347 if not heads:
1347 if not heads:
1348 return 1
1348 return 1
1349 displayer = cmdutil.show_changeset(ui, repo, opts)
1349 displayer = cmdutil.show_changeset(ui, repo, opts)
1350 for n in heads:
1350 for n in heads:
1351 displayer.show(repo[n])
1351 displayer.show(repo[n])
1352
1352
1353 def help_(ui, name=None, with_version=False):
1353 def help_(ui, name=None, with_version=False):
1354 """show help for a given topic or a help overview
1354 """show help for a given topic or a help overview
1355
1355
1356 With no arguments, print a list of commands and short help.
1356 With no arguments, print a list of commands and short help.
1357
1357
1358 Given a topic, extension, or command name, print help for that
1358 Given a topic, extension, or command name, print help for that
1359 topic."""
1359 topic."""
1360 option_lists = []
1360 option_lists = []
1361
1361
1362 def addglobalopts(aliases):
1362 def addglobalopts(aliases):
1363 if ui.verbose:
1363 if ui.verbose:
1364 option_lists.append((_("global options:"), globalopts))
1364 option_lists.append((_("global options:"), globalopts))
1365 if name == 'shortlist':
1365 if name == 'shortlist':
1366 option_lists.append((_('use "hg help" for the full list '
1366 option_lists.append((_('use "hg help" for the full list '
1367 'of commands'), ()))
1367 'of commands'), ()))
1368 else:
1368 else:
1369 if name == 'shortlist':
1369 if name == 'shortlist':
1370 msg = _('use "hg help" for the full list of commands '
1370 msg = _('use "hg help" for the full list of commands '
1371 'or "hg -v" for details')
1371 'or "hg -v" for details')
1372 elif aliases:
1372 elif aliases:
1373 msg = _('use "hg -v help%s" to show aliases and '
1373 msg = _('use "hg -v help%s" to show aliases and '
1374 'global options') % (name and " " + name or "")
1374 'global options') % (name and " " + name or "")
1375 else:
1375 else:
1376 msg = _('use "hg -v help %s" to show global options') % name
1376 msg = _('use "hg -v help %s" to show global options') % name
1377 option_lists.append((msg, ()))
1377 option_lists.append((msg, ()))
1378
1378
1379 def helpcmd(name):
1379 def helpcmd(name):
1380 if with_version:
1380 if with_version:
1381 version_(ui)
1381 version_(ui)
1382 ui.write('\n')
1382 ui.write('\n')
1383
1383
1384 try:
1384 try:
1385 aliases, i = cmdutil.findcmd(name, table, False)
1385 aliases, i = cmdutil.findcmd(name, table, False)
1386 except error.AmbiguousCommand, inst:
1386 except error.AmbiguousCommand, inst:
1387 select = lambda c: c.lstrip('^').startswith(inst.args[0])
1387 select = lambda c: c.lstrip('^').startswith(inst.args[0])
1388 helplist(_('list of commands:\n\n'), select)
1388 helplist(_('list of commands:\n\n'), select)
1389 return
1389 return
1390
1390
1391 # synopsis
1391 # synopsis
1392 if len(i) > 2:
1392 if len(i) > 2:
1393 if i[2].startswith('hg'):
1393 if i[2].startswith('hg'):
1394 ui.write("%s\n" % i[2])
1394 ui.write("%s\n" % i[2])
1395 else:
1395 else:
1396 ui.write('hg %s %s\n' % (aliases[0], i[2]))
1396 ui.write('hg %s %s\n' % (aliases[0], i[2]))
1397 else:
1397 else:
1398 ui.write('hg %s\n' % aliases[0])
1398 ui.write('hg %s\n' % aliases[0])
1399
1399
1400 # aliases
1400 # aliases
1401 if not ui.quiet and len(aliases) > 1:
1401 if not ui.quiet and len(aliases) > 1:
1402 ui.write(_("\naliases: %s\n") % ', '.join(aliases[1:]))
1402 ui.write(_("\naliases: %s\n") % ', '.join(aliases[1:]))
1403
1403
1404 # description
1404 # description
1405 doc = gettext(i[0].__doc__)
1405 doc = gettext(i[0].__doc__)
1406 if not doc:
1406 if not doc:
1407 doc = _("(no help text available)")
1407 doc = _("(no help text available)")
1408 if ui.quiet:
1408 if ui.quiet:
1409 doc = doc.splitlines(0)[0]
1409 doc = doc.splitlines(0)[0]
1410 ui.write("\n%s\n" % doc.rstrip())
1410 ui.write("\n%s\n" % doc.rstrip())
1411
1411
1412 if not ui.quiet:
1412 if not ui.quiet:
1413 # options
1413 # options
1414 if i[1]:
1414 if i[1]:
1415 option_lists.append((_("options:\n"), i[1]))
1415 option_lists.append((_("options:\n"), i[1]))
1416
1416
1417 addglobalopts(False)
1417 addglobalopts(False)
1418
1418
1419 def helplist(header, select=None):
1419 def helplist(header, select=None):
1420 h = {}
1420 h = {}
1421 cmds = {}
1421 cmds = {}
1422 for c, e in table.iteritems():
1422 for c, e in table.iteritems():
1423 f = c.split("|", 1)[0]
1423 f = c.split("|", 1)[0]
1424 if select and not select(f):
1424 if select and not select(f):
1425 continue
1425 continue
1426 if (not select and name != 'shortlist' and
1426 if (not select and name != 'shortlist' and
1427 e[0].__module__ != __name__):
1427 e[0].__module__ != __name__):
1428 continue
1428 continue
1429 if name == "shortlist" and not f.startswith("^"):
1429 if name == "shortlist" and not f.startswith("^"):
1430 continue
1430 continue
1431 f = f.lstrip("^")
1431 f = f.lstrip("^")
1432 if not ui.debugflag and f.startswith("debug"):
1432 if not ui.debugflag and f.startswith("debug"):
1433 continue
1433 continue
1434 doc = gettext(e[0].__doc__)
1434 doc = gettext(e[0].__doc__)
1435 if not doc:
1435 if not doc:
1436 doc = _("(no help text available)")
1436 doc = _("(no help text available)")
1437 h[f] = doc.splitlines(0)[0].rstrip()
1437 h[f] = doc.splitlines(0)[0].rstrip()
1438 cmds[f] = c.lstrip("^")
1438 cmds[f] = c.lstrip("^")
1439
1439
1440 if not h:
1440 if not h:
1441 ui.status(_('no commands defined\n'))
1441 ui.status(_('no commands defined\n'))
1442 return
1442 return
1443
1443
1444 ui.status(header)
1444 ui.status(header)
1445 fns = sorted(h)
1445 fns = sorted(h)
1446 m = max(map(len, fns))
1446 m = max(map(len, fns))
1447 for f in fns:
1447 for f in fns:
1448 if ui.verbose:
1448 if ui.verbose:
1449 commands = cmds[f].replace("|",", ")
1449 commands = cmds[f].replace("|",", ")
1450 ui.write(" %s:\n %s\n"%(commands, h[f]))
1450 ui.write(" %s:\n %s\n"%(commands, h[f]))
1451 else:
1451 else:
1452 ui.write(' %-*s %s\n' % (m, f, h[f]))
1452 ui.write(' %-*s %s\n' % (m, f, h[f]))
1453
1453
1454 exts = list(extensions.extensions())
1454 exts = list(extensions.extensions())
1455 if exts and name != 'shortlist':
1455 if exts and name != 'shortlist':
1456 ui.write(_('\nenabled extensions:\n\n'))
1456 ui.write(_('\nenabled extensions:\n\n'))
1457 maxlength = 0
1457 maxlength = 0
1458 exthelps = []
1458 exthelps = []
1459 for ename, ext in exts:
1459 for ename, ext in exts:
1460 doc = (gettext(ext.__doc__) or _('(no help text available)'))
1460 doc = (gettext(ext.__doc__) or _('(no help text available)'))
1461 ename = ename.split('.')[-1]
1461 ename = ename.split('.')[-1]
1462 maxlength = max(len(ename), maxlength)
1462 maxlength = max(len(ename), maxlength)
1463 exthelps.append((ename, doc.splitlines(0)[0].strip()))
1463 exthelps.append((ename, doc.splitlines(0)[0].strip()))
1464 for ename, text in exthelps:
1464 for ename, text in exthelps:
1465 ui.write(_(' %s %s\n') % (ename.ljust(maxlength), text))
1465 ui.write(_(' %s %s\n') % (ename.ljust(maxlength), text))
1466
1466
1467 if not ui.quiet:
1467 if not ui.quiet:
1468 addglobalopts(True)
1468 addglobalopts(True)
1469
1469
1470 def helptopic(name):
1470 def helptopic(name):
1471 for names, header, doc in help.helptable:
1471 for names, header, doc in help.helptable:
1472 if name in names:
1472 if name in names:
1473 break
1473 break
1474 else:
1474 else:
1475 raise error.UnknownCommand(name)
1475 raise error.UnknownCommand(name)
1476
1476
1477 # description
1477 # description
1478 if not doc:
1478 if not doc:
1479 doc = _("(no help text available)")
1479 doc = _("(no help text available)")
1480 if hasattr(doc, '__call__'):
1480 if hasattr(doc, '__call__'):
1481 doc = doc()
1481 doc = doc()
1482
1482
1483 ui.write("%s\n" % header)
1483 ui.write("%s\n" % header)
1484 ui.write("%s\n" % doc.rstrip())
1484 ui.write("%s\n" % doc.rstrip())
1485
1485
1486 def helpext(name):
1486 def helpext(name):
1487 try:
1487 try:
1488 mod = extensions.find(name)
1488 mod = extensions.find(name)
1489 except KeyError:
1489 except KeyError:
1490 raise error.UnknownCommand(name)
1490 raise error.UnknownCommand(name)
1491
1491
1492 doc = gettext(mod.__doc__) or _('no help text available')
1492 doc = gettext(mod.__doc__) or _('no help text available')
1493 doc = doc.splitlines(0)
1493 doc = doc.splitlines(0)
1494 ui.write(_('%s extension - %s\n') % (name.split('.')[-1], doc[0]))
1494 ui.write(_('%s extension - %s\n') % (name.split('.')[-1], doc[0]))
1495 for d in doc[1:]:
1495 for d in doc[1:]:
1496 ui.write(d, '\n')
1496 ui.write(d, '\n')
1497
1497
1498 ui.status('\n')
1498 ui.status('\n')
1499
1499
1500 try:
1500 try:
1501 ct = mod.cmdtable
1501 ct = mod.cmdtable
1502 except AttributeError:
1502 except AttributeError:
1503 ct = {}
1503 ct = {}
1504
1504
1505 modcmds = set([c.split('|', 1)[0] for c in ct])
1505 modcmds = set([c.split('|', 1)[0] for c in ct])
1506 helplist(_('list of commands:\n\n'), modcmds.__contains__)
1506 helplist(_('list of commands:\n\n'), modcmds.__contains__)
1507
1507
1508 if name and name != 'shortlist':
1508 if name and name != 'shortlist':
1509 i = None
1509 i = None
1510 for f in (helptopic, helpcmd, helpext):
1510 for f in (helptopic, helpcmd, helpext):
1511 try:
1511 try:
1512 f(name)
1512 f(name)
1513 i = None
1513 i = None
1514 break
1514 break
1515 except error.UnknownCommand, inst:
1515 except error.UnknownCommand, inst:
1516 i = inst
1516 i = inst
1517 if i:
1517 if i:
1518 raise i
1518 raise i
1519
1519
1520 else:
1520 else:
1521 # program name
1521 # program name
1522 if ui.verbose or with_version:
1522 if ui.verbose or with_version:
1523 version_(ui)
1523 version_(ui)
1524 else:
1524 else:
1525 ui.status(_("Mercurial Distributed SCM\n"))
1525 ui.status(_("Mercurial Distributed SCM\n"))
1526 ui.status('\n')
1526 ui.status('\n')
1527
1527
1528 # list of commands
1528 # list of commands
1529 if name == "shortlist":
1529 if name == "shortlist":
1530 header = _('basic commands:\n\n')
1530 header = _('basic commands:\n\n')
1531 else:
1531 else:
1532 header = _('list of commands:\n\n')
1532 header = _('list of commands:\n\n')
1533
1533
1534 helplist(header)
1534 helplist(header)
1535
1535
1536 # list all option lists
1536 # list all option lists
1537 opt_output = []
1537 opt_output = []
1538 for title, options in option_lists:
1538 for title, options in option_lists:
1539 opt_output.append(("\n%s" % title, None))
1539 opt_output.append(("\n%s" % title, None))
1540 for shortopt, longopt, default, desc in options:
1540 for shortopt, longopt, default, desc in options:
1541 if "DEPRECATED" in desc and not ui.verbose: continue
1541 if "DEPRECATED" in desc and not ui.verbose: continue
1542 opt_output.append(("%2s%s" % (shortopt and "-%s" % shortopt,
1542 opt_output.append(("%2s%s" % (shortopt and "-%s" % shortopt,
1543 longopt and " --%s" % longopt),
1543 longopt and " --%s" % longopt),
1544 "%s%s" % (desc,
1544 "%s%s" % (desc,
1545 default
1545 default
1546 and _(" (default: %s)") % default
1546 and _(" (default: %s)") % default
1547 or "")))
1547 or "")))
1548
1548
1549 if not name:
1549 if not name:
1550 ui.write(_("\nadditional help topics:\n\n"))
1550 ui.write(_("\nadditional help topics:\n\n"))
1551 topics = []
1551 topics = []
1552 for names, header, doc in help.helptable:
1552 for names, header, doc in help.helptable:
1553 names = [(-len(name), name) for name in names]
1553 names = [(-len(name), name) for name in names]
1554 names.sort()
1554 names.sort()
1555 topics.append((names[0][1], header))
1555 topics.append((names[0][1], header))
1556 topics_len = max([len(s[0]) for s in topics])
1556 topics_len = max([len(s[0]) for s in topics])
1557 for t, desc in topics:
1557 for t, desc in topics:
1558 ui.write(" %-*s %s\n" % (topics_len, t, desc))
1558 ui.write(" %-*s %s\n" % (topics_len, t, desc))
1559
1559
1560 if opt_output:
1560 if opt_output:
1561 opts_len = max([len(line[0]) for line in opt_output if line[1]] or [0])
1561 opts_len = max([len(line[0]) for line in opt_output if line[1]] or [0])
1562 for first, second in opt_output:
1562 for first, second in opt_output:
1563 if second:
1563 if second:
1564 # wrap descriptions at 70 characters, just like the
1564 # wrap descriptions at 70 characters, just like the
1565 # main help texts
1565 # main help texts
1566 second = textwrap.wrap(second, width=70 - opts_len - 3)
1566 second = textwrap.wrap(second, width=70 - opts_len - 3)
1567 pad = '\n' + ' ' * (opts_len + 3)
1567 pad = '\n' + ' ' * (opts_len + 3)
1568 ui.write(" %-*s %s\n" % (opts_len, first, pad.join(second)))
1568 ui.write(" %-*s %s\n" % (opts_len, first, pad.join(second)))
1569 else:
1569 else:
1570 ui.write("%s\n" % first)
1570 ui.write("%s\n" % first)
1571
1571
1572 def identify(ui, repo, source=None,
1572 def identify(ui, repo, source=None,
1573 rev=None, num=None, id=None, branch=None, tags=None):
1573 rev=None, num=None, id=None, branch=None, tags=None):
1574 """identify the working copy or specified revision
1574 """identify the working copy or specified revision
1575
1575
1576 With no revision, print a summary of the current state of the
1576 With no revision, print a summary of the current state of the
1577 repository.
1577 repository.
1578
1578
1579 With a path, do a lookup in another repository.
1579 With a path, do a lookup in another repository.
1580
1580
1581 This summary identifies the repository state using one or two
1581 This summary identifies the repository state using one or two
1582 parent hash identifiers, followed by a "+" if there are
1582 parent hash identifiers, followed by a "+" if there are
1583 uncommitted changes in the working directory, a list of tags for
1583 uncommitted changes in the working directory, a list of tags for
1584 this revision and a branch name for non-default branches.
1584 this revision and a branch name for non-default branches.
1585 """
1585 """
1586
1586
1587 if not repo and not source:
1587 if not repo and not source:
1588 raise util.Abort(_("There is no Mercurial repository here "
1588 raise util.Abort(_("There is no Mercurial repository here "
1589 "(.hg not found)"))
1589 "(.hg not found)"))
1590
1590
1591 hexfunc = ui.debugflag and hex or short
1591 hexfunc = ui.debugflag and hex or short
1592 default = not (num or id or branch or tags)
1592 default = not (num or id or branch or tags)
1593 output = []
1593 output = []
1594
1594
1595 revs = []
1595 revs = []
1596 if source:
1596 if source:
1597 source, revs, checkout = hg.parseurl(ui.expandpath(source), [])
1597 source, revs, checkout = hg.parseurl(ui.expandpath(source), [])
1598 repo = hg.repository(ui, source)
1598 repo = hg.repository(ui, source)
1599
1599
1600 if not repo.local():
1600 if not repo.local():
1601 if not rev and revs:
1601 if not rev and revs:
1602 rev = revs[0]
1602 rev = revs[0]
1603 if not rev:
1603 if not rev:
1604 rev = "tip"
1604 rev = "tip"
1605 if num or branch or tags:
1605 if num or branch or tags:
1606 raise util.Abort(
1606 raise util.Abort(
1607 "can't query remote revision number, branch, or tags")
1607 "can't query remote revision number, branch, or tags")
1608 output = [hexfunc(repo.lookup(rev))]
1608 output = [hexfunc(repo.lookup(rev))]
1609 elif not rev:
1609 elif not rev:
1610 ctx = repo[None]
1610 ctx = repo[None]
1611 parents = ctx.parents()
1611 parents = ctx.parents()
1612 changed = False
1612 changed = False
1613 if default or id or num:
1613 if default or id or num:
1614 changed = ctx.files() + ctx.deleted()
1614 changed = ctx.files() + ctx.deleted()
1615 if default or id:
1615 if default or id:
1616 output = ["%s%s" % ('+'.join([hexfunc(p.node()) for p in parents]),
1616 output = ["%s%s" % ('+'.join([hexfunc(p.node()) for p in parents]),
1617 (changed) and "+" or "")]
1617 (changed) and "+" or "")]
1618 if num:
1618 if num:
1619 output.append("%s%s" % ('+'.join([str(p.rev()) for p in parents]),
1619 output.append("%s%s" % ('+'.join([str(p.rev()) for p in parents]),
1620 (changed) and "+" or ""))
1620 (changed) and "+" or ""))
1621 else:
1621 else:
1622 ctx = repo[rev]
1622 ctx = repo[rev]
1623 if default or id:
1623 if default or id:
1624 output = [hexfunc(ctx.node())]
1624 output = [hexfunc(ctx.node())]
1625 if num:
1625 if num:
1626 output.append(str(ctx.rev()))
1626 output.append(str(ctx.rev()))
1627
1627
1628 if repo.local() and default and not ui.quiet:
1628 if repo.local() and default and not ui.quiet:
1629 b = encoding.tolocal(ctx.branch())
1629 b = encoding.tolocal(ctx.branch())
1630 if b != 'default':
1630 if b != 'default':
1631 output.append("(%s)" % b)
1631 output.append("(%s)" % b)
1632
1632
1633 # multiple tags for a single parent separated by '/'
1633 # multiple tags for a single parent separated by '/'
1634 t = "/".join(ctx.tags())
1634 t = "/".join(ctx.tags())
1635 if t:
1635 if t:
1636 output.append(t)
1636 output.append(t)
1637
1637
1638 if branch:
1638 if branch:
1639 output.append(encoding.tolocal(ctx.branch()))
1639 output.append(encoding.tolocal(ctx.branch()))
1640
1640
1641 if tags:
1641 if tags:
1642 output.extend(ctx.tags())
1642 output.extend(ctx.tags())
1643
1643
1644 ui.write("%s\n" % ' '.join(output))
1644 ui.write("%s\n" % ' '.join(output))
1645
1645
1646 def import_(ui, repo, patch1, *patches, **opts):
1646 def import_(ui, repo, patch1, *patches, **opts):
1647 """import an ordered set of patches
1647 """import an ordered set of patches
1648
1648
1649 Import a list of patches and commit them individually.
1649 Import a list of patches and commit them individually.
1650
1650
1651 If there are outstanding changes in the working directory, import
1651 If there are outstanding changes in the working directory, import
1652 will abort unless given the -f/--force flag.
1652 will abort unless given the -f/--force flag.
1653
1653
1654 You can import a patch straight from a mail message. Even patches
1654 You can import a patch straight from a mail message. Even patches
1655 as attachments work (body part must be type text/plain or
1655 as attachments work (body part must be type text/plain or
1656 text/x-patch to be used). From and Subject headers of email
1656 text/x-patch to be used). From and Subject headers of email
1657 message are used as default committer and commit message. All
1657 message are used as default committer and commit message. All
1658 text/plain body parts before first diff are added to commit
1658 text/plain body parts before first diff are added to commit
1659 message.
1659 message.
1660
1660
1661 If the imported patch was generated by hg export, user and
1661 If the imported patch was generated by hg export, user and
1662 description from patch override values from message headers and
1662 description from patch override values from message headers and
1663 body. Values given on command line with -m/--message and -u/--user
1663 body. Values given on command line with -m/--message and -u/--user
1664 override these.
1664 override these.
1665
1665
1666 If --exact is specified, import will set the working directory to
1666 If --exact is specified, import will set the working directory to
1667 the parent of each patch before applying it, and will abort if the
1667 the parent of each patch before applying it, and will abort if the
1668 resulting changeset has a different ID than the one recorded in
1668 resulting changeset has a different ID than the one recorded in
1669 the patch. This may happen due to character set problems or other
1669 the patch. This may happen due to character set problems or other
1670 deficiencies in the text patch format.
1670 deficiencies in the text patch format.
1671
1671
1672 With -s/--similarity, hg will attempt to discover renames and
1672 With -s/--similarity, hg will attempt to discover renames and
1673 copies in the patch in the same way as 'addremove'.
1673 copies in the patch in the same way as 'addremove'.
1674
1674
1675 To read a patch from standard input, use patch name "-". See 'hg
1675 To read a patch from standard input, use patch name "-". See 'hg
1676 help dates' for a list of formats valid for -d/--date.
1676 help dates' for a list of formats valid for -d/--date.
1677 """
1677 """
1678 patches = (patch1,) + patches
1678 patches = (patch1,) + patches
1679
1679
1680 date = opts.get('date')
1680 date = opts.get('date')
1681 if date:
1681 if date:
1682 opts['date'] = util.parsedate(date)
1682 opts['date'] = util.parsedate(date)
1683
1683
1684 try:
1684 try:
1685 sim = float(opts.get('similarity') or 0)
1685 sim = float(opts.get('similarity') or 0)
1686 except ValueError:
1686 except ValueError:
1687 raise util.Abort(_('similarity must be a number'))
1687 raise util.Abort(_('similarity must be a number'))
1688 if sim < 0 or sim > 100:
1688 if sim < 0 or sim > 100:
1689 raise util.Abort(_('similarity must be between 0 and 100'))
1689 raise util.Abort(_('similarity must be between 0 and 100'))
1690
1690
1691 if opts.get('exact') or not opts.get('force'):
1691 if opts.get('exact') or not opts.get('force'):
1692 cmdutil.bail_if_changed(repo)
1692 cmdutil.bail_if_changed(repo)
1693
1693
1694 d = opts["base"]
1694 d = opts["base"]
1695 strip = opts["strip"]
1695 strip = opts["strip"]
1696 wlock = lock = None
1696 wlock = lock = None
1697 try:
1697 try:
1698 wlock = repo.wlock()
1698 wlock = repo.wlock()
1699 lock = repo.lock()
1699 lock = repo.lock()
1700 for p in patches:
1700 for p in patches:
1701 pf = os.path.join(d, p)
1701 pf = os.path.join(d, p)
1702
1702
1703 if pf == '-':
1703 if pf == '-':
1704 ui.status(_("applying patch from stdin\n"))
1704 ui.status(_("applying patch from stdin\n"))
1705 pf = sys.stdin
1705 pf = sys.stdin
1706 else:
1706 else:
1707 ui.status(_("applying %s\n") % p)
1707 ui.status(_("applying %s\n") % p)
1708 pf = url.open(ui, pf)
1708 pf = url.open(ui, pf)
1709 data = patch.extract(ui, pf)
1709 data = patch.extract(ui, pf)
1710 tmpname, message, user, date, branch, nodeid, p1, p2 = data
1710 tmpname, message, user, date, branch, nodeid, p1, p2 = data
1711
1711
1712 if tmpname is None:
1712 if tmpname is None:
1713 raise util.Abort(_('no diffs found'))
1713 raise util.Abort(_('no diffs found'))
1714
1714
1715 try:
1715 try:
1716 cmdline_message = cmdutil.logmessage(opts)
1716 cmdline_message = cmdutil.logmessage(opts)
1717 if cmdline_message:
1717 if cmdline_message:
1718 # pickup the cmdline msg
1718 # pickup the cmdline msg
1719 message = cmdline_message
1719 message = cmdline_message
1720 elif message:
1720 elif message:
1721 # pickup the patch msg
1721 # pickup the patch msg
1722 message = message.strip()
1722 message = message.strip()
1723 else:
1723 else:
1724 # launch the editor
1724 # launch the editor
1725 message = None
1725 message = None
1726 ui.debug(_('message:\n%s\n') % message)
1726 ui.debug(_('message:\n%s\n') % message)
1727
1727
1728 wp = repo.parents()
1728 wp = repo.parents()
1729 if opts.get('exact'):
1729 if opts.get('exact'):
1730 if not nodeid or not p1:
1730 if not nodeid or not p1:
1731 raise util.Abort(_('not a Mercurial patch'))
1731 raise util.Abort(_('not a Mercurial patch'))
1732 p1 = repo.lookup(p1)
1732 p1 = repo.lookup(p1)
1733 p2 = repo.lookup(p2 or hex(nullid))
1733 p2 = repo.lookup(p2 or hex(nullid))
1734
1734
1735 if p1 != wp[0].node():
1735 if p1 != wp[0].node():
1736 hg.clean(repo, p1)
1736 hg.clean(repo, p1)
1737 repo.dirstate.setparents(p1, p2)
1737 repo.dirstate.setparents(p1, p2)
1738 elif p2:
1738 elif p2:
1739 try:
1739 try:
1740 p1 = repo.lookup(p1)
1740 p1 = repo.lookup(p1)
1741 p2 = repo.lookup(p2)
1741 p2 = repo.lookup(p2)
1742 if p1 == wp[0].node():
1742 if p1 == wp[0].node():
1743 repo.dirstate.setparents(p1, p2)
1743 repo.dirstate.setparents(p1, p2)
1744 except error.RepoError:
1744 except error.RepoError:
1745 pass
1745 pass
1746 if opts.get('exact') or opts.get('import_branch'):
1746 if opts.get('exact') or opts.get('import_branch'):
1747 repo.dirstate.setbranch(branch or 'default')
1747 repo.dirstate.setbranch(branch or 'default')
1748
1748
1749 files = {}
1749 files = {}
1750 try:
1750 try:
1751 patch.patch(tmpname, ui, strip=strip, cwd=repo.root,
1751 patch.patch(tmpname, ui, strip=strip, cwd=repo.root,
1752 files=files)
1752 files=files)
1753 finally:
1753 finally:
1754 files = patch.updatedir(ui, repo, files, similarity=sim/100.)
1754 files = patch.updatedir(ui, repo, files, similarity=sim/100.)
1755 if not opts.get('no_commit'):
1755 if not opts.get('no_commit'):
1756 m = cmdutil.matchfiles(repo, files or [])
1756 m = cmdutil.matchfiles(repo, files or [])
1757 n = repo.commit(message, opts.get('user') or user,
1757 n = repo.commit(message, opts.get('user') or user,
1758 opts.get('date') or date, match=m,
1758 opts.get('date') or date, match=m,
1759 editor=cmdutil.commiteditor)
1759 editor=cmdutil.commiteditor)
1760 if opts.get('exact'):
1760 if opts.get('exact'):
1761 if hex(n) != nodeid:
1761 if hex(n) != nodeid:
1762 repo.rollback()
1762 repo.rollback()
1763 raise util.Abort(_('patch is damaged'
1763 raise util.Abort(_('patch is damaged'
1764 ' or loses information'))
1764 ' or loses information'))
1765 # Force a dirstate write so that the next transaction
1765 # Force a dirstate write so that the next transaction
1766 # backups an up-do-date file.
1766 # backups an up-do-date file.
1767 repo.dirstate.write()
1767 repo.dirstate.write()
1768 finally:
1768 finally:
1769 os.unlink(tmpname)
1769 os.unlink(tmpname)
1770 finally:
1770 finally:
1771 release(lock, wlock)
1771 release(lock, wlock)
1772
1772
1773 def incoming(ui, repo, source="default", **opts):
1773 def incoming(ui, repo, source="default", **opts):
1774 """show new changesets found in source
1774 """show new changesets found in source
1775
1775
1776 Show new changesets found in the specified path/URL or the default
1776 Show new changesets found in the specified path/URL or the default
1777 pull location. These are the changesets that would be pulled if a
1777 pull location. These are the changesets that would be pulled if a
1778 pull was requested.
1778 pull was requested.
1779
1779
1780 For remote repository, using --bundle avoids downloading the
1780 For remote repository, using --bundle avoids downloading the
1781 changesets twice if the incoming is followed by a pull.
1781 changesets twice if the incoming is followed by a pull.
1782
1782
1783 See pull for valid source format details.
1783 See pull for valid source format details.
1784 """
1784 """
1785 limit = cmdutil.loglimit(opts)
1785 limit = cmdutil.loglimit(opts)
1786 source, revs, checkout = hg.parseurl(ui.expandpath(source), opts.get('rev'))
1786 source, revs, checkout = hg.parseurl(ui.expandpath(source), opts.get('rev'))
1787 other = hg.repository(cmdutil.remoteui(repo, opts), source)
1787 other = hg.repository(cmdutil.remoteui(repo, opts), source)
1788 ui.status(_('comparing with %s\n') % url.hidepassword(source))
1788 ui.status(_('comparing with %s\n') % url.hidepassword(source))
1789 if revs:
1789 if revs:
1790 revs = [other.lookup(rev) for rev in revs]
1790 revs = [other.lookup(rev) for rev in revs]
1791 common, incoming, rheads = repo.findcommonincoming(other, heads=revs,
1791 common, incoming, rheads = repo.findcommonincoming(other, heads=revs,
1792 force=opts["force"])
1792 force=opts["force"])
1793 if not incoming:
1793 if not incoming:
1794 try:
1794 try:
1795 os.unlink(opts["bundle"])
1795 os.unlink(opts["bundle"])
1796 except:
1796 except:
1797 pass
1797 pass
1798 ui.status(_("no changes found\n"))
1798 ui.status(_("no changes found\n"))
1799 return 1
1799 return 1
1800
1800
1801 cleanup = None
1801 cleanup = None
1802 try:
1802 try:
1803 fname = opts["bundle"]
1803 fname = opts["bundle"]
1804 if fname or not other.local():
1804 if fname or not other.local():
1805 # create a bundle (uncompressed if other repo is not local)
1805 # create a bundle (uncompressed if other repo is not local)
1806
1806
1807 if revs is None and other.capable('changegroupsubset'):
1807 if revs is None and other.capable('changegroupsubset'):
1808 revs = rheads
1808 revs = rheads
1809
1809
1810 if revs is None:
1810 if revs is None:
1811 cg = other.changegroup(incoming, "incoming")
1811 cg = other.changegroup(incoming, "incoming")
1812 else:
1812 else:
1813 cg = other.changegroupsubset(incoming, revs, 'incoming')
1813 cg = other.changegroupsubset(incoming, revs, 'incoming')
1814 bundletype = other.local() and "HG10BZ" or "HG10UN"
1814 bundletype = other.local() and "HG10BZ" or "HG10UN"
1815 fname = cleanup = changegroup.writebundle(cg, fname, bundletype)
1815 fname = cleanup = changegroup.writebundle(cg, fname, bundletype)
1816 # keep written bundle?
1816 # keep written bundle?
1817 if opts["bundle"]:
1817 if opts["bundle"]:
1818 cleanup = None
1818 cleanup = None
1819 if not other.local():
1819 if not other.local():
1820 # use the created uncompressed bundlerepo
1820 # use the created uncompressed bundlerepo
1821 other = bundlerepo.bundlerepository(ui, repo.root, fname)
1821 other = bundlerepo.bundlerepository(ui, repo.root, fname)
1822
1822
1823 o = other.changelog.nodesbetween(incoming, revs)[0]
1823 o = other.changelog.nodesbetween(incoming, revs)[0]
1824 if opts.get('newest_first'):
1824 if opts.get('newest_first'):
1825 o.reverse()
1825 o.reverse()
1826 displayer = cmdutil.show_changeset(ui, other, opts)
1826 displayer = cmdutil.show_changeset(ui, other, opts)
1827 count = 0
1827 count = 0
1828 for n in o:
1828 for n in o:
1829 if count >= limit:
1829 if count >= limit:
1830 break
1830 break
1831 parents = [p for p in other.changelog.parents(n) if p != nullid]
1831 parents = [p for p in other.changelog.parents(n) if p != nullid]
1832 if opts.get('no_merges') and len(parents) == 2:
1832 if opts.get('no_merges') and len(parents) == 2:
1833 continue
1833 continue
1834 count += 1
1834 count += 1
1835 displayer.show(other[n])
1835 displayer.show(other[n])
1836 finally:
1836 finally:
1837 if hasattr(other, 'close'):
1837 if hasattr(other, 'close'):
1838 other.close()
1838 other.close()
1839 if cleanup:
1839 if cleanup:
1840 os.unlink(cleanup)
1840 os.unlink(cleanup)
1841
1841
1842 def init(ui, dest=".", **opts):
1842 def init(ui, dest=".", **opts):
1843 """create a new repository in the given directory
1843 """create a new repository in the given directory
1844
1844
1845 Initialize a new repository in the given directory. If the given
1845 Initialize a new repository in the given directory. If the given
1846 directory does not exist, it is created.
1846 directory does not exist, it is created.
1847
1847
1848 If no directory is given, the current directory is used.
1848 If no directory is given, the current directory is used.
1849
1849
1850 It is possible to specify an ssh:// URL as the destination.
1850 It is possible to specify an ssh:// URL as the destination.
1851 See 'hg help urls' for more information.
1851 See 'hg help urls' for more information.
1852 """
1852 """
1853 hg.repository(cmdutil.remoteui(ui, opts), dest, create=1)
1853 hg.repository(cmdutil.remoteui(ui, opts), dest, create=1)
1854
1854
1855 def locate(ui, repo, *pats, **opts):
1855 def locate(ui, repo, *pats, **opts):
1856 """locate files matching specific patterns
1856 """locate files matching specific patterns
1857
1857
1858 Print all files under Mercurial control whose names match the
1858 Print all files under Mercurial control whose names match the
1859 given patterns.
1859 given patterns.
1860
1860
1861 This command searches the entire repository by default. To search
1861 This command searches the entire repository by default. To search
1862 just the current directory and its subdirectories, use
1862 just the current directory and its subdirectories, use
1863 "--include .".
1863 "--include .".
1864
1864
1865 If no patterns are given to match, this command prints all file
1865 If no patterns are given to match, this command prints all file
1866 names.
1866 names.
1867
1867
1868 If you want to feed the output of this command into the "xargs"
1868 If you want to feed the output of this command into the "xargs"
1869 command, use the -0 option to both this command and "xargs". This
1869 command, use the -0 option to both this command and "xargs". This
1870 will avoid the problem of "xargs" treating single filenames that
1870 will avoid the problem of "xargs" treating single filenames that
1871 contain white space as multiple filenames.
1871 contain white space as multiple filenames.
1872 """
1872 """
1873 end = opts.get('print0') and '\0' or '\n'
1873 end = opts.get('print0') and '\0' or '\n'
1874 rev = opts.get('rev') or None
1874 rev = opts.get('rev') or None
1875
1875
1876 ret = 1
1876 ret = 1
1877 m = cmdutil.match(repo, pats, opts, default='relglob')
1877 m = cmdutil.match(repo, pats, opts, default='relglob')
1878 m.bad = lambda x,y: False
1878 m.bad = lambda x,y: False
1879 for abs in repo[rev].walk(m):
1879 for abs in repo[rev].walk(m):
1880 if not rev and abs not in repo.dirstate:
1880 if not rev and abs not in repo.dirstate:
1881 continue
1881 continue
1882 if opts.get('fullpath'):
1882 if opts.get('fullpath'):
1883 ui.write(repo.wjoin(abs), end)
1883 ui.write(repo.wjoin(abs), end)
1884 else:
1884 else:
1885 ui.write(((pats and m.rel(abs)) or abs), end)
1885 ui.write(((pats and m.rel(abs)) or abs), end)
1886 ret = 0
1886 ret = 0
1887
1887
1888 return ret
1888 return ret
1889
1889
1890 def log(ui, repo, *pats, **opts):
1890 def log(ui, repo, *pats, **opts):
1891 """show revision history of entire repository or files
1891 """show revision history of entire repository or files
1892
1892
1893 Print the revision history of the specified files or the entire
1893 Print the revision history of the specified files or the entire
1894 project.
1894 project.
1895
1895
1896 File history is shown without following rename or copy history of
1896 File history is shown without following rename or copy history of
1897 files. Use -f/--follow with a filename to follow history across
1897 files. Use -f/--follow with a filename to follow history across
1898 renames and copies. --follow without a filename will only show
1898 renames and copies. --follow without a filename will only show
1899 ancestors or descendants of the starting revision. --follow-first
1899 ancestors or descendants of the starting revision. --follow-first
1900 only follows the first parent of merge revisions.
1900 only follows the first parent of merge revisions.
1901
1901
1902 If no revision range is specified, the default is tip:0 unless
1902 If no revision range is specified, the default is tip:0 unless
1903 --follow is set, in which case the working directory parent is
1903 --follow is set, in which case the working directory parent is
1904 used as the starting revision.
1904 used as the starting revision.
1905
1905
1906 See 'hg help dates' for a list of formats valid for -d/--date.
1906 See 'hg help dates' for a list of formats valid for -d/--date.
1907
1907
1908 By default this command outputs: changeset id and hash, tags,
1908 By default this command outputs: changeset id and hash, tags,
1909 non-trivial parents, user, date and time, and a summary for each
1909 non-trivial parents, user, date and time, and a summary for each
1910 commit. When the -v/--verbose switch is used, the list of changed
1910 commit. When the -v/--verbose switch is used, the list of changed
1911 files and full commit message is shown.
1911 files and full commit message is shown.
1912
1912
1913 NOTE: log -p/--patch may generate unexpected diff output for merge
1913 NOTE: log -p/--patch may generate unexpected diff output for merge
1914 changesets, as it will only compare the merge changeset against
1914 changesets, as it will only compare the merge changeset against
1915 its first parent. Also, the files: list will only reflect files
1915 its first parent. Also, the files: list will only reflect files
1916 that are different from BOTH parents.
1916 that are different from BOTH parents.
1917
1917
1918 """
1918 """
1919
1919
1920 get = util.cachefunc(lambda r: repo[r].changeset())
1920 get = util.cachefunc(lambda r: repo[r].changeset())
1921 changeiter, matchfn = cmdutil.walkchangerevs(ui, repo, pats, get, opts)
1921 changeiter, matchfn = cmdutil.walkchangerevs(ui, repo, pats, get, opts)
1922
1922
1923 limit = cmdutil.loglimit(opts)
1923 limit = cmdutil.loglimit(opts)
1924 count = 0
1924 count = 0
1925
1925
1926 if opts.get('copies') and opts.get('rev'):
1926 if opts.get('copies') and opts.get('rev'):
1927 endrev = max(cmdutil.revrange(repo, opts.get('rev'))) + 1
1927 endrev = max(cmdutil.revrange(repo, opts.get('rev'))) + 1
1928 else:
1928 else:
1929 endrev = len(repo)
1929 endrev = len(repo)
1930 rcache = {}
1930 rcache = {}
1931 ncache = {}
1931 ncache = {}
1932 def getrenamed(fn, rev):
1932 def getrenamed(fn, rev):
1933 '''looks up all renames for a file (up to endrev) the first
1933 '''looks up all renames for a file (up to endrev) the first
1934 time the file is given. It indexes on the changerev and only
1934 time the file is given. It indexes on the changerev and only
1935 parses the manifest if linkrev != changerev.
1935 parses the manifest if linkrev != changerev.
1936 Returns rename info for fn at changerev rev.'''
1936 Returns rename info for fn at changerev rev.'''
1937 if fn not in rcache:
1937 if fn not in rcache:
1938 rcache[fn] = {}
1938 rcache[fn] = {}
1939 ncache[fn] = {}
1939 ncache[fn] = {}
1940 fl = repo.file(fn)
1940 fl = repo.file(fn)
1941 for i in fl:
1941 for i in fl:
1942 node = fl.node(i)
1942 node = fl.node(i)
1943 lr = fl.linkrev(i)
1943 lr = fl.linkrev(i)
1944 renamed = fl.renamed(node)
1944 renamed = fl.renamed(node)
1945 rcache[fn][lr] = renamed
1945 rcache[fn][lr] = renamed
1946 if renamed:
1946 if renamed:
1947 ncache[fn][node] = renamed
1947 ncache[fn][node] = renamed
1948 if lr >= endrev:
1948 if lr >= endrev:
1949 break
1949 break
1950 if rev in rcache[fn]:
1950 if rev in rcache[fn]:
1951 return rcache[fn][rev]
1951 return rcache[fn][rev]
1952
1952
1953 # If linkrev != rev (i.e. rev not found in rcache) fallback to
1953 # If linkrev != rev (i.e. rev not found in rcache) fallback to
1954 # filectx logic.
1954 # filectx logic.
1955
1955
1956 try:
1956 try:
1957 return repo[rev][fn].renamed()
1957 return repo[rev][fn].renamed()
1958 except error.LookupError:
1958 except error.LookupError:
1959 pass
1959 pass
1960 return None
1960 return None
1961
1961
1962 df = False
1962 df = False
1963 if opts["date"]:
1963 if opts["date"]:
1964 df = util.matchdate(opts["date"])
1964 df = util.matchdate(opts["date"])
1965
1965
1966 only_branches = opts.get('only_branch')
1966 only_branches = opts.get('only_branch')
1967
1967
1968 displayer = cmdutil.show_changeset(ui, repo, opts, True, matchfn)
1968 displayer = cmdutil.show_changeset(ui, repo, opts, True, matchfn)
1969 for st, rev, fns in changeiter:
1969 for st, rev, fns in changeiter:
1970 if st == 'add':
1970 if st == 'add':
1971 parents = [p for p in repo.changelog.parentrevs(rev)
1971 parents = [p for p in repo.changelog.parentrevs(rev)
1972 if p != nullrev]
1972 if p != nullrev]
1973 if opts.get('no_merges') and len(parents) == 2:
1973 if opts.get('no_merges') and len(parents) == 2:
1974 continue
1974 continue
1975 if opts.get('only_merges') and len(parents) != 2:
1975 if opts.get('only_merges') and len(parents) != 2:
1976 continue
1976 continue
1977
1977
1978 if only_branches:
1978 if only_branches:
1979 revbranch = get(rev)[5]['branch']
1979 revbranch = get(rev)[5]['branch']
1980 if revbranch not in only_branches:
1980 if revbranch not in only_branches:
1981 continue
1981 continue
1982
1982
1983 if df:
1983 if df:
1984 changes = get(rev)
1984 changes = get(rev)
1985 if not df(changes[2][0]):
1985 if not df(changes[2][0]):
1986 continue
1986 continue
1987
1987
1988 if opts.get('keyword'):
1988 if opts.get('keyword'):
1989 changes = get(rev)
1989 changes = get(rev)
1990 miss = 0
1990 miss = 0
1991 for k in [kw.lower() for kw in opts['keyword']]:
1991 for k in [kw.lower() for kw in opts['keyword']]:
1992 if not (k in changes[1].lower() or
1992 if not (k in changes[1].lower() or
1993 k in changes[4].lower() or
1993 k in changes[4].lower() or
1994 k in " ".join(changes[3]).lower()):
1994 k in " ".join(changes[3]).lower()):
1995 miss = 1
1995 miss = 1
1996 break
1996 break
1997 if miss:
1997 if miss:
1998 continue
1998 continue
1999
1999
2000 if opts['user']:
2000 if opts['user']:
2001 changes = get(rev)
2001 changes = get(rev)
2002 if not [k for k in opts['user'] if k in changes[1]]:
2002 if not [k for k in opts['user'] if k in changes[1]]:
2003 continue
2003 continue
2004
2004
2005 copies = []
2005 copies = []
2006 if opts.get('copies') and rev:
2006 if opts.get('copies') and rev:
2007 for fn in get(rev)[3]:
2007 for fn in get(rev)[3]:
2008 rename = getrenamed(fn, rev)
2008 rename = getrenamed(fn, rev)
2009 if rename:
2009 if rename:
2010 copies.append((fn, rename[0]))
2010 copies.append((fn, rename[0]))
2011 displayer.show(context.changectx(repo, rev), copies=copies)
2011 displayer.show(context.changectx(repo, rev), copies=copies)
2012 elif st == 'iter':
2012 elif st == 'iter':
2013 if count == limit: break
2013 if count == limit: break
2014 if displayer.flush(rev):
2014 if displayer.flush(rev):
2015 count += 1
2015 count += 1
2016
2016
2017 def manifest(ui, repo, node=None, rev=None):
2017 def manifest(ui, repo, node=None, rev=None):
2018 """output the current or given revision of the project manifest
2018 """output the current or given revision of the project manifest
2019
2019
2020 Print a list of version controlled files for the given revision.
2020 Print a list of version controlled files for the given revision.
2021 If no revision is given, the first parent of the working directory
2021 If no revision is given, the first parent of the working directory
2022 is used, or the null revision if none is checked out.
2022 is used, or the null revision if none is checked out.
2023
2023
2024 With -v flag, print file permissions, symlink and executable bits.
2024 With -v flag, print file permissions, symlink and executable bits.
2025 With --debug flag, print file revision hashes.
2025 With --debug flag, print file revision hashes.
2026 """
2026 """
2027
2027
2028 if rev and node:
2028 if rev and node:
2029 raise util.Abort(_("please specify just one revision"))
2029 raise util.Abort(_("please specify just one revision"))
2030
2030
2031 if not node:
2031 if not node:
2032 node = rev
2032 node = rev
2033
2033
2034 decor = {'l':'644 @ ', 'x':'755 * ', '':'644 '}
2034 decor = {'l':'644 @ ', 'x':'755 * ', '':'644 '}
2035 ctx = repo[node]
2035 ctx = repo[node]
2036 for f in ctx:
2036 for f in ctx:
2037 if ui.debugflag:
2037 if ui.debugflag:
2038 ui.write("%40s " % hex(ctx.manifest()[f]))
2038 ui.write("%40s " % hex(ctx.manifest()[f]))
2039 if ui.verbose:
2039 if ui.verbose:
2040 ui.write(decor[ctx.flags(f)])
2040 ui.write(decor[ctx.flags(f)])
2041 ui.write("%s\n" % f)
2041 ui.write("%s\n" % f)
2042
2042
2043 def merge(ui, repo, node=None, **opts):
2043 def merge(ui, repo, node=None, **opts):
2044 """merge working directory with another revision
2044 """merge working directory with another revision
2045
2045
2046 The contents of the current working directory is updated with all
2046 The contents of the current working directory is updated with all
2047 changes made in the requested revision since the last common
2047 changes made in the requested revision since the last common
2048 predecessor revision.
2048 predecessor revision.
2049
2049
2050 Files that changed between either parent are marked as changed for
2050 Files that changed between either parent are marked as changed for
2051 the next commit and a commit must be performed before any further
2051 the next commit and a commit must be performed before any further
2052 updates are allowed. The next commit has two parents.
2052 updates are allowed. The next commit has two parents.
2053
2053
2054 If no revision is specified, the working directory's parent is a
2054 If no revision is specified, the working directory's parent is a
2055 head revision, and the current branch contains exactly one other
2055 head revision, and the current branch contains exactly one other
2056 head, the other head is merged with by default. Otherwise, an
2056 head, the other head is merged with by default. Otherwise, an
2057 explicit revision to merge with must be provided.
2057 explicit revision to merge with must be provided.
2058 """
2058 """
2059
2059
2060 if opts.get('rev') and node:
2060 if opts.get('rev') and node:
2061 raise util.Abort(_("please specify just one revision"))
2061 raise util.Abort(_("please specify just one revision"))
2062 if not node:
2062 if not node:
2063 node = opts.get('rev')
2063 node = opts.get('rev')
2064
2064
2065 if not node:
2065 if not node:
2066 branch = repo.changectx(None).branch()
2066 branch = repo.changectx(None).branch()
2067 bheads = repo.branchheads(branch)
2067 bheads = repo.branchheads(branch)
2068 if len(bheads) > 2:
2068 if len(bheads) > 2:
2069 raise util.Abort(_("branch '%s' has %d heads - "
2069 raise util.Abort(_("branch '%s' has %d heads - "
2070 "please merge with an explicit rev") %
2070 "please merge with an explicit rev") %
2071 (branch, len(bheads)))
2071 (branch, len(bheads)))
2072
2072
2073 parent = repo.dirstate.parents()[0]
2073 parent = repo.dirstate.parents()[0]
2074 if len(bheads) == 1:
2074 if len(bheads) == 1:
2075 if len(repo.heads()) > 1:
2075 if len(repo.heads()) > 1:
2076 raise util.Abort(_("branch '%s' has one head - "
2076 raise util.Abort(_("branch '%s' has one head - "
2077 "please merge with an explicit rev") %
2077 "please merge with an explicit rev") %
2078 branch)
2078 branch)
2079 msg = _('there is nothing to merge')
2079 msg = _('there is nothing to merge')
2080 if parent != repo.lookup(repo[None].branch()):
2080 if parent != repo.lookup(repo[None].branch()):
2081 msg = _('%s - use "hg update" instead') % msg
2081 msg = _('%s - use "hg update" instead') % msg
2082 raise util.Abort(msg)
2082 raise util.Abort(msg)
2083
2083
2084 if parent not in bheads:
2084 if parent not in bheads:
2085 raise util.Abort(_('working dir not at a head rev - '
2085 raise util.Abort(_('working dir not at a head rev - '
2086 'use "hg update" or merge with an explicit rev'))
2086 'use "hg update" or merge with an explicit rev'))
2087 node = parent == bheads[0] and bheads[-1] or bheads[0]
2087 node = parent == bheads[0] and bheads[-1] or bheads[0]
2088
2088
2089 if opts.get('show'):
2089 if opts.get('show'):
2090 p1 = repo['.']
2090 p1 = repo['.']
2091 p2 = repo[node]
2091 p2 = repo[node]
2092 common = p1.ancestor(p2)
2092 common = p1.ancestor(p2)
2093 roots, heads = [common.node()], [p2.node()]
2093 roots, heads = [common.node()], [p2.node()]
2094 displayer = cmdutil.show_changeset(ui, repo, opts)
2094 displayer = cmdutil.show_changeset(ui, repo, opts)
2095 for node in repo.changelog.nodesbetween(roots=roots, heads=heads)[0]:
2095 for node in repo.changelog.nodesbetween(roots=roots, heads=heads)[0]:
2096 displayer.show(repo[node])
2096 displayer.show(repo[node])
2097 return 0
2097 return 0
2098
2098
2099 return hg.merge(repo, node, force=opts.get('force'))
2099 return hg.merge(repo, node, force=opts.get('force'))
2100
2100
2101 def outgoing(ui, repo, dest=None, **opts):
2101 def outgoing(ui, repo, dest=None, **opts):
2102 """show changesets not found in destination
2102 """show changesets not found in destination
2103
2103
2104 Show changesets not found in the specified destination repository
2104 Show changesets not found in the specified destination repository
2105 or the default push location. These are the changesets that would
2105 or the default push location. These are the changesets that would
2106 be pushed if a push was requested.
2106 be pushed if a push was requested.
2107
2107
2108 See pull for valid destination format details.
2108 See pull for valid destination format details.
2109 """
2109 """
2110 limit = cmdutil.loglimit(opts)
2110 limit = cmdutil.loglimit(opts)
2111 dest, revs, checkout = hg.parseurl(
2111 dest, revs, checkout = hg.parseurl(
2112 ui.expandpath(dest or 'default-push', dest or 'default'), opts.get('rev'))
2112 ui.expandpath(dest or 'default-push', dest or 'default'), opts.get('rev'))
2113 if revs:
2113 if revs:
2114 revs = [repo.lookup(rev) for rev in revs]
2114 revs = [repo.lookup(rev) for rev in revs]
2115
2115
2116 other = hg.repository(cmdutil.remoteui(repo, opts), dest)
2116 other = hg.repository(cmdutil.remoteui(repo, opts), dest)
2117 ui.status(_('comparing with %s\n') % url.hidepassword(dest))
2117 ui.status(_('comparing with %s\n') % url.hidepassword(dest))
2118 o = repo.findoutgoing(other, force=opts.get('force'))
2118 o = repo.findoutgoing(other, force=opts.get('force'))
2119 if not o:
2119 if not o:
2120 ui.status(_("no changes found\n"))
2120 ui.status(_("no changes found\n"))
2121 return 1
2121 return 1
2122 o = repo.changelog.nodesbetween(o, revs)[0]
2122 o = repo.changelog.nodesbetween(o, revs)[0]
2123 if opts.get('newest_first'):
2123 if opts.get('newest_first'):
2124 o.reverse()
2124 o.reverse()
2125 displayer = cmdutil.show_changeset(ui, repo, opts)
2125 displayer = cmdutil.show_changeset(ui, repo, opts)
2126 count = 0
2126 count = 0
2127 for n in o:
2127 for n in o:
2128 if count >= limit:
2128 if count >= limit:
2129 break
2129 break
2130 parents = [p for p in repo.changelog.parents(n) if p != nullid]
2130 parents = [p for p in repo.changelog.parents(n) if p != nullid]
2131 if opts.get('no_merges') and len(parents) == 2:
2131 if opts.get('no_merges') and len(parents) == 2:
2132 continue
2132 continue
2133 count += 1
2133 count += 1
2134 displayer.show(repo[n])
2134 displayer.show(repo[n])
2135
2135
2136 def parents(ui, repo, file_=None, **opts):
2136 def parents(ui, repo, file_=None, **opts):
2137 """show the parents of the working directory or revision
2137 """show the parents of the working directory or revision
2138
2138
2139 Print the working directory's parent revisions. If a revision is
2139 Print the working directory's parent revisions. If a revision is
2140 given via -r/--rev, the parent of that revision will be printed.
2140 given via -r/--rev, the parent of that revision will be printed.
2141 If a file argument is given, revision in which the file was last
2141 If a file argument is given, revision in which the file was last
2142 changed (before the working directory revision or the argument to
2142 changed (before the working directory revision or the argument to
2143 --rev if given) is printed.
2143 --rev if given) is printed.
2144 """
2144 """
2145 rev = opts.get('rev')
2145 rev = opts.get('rev')
2146 if rev:
2146 if rev:
2147 ctx = repo[rev]
2147 ctx = repo[rev]
2148 else:
2148 else:
2149 ctx = repo[None]
2149 ctx = repo[None]
2150
2150
2151 if file_:
2151 if file_:
2152 m = cmdutil.match(repo, (file_,), opts)
2152 m = cmdutil.match(repo, (file_,), opts)
2153 if m.anypats() or len(m.files()) != 1:
2153 if m.anypats() or len(m.files()) != 1:
2154 raise util.Abort(_('can only specify an explicit filename'))
2154 raise util.Abort(_('can only specify an explicit filename'))
2155 file_ = m.files()[0]
2155 file_ = m.files()[0]
2156 filenodes = []
2156 filenodes = []
2157 for cp in ctx.parents():
2157 for cp in ctx.parents():
2158 if not cp:
2158 if not cp:
2159 continue
2159 continue
2160 try:
2160 try:
2161 filenodes.append(cp.filenode(file_))
2161 filenodes.append(cp.filenode(file_))
2162 except error.LookupError:
2162 except error.LookupError:
2163 pass
2163 pass
2164 if not filenodes:
2164 if not filenodes:
2165 raise util.Abort(_("'%s' not found in manifest!") % file_)
2165 raise util.Abort(_("'%s' not found in manifest!") % file_)
2166 fl = repo.file(file_)
2166 fl = repo.file(file_)
2167 p = [repo.lookup(fl.linkrev(fl.rev(fn))) for fn in filenodes]
2167 p = [repo.lookup(fl.linkrev(fl.rev(fn))) for fn in filenodes]
2168 else:
2168 else:
2169 p = [cp.node() for cp in ctx.parents()]
2169 p = [cp.node() for cp in ctx.parents()]
2170
2170
2171 displayer = cmdutil.show_changeset(ui, repo, opts)
2171 displayer = cmdutil.show_changeset(ui, repo, opts)
2172 for n in p:
2172 for n in p:
2173 if n != nullid:
2173 if n != nullid:
2174 displayer.show(repo[n])
2174 displayer.show(repo[n])
2175
2175
2176 def paths(ui, repo, search=None):
2176 def paths(ui, repo, search=None):
2177 """show aliases for remote repositories
2177 """show aliases for remote repositories
2178
2178
2179 Show definition of symbolic path name NAME. If no name is given,
2179 Show definition of symbolic path name NAME. If no name is given,
2180 show definition of available names.
2180 show definition of available names.
2181
2181
2182 Path names are defined in the [paths] section of /etc/mercurial/hgrc
2182 Path names are defined in the [paths] section of /etc/mercurial/hgrc
2183 and $HOME/.hgrc. If run inside a repository, .hg/hgrc is used, too.
2183 and $HOME/.hgrc. If run inside a repository, .hg/hgrc is used, too.
2184
2184
2185 See 'hg help urls' for more information.
2185 See 'hg help urls' for more information.
2186 """
2186 """
2187 if search:
2187 if search:
2188 for name, path in ui.configitems("paths"):
2188 for name, path in ui.configitems("paths"):
2189 if name == search:
2189 if name == search:
2190 ui.write("%s\n" % url.hidepassword(path))
2190 ui.write("%s\n" % url.hidepassword(path))
2191 return
2191 return
2192 ui.warn(_("not found!\n"))
2192 ui.warn(_("not found!\n"))
2193 return 1
2193 return 1
2194 else:
2194 else:
2195 for name, path in ui.configitems("paths"):
2195 for name, path in ui.configitems("paths"):
2196 ui.write("%s = %s\n" % (name, url.hidepassword(path)))
2196 ui.write("%s = %s\n" % (name, url.hidepassword(path)))
2197
2197
2198 def postincoming(ui, repo, modheads, optupdate, checkout):
2198 def postincoming(ui, repo, modheads, optupdate, checkout):
2199 if modheads == 0:
2199 if modheads == 0:
2200 return
2200 return
2201 if optupdate:
2201 if optupdate:
2202 if (modheads <= 1 or len(repo.branchheads()) == 1) or checkout:
2202 if (modheads <= 1 or len(repo.branchheads()) == 1) or checkout:
2203 return hg.update(repo, checkout)
2203 return hg.update(repo, checkout)
2204 else:
2204 else:
2205 ui.status(_("not updating, since new heads added\n"))
2205 ui.status(_("not updating, since new heads added\n"))
2206 if modheads > 1:
2206 if modheads > 1:
2207 ui.status(_("(run 'hg heads' to see heads, 'hg merge' to merge)\n"))
2207 ui.status(_("(run 'hg heads' to see heads, 'hg merge' to merge)\n"))
2208 else:
2208 else:
2209 ui.status(_("(run 'hg update' to get a working copy)\n"))
2209 ui.status(_("(run 'hg update' to get a working copy)\n"))
2210
2210
2211 def pull(ui, repo, source="default", **opts):
2211 def pull(ui, repo, source="default", **opts):
2212 """pull changes from the specified source
2212 """pull changes from the specified source
2213
2213
2214 Pull changes from a remote repository to the local one.
2214 Pull changes from a remote repository to the local one.
2215
2215
2216 This finds all changes from the repository at the specified path
2216 This finds all changes from the repository at the specified path
2217 or URL and adds them to the local repository. By default, this
2217 or URL and adds them to the local repository. By default, this
2218 does not update the copy of the project in the working directory.
2218 does not update the copy of the project in the working directory.
2219
2219
2220 Use hg incoming if you want to see what will be added by the next
2220 Use hg incoming if you want to see what will be added by the next
2221 pull without actually adding the changes to the repository.
2221 pull without actually adding the changes to the repository.
2222
2222
2223 If SOURCE is omitted, the 'default' path will be used.
2223 If SOURCE is omitted, the 'default' path will be used.
2224 See 'hg help urls' for more information.
2224 See 'hg help urls' for more information.
2225 """
2225 """
2226 source, revs, checkout = hg.parseurl(ui.expandpath(source), opts.get('rev'))
2226 source, revs, checkout = hg.parseurl(ui.expandpath(source), opts.get('rev'))
2227 other = hg.repository(cmdutil.remoteui(repo, opts), source)
2227 other = hg.repository(cmdutil.remoteui(repo, opts), source)
2228 ui.status(_('pulling from %s\n') % url.hidepassword(source))
2228 ui.status(_('pulling from %s\n') % url.hidepassword(source))
2229 if revs:
2229 if revs:
2230 try:
2230 try:
2231 revs = [other.lookup(rev) for rev in revs]
2231 revs = [other.lookup(rev) for rev in revs]
2232 except error.CapabilityError:
2232 except error.CapabilityError:
2233 err = _("Other repository doesn't support revision lookup, "
2233 err = _("Other repository doesn't support revision lookup, "
2234 "so a rev cannot be specified.")
2234 "so a rev cannot be specified.")
2235 raise util.Abort(err)
2235 raise util.Abort(err)
2236
2236
2237 modheads = repo.pull(other, heads=revs, force=opts.get('force'))
2237 modheads = repo.pull(other, heads=revs, force=opts.get('force'))
2238 return postincoming(ui, repo, modheads, opts.get('update'), checkout)
2238 return postincoming(ui, repo, modheads, opts.get('update'), checkout)
2239
2239
2240 def push(ui, repo, dest=None, **opts):
2240 def push(ui, repo, dest=None, **opts):
2241 """push changes to the specified destination
2241 """push changes to the specified destination
2242
2242
2243 Push changes from the local repository to the given destination.
2243 Push changes from the local repository to the given destination.
2244
2244
2245 This is the symmetrical operation for pull. It moves changes from
2245 This is the symmetrical operation for pull. It moves changes from
2246 the current repository to a different one. If the destination is
2246 the current repository to a different one. If the destination is
2247 local this is identical to a pull in that directory from the
2247 local this is identical to a pull in that directory from the
2248 current one.
2248 current one.
2249
2249
2250 By default, push will refuse to run if it detects the result would
2250 By default, push will refuse to run if it detects the result would
2251 increase the number of remote heads. This generally indicates the
2251 increase the number of remote heads. This generally indicates the
2252 the client has forgotten to pull and merge before pushing.
2252 the client has forgotten to pull and merge before pushing.
2253
2253
2254 If -r/--rev is used, the named revision and all its ancestors will
2254 If -r/--rev is used, the named revision and all its ancestors will
2255 be pushed to the remote repository.
2255 be pushed to the remote repository.
2256
2256
2257 Look at the help text for URLs for important details about ssh://
2257 Look at the help text for URLs for important details about ssh://
2258 URLs. If DESTINATION is omitted, a default path will be used.
2258 URLs. If DESTINATION is omitted, a default path will be used.
2259 See 'hg help urls' for more information.
2259 See 'hg help urls' for more information.
2260 """
2260 """
2261 dest, revs, checkout = hg.parseurl(
2261 dest, revs, checkout = hg.parseurl(
2262 ui.expandpath(dest or 'default-push', dest or 'default'), opts.get('rev'))
2262 ui.expandpath(dest or 'default-push', dest or 'default'), opts.get('rev'))
2263 other = hg.repository(cmdutil.remoteui(repo, opts), dest)
2263 other = hg.repository(cmdutil.remoteui(repo, opts), dest)
2264 ui.status(_('pushing to %s\n') % url.hidepassword(dest))
2264 ui.status(_('pushing to %s\n') % url.hidepassword(dest))
2265 if revs:
2265 if revs:
2266 revs = [repo.lookup(rev) for rev in revs]
2266 revs = [repo.lookup(rev) for rev in revs]
2267 r = repo.push(other, opts.get('force'), revs=revs)
2267 r = repo.push(other, opts.get('force'), revs=revs)
2268 return r == 0
2268 return r == 0
2269
2269
2270 def recover(ui, repo):
2270 def recover(ui, repo):
2271 """roll back an interrupted transaction
2271 """roll back an interrupted transaction
2272
2272
2273 Recover from an interrupted commit or pull.
2273 Recover from an interrupted commit or pull.
2274
2274
2275 This command tries to fix the repository status after an
2275 This command tries to fix the repository status after an
2276 interrupted operation. It should only be necessary when Mercurial
2276 interrupted operation. It should only be necessary when Mercurial
2277 suggests it.
2277 suggests it.
2278 """
2278 """
2279 if repo.recover():
2279 if repo.recover():
2280 return hg.verify(repo)
2280 return hg.verify(repo)
2281 return 1
2281 return 1
2282
2282
2283 def remove(ui, repo, *pats, **opts):
2283 def remove(ui, repo, *pats, **opts):
2284 """remove the specified files on the next commit
2284 """remove the specified files on the next commit
2285
2285
2286 Schedule the indicated files for removal from the repository.
2286 Schedule the indicated files for removal from the repository.
2287
2287
2288 This only removes files from the current branch, not from the
2288 This only removes files from the current branch, not from the
2289 entire project history. -A/--after can be used to remove only
2289 entire project history. -A/--after can be used to remove only
2290 files that have already been deleted, -f/--force can be used to
2290 files that have already been deleted, -f/--force can be used to
2291 force deletion, and -Af can be used to remove files from the next
2291 force deletion, and -Af can be used to remove files from the next
2292 revision without deleting them.
2292 revision without deleting them.
2293
2293
2294 The following table details the behavior of remove for different
2294 The following table details the behavior of remove for different
2295 file states (columns) and option combinations (rows). The file
2295 file states (columns) and option combinations (rows). The file
2296 states are Added, Clean, Modified and Missing (as reported by hg
2296 states are Added, Clean, Modified and Missing (as reported by hg
2297 status). The actions are Warn, Remove (from branch) and Delete
2297 status). The actions are Warn, Remove (from branch) and Delete
2298 (from disk).
2298 (from disk).
2299
2299
2300 A C M !
2300 A C M !
2301 none W RD W R
2301 none W RD W R
2302 -f R RD RD R
2302 -f R RD RD R
2303 -A W W W R
2303 -A W W W R
2304 -Af R R R R
2304 -Af R R R R
2305
2305
2306 This command schedules the files to be removed at the next commit.
2306 This command schedules the files to be removed at the next commit.
2307 To undo a remove before that, see hg revert.
2307 To undo a remove before that, see hg revert.
2308 """
2308 """
2309
2309
2310 after, force = opts.get('after'), opts.get('force')
2310 after, force = opts.get('after'), opts.get('force')
2311 if not pats and not after:
2311 if not pats and not after:
2312 raise util.Abort(_('no files specified'))
2312 raise util.Abort(_('no files specified'))
2313
2313
2314 m = cmdutil.match(repo, pats, opts)
2314 m = cmdutil.match(repo, pats, opts)
2315 s = repo.status(match=m, clean=True)
2315 s = repo.status(match=m, clean=True)
2316 modified, added, deleted, clean = s[0], s[1], s[3], s[6]
2316 modified, added, deleted, clean = s[0], s[1], s[3], s[6]
2317
2317
2318 for f in m.files():
2318 for f in m.files():
2319 if f not in repo.dirstate and not os.path.isdir(m.rel(f)):
2319 if f not in repo.dirstate and not os.path.isdir(m.rel(f)):
2320 ui.warn(_('not removing %s: file is untracked\n') % m.rel(f))
2320 ui.warn(_('not removing %s: file is untracked\n') % m.rel(f))
2321
2321
2322 def warn(files, reason):
2322 def warn(files, reason):
2323 for f in files:
2323 for f in files:
2324 ui.warn(_('not removing %s: file %s (use -f to force removal)\n')
2324 ui.warn(_('not removing %s: file %s (use -f to force removal)\n')
2325 % (m.rel(f), reason))
2325 % (m.rel(f), reason))
2326
2326
2327 if force:
2327 if force:
2328 remove, forget = modified + deleted + clean, added
2328 remove, forget = modified + deleted + clean, added
2329 elif after:
2329 elif after:
2330 remove, forget = deleted, []
2330 remove, forget = deleted, []
2331 warn(modified + added + clean, _('still exists'))
2331 warn(modified + added + clean, _('still exists'))
2332 else:
2332 else:
2333 remove, forget = deleted + clean, []
2333 remove, forget = deleted + clean, []
2334 warn(modified, _('is modified'))
2334 warn(modified, _('is modified'))
2335 warn(added, _('has been marked for add'))
2335 warn(added, _('has been marked for add'))
2336
2336
2337 for f in sorted(remove + forget):
2337 for f in sorted(remove + forget):
2338 if ui.verbose or not m.exact(f):
2338 if ui.verbose or not m.exact(f):
2339 ui.status(_('removing %s\n') % m.rel(f))
2339 ui.status(_('removing %s\n') % m.rel(f))
2340
2340
2341 repo.forget(forget)
2341 repo.forget(forget)
2342 repo.remove(remove, unlink=not after)
2342 repo.remove(remove, unlink=not after)
2343
2343
2344 def rename(ui, repo, *pats, **opts):
2344 def rename(ui, repo, *pats, **opts):
2345 """rename files; equivalent of copy + remove
2345 """rename files; equivalent of copy + remove
2346
2346
2347 Mark dest as copies of sources; mark sources for deletion. If dest
2347 Mark dest as copies of sources; mark sources for deletion. If dest
2348 is a directory, copies are put in that directory. If dest is a
2348 is a directory, copies are put in that directory. If dest is a
2349 file, there can only be one source.
2349 file, there can only be one source.
2350
2350
2351 By default, this command copies the contents of files as they
2351 By default, this command copies the contents of files as they
2352 exist in the working directory. If invoked with -A/--after, the
2352 exist in the working directory. If invoked with -A/--after, the
2353 operation is recorded, but no copying is performed.
2353 operation is recorded, but no copying is performed.
2354
2354
2355 This command takes effect at the next commit. To undo a rename
2355 This command takes effect at the next commit. To undo a rename
2356 before that, see hg revert.
2356 before that, see hg revert.
2357 """
2357 """
2358 wlock = repo.wlock(False)
2358 wlock = repo.wlock(False)
2359 try:
2359 try:
2360 return cmdutil.copy(ui, repo, pats, opts, rename=True)
2360 return cmdutil.copy(ui, repo, pats, opts, rename=True)
2361 finally:
2361 finally:
2362 wlock.release()
2362 wlock.release()
2363
2363
2364 def resolve(ui, repo, *pats, **opts):
2364 def resolve(ui, repo, *pats, **opts):
2365 """retry file merges from a merge or update
2365 """retry file merges from a merge or update
2366
2366
2367 This command will cleanly retry unresolved file merges using file
2367 This command will cleanly retry unresolved file merges using file
2368 revisions preserved from the last update or merge. To attempt to
2368 revisions preserved from the last update or merge. To attempt to
2369 resolve all unresolved files, use the -a/--all switch.
2369 resolve all unresolved files, use the -a/--all switch.
2370
2370
2371 If a conflict is resolved manually, please note that the changes
2371 If a conflict is resolved manually, please note that the changes
2372 will be overwritten if the merge is retried with resolve. The
2372 will be overwritten if the merge is retried with resolve. The
2373 -m/--mark switch should be used to mark the file as resolved.
2373 -m/--mark switch should be used to mark the file as resolved.
2374
2374
2375 This command will also allow listing resolved files and manually
2375 This command will also allow listing resolved files and manually
2376 marking and unmarking files as resolved. All files must be marked
2376 marking and unmarking files as resolved. All files must be marked
2377 as resolved before the new commits are permitted.
2377 as resolved before the new commits are permitted.
2378
2378
2379 The codes used to show the status of files are:
2379 The codes used to show the status of files are:
2380 U = unresolved
2380 U = unresolved
2381 R = resolved
2381 R = resolved
2382 """
2382 """
2383
2383
2384 all, mark, unmark, show = [opts.get(o) for o in 'all mark unmark list'.split()]
2384 all, mark, unmark, show = [opts.get(o) for o in 'all mark unmark list'.split()]
2385
2385
2386 if (show and (mark or unmark)) or (mark and unmark):
2386 if (show and (mark or unmark)) or (mark and unmark):
2387 raise util.Abort(_("too many options specified"))
2387 raise util.Abort(_("too many options specified"))
2388 if pats and all:
2388 if pats and all:
2389 raise util.Abort(_("can't specify --all and patterns"))
2389 raise util.Abort(_("can't specify --all and patterns"))
2390 if not (all or pats or show or mark or unmark):
2390 if not (all or pats or show or mark or unmark):
2391 raise util.Abort(_('no files or directories specified; '
2391 raise util.Abort(_('no files or directories specified; '
2392 'use --all to remerge all files'))
2392 'use --all to remerge all files'))
2393
2393
2394 ms = merge_.mergestate(repo)
2394 ms = merge_.mergestate(repo)
2395 m = cmdutil.match(repo, pats, opts)
2395 m = cmdutil.match(repo, pats, opts)
2396
2396
2397 for f in ms:
2397 for f in ms:
2398 if m(f):
2398 if m(f):
2399 if show:
2399 if show:
2400 ui.write("%s %s\n" % (ms[f].upper(), f))
2400 ui.write("%s %s\n" % (ms[f].upper(), f))
2401 elif mark:
2401 elif mark:
2402 ms.mark(f, "r")
2402 ms.mark(f, "r")
2403 elif unmark:
2403 elif unmark:
2404 ms.mark(f, "u")
2404 ms.mark(f, "u")
2405 else:
2405 else:
2406 wctx = repo[None]
2406 wctx = repo[None]
2407 mctx = wctx.parents()[-1]
2407 mctx = wctx.parents()[-1]
2408
2408
2409 # backup pre-resolve (merge uses .orig for its own purposes)
2409 # backup pre-resolve (merge uses .orig for its own purposes)
2410 a = repo.wjoin(f)
2410 a = repo.wjoin(f)
2411 util.copyfile(a, a + ".resolve")
2411 util.copyfile(a, a + ".resolve")
2412
2412
2413 # resolve file
2413 # resolve file
2414 ms.resolve(f, wctx, mctx)
2414 ms.resolve(f, wctx, mctx)
2415
2415
2416 # replace filemerge's .orig file with our resolve file
2416 # replace filemerge's .orig file with our resolve file
2417 util.rename(a + ".resolve", a + ".orig")
2417 util.rename(a + ".resolve", a + ".orig")
2418
2418
2419 def revert(ui, repo, *pats, **opts):
2419 def revert(ui, repo, *pats, **opts):
2420 """restore individual files or directories to an earlier state
2420 """restore individual files or directories to an earlier state
2421
2421
2422 (Use update -r to check out earlier revisions, revert does not
2422 (Use update -r to check out earlier revisions, revert does not
2423 change the working directory parents.)
2423 change the working directory parents.)
2424
2424
2425 With no revision specified, revert the named files or directories
2425 With no revision specified, revert the named files or directories
2426 to the contents they had in the parent of the working directory.
2426 to the contents they had in the parent of the working directory.
2427 This restores the contents of the affected files to an unmodified
2427 This restores the contents of the affected files to an unmodified
2428 state and unschedules adds, removes, copies, and renames. If the
2428 state and unschedules adds, removes, copies, and renames. If the
2429 working directory has two parents, you must explicitly specify the
2429 working directory has two parents, you must explicitly specify the
2430 revision to revert to.
2430 revision to revert to.
2431
2431
2432 Using the -r/--rev option, revert the given files or directories
2432 Using the -r/--rev option, revert the given files or directories
2433 to their contents as of a specific revision. This can be helpful
2433 to their contents as of a specific revision. This can be helpful
2434 to "roll back" some or all of an earlier change. See 'hg help
2434 to "roll back" some or all of an earlier change. See 'hg help
2435 dates' for a list of formats valid for -d/--date.
2435 dates' for a list of formats valid for -d/--date.
2436
2436
2437 Revert modifies the working directory. It does not commit any
2437 Revert modifies the working directory. It does not commit any
2438 changes, or change the parent of the working directory. If you
2438 changes, or change the parent of the working directory. If you
2439 revert to a revision other than the parent of the working
2439 revert to a revision other than the parent of the working
2440 directory, the reverted files will thus appear modified
2440 directory, the reverted files will thus appear modified
2441 afterwards.
2441 afterwards.
2442
2442
2443 If a file has been deleted, it is restored. If the executable mode
2443 If a file has been deleted, it is restored. If the executable mode
2444 of a file was changed, it is reset.
2444 of a file was changed, it is reset.
2445
2445
2446 If names are given, all files matching the names are reverted.
2446 If names are given, all files matching the names are reverted.
2447 If no arguments are given, no files are reverted.
2447 If no arguments are given, no files are reverted.
2448
2448
2449 Modified files are saved with a .orig suffix before reverting.
2449 Modified files are saved with a .orig suffix before reverting.
2450 To disable these backups, use --no-backup.
2450 To disable these backups, use --no-backup.
2451 """
2451 """
2452
2452
2453 if opts["date"]:
2453 if opts["date"]:
2454 if opts["rev"]:
2454 if opts["rev"]:
2455 raise util.Abort(_("you can't specify a revision and a date"))
2455 raise util.Abort(_("you can't specify a revision and a date"))
2456 opts["rev"] = cmdutil.finddate(ui, repo, opts["date"])
2456 opts["rev"] = cmdutil.finddate(ui, repo, opts["date"])
2457
2457
2458 if not pats and not opts.get('all'):
2458 if not pats and not opts.get('all'):
2459 raise util.Abort(_('no files or directories specified; '
2459 raise util.Abort(_('no files or directories specified; '
2460 'use --all to revert the whole repo'))
2460 'use --all to revert the whole repo'))
2461
2461
2462 parent, p2 = repo.dirstate.parents()
2462 parent, p2 = repo.dirstate.parents()
2463 if not opts.get('rev') and p2 != nullid:
2463 if not opts.get('rev') and p2 != nullid:
2464 raise util.Abort(_('uncommitted merge - please provide a '
2464 raise util.Abort(_('uncommitted merge - please provide a '
2465 'specific revision'))
2465 'specific revision'))
2466 ctx = repo[opts.get('rev')]
2466 ctx = repo[opts.get('rev')]
2467 node = ctx.node()
2467 node = ctx.node()
2468 mf = ctx.manifest()
2468 mf = ctx.manifest()
2469 if node == parent:
2469 if node == parent:
2470 pmf = mf
2470 pmf = mf
2471 else:
2471 else:
2472 pmf = None
2472 pmf = None
2473
2473
2474 # need all matching names in dirstate and manifest of target rev,
2474 # need all matching names in dirstate and manifest of target rev,
2475 # so have to walk both. do not print errors if files exist in one
2475 # so have to walk both. do not print errors if files exist in one
2476 # but not other.
2476 # but not other.
2477
2477
2478 names = {}
2478 names = {}
2479
2479
2480 wlock = repo.wlock()
2480 wlock = repo.wlock()
2481 try:
2481 try:
2482 # walk dirstate.
2482 # walk dirstate.
2483
2483
2484 m = cmdutil.match(repo, pats, opts)
2484 m = cmdutil.match(repo, pats, opts)
2485 m.bad = lambda x,y: False
2485 m.bad = lambda x,y: False
2486 for abs in repo.walk(m):
2486 for abs in repo.walk(m):
2487 names[abs] = m.rel(abs), m.exact(abs)
2487 names[abs] = m.rel(abs), m.exact(abs)
2488
2488
2489 # walk target manifest.
2489 # walk target manifest.
2490
2490
2491 def badfn(path, msg):
2491 def badfn(path, msg):
2492 if path in names:
2492 if path in names:
2493 return
2493 return
2494 path_ = path + '/'
2494 path_ = path + '/'
2495 for f in names:
2495 for f in names:
2496 if f.startswith(path_):
2496 if f.startswith(path_):
2497 return
2497 return
2498 ui.warn("%s: %s\n" % (m.rel(path), msg))
2498 ui.warn("%s: %s\n" % (m.rel(path), msg))
2499
2499
2500 m = cmdutil.match(repo, pats, opts)
2500 m = cmdutil.match(repo, pats, opts)
2501 m.bad = badfn
2501 m.bad = badfn
2502 for abs in repo[node].walk(m):
2502 for abs in repo[node].walk(m):
2503 if abs not in names:
2503 if abs not in names:
2504 names[abs] = m.rel(abs), m.exact(abs)
2504 names[abs] = m.rel(abs), m.exact(abs)
2505
2505
2506 m = cmdutil.matchfiles(repo, names)
2506 m = cmdutil.matchfiles(repo, names)
2507 changes = repo.status(match=m)[:4]
2507 changes = repo.status(match=m)[:4]
2508 modified, added, removed, deleted = map(set, changes)
2508 modified, added, removed, deleted = map(set, changes)
2509
2509
2510 # if f is a rename, also revert the source
2510 # if f is a rename, also revert the source
2511 cwd = repo.getcwd()
2511 cwd = repo.getcwd()
2512 for f in added:
2512 for f in added:
2513 src = repo.dirstate.copied(f)
2513 src = repo.dirstate.copied(f)
2514 if src and src not in names and repo.dirstate[src] == 'r':
2514 if src and src not in names and repo.dirstate[src] == 'r':
2515 removed.add(src)
2515 removed.add(src)
2516 names[src] = (repo.pathto(src, cwd), True)
2516 names[src] = (repo.pathto(src, cwd), True)
2517
2517
2518 def removeforget(abs):
2518 def removeforget(abs):
2519 if repo.dirstate[abs] == 'a':
2519 if repo.dirstate[abs] == 'a':
2520 return _('forgetting %s\n')
2520 return _('forgetting %s\n')
2521 return _('removing %s\n')
2521 return _('removing %s\n')
2522
2522
2523 revert = ([], _('reverting %s\n'))
2523 revert = ([], _('reverting %s\n'))
2524 add = ([], _('adding %s\n'))
2524 add = ([], _('adding %s\n'))
2525 remove = ([], removeforget)
2525 remove = ([], removeforget)
2526 undelete = ([], _('undeleting %s\n'))
2526 undelete = ([], _('undeleting %s\n'))
2527
2527
2528 disptable = (
2528 disptable = (
2529 # dispatch table:
2529 # dispatch table:
2530 # file state
2530 # file state
2531 # action if in target manifest
2531 # action if in target manifest
2532 # action if not in target manifest
2532 # action if not in target manifest
2533 # make backup if in target manifest
2533 # make backup if in target manifest
2534 # make backup if not in target manifest
2534 # make backup if not in target manifest
2535 (modified, revert, remove, True, True),
2535 (modified, revert, remove, True, True),
2536 (added, revert, remove, True, False),
2536 (added, revert, remove, True, False),
2537 (removed, undelete, None, False, False),
2537 (removed, undelete, None, False, False),
2538 (deleted, revert, remove, False, False),
2538 (deleted, revert, remove, False, False),
2539 )
2539 )
2540
2540
2541 for abs, (rel, exact) in sorted(names.items()):
2541 for abs, (rel, exact) in sorted(names.items()):
2542 mfentry = mf.get(abs)
2542 mfentry = mf.get(abs)
2543 target = repo.wjoin(abs)
2543 target = repo.wjoin(abs)
2544 def handle(xlist, dobackup):
2544 def handle(xlist, dobackup):
2545 xlist[0].append(abs)
2545 xlist[0].append(abs)
2546 if dobackup and not opts.get('no_backup') and util.lexists(target):
2546 if dobackup and not opts.get('no_backup') and util.lexists(target):
2547 bakname = "%s.orig" % rel
2547 bakname = "%s.orig" % rel
2548 ui.note(_('saving current version of %s as %s\n') %
2548 ui.note(_('saving current version of %s as %s\n') %
2549 (rel, bakname))
2549 (rel, bakname))
2550 if not opts.get('dry_run'):
2550 if not opts.get('dry_run'):
2551 util.copyfile(target, bakname)
2551 util.copyfile(target, bakname)
2552 if ui.verbose or not exact:
2552 if ui.verbose or not exact:
2553 msg = xlist[1]
2553 msg = xlist[1]
2554 if not isinstance(msg, basestring):
2554 if not isinstance(msg, basestring):
2555 msg = msg(abs)
2555 msg = msg(abs)
2556 ui.status(msg % rel)
2556 ui.status(msg % rel)
2557 for table, hitlist, misslist, backuphit, backupmiss in disptable:
2557 for table, hitlist, misslist, backuphit, backupmiss in disptable:
2558 if abs not in table: continue
2558 if abs not in table: continue
2559 # file has changed in dirstate
2559 # file has changed in dirstate
2560 if mfentry:
2560 if mfentry:
2561 handle(hitlist, backuphit)
2561 handle(hitlist, backuphit)
2562 elif misslist is not None:
2562 elif misslist is not None:
2563 handle(misslist, backupmiss)
2563 handle(misslist, backupmiss)
2564 break
2564 break
2565 else:
2565 else:
2566 if abs not in repo.dirstate:
2566 if abs not in repo.dirstate:
2567 if mfentry:
2567 if mfentry:
2568 handle(add, True)
2568 handle(add, True)
2569 elif exact:
2569 elif exact:
2570 ui.warn(_('file not managed: %s\n') % rel)
2570 ui.warn(_('file not managed: %s\n') % rel)
2571 continue
2571 continue
2572 # file has not changed in dirstate
2572 # file has not changed in dirstate
2573 if node == parent:
2573 if node == parent:
2574 if exact: ui.warn(_('no changes needed to %s\n') % rel)
2574 if exact: ui.warn(_('no changes needed to %s\n') % rel)
2575 continue
2575 continue
2576 if pmf is None:
2576 if pmf is None:
2577 # only need parent manifest in this unlikely case,
2577 # only need parent manifest in this unlikely case,
2578 # so do not read by default
2578 # so do not read by default
2579 pmf = repo[parent].manifest()
2579 pmf = repo[parent].manifest()
2580 if abs in pmf:
2580 if abs in pmf:
2581 if mfentry:
2581 if mfentry:
2582 # if version of file is same in parent and target
2582 # if version of file is same in parent and target
2583 # manifests, do nothing
2583 # manifests, do nothing
2584 if (pmf[abs] != mfentry or
2584 if (pmf[abs] != mfentry or
2585 pmf.flags(abs) != mf.flags(abs)):
2585 pmf.flags(abs) != mf.flags(abs)):
2586 handle(revert, False)
2586 handle(revert, False)
2587 else:
2587 else:
2588 handle(remove, False)
2588 handle(remove, False)
2589
2589
2590 if not opts.get('dry_run'):
2590 if not opts.get('dry_run'):
2591 def checkout(f):
2591 def checkout(f):
2592 fc = ctx[f]
2592 fc = ctx[f]
2593 repo.wwrite(f, fc.data(), fc.flags())
2593 repo.wwrite(f, fc.data(), fc.flags())
2594
2594
2595 audit_path = util.path_auditor(repo.root)
2595 audit_path = util.path_auditor(repo.root)
2596 for f in remove[0]:
2596 for f in remove[0]:
2597 if repo.dirstate[f] == 'a':
2597 if repo.dirstate[f] == 'a':
2598 repo.dirstate.forget(f)
2598 repo.dirstate.forget(f)
2599 continue
2599 continue
2600 audit_path(f)
2600 audit_path(f)
2601 try:
2601 try:
2602 util.unlink(repo.wjoin(f))
2602 util.unlink(repo.wjoin(f))
2603 except OSError:
2603 except OSError:
2604 pass
2604 pass
2605 repo.dirstate.remove(f)
2605 repo.dirstate.remove(f)
2606
2606
2607 normal = None
2607 normal = None
2608 if node == parent:
2608 if node == parent:
2609 # We're reverting to our parent. If possible, we'd like status
2609 # We're reverting to our parent. If possible, we'd like status
2610 # to report the file as clean. We have to use normallookup for
2610 # to report the file as clean. We have to use normallookup for
2611 # merges to avoid losing information about merged/dirty files.
2611 # merges to avoid losing information about merged/dirty files.
2612 if p2 != nullid:
2612 if p2 != nullid:
2613 normal = repo.dirstate.normallookup
2613 normal = repo.dirstate.normallookup
2614 else:
2614 else:
2615 normal = repo.dirstate.normal
2615 normal = repo.dirstate.normal
2616 for f in revert[0]:
2616 for f in revert[0]:
2617 checkout(f)
2617 checkout(f)
2618 if normal:
2618 if normal:
2619 normal(f)
2619 normal(f)
2620
2620
2621 for f in add[0]:
2621 for f in add[0]:
2622 checkout(f)
2622 checkout(f)
2623 repo.dirstate.add(f)
2623 repo.dirstate.add(f)
2624
2624
2625 normal = repo.dirstate.normallookup
2625 normal = repo.dirstate.normallookup
2626 if node == parent and p2 == nullid:
2626 if node == parent and p2 == nullid:
2627 normal = repo.dirstate.normal
2627 normal = repo.dirstate.normal
2628 for f in undelete[0]:
2628 for f in undelete[0]:
2629 checkout(f)
2629 checkout(f)
2630 normal(f)
2630 normal(f)
2631
2631
2632 finally:
2632 finally:
2633 wlock.release()
2633 wlock.release()
2634
2634
2635 def rollback(ui, repo):
2635 def rollback(ui, repo):
2636 """roll back the last transaction
2636 """roll back the last transaction
2637
2637
2638 This command should be used with care. There is only one level of
2638 This command should be used with care. There is only one level of
2639 rollback, and there is no way to undo a rollback. It will also
2639 rollback, and there is no way to undo a rollback. It will also
2640 restore the dirstate at the time of the last transaction, losing
2640 restore the dirstate at the time of the last transaction, losing
2641 any dirstate changes since that time.
2641 any dirstate changes since that time.
2642
2642
2643 Transactions are used to encapsulate the effects of all commands
2643 Transactions are used to encapsulate the effects of all commands
2644 that create new changesets or propagate existing changesets into a
2644 that create new changesets or propagate existing changesets into a
2645 repository. For example, the following commands are transactional,
2645 repository. For example, the following commands are transactional,
2646 and their effects can be rolled back:
2646 and their effects can be rolled back:
2647
2647
2648 commit
2648 commit
2649 import
2649 import
2650 pull
2650 pull
2651 push (with this repository as destination)
2651 push (with this repository as destination)
2652 unbundle
2652 unbundle
2653
2653
2654 This command is not intended for use on public repositories. Once
2654 This command is not intended for use on public repositories. Once
2655 changes are visible for pull by other users, rolling a transaction
2655 changes are visible for pull by other users, rolling a transaction
2656 back locally is ineffective (someone else may already have pulled
2656 back locally is ineffective (someone else may already have pulled
2657 the changes). Furthermore, a race is possible with readers of the
2657 the changes). Furthermore, a race is possible with readers of the
2658 repository; for example an in-progress pull from the repository
2658 repository; for example an in-progress pull from the repository
2659 may fail if a rollback is performed.
2659 may fail if a rollback is performed.
2660 """
2660 """
2661 repo.rollback()
2661 repo.rollback()
2662
2662
2663 def root(ui, repo):
2663 def root(ui, repo):
2664 """print the root (top) of the current working directory
2664 """print the root (top) of the current working directory
2665
2665
2666 Print the root directory of the current repository.
2666 Print the root directory of the current repository.
2667 """
2667 """
2668 ui.write(repo.root + "\n")
2668 ui.write(repo.root + "\n")
2669
2669
2670 def serve(ui, repo, **opts):
2670 def serve(ui, repo, **opts):
2671 """export the repository via HTTP
2671 """export the repository via HTTP
2672
2672
2673 Start a local HTTP repository browser and pull server.
2673 Start a local HTTP repository browser and pull server.
2674
2674
2675 By default, the server logs accesses to stdout and errors to
2675 By default, the server logs accesses to stdout and errors to
2676 stderr. Use the -A/--accesslog and -E/--errorlog options to log to
2676 stderr. Use the -A/--accesslog and -E/--errorlog options to log to
2677 files.
2677 files.
2678 """
2678 """
2679
2679
2680 if opts["stdio"]:
2680 if opts["stdio"]:
2681 if repo is None:
2681 if repo is None:
2682 raise error.RepoError(_("There is no Mercurial repository here"
2682 raise error.RepoError(_("There is no Mercurial repository here"
2683 " (.hg not found)"))
2683 " (.hg not found)"))
2684 s = sshserver.sshserver(ui, repo)
2684 s = sshserver.sshserver(ui, repo)
2685 s.serve_forever()
2685 s.serve_forever()
2686
2686
2687 baseui = repo and repo.baseui or ui
2687 baseui = repo and repo.baseui or ui
2688 optlist = ("name templates style address port prefix ipv6"
2688 optlist = ("name templates style address port prefix ipv6"
2689 " accesslog errorlog webdir_conf certificate")
2689 " accesslog errorlog webdir_conf certificate")
2690 for o in optlist.split():
2690 for o in optlist.split():
2691 if opts[o]:
2691 if opts[o]:
2692 baseui.setconfig("web", o, str(opts[o]))
2692 baseui.setconfig("web", o, str(opts[o]))
2693 if (repo is not None) and (repo.ui != baseui):
2693 if (repo is not None) and (repo.ui != baseui):
2694 repo.ui.setconfig("web", o, str(opts[o]))
2694 repo.ui.setconfig("web", o, str(opts[o]))
2695
2695
2696 if repo is None and not ui.config("web", "webdir_conf"):
2696 if repo is None and not ui.config("web", "webdir_conf"):
2697 raise error.RepoError(_("There is no Mercurial repository here"
2697 raise error.RepoError(_("There is no Mercurial repository here"
2698 " (.hg not found)"))
2698 " (.hg not found)"))
2699
2699
2700 class service:
2700 class service(object):
2701 def init(self):
2701 def init(self):
2702 util.set_signal_handler()
2702 util.set_signal_handler()
2703 self.httpd = server.create_server(baseui, repo)
2703 self.httpd = server.create_server(baseui, repo)
2704
2704
2705 if not ui.verbose: return
2705 if not ui.verbose: return
2706
2706
2707 if self.httpd.prefix:
2707 if self.httpd.prefix:
2708 prefix = self.httpd.prefix.strip('/') + '/'
2708 prefix = self.httpd.prefix.strip('/') + '/'
2709 else:
2709 else:
2710 prefix = ''
2710 prefix = ''
2711
2711
2712 port = ':%d' % self.httpd.port
2712 port = ':%d' % self.httpd.port
2713 if port == ':80':
2713 if port == ':80':
2714 port = ''
2714 port = ''
2715
2715
2716 bindaddr = self.httpd.addr
2716 bindaddr = self.httpd.addr
2717 if bindaddr == '0.0.0.0':
2717 if bindaddr == '0.0.0.0':
2718 bindaddr = '*'
2718 bindaddr = '*'
2719 elif ':' in bindaddr: # IPv6
2719 elif ':' in bindaddr: # IPv6
2720 bindaddr = '[%s]' % bindaddr
2720 bindaddr = '[%s]' % bindaddr
2721
2721
2722 fqaddr = self.httpd.fqaddr
2722 fqaddr = self.httpd.fqaddr
2723 if ':' in fqaddr:
2723 if ':' in fqaddr:
2724 fqaddr = '[%s]' % fqaddr
2724 fqaddr = '[%s]' % fqaddr
2725 ui.status(_('listening at http://%s%s/%s (bound to %s:%d)\n') %
2725 ui.status(_('listening at http://%s%s/%s (bound to %s:%d)\n') %
2726 (fqaddr, port, prefix, bindaddr, self.httpd.port))
2726 (fqaddr, port, prefix, bindaddr, self.httpd.port))
2727
2727
2728 def run(self):
2728 def run(self):
2729 self.httpd.serve_forever()
2729 self.httpd.serve_forever()
2730
2730
2731 service = service()
2731 service = service()
2732
2732
2733 cmdutil.service(opts, initfn=service.init, runfn=service.run)
2733 cmdutil.service(opts, initfn=service.init, runfn=service.run)
2734
2734
2735 def status(ui, repo, *pats, **opts):
2735 def status(ui, repo, *pats, **opts):
2736 """show changed files in the working directory
2736 """show changed files in the working directory
2737
2737
2738 Show status of files in the repository. If names are given, only
2738 Show status of files in the repository. If names are given, only
2739 files that match are shown. Files that are clean or ignored or
2739 files that match are shown. Files that are clean or ignored or
2740 source of a copy/move operation, are not listed unless -c/--clean,
2740 source of a copy/move operation, are not listed unless -c/--clean,
2741 -i/--ignored, -C/--copies or -A/--all is given. Unless options
2741 -i/--ignored, -C/--copies or -A/--all is given. Unless options
2742 described with "show only ..." are given, the options -mardu are
2742 described with "show only ..." are given, the options -mardu are
2743 used.
2743 used.
2744
2744
2745 Option -q/--quiet hides untracked (unknown and ignored) files
2745 Option -q/--quiet hides untracked (unknown and ignored) files
2746 unless explicitly requested with -u/--unknown or -i/--ignored.
2746 unless explicitly requested with -u/--unknown or -i/--ignored.
2747
2747
2748 NOTE: status may appear to disagree with diff if permissions have
2748 NOTE: status may appear to disagree with diff if permissions have
2749 changed or a merge has occurred. The standard diff format does not
2749 changed or a merge has occurred. The standard diff format does not
2750 report permission changes and diff only reports changes relative
2750 report permission changes and diff only reports changes relative
2751 to one merge parent.
2751 to one merge parent.
2752
2752
2753 If one revision is given, it is used as the base revision.
2753 If one revision is given, it is used as the base revision.
2754 If two revisions are given, the difference between them is shown.
2754 If two revisions are given, the difference between them is shown.
2755
2755
2756 The codes used to show the status of files are:
2756 The codes used to show the status of files are:
2757 M = modified
2757 M = modified
2758 A = added
2758 A = added
2759 R = removed
2759 R = removed
2760 C = clean
2760 C = clean
2761 ! = missing (deleted by non-hg command, but still tracked)
2761 ! = missing (deleted by non-hg command, but still tracked)
2762 ? = not tracked
2762 ? = not tracked
2763 I = ignored
2763 I = ignored
2764 = the previous added file was copied from here
2764 = the previous added file was copied from here
2765 """
2765 """
2766
2766
2767 node1, node2 = cmdutil.revpair(repo, opts.get('rev'))
2767 node1, node2 = cmdutil.revpair(repo, opts.get('rev'))
2768 cwd = (pats and repo.getcwd()) or ''
2768 cwd = (pats and repo.getcwd()) or ''
2769 end = opts.get('print0') and '\0' or '\n'
2769 end = opts.get('print0') and '\0' or '\n'
2770 copy = {}
2770 copy = {}
2771 states = 'modified added removed deleted unknown ignored clean'.split()
2771 states = 'modified added removed deleted unknown ignored clean'.split()
2772 show = [k for k in states if opts.get(k)]
2772 show = [k for k in states if opts.get(k)]
2773 if opts.get('all'):
2773 if opts.get('all'):
2774 show += ui.quiet and (states[:4] + ['clean']) or states
2774 show += ui.quiet and (states[:4] + ['clean']) or states
2775 if not show:
2775 if not show:
2776 show = ui.quiet and states[:4] or states[:5]
2776 show = ui.quiet and states[:4] or states[:5]
2777
2777
2778 stat = repo.status(node1, node2, cmdutil.match(repo, pats, opts),
2778 stat = repo.status(node1, node2, cmdutil.match(repo, pats, opts),
2779 'ignored' in show, 'clean' in show, 'unknown' in show)
2779 'ignored' in show, 'clean' in show, 'unknown' in show)
2780 changestates = zip(states, 'MAR!?IC', stat)
2780 changestates = zip(states, 'MAR!?IC', stat)
2781
2781
2782 if (opts.get('all') or opts.get('copies')) and not opts.get('no_status'):
2782 if (opts.get('all') or opts.get('copies')) and not opts.get('no_status'):
2783 ctxn = repo[nullid]
2783 ctxn = repo[nullid]
2784 ctx1 = repo[node1]
2784 ctx1 = repo[node1]
2785 ctx2 = repo[node2]
2785 ctx2 = repo[node2]
2786 added = stat[1]
2786 added = stat[1]
2787 if node2 is None:
2787 if node2 is None:
2788 added = stat[0] + stat[1] # merged?
2788 added = stat[0] + stat[1] # merged?
2789
2789
2790 for k, v in copies.copies(repo, ctx1, ctx2, ctxn)[0].iteritems():
2790 for k, v in copies.copies(repo, ctx1, ctx2, ctxn)[0].iteritems():
2791 if k in added:
2791 if k in added:
2792 copy[k] = v
2792 copy[k] = v
2793 elif v in added:
2793 elif v in added:
2794 copy[v] = k
2794 copy[v] = k
2795
2795
2796 for state, char, files in changestates:
2796 for state, char, files in changestates:
2797 if state in show:
2797 if state in show:
2798 format = "%s %%s%s" % (char, end)
2798 format = "%s %%s%s" % (char, end)
2799 if opts.get('no_status'):
2799 if opts.get('no_status'):
2800 format = "%%s%s" % end
2800 format = "%%s%s" % end
2801
2801
2802 for f in files:
2802 for f in files:
2803 ui.write(format % repo.pathto(f, cwd))
2803 ui.write(format % repo.pathto(f, cwd))
2804 if f in copy:
2804 if f in copy:
2805 ui.write(' %s%s' % (repo.pathto(copy[f], cwd), end))
2805 ui.write(' %s%s' % (repo.pathto(copy[f], cwd), end))
2806
2806
2807 def tag(ui, repo, name1, *names, **opts):
2807 def tag(ui, repo, name1, *names, **opts):
2808 """add one or more tags for the current or given revision
2808 """add one or more tags for the current or given revision
2809
2809
2810 Name a particular revision using <name>.
2810 Name a particular revision using <name>.
2811
2811
2812 Tags are used to name particular revisions of the repository and are
2812 Tags are used to name particular revisions of the repository and are
2813 very useful to compare different revisions, to go back to significant
2813 very useful to compare different revisions, to go back to significant
2814 earlier versions or to mark branch points as releases, etc.
2814 earlier versions or to mark branch points as releases, etc.
2815
2815
2816 If no revision is given, the parent of the working directory is
2816 If no revision is given, the parent of the working directory is
2817 used, or tip if no revision is checked out.
2817 used, or tip if no revision is checked out.
2818
2818
2819 To facilitate version control, distribution, and merging of tags,
2819 To facilitate version control, distribution, and merging of tags,
2820 they are stored as a file named ".hgtags" which is managed
2820 they are stored as a file named ".hgtags" which is managed
2821 similarly to other project files and can be hand-edited if
2821 similarly to other project files and can be hand-edited if
2822 necessary. The file '.hg/localtags' is used for local tags (not
2822 necessary. The file '.hg/localtags' is used for local tags (not
2823 shared among repositories).
2823 shared among repositories).
2824
2824
2825 See 'hg help dates' for a list of formats valid for -d/--date.
2825 See 'hg help dates' for a list of formats valid for -d/--date.
2826 """
2826 """
2827
2827
2828 rev_ = "."
2828 rev_ = "."
2829 names = (name1,) + names
2829 names = (name1,) + names
2830 if len(names) != len(set(names)):
2830 if len(names) != len(set(names)):
2831 raise util.Abort(_('tag names must be unique'))
2831 raise util.Abort(_('tag names must be unique'))
2832 for n in names:
2832 for n in names:
2833 if n in ['tip', '.', 'null']:
2833 if n in ['tip', '.', 'null']:
2834 raise util.Abort(_('the name \'%s\' is reserved') % n)
2834 raise util.Abort(_('the name \'%s\' is reserved') % n)
2835 if opts.get('rev') and opts.get('remove'):
2835 if opts.get('rev') and opts.get('remove'):
2836 raise util.Abort(_("--rev and --remove are incompatible"))
2836 raise util.Abort(_("--rev and --remove are incompatible"))
2837 if opts.get('rev'):
2837 if opts.get('rev'):
2838 rev_ = opts['rev']
2838 rev_ = opts['rev']
2839 message = opts.get('message')
2839 message = opts.get('message')
2840 if opts.get('remove'):
2840 if opts.get('remove'):
2841 expectedtype = opts.get('local') and 'local' or 'global'
2841 expectedtype = opts.get('local') and 'local' or 'global'
2842 for n in names:
2842 for n in names:
2843 if not repo.tagtype(n):
2843 if not repo.tagtype(n):
2844 raise util.Abort(_('tag \'%s\' does not exist') % n)
2844 raise util.Abort(_('tag \'%s\' does not exist') % n)
2845 if repo.tagtype(n) != expectedtype:
2845 if repo.tagtype(n) != expectedtype:
2846 if expectedtype == 'global':
2846 if expectedtype == 'global':
2847 raise util.Abort(_('tag \'%s\' is not a global tag') % n)
2847 raise util.Abort(_('tag \'%s\' is not a global tag') % n)
2848 else:
2848 else:
2849 raise util.Abort(_('tag \'%s\' is not a local tag') % n)
2849 raise util.Abort(_('tag \'%s\' is not a local tag') % n)
2850 rev_ = nullid
2850 rev_ = nullid
2851 if not message:
2851 if not message:
2852 message = _('Removed tag %s') % ', '.join(names)
2852 message = _('Removed tag %s') % ', '.join(names)
2853 elif not opts.get('force'):
2853 elif not opts.get('force'):
2854 for n in names:
2854 for n in names:
2855 if n in repo.tags():
2855 if n in repo.tags():
2856 raise util.Abort(_('tag \'%s\' already exists '
2856 raise util.Abort(_('tag \'%s\' already exists '
2857 '(use -f to force)') % n)
2857 '(use -f to force)') % n)
2858 if not rev_ and repo.dirstate.parents()[1] != nullid:
2858 if not rev_ and repo.dirstate.parents()[1] != nullid:
2859 raise util.Abort(_('uncommitted merge - please provide a '
2859 raise util.Abort(_('uncommitted merge - please provide a '
2860 'specific revision'))
2860 'specific revision'))
2861 r = repo[rev_].node()
2861 r = repo[rev_].node()
2862
2862
2863 if not message:
2863 if not message:
2864 message = (_('Added tag %s for changeset %s') %
2864 message = (_('Added tag %s for changeset %s') %
2865 (', '.join(names), short(r)))
2865 (', '.join(names), short(r)))
2866
2866
2867 date = opts.get('date')
2867 date = opts.get('date')
2868 if date:
2868 if date:
2869 date = util.parsedate(date)
2869 date = util.parsedate(date)
2870
2870
2871 repo.tag(names, r, message, opts.get('local'), opts.get('user'), date)
2871 repo.tag(names, r, message, opts.get('local'), opts.get('user'), date)
2872
2872
2873 def tags(ui, repo):
2873 def tags(ui, repo):
2874 """list repository tags
2874 """list repository tags
2875
2875
2876 This lists both regular and local tags. When the -v/--verbose
2876 This lists both regular and local tags. When the -v/--verbose
2877 switch is used, a third column "local" is printed for local tags.
2877 switch is used, a third column "local" is printed for local tags.
2878 """
2878 """
2879
2879
2880 hexfunc = ui.debugflag and hex or short
2880 hexfunc = ui.debugflag and hex or short
2881 tagtype = ""
2881 tagtype = ""
2882
2882
2883 for t, n in reversed(repo.tagslist()):
2883 for t, n in reversed(repo.tagslist()):
2884 if ui.quiet:
2884 if ui.quiet:
2885 ui.write("%s\n" % t)
2885 ui.write("%s\n" % t)
2886 continue
2886 continue
2887
2887
2888 try:
2888 try:
2889 hn = hexfunc(n)
2889 hn = hexfunc(n)
2890 r = "%5d:%s" % (repo.changelog.rev(n), hn)
2890 r = "%5d:%s" % (repo.changelog.rev(n), hn)
2891 except error.LookupError:
2891 except error.LookupError:
2892 r = " ?:%s" % hn
2892 r = " ?:%s" % hn
2893 else:
2893 else:
2894 spaces = " " * (30 - encoding.colwidth(t))
2894 spaces = " " * (30 - encoding.colwidth(t))
2895 if ui.verbose:
2895 if ui.verbose:
2896 if repo.tagtype(t) == 'local':
2896 if repo.tagtype(t) == 'local':
2897 tagtype = " local"
2897 tagtype = " local"
2898 else:
2898 else:
2899 tagtype = ""
2899 tagtype = ""
2900 ui.write("%s%s %s%s\n" % (t, spaces, r, tagtype))
2900 ui.write("%s%s %s%s\n" % (t, spaces, r, tagtype))
2901
2901
2902 def tip(ui, repo, **opts):
2902 def tip(ui, repo, **opts):
2903 """show the tip revision
2903 """show the tip revision
2904
2904
2905 The tip revision (usually just called the tip) is the most
2905 The tip revision (usually just called the tip) is the most
2906 recently added changeset in the repository, the most recently
2906 recently added changeset in the repository, the most recently
2907 changed head.
2907 changed head.
2908
2908
2909 If you have just made a commit, that commit will be the tip. If
2909 If you have just made a commit, that commit will be the tip. If
2910 you have just pulled changes from another repository, the tip of
2910 you have just pulled changes from another repository, the tip of
2911 that repository becomes the current tip. The "tip" tag is special
2911 that repository becomes the current tip. The "tip" tag is special
2912 and cannot be renamed or assigned to a different changeset.
2912 and cannot be renamed or assigned to a different changeset.
2913 """
2913 """
2914 cmdutil.show_changeset(ui, repo, opts).show(repo[len(repo) - 1])
2914 cmdutil.show_changeset(ui, repo, opts).show(repo[len(repo) - 1])
2915
2915
2916 def unbundle(ui, repo, fname1, *fnames, **opts):
2916 def unbundle(ui, repo, fname1, *fnames, **opts):
2917 """apply one or more changegroup files
2917 """apply one or more changegroup files
2918
2918
2919 Apply one or more compressed changegroup files generated by the
2919 Apply one or more compressed changegroup files generated by the
2920 bundle command.
2920 bundle command.
2921 """
2921 """
2922 fnames = (fname1,) + fnames
2922 fnames = (fname1,) + fnames
2923
2923
2924 lock = repo.lock()
2924 lock = repo.lock()
2925 try:
2925 try:
2926 for fname in fnames:
2926 for fname in fnames:
2927 f = url.open(ui, fname)
2927 f = url.open(ui, fname)
2928 gen = changegroup.readbundle(f, fname)
2928 gen = changegroup.readbundle(f, fname)
2929 modheads = repo.addchangegroup(gen, 'unbundle', 'bundle:' + fname)
2929 modheads = repo.addchangegroup(gen, 'unbundle', 'bundle:' + fname)
2930 finally:
2930 finally:
2931 lock.release()
2931 lock.release()
2932
2932
2933 return postincoming(ui, repo, modheads, opts.get('update'), None)
2933 return postincoming(ui, repo, modheads, opts.get('update'), None)
2934
2934
2935 def update(ui, repo, node=None, rev=None, clean=False, date=None):
2935 def update(ui, repo, node=None, rev=None, clean=False, date=None):
2936 """update working directory
2936 """update working directory
2937
2937
2938 Update the repository's working directory to the specified
2938 Update the repository's working directory to the specified
2939 revision, or the tip of the current branch if none is specified.
2939 revision, or the tip of the current branch if none is specified.
2940 Use null as the revision to remove the working copy (like 'hg
2940 Use null as the revision to remove the working copy (like 'hg
2941 clone -U').
2941 clone -U').
2942
2942
2943 When the working directory contains no uncommitted changes, it
2943 When the working directory contains no uncommitted changes, it
2944 will be replaced by the state of the requested revision from the
2944 will be replaced by the state of the requested revision from the
2945 repository. When the requested revision is on a different branch,
2945 repository. When the requested revision is on a different branch,
2946 the working directory will additionally be switched to that
2946 the working directory will additionally be switched to that
2947 branch.
2947 branch.
2948
2948
2949 When there are uncommitted changes, use option -C/--clean to
2949 When there are uncommitted changes, use option -C/--clean to
2950 discard them, forcibly replacing the state of the working
2950 discard them, forcibly replacing the state of the working
2951 directory with the requested revision.
2951 directory with the requested revision.
2952
2952
2953 When there are uncommitted changes and option -C/--clean is not
2953 When there are uncommitted changes and option -C/--clean is not
2954 used, and the parent revision and requested revision are on the
2954 used, and the parent revision and requested revision are on the
2955 same branch, and one of them is an ancestor of the other, then the
2955 same branch, and one of them is an ancestor of the other, then the
2956 new working directory will contain the requested revision merged
2956 new working directory will contain the requested revision merged
2957 with the uncommitted changes. Otherwise, the update will fail with
2957 with the uncommitted changes. Otherwise, the update will fail with
2958 a suggestion to use 'merge' or 'update -C' instead.
2958 a suggestion to use 'merge' or 'update -C' instead.
2959
2959
2960 If you want to update just one file to an older revision, use
2960 If you want to update just one file to an older revision, use
2961 revert.
2961 revert.
2962
2962
2963 See 'hg help dates' for a list of formats valid for -d/--date.
2963 See 'hg help dates' for a list of formats valid for -d/--date.
2964 """
2964 """
2965 if rev and node:
2965 if rev and node:
2966 raise util.Abort(_("please specify just one revision"))
2966 raise util.Abort(_("please specify just one revision"))
2967
2967
2968 if not rev:
2968 if not rev:
2969 rev = node
2969 rev = node
2970
2970
2971 if date:
2971 if date:
2972 if rev:
2972 if rev:
2973 raise util.Abort(_("you can't specify a revision and a date"))
2973 raise util.Abort(_("you can't specify a revision and a date"))
2974 rev = cmdutil.finddate(ui, repo, date)
2974 rev = cmdutil.finddate(ui, repo, date)
2975
2975
2976 if clean:
2976 if clean:
2977 return hg.clean(repo, rev)
2977 return hg.clean(repo, rev)
2978 else:
2978 else:
2979 return hg.update(repo, rev)
2979 return hg.update(repo, rev)
2980
2980
2981 def verify(ui, repo):
2981 def verify(ui, repo):
2982 """verify the integrity of the repository
2982 """verify the integrity of the repository
2983
2983
2984 Verify the integrity of the current repository.
2984 Verify the integrity of the current repository.
2985
2985
2986 This will perform an extensive check of the repository's
2986 This will perform an extensive check of the repository's
2987 integrity, validating the hashes and checksums of each entry in
2987 integrity, validating the hashes and checksums of each entry in
2988 the changelog, manifest, and tracked files, as well as the
2988 the changelog, manifest, and tracked files, as well as the
2989 integrity of their crosslinks and indices.
2989 integrity of their crosslinks and indices.
2990 """
2990 """
2991 return hg.verify(repo)
2991 return hg.verify(repo)
2992
2992
2993 def version_(ui):
2993 def version_(ui):
2994 """output version and copyright information"""
2994 """output version and copyright information"""
2995 ui.write(_("Mercurial Distributed SCM (version %s)\n")
2995 ui.write(_("Mercurial Distributed SCM (version %s)\n")
2996 % util.version())
2996 % util.version())
2997 ui.status(_(
2997 ui.status(_(
2998 "\nCopyright (C) 2005-2009 Matt Mackall <mpm@selenic.com> and others\n"
2998 "\nCopyright (C) 2005-2009 Matt Mackall <mpm@selenic.com> and others\n"
2999 "This is free software; see the source for copying conditions. "
2999 "This is free software; see the source for copying conditions. "
3000 "There is NO\nwarranty; "
3000 "There is NO\nwarranty; "
3001 "not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.\n"
3001 "not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.\n"
3002 ))
3002 ))
3003
3003
3004 # Command options and aliases are listed here, alphabetically
3004 # Command options and aliases are listed here, alphabetically
3005
3005
3006 globalopts = [
3006 globalopts = [
3007 ('R', 'repository', '',
3007 ('R', 'repository', '',
3008 _('repository root directory or symbolic path name')),
3008 _('repository root directory or symbolic path name')),
3009 ('', 'cwd', '', _('change working directory')),
3009 ('', 'cwd', '', _('change working directory')),
3010 ('y', 'noninteractive', None,
3010 ('y', 'noninteractive', None,
3011 _('do not prompt, assume \'yes\' for any required answers')),
3011 _('do not prompt, assume \'yes\' for any required answers')),
3012 ('q', 'quiet', None, _('suppress output')),
3012 ('q', 'quiet', None, _('suppress output')),
3013 ('v', 'verbose', None, _('enable additional output')),
3013 ('v', 'verbose', None, _('enable additional output')),
3014 ('', 'config', [], _('set/override config option')),
3014 ('', 'config', [], _('set/override config option')),
3015 ('', 'debug', None, _('enable debugging output')),
3015 ('', 'debug', None, _('enable debugging output')),
3016 ('', 'debugger', None, _('start debugger')),
3016 ('', 'debugger', None, _('start debugger')),
3017 ('', 'encoding', encoding.encoding, _('set the charset encoding')),
3017 ('', 'encoding', encoding.encoding, _('set the charset encoding')),
3018 ('', 'encodingmode', encoding.encodingmode,
3018 ('', 'encodingmode', encoding.encodingmode,
3019 _('set the charset encoding mode')),
3019 _('set the charset encoding mode')),
3020 ('', 'traceback', None, _('print traceback on exception')),
3020 ('', 'traceback', None, _('print traceback on exception')),
3021 ('', 'time', None, _('time how long the command takes')),
3021 ('', 'time', None, _('time how long the command takes')),
3022 ('', 'profile', None, _('print command execution profile')),
3022 ('', 'profile', None, _('print command execution profile')),
3023 ('', 'version', None, _('output version information and exit')),
3023 ('', 'version', None, _('output version information and exit')),
3024 ('h', 'help', None, _('display help and exit')),
3024 ('h', 'help', None, _('display help and exit')),
3025 ]
3025 ]
3026
3026
3027 dryrunopts = [('n', 'dry-run', None,
3027 dryrunopts = [('n', 'dry-run', None,
3028 _('do not perform actions, just print output'))]
3028 _('do not perform actions, just print output'))]
3029
3029
3030 remoteopts = [
3030 remoteopts = [
3031 ('e', 'ssh', '', _('specify ssh command to use')),
3031 ('e', 'ssh', '', _('specify ssh command to use')),
3032 ('', 'remotecmd', '', _('specify hg command to run on the remote side')),
3032 ('', 'remotecmd', '', _('specify hg command to run on the remote side')),
3033 ]
3033 ]
3034
3034
3035 walkopts = [
3035 walkopts = [
3036 ('I', 'include', [], _('include names matching the given patterns')),
3036 ('I', 'include', [], _('include names matching the given patterns')),
3037 ('X', 'exclude', [], _('exclude names matching the given patterns')),
3037 ('X', 'exclude', [], _('exclude names matching the given patterns')),
3038 ]
3038 ]
3039
3039
3040 commitopts = [
3040 commitopts = [
3041 ('m', 'message', '', _('use <text> as commit message')),
3041 ('m', 'message', '', _('use <text> as commit message')),
3042 ('l', 'logfile', '', _('read commit message from <file>')),
3042 ('l', 'logfile', '', _('read commit message from <file>')),
3043 ]
3043 ]
3044
3044
3045 commitopts2 = [
3045 commitopts2 = [
3046 ('d', 'date', '', _('record datecode as commit date')),
3046 ('d', 'date', '', _('record datecode as commit date')),
3047 ('u', 'user', '', _('record the specified user as committer')),
3047 ('u', 'user', '', _('record the specified user as committer')),
3048 ]
3048 ]
3049
3049
3050 templateopts = [
3050 templateopts = [
3051 ('', 'style', '', _('display using template map file')),
3051 ('', 'style', '', _('display using template map file')),
3052 ('', 'template', '', _('display with template')),
3052 ('', 'template', '', _('display with template')),
3053 ]
3053 ]
3054
3054
3055 logopts = [
3055 logopts = [
3056 ('p', 'patch', None, _('show patch')),
3056 ('p', 'patch', None, _('show patch')),
3057 ('g', 'git', None, _('use git extended diff format')),
3057 ('g', 'git', None, _('use git extended diff format')),
3058 ('l', 'limit', '', _('limit number of changes displayed')),
3058 ('l', 'limit', '', _('limit number of changes displayed')),
3059 ('M', 'no-merges', None, _('do not show merges')),
3059 ('M', 'no-merges', None, _('do not show merges')),
3060 ] + templateopts
3060 ] + templateopts
3061
3061
3062 diffopts = [
3062 diffopts = [
3063 ('a', 'text', None, _('treat all files as text')),
3063 ('a', 'text', None, _('treat all files as text')),
3064 ('g', 'git', None, _('use git extended diff format')),
3064 ('g', 'git', None, _('use git extended diff format')),
3065 ('', 'nodates', None, _("don't include dates in diff headers"))
3065 ('', 'nodates', None, _("don't include dates in diff headers"))
3066 ]
3066 ]
3067
3067
3068 diffopts2 = [
3068 diffopts2 = [
3069 ('p', 'show-function', None, _('show which function each change is in')),
3069 ('p', 'show-function', None, _('show which function each change is in')),
3070 ('w', 'ignore-all-space', None,
3070 ('w', 'ignore-all-space', None,
3071 _('ignore white space when comparing lines')),
3071 _('ignore white space when comparing lines')),
3072 ('b', 'ignore-space-change', None,
3072 ('b', 'ignore-space-change', None,
3073 _('ignore changes in the amount of white space')),
3073 _('ignore changes in the amount of white space')),
3074 ('B', 'ignore-blank-lines', None,
3074 ('B', 'ignore-blank-lines', None,
3075 _('ignore changes whose lines are all blank')),
3075 _('ignore changes whose lines are all blank')),
3076 ('U', 'unified', '', _('number of lines of context to show'))
3076 ('U', 'unified', '', _('number of lines of context to show'))
3077 ]
3077 ]
3078
3078
3079 similarityopts = [
3079 similarityopts = [
3080 ('s', 'similarity', '',
3080 ('s', 'similarity', '',
3081 _('guess renamed files by similarity (0<=s<=100)'))
3081 _('guess renamed files by similarity (0<=s<=100)'))
3082 ]
3082 ]
3083
3083
3084 table = {
3084 table = {
3085 "^add": (add, walkopts + dryrunopts, _('[OPTION]... [FILE]...')),
3085 "^add": (add, walkopts + dryrunopts, _('[OPTION]... [FILE]...')),
3086 "addremove":
3086 "addremove":
3087 (addremove, similarityopts + walkopts + dryrunopts,
3087 (addremove, similarityopts + walkopts + dryrunopts,
3088 _('[OPTION]... [FILE]...')),
3088 _('[OPTION]... [FILE]...')),
3089 "^annotate|blame":
3089 "^annotate|blame":
3090 (annotate,
3090 (annotate,
3091 [('r', 'rev', '', _('annotate the specified revision')),
3091 [('r', 'rev', '', _('annotate the specified revision')),
3092 ('f', 'follow', None, _('follow file copies and renames')),
3092 ('f', 'follow', None, _('follow file copies and renames')),
3093 ('a', 'text', None, _('treat all files as text')),
3093 ('a', 'text', None, _('treat all files as text')),
3094 ('u', 'user', None, _('list the author (long with -v)')),
3094 ('u', 'user', None, _('list the author (long with -v)')),
3095 ('d', 'date', None, _('list the date (short with -q)')),
3095 ('d', 'date', None, _('list the date (short with -q)')),
3096 ('n', 'number', None, _('list the revision number (default)')),
3096 ('n', 'number', None, _('list the revision number (default)')),
3097 ('c', 'changeset', None, _('list the changeset')),
3097 ('c', 'changeset', None, _('list the changeset')),
3098 ('l', 'line-number', None,
3098 ('l', 'line-number', None,
3099 _('show line number at the first appearance'))
3099 _('show line number at the first appearance'))
3100 ] + walkopts,
3100 ] + walkopts,
3101 _('[-r REV] [-f] [-a] [-u] [-d] [-n] [-c] [-l] FILE...')),
3101 _('[-r REV] [-f] [-a] [-u] [-d] [-n] [-c] [-l] FILE...')),
3102 "archive":
3102 "archive":
3103 (archive,
3103 (archive,
3104 [('', 'no-decode', None, _('do not pass files through decoders')),
3104 [('', 'no-decode', None, _('do not pass files through decoders')),
3105 ('p', 'prefix', '', _('directory prefix for files in archive')),
3105 ('p', 'prefix', '', _('directory prefix for files in archive')),
3106 ('r', 'rev', '', _('revision to distribute')),
3106 ('r', 'rev', '', _('revision to distribute')),
3107 ('t', 'type', '', _('type of distribution to create')),
3107 ('t', 'type', '', _('type of distribution to create')),
3108 ] + walkopts,
3108 ] + walkopts,
3109 _('[OPTION]... DEST')),
3109 _('[OPTION]... DEST')),
3110 "backout":
3110 "backout":
3111 (backout,
3111 (backout,
3112 [('', 'merge', None,
3112 [('', 'merge', None,
3113 _('merge with old dirstate parent after backout')),
3113 _('merge with old dirstate parent after backout')),
3114 ('', 'parent', '', _('parent to choose when backing out merge')),
3114 ('', 'parent', '', _('parent to choose when backing out merge')),
3115 ('r', 'rev', '', _('revision to backout')),
3115 ('r', 'rev', '', _('revision to backout')),
3116 ] + walkopts + commitopts + commitopts2,
3116 ] + walkopts + commitopts + commitopts2,
3117 _('[OPTION]... [-r] REV')),
3117 _('[OPTION]... [-r] REV')),
3118 "bisect":
3118 "bisect":
3119 (bisect,
3119 (bisect,
3120 [('r', 'reset', False, _('reset bisect state')),
3120 [('r', 'reset', False, _('reset bisect state')),
3121 ('g', 'good', False, _('mark changeset good')),
3121 ('g', 'good', False, _('mark changeset good')),
3122 ('b', 'bad', False, _('mark changeset bad')),
3122 ('b', 'bad', False, _('mark changeset bad')),
3123 ('s', 'skip', False, _('skip testing changeset')),
3123 ('s', 'skip', False, _('skip testing changeset')),
3124 ('c', 'command', '', _('use command to check changeset state')),
3124 ('c', 'command', '', _('use command to check changeset state')),
3125 ('U', 'noupdate', False, _('do not update to target'))],
3125 ('U', 'noupdate', False, _('do not update to target'))],
3126 _("[-gbsr] [-c CMD] [REV]")),
3126 _("[-gbsr] [-c CMD] [REV]")),
3127 "branch":
3127 "branch":
3128 (branch,
3128 (branch,
3129 [('f', 'force', None,
3129 [('f', 'force', None,
3130 _('set branch name even if it shadows an existing branch')),
3130 _('set branch name even if it shadows an existing branch')),
3131 ('C', 'clean', None, _('reset branch name to parent branch name'))],
3131 ('C', 'clean', None, _('reset branch name to parent branch name'))],
3132 _('[-fC] [NAME]')),
3132 _('[-fC] [NAME]')),
3133 "branches":
3133 "branches":
3134 (branches,
3134 (branches,
3135 [('a', 'active', False,
3135 [('a', 'active', False,
3136 _('show only branches that have unmerged heads'))],
3136 _('show only branches that have unmerged heads'))],
3137 _('[-a]')),
3137 _('[-a]')),
3138 "bundle":
3138 "bundle":
3139 (bundle,
3139 (bundle,
3140 [('f', 'force', None,
3140 [('f', 'force', None,
3141 _('run even when remote repository is unrelated')),
3141 _('run even when remote repository is unrelated')),
3142 ('r', 'rev', [],
3142 ('r', 'rev', [],
3143 _('a changeset up to which you would like to bundle')),
3143 _('a changeset up to which you would like to bundle')),
3144 ('', 'base', [],
3144 ('', 'base', [],
3145 _('a base changeset to specify instead of a destination')),
3145 _('a base changeset to specify instead of a destination')),
3146 ('a', 'all', None, _('bundle all changesets in the repository')),
3146 ('a', 'all', None, _('bundle all changesets in the repository')),
3147 ('t', 'type', 'bzip2', _('bundle compression type to use')),
3147 ('t', 'type', 'bzip2', _('bundle compression type to use')),
3148 ] + remoteopts,
3148 ] + remoteopts,
3149 _('[-f] [-a] [-r REV]... [--base REV]... FILE [DEST]')),
3149 _('[-f] [-a] [-r REV]... [--base REV]... FILE [DEST]')),
3150 "cat":
3150 "cat":
3151 (cat,
3151 (cat,
3152 [('o', 'output', '', _('print output to file with formatted name')),
3152 [('o', 'output', '', _('print output to file with formatted name')),
3153 ('r', 'rev', '', _('print the given revision')),
3153 ('r', 'rev', '', _('print the given revision')),
3154 ('', 'decode', None, _('apply any matching decode filter')),
3154 ('', 'decode', None, _('apply any matching decode filter')),
3155 ] + walkopts,
3155 ] + walkopts,
3156 _('[OPTION]... FILE...')),
3156 _('[OPTION]... FILE...')),
3157 "^clone":
3157 "^clone":
3158 (clone,
3158 (clone,
3159 [('U', 'noupdate', None,
3159 [('U', 'noupdate', None,
3160 _('the clone will only contain a repository (no working copy)')),
3160 _('the clone will only contain a repository (no working copy)')),
3161 ('r', 'rev', [],
3161 ('r', 'rev', [],
3162 _('a changeset you would like to have after cloning')),
3162 _('a changeset you would like to have after cloning')),
3163 ('', 'pull', None, _('use pull protocol to copy metadata')),
3163 ('', 'pull', None, _('use pull protocol to copy metadata')),
3164 ('', 'uncompressed', None,
3164 ('', 'uncompressed', None,
3165 _('use uncompressed transfer (fast over LAN)')),
3165 _('use uncompressed transfer (fast over LAN)')),
3166 ] + remoteopts,
3166 ] + remoteopts,
3167 _('[OPTION]... SOURCE [DEST]')),
3167 _('[OPTION]... SOURCE [DEST]')),
3168 "^commit|ci":
3168 "^commit|ci":
3169 (commit,
3169 (commit,
3170 [('A', 'addremove', None,
3170 [('A', 'addremove', None,
3171 _('mark new/missing files as added/removed before committing')),
3171 _('mark new/missing files as added/removed before committing')),
3172 ('', 'close-branch', None,
3172 ('', 'close-branch', None,
3173 _('mark a branch as closed, hiding it from the branch list')),
3173 _('mark a branch as closed, hiding it from the branch list')),
3174 ] + walkopts + commitopts + commitopts2,
3174 ] + walkopts + commitopts + commitopts2,
3175 _('[OPTION]... [FILE]...')),
3175 _('[OPTION]... [FILE]...')),
3176 "copy|cp":
3176 "copy|cp":
3177 (copy,
3177 (copy,
3178 [('A', 'after', None, _('record a copy that has already occurred')),
3178 [('A', 'after', None, _('record a copy that has already occurred')),
3179 ('f', 'force', None,
3179 ('f', 'force', None,
3180 _('forcibly copy over an existing managed file')),
3180 _('forcibly copy over an existing managed file')),
3181 ] + walkopts + dryrunopts,
3181 ] + walkopts + dryrunopts,
3182 _('[OPTION]... [SOURCE]... DEST')),
3182 _('[OPTION]... [SOURCE]... DEST')),
3183 "debugancestor": (debugancestor, [], _('[INDEX] REV1 REV2')),
3183 "debugancestor": (debugancestor, [], _('[INDEX] REV1 REV2')),
3184 "debugcheckstate": (debugcheckstate, []),
3184 "debugcheckstate": (debugcheckstate, []),
3185 "debugcommands": (debugcommands, [], _('[COMMAND]')),
3185 "debugcommands": (debugcommands, [], _('[COMMAND]')),
3186 "debugcomplete":
3186 "debugcomplete":
3187 (debugcomplete,
3187 (debugcomplete,
3188 [('o', 'options', None, _('show the command options'))],
3188 [('o', 'options', None, _('show the command options'))],
3189 _('[-o] CMD')),
3189 _('[-o] CMD')),
3190 "debugdate":
3190 "debugdate":
3191 (debugdate,
3191 (debugdate,
3192 [('e', 'extended', None, _('try extended date formats'))],
3192 [('e', 'extended', None, _('try extended date formats'))],
3193 _('[-e] DATE [RANGE]')),
3193 _('[-e] DATE [RANGE]')),
3194 "debugdata": (debugdata, [], _('FILE REV')),
3194 "debugdata": (debugdata, [], _('FILE REV')),
3195 "debugfsinfo": (debugfsinfo, [], _('[PATH]')),
3195 "debugfsinfo": (debugfsinfo, [], _('[PATH]')),
3196 "debugindex": (debugindex, [], _('FILE')),
3196 "debugindex": (debugindex, [], _('FILE')),
3197 "debugindexdot": (debugindexdot, [], _('FILE')),
3197 "debugindexdot": (debugindexdot, [], _('FILE')),
3198 "debuginstall": (debuginstall, []),
3198 "debuginstall": (debuginstall, []),
3199 "debugrebuildstate":
3199 "debugrebuildstate":
3200 (debugrebuildstate,
3200 (debugrebuildstate,
3201 [('r', 'rev', '', _('revision to rebuild to'))],
3201 [('r', 'rev', '', _('revision to rebuild to'))],
3202 _('[-r REV] [REV]')),
3202 _('[-r REV] [REV]')),
3203 "debugrename":
3203 "debugrename":
3204 (debugrename,
3204 (debugrename,
3205 [('r', 'rev', '', _('revision to debug'))],
3205 [('r', 'rev', '', _('revision to debug'))],
3206 _('[-r REV] FILE')),
3206 _('[-r REV] FILE')),
3207 "debugsetparents":
3207 "debugsetparents":
3208 (debugsetparents, [], _('REV1 [REV2]')),
3208 (debugsetparents, [], _('REV1 [REV2]')),
3209 "debugstate":
3209 "debugstate":
3210 (debugstate,
3210 (debugstate,
3211 [('', 'nodates', None, _('do not display the saved mtime'))],
3211 [('', 'nodates', None, _('do not display the saved mtime'))],
3212 _('[OPTION]...')),
3212 _('[OPTION]...')),
3213 "debugwalk": (debugwalk, walkopts, _('[OPTION]... [FILE]...')),
3213 "debugwalk": (debugwalk, walkopts, _('[OPTION]... [FILE]...')),
3214 "^diff":
3214 "^diff":
3215 (diff,
3215 (diff,
3216 [('r', 'rev', [], _('revision')),
3216 [('r', 'rev', [], _('revision')),
3217 ('c', 'change', '', _('change made by revision'))
3217 ('c', 'change', '', _('change made by revision'))
3218 ] + diffopts + diffopts2 + walkopts,
3218 ] + diffopts + diffopts2 + walkopts,
3219 _('[OPTION]... [-r REV1 [-r REV2]] [FILE]...')),
3219 _('[OPTION]... [-r REV1 [-r REV2]] [FILE]...')),
3220 "^export":
3220 "^export":
3221 (export,
3221 (export,
3222 [('o', 'output', '', _('print output to file with formatted name')),
3222 [('o', 'output', '', _('print output to file with formatted name')),
3223 ('', 'switch-parent', None, _('diff against the second parent'))
3223 ('', 'switch-parent', None, _('diff against the second parent'))
3224 ] + diffopts,
3224 ] + diffopts,
3225 _('[OPTION]... [-o OUTFILESPEC] REV...')),
3225 _('[OPTION]... [-o OUTFILESPEC] REV...')),
3226 "grep":
3226 "grep":
3227 (grep,
3227 (grep,
3228 [('0', 'print0', None, _('end fields with NUL')),
3228 [('0', 'print0', None, _('end fields with NUL')),
3229 ('', 'all', None, _('print all revisions that match')),
3229 ('', 'all', None, _('print all revisions that match')),
3230 ('f', 'follow', None,
3230 ('f', 'follow', None,
3231 _('follow changeset history, or file history across copies and renames')),
3231 _('follow changeset history, or file history across copies and renames')),
3232 ('i', 'ignore-case', None, _('ignore case when matching')),
3232 ('i', 'ignore-case', None, _('ignore case when matching')),
3233 ('l', 'files-with-matches', None,
3233 ('l', 'files-with-matches', None,
3234 _('print only filenames and revisions that match')),
3234 _('print only filenames and revisions that match')),
3235 ('n', 'line-number', None, _('print matching line numbers')),
3235 ('n', 'line-number', None, _('print matching line numbers')),
3236 ('r', 'rev', [], _('search in given revision range')),
3236 ('r', 'rev', [], _('search in given revision range')),
3237 ('u', 'user', None, _('list the author (long with -v)')),
3237 ('u', 'user', None, _('list the author (long with -v)')),
3238 ('d', 'date', None, _('list the date (short with -q)')),
3238 ('d', 'date', None, _('list the date (short with -q)')),
3239 ] + walkopts,
3239 ] + walkopts,
3240 _('[OPTION]... PATTERN [FILE]...')),
3240 _('[OPTION]... PATTERN [FILE]...')),
3241 "heads":
3241 "heads":
3242 (heads,
3242 (heads,
3243 [('r', 'rev', '', _('show only heads which are descendants of REV')),
3243 [('r', 'rev', '', _('show only heads which are descendants of REV')),
3244 ('a', 'active', False,
3244 ('a', 'active', False,
3245 _('show only the active heads from open branches')),
3245 _('show only the active heads from open branches')),
3246 ('c', 'closed', False,
3246 ('c', 'closed', False,
3247 _('show normal and closed heads')),
3247 _('show normal and closed heads')),
3248 ] + templateopts,
3248 ] + templateopts,
3249 _('[-r REV] [REV]...')),
3249 _('[-r REV] [REV]...')),
3250 "help": (help_, [], _('[TOPIC]')),
3250 "help": (help_, [], _('[TOPIC]')),
3251 "identify|id":
3251 "identify|id":
3252 (identify,
3252 (identify,
3253 [('r', 'rev', '', _('identify the specified revision')),
3253 [('r', 'rev', '', _('identify the specified revision')),
3254 ('n', 'num', None, _('show local revision number')),
3254 ('n', 'num', None, _('show local revision number')),
3255 ('i', 'id', None, _('show global revision id')),
3255 ('i', 'id', None, _('show global revision id')),
3256 ('b', 'branch', None, _('show branch')),
3256 ('b', 'branch', None, _('show branch')),
3257 ('t', 'tags', None, _('show tags'))],
3257 ('t', 'tags', None, _('show tags'))],
3258 _('[-nibt] [-r REV] [SOURCE]')),
3258 _('[-nibt] [-r REV] [SOURCE]')),
3259 "import|patch":
3259 "import|patch":
3260 (import_,
3260 (import_,
3261 [('p', 'strip', 1,
3261 [('p', 'strip', 1,
3262 _('directory strip option for patch. This has the same '
3262 _('directory strip option for patch. This has the same '
3263 'meaning as the corresponding patch option')),
3263 'meaning as the corresponding patch option')),
3264 ('b', 'base', '', _('base path')),
3264 ('b', 'base', '', _('base path')),
3265 ('f', 'force', None,
3265 ('f', 'force', None,
3266 _('skip check for outstanding uncommitted changes')),
3266 _('skip check for outstanding uncommitted changes')),
3267 ('', 'no-commit', None, _("don't commit, just update the working directory")),
3267 ('', 'no-commit', None, _("don't commit, just update the working directory")),
3268 ('', 'exact', None,
3268 ('', 'exact', None,
3269 _('apply patch to the nodes from which it was generated')),
3269 _('apply patch to the nodes from which it was generated')),
3270 ('', 'import-branch', None,
3270 ('', 'import-branch', None,
3271 _('use any branch information in patch (implied by --exact)'))] +
3271 _('use any branch information in patch (implied by --exact)'))] +
3272 commitopts + commitopts2 + similarityopts,
3272 commitopts + commitopts2 + similarityopts,
3273 _('[OPTION]... PATCH...')),
3273 _('[OPTION]... PATCH...')),
3274 "incoming|in":
3274 "incoming|in":
3275 (incoming,
3275 (incoming,
3276 [('f', 'force', None,
3276 [('f', 'force', None,
3277 _('run even when remote repository is unrelated')),
3277 _('run even when remote repository is unrelated')),
3278 ('n', 'newest-first', None, _('show newest record first')),
3278 ('n', 'newest-first', None, _('show newest record first')),
3279 ('', 'bundle', '', _('file to store the bundles into')),
3279 ('', 'bundle', '', _('file to store the bundles into')),
3280 ('r', 'rev', [],
3280 ('r', 'rev', [],
3281 _('a specific revision up to which you would like to pull')),
3281 _('a specific revision up to which you would like to pull')),
3282 ] + logopts + remoteopts,
3282 ] + logopts + remoteopts,
3283 _('[-p] [-n] [-M] [-f] [-r REV]...'
3283 _('[-p] [-n] [-M] [-f] [-r REV]...'
3284 ' [--bundle FILENAME] [SOURCE]')),
3284 ' [--bundle FILENAME] [SOURCE]')),
3285 "^init":
3285 "^init":
3286 (init,
3286 (init,
3287 remoteopts,
3287 remoteopts,
3288 _('[-e CMD] [--remotecmd CMD] [DEST]')),
3288 _('[-e CMD] [--remotecmd CMD] [DEST]')),
3289 "locate":
3289 "locate":
3290 (locate,
3290 (locate,
3291 [('r', 'rev', '', _('search the repository as it stood at REV')),
3291 [('r', 'rev', '', _('search the repository as it stood at REV')),
3292 ('0', 'print0', None,
3292 ('0', 'print0', None,
3293 _('end filenames with NUL, for use with xargs')),
3293 _('end filenames with NUL, for use with xargs')),
3294 ('f', 'fullpath', None,
3294 ('f', 'fullpath', None,
3295 _('print complete paths from the filesystem root')),
3295 _('print complete paths from the filesystem root')),
3296 ] + walkopts,
3296 ] + walkopts,
3297 _('[OPTION]... [PATTERN]...')),
3297 _('[OPTION]... [PATTERN]...')),
3298 "^log|history":
3298 "^log|history":
3299 (log,
3299 (log,
3300 [('f', 'follow', None,
3300 [('f', 'follow', None,
3301 _('follow changeset history, or file history across copies and renames')),
3301 _('follow changeset history, or file history across copies and renames')),
3302 ('', 'follow-first', None,
3302 ('', 'follow-first', None,
3303 _('only follow the first parent of merge changesets')),
3303 _('only follow the first parent of merge changesets')),
3304 ('d', 'date', '', _('show revisions matching date spec')),
3304 ('d', 'date', '', _('show revisions matching date spec')),
3305 ('C', 'copies', None, _('show copied files')),
3305 ('C', 'copies', None, _('show copied files')),
3306 ('k', 'keyword', [], _('do case-insensitive search for a keyword')),
3306 ('k', 'keyword', [], _('do case-insensitive search for a keyword')),
3307 ('r', 'rev', [], _('show the specified revision or range')),
3307 ('r', 'rev', [], _('show the specified revision or range')),
3308 ('', 'removed', None, _('include revisions where files were removed')),
3308 ('', 'removed', None, _('include revisions where files were removed')),
3309 ('m', 'only-merges', None, _('show only merges')),
3309 ('m', 'only-merges', None, _('show only merges')),
3310 ('u', 'user', [], _('revisions committed by user')),
3310 ('u', 'user', [], _('revisions committed by user')),
3311 ('b', 'only-branch', [],
3311 ('b', 'only-branch', [],
3312 _('show only changesets within the given named branch')),
3312 _('show only changesets within the given named branch')),
3313 ('P', 'prune', [], _('do not display revision or any of its ancestors')),
3313 ('P', 'prune', [], _('do not display revision or any of its ancestors')),
3314 ] + logopts + walkopts,
3314 ] + logopts + walkopts,
3315 _('[OPTION]... [FILE]')),
3315 _('[OPTION]... [FILE]')),
3316 "manifest":
3316 "manifest":
3317 (manifest,
3317 (manifest,
3318 [('r', 'rev', '', _('revision to display'))],
3318 [('r', 'rev', '', _('revision to display'))],
3319 _('[-r REV]')),
3319 _('[-r REV]')),
3320 "^merge":
3320 "^merge":
3321 (merge,
3321 (merge,
3322 [('f', 'force', None, _('force a merge with outstanding changes')),
3322 [('f', 'force', None, _('force a merge with outstanding changes')),
3323 ('r', 'rev', '', _('revision to merge')),
3323 ('r', 'rev', '', _('revision to merge')),
3324 ('S', 'show', None,
3324 ('S', 'show', None,
3325 _('review revisions to merge (no merge is performed)'))],
3325 _('review revisions to merge (no merge is performed)'))],
3326 _('[-f] [[-r] REV]')),
3326 _('[-f] [[-r] REV]')),
3327 "outgoing|out":
3327 "outgoing|out":
3328 (outgoing,
3328 (outgoing,
3329 [('f', 'force', None,
3329 [('f', 'force', None,
3330 _('run even when remote repository is unrelated')),
3330 _('run even when remote repository is unrelated')),
3331 ('r', 'rev', [],
3331 ('r', 'rev', [],
3332 _('a specific revision up to which you would like to push')),
3332 _('a specific revision up to which you would like to push')),
3333 ('n', 'newest-first', None, _('show newest record first')),
3333 ('n', 'newest-first', None, _('show newest record first')),
3334 ] + logopts + remoteopts,
3334 ] + logopts + remoteopts,
3335 _('[-M] [-p] [-n] [-f] [-r REV]... [DEST]')),
3335 _('[-M] [-p] [-n] [-f] [-r REV]... [DEST]')),
3336 "^parents":
3336 "^parents":
3337 (parents,
3337 (parents,
3338 [('r', 'rev', '', _('show parents from the specified revision')),
3338 [('r', 'rev', '', _('show parents from the specified revision')),
3339 ] + templateopts,
3339 ] + templateopts,
3340 _('[-r REV] [FILE]')),
3340 _('[-r REV] [FILE]')),
3341 "paths": (paths, [], _('[NAME]')),
3341 "paths": (paths, [], _('[NAME]')),
3342 "^pull":
3342 "^pull":
3343 (pull,
3343 (pull,
3344 [('u', 'update', None,
3344 [('u', 'update', None,
3345 _('update to new tip if changesets were pulled')),
3345 _('update to new tip if changesets were pulled')),
3346 ('f', 'force', None,
3346 ('f', 'force', None,
3347 _('run even when remote repository is unrelated')),
3347 _('run even when remote repository is unrelated')),
3348 ('r', 'rev', [],
3348 ('r', 'rev', [],
3349 _('a specific revision up to which you would like to pull')),
3349 _('a specific revision up to which you would like to pull')),
3350 ] + remoteopts,
3350 ] + remoteopts,
3351 _('[-u] [-f] [-r REV]... [-e CMD] [--remotecmd CMD] [SOURCE]')),
3351 _('[-u] [-f] [-r REV]... [-e CMD] [--remotecmd CMD] [SOURCE]')),
3352 "^push":
3352 "^push":
3353 (push,
3353 (push,
3354 [('f', 'force', None, _('force push')),
3354 [('f', 'force', None, _('force push')),
3355 ('r', 'rev', [],
3355 ('r', 'rev', [],
3356 _('a specific revision up to which you would like to push')),
3356 _('a specific revision up to which you would like to push')),
3357 ] + remoteopts,
3357 ] + remoteopts,
3358 _('[-f] [-r REV]... [-e CMD] [--remotecmd CMD] [DEST]')),
3358 _('[-f] [-r REV]... [-e CMD] [--remotecmd CMD] [DEST]')),
3359 "recover": (recover, []),
3359 "recover": (recover, []),
3360 "^remove|rm":
3360 "^remove|rm":
3361 (remove,
3361 (remove,
3362 [('A', 'after', None, _('record delete for missing files')),
3362 [('A', 'after', None, _('record delete for missing files')),
3363 ('f', 'force', None,
3363 ('f', 'force', None,
3364 _('remove (and delete) file even if added or modified')),
3364 _('remove (and delete) file even if added or modified')),
3365 ] + walkopts,
3365 ] + walkopts,
3366 _('[OPTION]... FILE...')),
3366 _('[OPTION]... FILE...')),
3367 "rename|mv":
3367 "rename|mv":
3368 (rename,
3368 (rename,
3369 [('A', 'after', None, _('record a rename that has already occurred')),
3369 [('A', 'after', None, _('record a rename that has already occurred')),
3370 ('f', 'force', None,
3370 ('f', 'force', None,
3371 _('forcibly copy over an existing managed file')),
3371 _('forcibly copy over an existing managed file')),
3372 ] + walkopts + dryrunopts,
3372 ] + walkopts + dryrunopts,
3373 _('[OPTION]... SOURCE... DEST')),
3373 _('[OPTION]... SOURCE... DEST')),
3374 "resolve":
3374 "resolve":
3375 (resolve,
3375 (resolve,
3376 [('a', 'all', None, _('remerge all unresolved files')),
3376 [('a', 'all', None, _('remerge all unresolved files')),
3377 ('l', 'list', None, _('list state of files needing merge')),
3377 ('l', 'list', None, _('list state of files needing merge')),
3378 ('m', 'mark', None, _('mark files as resolved')),
3378 ('m', 'mark', None, _('mark files as resolved')),
3379 ('u', 'unmark', None, _('unmark files as resolved'))]
3379 ('u', 'unmark', None, _('unmark files as resolved'))]
3380 + walkopts,
3380 + walkopts,
3381 _('[OPTION]... [FILE]...')),
3381 _('[OPTION]... [FILE]...')),
3382 "revert":
3382 "revert":
3383 (revert,
3383 (revert,
3384 [('a', 'all', None, _('revert all changes when no arguments given')),
3384 [('a', 'all', None, _('revert all changes when no arguments given')),
3385 ('d', 'date', '', _('tipmost revision matching date')),
3385 ('d', 'date', '', _('tipmost revision matching date')),
3386 ('r', 'rev', '', _('revision to revert to')),
3386 ('r', 'rev', '', _('revision to revert to')),
3387 ('', 'no-backup', None, _('do not save backup copies of files')),
3387 ('', 'no-backup', None, _('do not save backup copies of files')),
3388 ] + walkopts + dryrunopts,
3388 ] + walkopts + dryrunopts,
3389 _('[OPTION]... [-r REV] [NAME]...')),
3389 _('[OPTION]... [-r REV] [NAME]...')),
3390 "rollback": (rollback, []),
3390 "rollback": (rollback, []),
3391 "root": (root, []),
3391 "root": (root, []),
3392 "^serve":
3392 "^serve":
3393 (serve,
3393 (serve,
3394 [('A', 'accesslog', '', _('name of access log file to write to')),
3394 [('A', 'accesslog', '', _('name of access log file to write to')),
3395 ('d', 'daemon', None, _('run server in background')),
3395 ('d', 'daemon', None, _('run server in background')),
3396 ('', 'daemon-pipefds', '', _('used internally by daemon mode')),
3396 ('', 'daemon-pipefds', '', _('used internally by daemon mode')),
3397 ('E', 'errorlog', '', _('name of error log file to write to')),
3397 ('E', 'errorlog', '', _('name of error log file to write to')),
3398 ('p', 'port', 0, _('port to listen on (default: 8000)')),
3398 ('p', 'port', 0, _('port to listen on (default: 8000)')),
3399 ('a', 'address', '', _('address to listen on (default: all interfaces)')),
3399 ('a', 'address', '', _('address to listen on (default: all interfaces)')),
3400 ('', 'prefix', '', _('prefix path to serve from (default: server root)')),
3400 ('', 'prefix', '', _('prefix path to serve from (default: server root)')),
3401 ('n', 'name', '',
3401 ('n', 'name', '',
3402 _('name to show in web pages (default: working directory)')),
3402 _('name to show in web pages (default: working directory)')),
3403 ('', 'webdir-conf', '', _('name of the webdir config file'
3403 ('', 'webdir-conf', '', _('name of the webdir config file'
3404 ' (serve more than one repository)')),
3404 ' (serve more than one repository)')),
3405 ('', 'pid-file', '', _('name of file to write process ID to')),
3405 ('', 'pid-file', '', _('name of file to write process ID to')),
3406 ('', 'stdio', None, _('for remote clients')),
3406 ('', 'stdio', None, _('for remote clients')),
3407 ('t', 'templates', '', _('web templates to use')),
3407 ('t', 'templates', '', _('web templates to use')),
3408 ('', 'style', '', _('template style to use')),
3408 ('', 'style', '', _('template style to use')),
3409 ('6', 'ipv6', None, _('use IPv6 in addition to IPv4')),
3409 ('6', 'ipv6', None, _('use IPv6 in addition to IPv4')),
3410 ('', 'certificate', '', _('SSL certificate file'))],
3410 ('', 'certificate', '', _('SSL certificate file'))],
3411 _('[OPTION]...')),
3411 _('[OPTION]...')),
3412 "showconfig|debugconfig":
3412 "showconfig|debugconfig":
3413 (showconfig,
3413 (showconfig,
3414 [('u', 'untrusted', None, _('show untrusted configuration options'))],
3414 [('u', 'untrusted', None, _('show untrusted configuration options'))],
3415 _('[-u] [NAME]...')),
3415 _('[-u] [NAME]...')),
3416 "^status|st":
3416 "^status|st":
3417 (status,
3417 (status,
3418 [('A', 'all', None, _('show status of all files')),
3418 [('A', 'all', None, _('show status of all files')),
3419 ('m', 'modified', None, _('show only modified files')),
3419 ('m', 'modified', None, _('show only modified files')),
3420 ('a', 'added', None, _('show only added files')),
3420 ('a', 'added', None, _('show only added files')),
3421 ('r', 'removed', None, _('show only removed files')),
3421 ('r', 'removed', None, _('show only removed files')),
3422 ('d', 'deleted', None, _('show only deleted (but tracked) files')),
3422 ('d', 'deleted', None, _('show only deleted (but tracked) files')),
3423 ('c', 'clean', None, _('show only files without changes')),
3423 ('c', 'clean', None, _('show only files without changes')),
3424 ('u', 'unknown', None, _('show only unknown (not tracked) files')),
3424 ('u', 'unknown', None, _('show only unknown (not tracked) files')),
3425 ('i', 'ignored', None, _('show only ignored files')),
3425 ('i', 'ignored', None, _('show only ignored files')),
3426 ('n', 'no-status', None, _('hide status prefix')),
3426 ('n', 'no-status', None, _('hide status prefix')),
3427 ('C', 'copies', None, _('show source of copied files')),
3427 ('C', 'copies', None, _('show source of copied files')),
3428 ('0', 'print0', None,
3428 ('0', 'print0', None,
3429 _('end filenames with NUL, for use with xargs')),
3429 _('end filenames with NUL, for use with xargs')),
3430 ('', 'rev', [], _('show difference from revision')),
3430 ('', 'rev', [], _('show difference from revision')),
3431 ] + walkopts,
3431 ] + walkopts,
3432 _('[OPTION]... [FILE]...')),
3432 _('[OPTION]... [FILE]...')),
3433 "tag":
3433 "tag":
3434 (tag,
3434 (tag,
3435 [('f', 'force', None, _('replace existing tag')),
3435 [('f', 'force', None, _('replace existing tag')),
3436 ('l', 'local', None, _('make the tag local')),
3436 ('l', 'local', None, _('make the tag local')),
3437 ('r', 'rev', '', _('revision to tag')),
3437 ('r', 'rev', '', _('revision to tag')),
3438 ('', 'remove', None, _('remove a tag')),
3438 ('', 'remove', None, _('remove a tag')),
3439 # -l/--local is already there, commitopts cannot be used
3439 # -l/--local is already there, commitopts cannot be used
3440 ('m', 'message', '', _('use <text> as commit message')),
3440 ('m', 'message', '', _('use <text> as commit message')),
3441 ] + commitopts2,
3441 ] + commitopts2,
3442 _('[-l] [-m TEXT] [-d DATE] [-u USER] [-r REV] NAME...')),
3442 _('[-l] [-m TEXT] [-d DATE] [-u USER] [-r REV] NAME...')),
3443 "tags": (tags, []),
3443 "tags": (tags, []),
3444 "tip":
3444 "tip":
3445 (tip,
3445 (tip,
3446 [('p', 'patch', None, _('show patch')),
3446 [('p', 'patch', None, _('show patch')),
3447 ('g', 'git', None, _('use git extended diff format')),
3447 ('g', 'git', None, _('use git extended diff format')),
3448 ] + templateopts,
3448 ] + templateopts,
3449 _('[-p]')),
3449 _('[-p]')),
3450 "unbundle":
3450 "unbundle":
3451 (unbundle,
3451 (unbundle,
3452 [('u', 'update', None,
3452 [('u', 'update', None,
3453 _('update to new tip if changesets were unbundled'))],
3453 _('update to new tip if changesets were unbundled'))],
3454 _('[-u] FILE...')),
3454 _('[-u] FILE...')),
3455 "^update|up|checkout|co":
3455 "^update|up|checkout|co":
3456 (update,
3456 (update,
3457 [('C', 'clean', None, _('overwrite locally modified files (no backup)')),
3457 [('C', 'clean', None, _('overwrite locally modified files (no backup)')),
3458 ('d', 'date', '', _('tipmost revision matching date')),
3458 ('d', 'date', '', _('tipmost revision matching date')),
3459 ('r', 'rev', '', _('revision'))],
3459 ('r', 'rev', '', _('revision'))],
3460 _('[-C] [-d DATE] [[-r] REV]')),
3460 _('[-C] [-d DATE] [[-r] REV]')),
3461 "verify": (verify, []),
3461 "verify": (verify, []),
3462 "version": (version_, []),
3462 "version": (version_, []),
3463 }
3463 }
3464
3464
3465 norepo = ("clone init version help debugcommands debugcomplete debugdata"
3465 norepo = ("clone init version help debugcommands debugcomplete debugdata"
3466 " debugindex debugindexdot debugdate debuginstall debugfsinfo")
3466 " debugindex debugindexdot debugdate debuginstall debugfsinfo")
3467 optionalrepo = ("identify paths serve showconfig debugancestor")
3467 optionalrepo = ("identify paths serve showconfig debugancestor")
@@ -1,1394 +1,1394
1 # patch.py - patch file parsing routines
1 # patch.py - patch file parsing routines
2 #
2 #
3 # Copyright 2006 Brendan Cully <brendan@kublai.com>
3 # Copyright 2006 Brendan Cully <brendan@kublai.com>
4 # Copyright 2007 Chris Mason <chris.mason@oracle.com>
4 # Copyright 2007 Chris Mason <chris.mason@oracle.com>
5 #
5 #
6 # This software may be used and distributed according to the terms of the
6 # This software may be used and distributed according to the terms of the
7 # GNU General Public License version 2, incorporated herein by reference.
7 # GNU General Public License version 2, incorporated herein by reference.
8
8
9 from i18n import _
9 from i18n import _
10 from node import hex, nullid, short
10 from node import hex, nullid, short
11 import base85, cmdutil, mdiff, util, diffhelpers, copies
11 import base85, cmdutil, mdiff, util, diffhelpers, copies
12 import cStringIO, email.Parser, os, re, math
12 import cStringIO, email.Parser, os, re, math
13 import sys, tempfile, zlib
13 import sys, tempfile, zlib
14
14
15 gitre = re.compile('diff --git a/(.*) b/(.*)')
15 gitre = re.compile('diff --git a/(.*) b/(.*)')
16
16
17 class PatchError(Exception):
17 class PatchError(Exception):
18 pass
18 pass
19
19
20 class NoHunks(PatchError):
20 class NoHunks(PatchError):
21 pass
21 pass
22
22
23 # helper functions
23 # helper functions
24
24
25 def copyfile(src, dst, basedir):
25 def copyfile(src, dst, basedir):
26 abssrc, absdst = [util.canonpath(basedir, basedir, x) for x in [src, dst]]
26 abssrc, absdst = [util.canonpath(basedir, basedir, x) for x in [src, dst]]
27 if os.path.exists(absdst):
27 if os.path.exists(absdst):
28 raise util.Abort(_("cannot create %s: destination already exists") %
28 raise util.Abort(_("cannot create %s: destination already exists") %
29 dst)
29 dst)
30
30
31 dstdir = os.path.dirname(absdst)
31 dstdir = os.path.dirname(absdst)
32 if dstdir and not os.path.isdir(dstdir):
32 if dstdir and not os.path.isdir(dstdir):
33 try:
33 try:
34 os.makedirs(dstdir)
34 os.makedirs(dstdir)
35 except IOError:
35 except IOError:
36 raise util.Abort(
36 raise util.Abort(
37 _("cannot create %s: unable to create destination directory")
37 _("cannot create %s: unable to create destination directory")
38 % dst)
38 % dst)
39
39
40 util.copyfile(abssrc, absdst)
40 util.copyfile(abssrc, absdst)
41
41
42 # public functions
42 # public functions
43
43
44 def extract(ui, fileobj):
44 def extract(ui, fileobj):
45 '''extract patch from data read from fileobj.
45 '''extract patch from data read from fileobj.
46
46
47 patch can be a normal patch or contained in an email message.
47 patch can be a normal patch or contained in an email message.
48
48
49 return tuple (filename, message, user, date, node, p1, p2).
49 return tuple (filename, message, user, date, node, p1, p2).
50 Any item in the returned tuple can be None. If filename is None,
50 Any item in the returned tuple can be None. If filename is None,
51 fileobj did not contain a patch. Caller must unlink filename when done.'''
51 fileobj did not contain a patch. Caller must unlink filename when done.'''
52
52
53 # attempt to detect the start of a patch
53 # attempt to detect the start of a patch
54 # (this heuristic is borrowed from quilt)
54 # (this heuristic is borrowed from quilt)
55 diffre = re.compile(r'^(?:Index:[ \t]|diff[ \t]|RCS file: |'
55 diffre = re.compile(r'^(?:Index:[ \t]|diff[ \t]|RCS file: |'
56 r'retrieving revision [0-9]+(\.[0-9]+)*$|'
56 r'retrieving revision [0-9]+(\.[0-9]+)*$|'
57 r'(---|\*\*\*)[ \t])', re.MULTILINE)
57 r'(---|\*\*\*)[ \t])', re.MULTILINE)
58
58
59 fd, tmpname = tempfile.mkstemp(prefix='hg-patch-')
59 fd, tmpname = tempfile.mkstemp(prefix='hg-patch-')
60 tmpfp = os.fdopen(fd, 'w')
60 tmpfp = os.fdopen(fd, 'w')
61 try:
61 try:
62 msg = email.Parser.Parser().parse(fileobj)
62 msg = email.Parser.Parser().parse(fileobj)
63
63
64 subject = msg['Subject']
64 subject = msg['Subject']
65 user = msg['From']
65 user = msg['From']
66 gitsendmail = 'git-send-email' in msg.get('X-Mailer', '')
66 gitsendmail = 'git-send-email' in msg.get('X-Mailer', '')
67 # should try to parse msg['Date']
67 # should try to parse msg['Date']
68 date = None
68 date = None
69 nodeid = None
69 nodeid = None
70 branch = None
70 branch = None
71 parents = []
71 parents = []
72
72
73 if subject:
73 if subject:
74 if subject.startswith('[PATCH'):
74 if subject.startswith('[PATCH'):
75 pend = subject.find(']')
75 pend = subject.find(']')
76 if pend >= 0:
76 if pend >= 0:
77 subject = subject[pend+1:].lstrip()
77 subject = subject[pend+1:].lstrip()
78 subject = subject.replace('\n\t', ' ')
78 subject = subject.replace('\n\t', ' ')
79 ui.debug('Subject: %s\n' % subject)
79 ui.debug('Subject: %s\n' % subject)
80 if user:
80 if user:
81 ui.debug('From: %s\n' % user)
81 ui.debug('From: %s\n' % user)
82 diffs_seen = 0
82 diffs_seen = 0
83 ok_types = ('text/plain', 'text/x-diff', 'text/x-patch')
83 ok_types = ('text/plain', 'text/x-diff', 'text/x-patch')
84 message = ''
84 message = ''
85 for part in msg.walk():
85 for part in msg.walk():
86 content_type = part.get_content_type()
86 content_type = part.get_content_type()
87 ui.debug('Content-Type: %s\n' % content_type)
87 ui.debug('Content-Type: %s\n' % content_type)
88 if content_type not in ok_types:
88 if content_type not in ok_types:
89 continue
89 continue
90 payload = part.get_payload(decode=True)
90 payload = part.get_payload(decode=True)
91 m = diffre.search(payload)
91 m = diffre.search(payload)
92 if m:
92 if m:
93 hgpatch = False
93 hgpatch = False
94 ignoretext = False
94 ignoretext = False
95
95
96 ui.debug(_('found patch at byte %d\n') % m.start(0))
96 ui.debug(_('found patch at byte %d\n') % m.start(0))
97 diffs_seen += 1
97 diffs_seen += 1
98 cfp = cStringIO.StringIO()
98 cfp = cStringIO.StringIO()
99 for line in payload[:m.start(0)].splitlines():
99 for line in payload[:m.start(0)].splitlines():
100 if line.startswith('# HG changeset patch'):
100 if line.startswith('# HG changeset patch'):
101 ui.debug(_('patch generated by hg export\n'))
101 ui.debug(_('patch generated by hg export\n'))
102 hgpatch = True
102 hgpatch = True
103 # drop earlier commit message content
103 # drop earlier commit message content
104 cfp.seek(0)
104 cfp.seek(0)
105 cfp.truncate()
105 cfp.truncate()
106 subject = None
106 subject = None
107 elif hgpatch:
107 elif hgpatch:
108 if line.startswith('# User '):
108 if line.startswith('# User '):
109 user = line[7:]
109 user = line[7:]
110 ui.debug('From: %s\n' % user)
110 ui.debug('From: %s\n' % user)
111 elif line.startswith("# Date "):
111 elif line.startswith("# Date "):
112 date = line[7:]
112 date = line[7:]
113 elif line.startswith("# Branch "):
113 elif line.startswith("# Branch "):
114 branch = line[9:]
114 branch = line[9:]
115 elif line.startswith("# Node ID "):
115 elif line.startswith("# Node ID "):
116 nodeid = line[10:]
116 nodeid = line[10:]
117 elif line.startswith("# Parent "):
117 elif line.startswith("# Parent "):
118 parents.append(line[10:])
118 parents.append(line[10:])
119 elif line == '---' and gitsendmail:
119 elif line == '---' and gitsendmail:
120 ignoretext = True
120 ignoretext = True
121 if not line.startswith('# ') and not ignoretext:
121 if not line.startswith('# ') and not ignoretext:
122 cfp.write(line)
122 cfp.write(line)
123 cfp.write('\n')
123 cfp.write('\n')
124 message = cfp.getvalue()
124 message = cfp.getvalue()
125 if tmpfp:
125 if tmpfp:
126 tmpfp.write(payload)
126 tmpfp.write(payload)
127 if not payload.endswith('\n'):
127 if not payload.endswith('\n'):
128 tmpfp.write('\n')
128 tmpfp.write('\n')
129 elif not diffs_seen and message and content_type == 'text/plain':
129 elif not diffs_seen and message and content_type == 'text/plain':
130 message += '\n' + payload
130 message += '\n' + payload
131 except:
131 except:
132 tmpfp.close()
132 tmpfp.close()
133 os.unlink(tmpname)
133 os.unlink(tmpname)
134 raise
134 raise
135
135
136 if subject and not message.startswith(subject):
136 if subject and not message.startswith(subject):
137 message = '%s\n%s' % (subject, message)
137 message = '%s\n%s' % (subject, message)
138 tmpfp.close()
138 tmpfp.close()
139 if not diffs_seen:
139 if not diffs_seen:
140 os.unlink(tmpname)
140 os.unlink(tmpname)
141 return None, message, user, date, branch, None, None, None
141 return None, message, user, date, branch, None, None, None
142 p1 = parents and parents.pop(0) or None
142 p1 = parents and parents.pop(0) or None
143 p2 = parents and parents.pop(0) or None
143 p2 = parents and parents.pop(0) or None
144 return tmpname, message, user, date, branch, nodeid, p1, p2
144 return tmpname, message, user, date, branch, nodeid, p1, p2
145
145
146 GP_PATCH = 1 << 0 # we have to run patch
146 GP_PATCH = 1 << 0 # we have to run patch
147 GP_FILTER = 1 << 1 # there's some copy/rename operation
147 GP_FILTER = 1 << 1 # there's some copy/rename operation
148 GP_BINARY = 1 << 2 # there's a binary patch
148 GP_BINARY = 1 << 2 # there's a binary patch
149
149
150 class patchmeta:
150 class patchmeta(object):
151 """Patched file metadata
151 """Patched file metadata
152
152
153 'op' is the performed operation within ADD, DELETE, RENAME, MODIFY
153 'op' is the performed operation within ADD, DELETE, RENAME, MODIFY
154 or COPY. 'path' is patched file path. 'oldpath' is set to the
154 or COPY. 'path' is patched file path. 'oldpath' is set to the
155 origin file when 'op' is either COPY or RENAME, None otherwise. If
155 origin file when 'op' is either COPY or RENAME, None otherwise. If
156 file mode is changed, 'mode' is a tuple (islink, isexec) where
156 file mode is changed, 'mode' is a tuple (islink, isexec) where
157 'islink' is True if the file is a symlink and 'isexec' is True if
157 'islink' is True if the file is a symlink and 'isexec' is True if
158 the file is executable. Otherwise, 'mode' is None.
158 the file is executable. Otherwise, 'mode' is None.
159 """
159 """
160 def __init__(self, path):
160 def __init__(self, path):
161 self.path = path
161 self.path = path
162 self.oldpath = None
162 self.oldpath = None
163 self.mode = None
163 self.mode = None
164 self.op = 'MODIFY'
164 self.op = 'MODIFY'
165 self.lineno = 0
165 self.lineno = 0
166 self.binary = False
166 self.binary = False
167
167
168 def setmode(self, mode):
168 def setmode(self, mode):
169 islink = mode & 020000
169 islink = mode & 020000
170 isexec = mode & 0100
170 isexec = mode & 0100
171 self.mode = (islink, isexec)
171 self.mode = (islink, isexec)
172
172
173 def readgitpatch(lr):
173 def readgitpatch(lr):
174 """extract git-style metadata about patches from <patchname>"""
174 """extract git-style metadata about patches from <patchname>"""
175
175
176 # Filter patch for git information
176 # Filter patch for git information
177 gp = None
177 gp = None
178 gitpatches = []
178 gitpatches = []
179 # Can have a git patch with only metadata, causing patch to complain
179 # Can have a git patch with only metadata, causing patch to complain
180 dopatch = 0
180 dopatch = 0
181
181
182 lineno = 0
182 lineno = 0
183 for line in lr:
183 for line in lr:
184 lineno += 1
184 lineno += 1
185 if line.startswith('diff --git'):
185 if line.startswith('diff --git'):
186 m = gitre.match(line)
186 m = gitre.match(line)
187 if m:
187 if m:
188 if gp:
188 if gp:
189 gitpatches.append(gp)
189 gitpatches.append(gp)
190 src, dst = m.group(1, 2)
190 src, dst = m.group(1, 2)
191 gp = patchmeta(dst)
191 gp = patchmeta(dst)
192 gp.lineno = lineno
192 gp.lineno = lineno
193 elif gp:
193 elif gp:
194 if line.startswith('--- '):
194 if line.startswith('--- '):
195 if gp.op in ('COPY', 'RENAME'):
195 if gp.op in ('COPY', 'RENAME'):
196 dopatch |= GP_FILTER
196 dopatch |= GP_FILTER
197 gitpatches.append(gp)
197 gitpatches.append(gp)
198 gp = None
198 gp = None
199 dopatch |= GP_PATCH
199 dopatch |= GP_PATCH
200 continue
200 continue
201 if line.startswith('rename from '):
201 if line.startswith('rename from '):
202 gp.op = 'RENAME'
202 gp.op = 'RENAME'
203 gp.oldpath = line[12:].rstrip()
203 gp.oldpath = line[12:].rstrip()
204 elif line.startswith('rename to '):
204 elif line.startswith('rename to '):
205 gp.path = line[10:].rstrip()
205 gp.path = line[10:].rstrip()
206 elif line.startswith('copy from '):
206 elif line.startswith('copy from '):
207 gp.op = 'COPY'
207 gp.op = 'COPY'
208 gp.oldpath = line[10:].rstrip()
208 gp.oldpath = line[10:].rstrip()
209 elif line.startswith('copy to '):
209 elif line.startswith('copy to '):
210 gp.path = line[8:].rstrip()
210 gp.path = line[8:].rstrip()
211 elif line.startswith('deleted file'):
211 elif line.startswith('deleted file'):
212 gp.op = 'DELETE'
212 gp.op = 'DELETE'
213 # is the deleted file a symlink?
213 # is the deleted file a symlink?
214 gp.setmode(int(line.rstrip()[-6:], 8))
214 gp.setmode(int(line.rstrip()[-6:], 8))
215 elif line.startswith('new file mode '):
215 elif line.startswith('new file mode '):
216 gp.op = 'ADD'
216 gp.op = 'ADD'
217 gp.setmode(int(line.rstrip()[-6:], 8))
217 gp.setmode(int(line.rstrip()[-6:], 8))
218 elif line.startswith('new mode '):
218 elif line.startswith('new mode '):
219 gp.setmode(int(line.rstrip()[-6:], 8))
219 gp.setmode(int(line.rstrip()[-6:], 8))
220 elif line.startswith('GIT binary patch'):
220 elif line.startswith('GIT binary patch'):
221 dopatch |= GP_BINARY
221 dopatch |= GP_BINARY
222 gp.binary = True
222 gp.binary = True
223 if gp:
223 if gp:
224 gitpatches.append(gp)
224 gitpatches.append(gp)
225
225
226 if not gitpatches:
226 if not gitpatches:
227 dopatch = GP_PATCH
227 dopatch = GP_PATCH
228
228
229 return (dopatch, gitpatches)
229 return (dopatch, gitpatches)
230
230
231 # @@ -start,len +start,len @@ or @@ -start +start @@ if len is 1
231 # @@ -start,len +start,len @@ or @@ -start +start @@ if len is 1
232 unidesc = re.compile('@@ -(\d+)(,(\d+))? \+(\d+)(,(\d+))? @@')
232 unidesc = re.compile('@@ -(\d+)(,(\d+))? \+(\d+)(,(\d+))? @@')
233 contextdesc = re.compile('(---|\*\*\*) (\d+)(,(\d+))? (---|\*\*\*)')
233 contextdesc = re.compile('(---|\*\*\*) (\d+)(,(\d+))? (---|\*\*\*)')
234
234
235 class patchfile:
235 class patchfile(object):
236 def __init__(self, ui, fname, opener, missing=False):
236 def __init__(self, ui, fname, opener, missing=False):
237 self.fname = fname
237 self.fname = fname
238 self.opener = opener
238 self.opener = opener
239 self.ui = ui
239 self.ui = ui
240 self.lines = []
240 self.lines = []
241 self.exists = False
241 self.exists = False
242 self.missing = missing
242 self.missing = missing
243 if not missing:
243 if not missing:
244 try:
244 try:
245 self.lines = self.readlines(fname)
245 self.lines = self.readlines(fname)
246 self.exists = True
246 self.exists = True
247 except IOError:
247 except IOError:
248 pass
248 pass
249 else:
249 else:
250 self.ui.warn(_("unable to find '%s' for patching\n") % self.fname)
250 self.ui.warn(_("unable to find '%s' for patching\n") % self.fname)
251
251
252 self.hash = {}
252 self.hash = {}
253 self.dirty = 0
253 self.dirty = 0
254 self.offset = 0
254 self.offset = 0
255 self.rej = []
255 self.rej = []
256 self.fileprinted = False
256 self.fileprinted = False
257 self.printfile(False)
257 self.printfile(False)
258 self.hunks = 0
258 self.hunks = 0
259
259
260 def readlines(self, fname):
260 def readlines(self, fname):
261 fp = self.opener(fname, 'r')
261 fp = self.opener(fname, 'r')
262 try:
262 try:
263 return fp.readlines()
263 return fp.readlines()
264 finally:
264 finally:
265 fp.close()
265 fp.close()
266
266
267 def writelines(self, fname, lines):
267 def writelines(self, fname, lines):
268 fp = self.opener(fname, 'w')
268 fp = self.opener(fname, 'w')
269 try:
269 try:
270 fp.writelines(lines)
270 fp.writelines(lines)
271 finally:
271 finally:
272 fp.close()
272 fp.close()
273
273
274 def unlink(self, fname):
274 def unlink(self, fname):
275 os.unlink(fname)
275 os.unlink(fname)
276
276
277 def printfile(self, warn):
277 def printfile(self, warn):
278 if self.fileprinted:
278 if self.fileprinted:
279 return
279 return
280 if warn or self.ui.verbose:
280 if warn or self.ui.verbose:
281 self.fileprinted = True
281 self.fileprinted = True
282 s = _("patching file %s\n") % self.fname
282 s = _("patching file %s\n") % self.fname
283 if warn:
283 if warn:
284 self.ui.warn(s)
284 self.ui.warn(s)
285 else:
285 else:
286 self.ui.note(s)
286 self.ui.note(s)
287
287
288
288
289 def findlines(self, l, linenum):
289 def findlines(self, l, linenum):
290 # looks through the hash and finds candidate lines. The
290 # looks through the hash and finds candidate lines. The
291 # result is a list of line numbers sorted based on distance
291 # result is a list of line numbers sorted based on distance
292 # from linenum
292 # from linenum
293 def sorter(a, b):
293 def sorter(a, b):
294 vala = abs(a - linenum)
294 vala = abs(a - linenum)
295 valb = abs(b - linenum)
295 valb = abs(b - linenum)
296 return cmp(vala, valb)
296 return cmp(vala, valb)
297
297
298 try:
298 try:
299 cand = self.hash[l]
299 cand = self.hash[l]
300 except:
300 except:
301 return []
301 return []
302
302
303 if len(cand) > 1:
303 if len(cand) > 1:
304 # resort our list of potentials forward then back.
304 # resort our list of potentials forward then back.
305 cand.sort(sorter)
305 cand.sort(sorter)
306 return cand
306 return cand
307
307
308 def hashlines(self):
308 def hashlines(self):
309 self.hash = {}
309 self.hash = {}
310 for x, s in enumerate(self.lines):
310 for x, s in enumerate(self.lines):
311 self.hash.setdefault(s, []).append(x)
311 self.hash.setdefault(s, []).append(x)
312
312
313 def write_rej(self):
313 def write_rej(self):
314 # our rejects are a little different from patch(1). This always
314 # our rejects are a little different from patch(1). This always
315 # creates rejects in the same form as the original patch. A file
315 # creates rejects in the same form as the original patch. A file
316 # header is inserted so that you can run the reject through patch again
316 # header is inserted so that you can run the reject through patch again
317 # without having to type the filename.
317 # without having to type the filename.
318
318
319 if not self.rej:
319 if not self.rej:
320 return
320 return
321
321
322 fname = self.fname + ".rej"
322 fname = self.fname + ".rej"
323 self.ui.warn(
323 self.ui.warn(
324 _("%d out of %d hunks FAILED -- saving rejects to file %s\n") %
324 _("%d out of %d hunks FAILED -- saving rejects to file %s\n") %
325 (len(self.rej), self.hunks, fname))
325 (len(self.rej), self.hunks, fname))
326
326
327 def rejlines():
327 def rejlines():
328 base = os.path.basename(self.fname)
328 base = os.path.basename(self.fname)
329 yield "--- %s\n+++ %s\n" % (base, base)
329 yield "--- %s\n+++ %s\n" % (base, base)
330 for x in self.rej:
330 for x in self.rej:
331 for l in x.hunk:
331 for l in x.hunk:
332 yield l
332 yield l
333 if l[-1] != '\n':
333 if l[-1] != '\n':
334 yield "\n\ No newline at end of file\n"
334 yield "\n\ No newline at end of file\n"
335
335
336 self.writelines(fname, rejlines())
336 self.writelines(fname, rejlines())
337
337
338 def write(self, dest=None):
338 def write(self, dest=None):
339 if not self.dirty:
339 if not self.dirty:
340 return
340 return
341 if not dest:
341 if not dest:
342 dest = self.fname
342 dest = self.fname
343 self.writelines(dest, self.lines)
343 self.writelines(dest, self.lines)
344
344
345 def close(self):
345 def close(self):
346 self.write()
346 self.write()
347 self.write_rej()
347 self.write_rej()
348
348
349 def apply(self, h, reverse):
349 def apply(self, h, reverse):
350 if not h.complete():
350 if not h.complete():
351 raise PatchError(_("bad hunk #%d %s (%d %d %d %d)") %
351 raise PatchError(_("bad hunk #%d %s (%d %d %d %d)") %
352 (h.number, h.desc, len(h.a), h.lena, len(h.b),
352 (h.number, h.desc, len(h.a), h.lena, len(h.b),
353 h.lenb))
353 h.lenb))
354
354
355 self.hunks += 1
355 self.hunks += 1
356 if reverse:
356 if reverse:
357 h.reverse()
357 h.reverse()
358
358
359 if self.missing:
359 if self.missing:
360 self.rej.append(h)
360 self.rej.append(h)
361 return -1
361 return -1
362
362
363 if self.exists and h.createfile():
363 if self.exists and h.createfile():
364 self.ui.warn(_("file %s already exists\n") % self.fname)
364 self.ui.warn(_("file %s already exists\n") % self.fname)
365 self.rej.append(h)
365 self.rej.append(h)
366 return -1
366 return -1
367
367
368 if isinstance(h, githunk):
368 if isinstance(h, githunk):
369 if h.rmfile():
369 if h.rmfile():
370 self.unlink(self.fname)
370 self.unlink(self.fname)
371 else:
371 else:
372 self.lines[:] = h.new()
372 self.lines[:] = h.new()
373 self.offset += len(h.new())
373 self.offset += len(h.new())
374 self.dirty = 1
374 self.dirty = 1
375 return 0
375 return 0
376
376
377 # fast case first, no offsets, no fuzz
377 # fast case first, no offsets, no fuzz
378 old = h.old()
378 old = h.old()
379 # patch starts counting at 1 unless we are adding the file
379 # patch starts counting at 1 unless we are adding the file
380 if h.starta == 0:
380 if h.starta == 0:
381 start = 0
381 start = 0
382 else:
382 else:
383 start = h.starta + self.offset - 1
383 start = h.starta + self.offset - 1
384 orig_start = start
384 orig_start = start
385 if diffhelpers.testhunk(old, self.lines, start) == 0:
385 if diffhelpers.testhunk(old, self.lines, start) == 0:
386 if h.rmfile():
386 if h.rmfile():
387 self.unlink(self.fname)
387 self.unlink(self.fname)
388 else:
388 else:
389 self.lines[start : start + h.lena] = h.new()
389 self.lines[start : start + h.lena] = h.new()
390 self.offset += h.lenb - h.lena
390 self.offset += h.lenb - h.lena
391 self.dirty = 1
391 self.dirty = 1
392 return 0
392 return 0
393
393
394 # ok, we couldn't match the hunk. Lets look for offsets and fuzz it
394 # ok, we couldn't match the hunk. Lets look for offsets and fuzz it
395 self.hashlines()
395 self.hashlines()
396 if h.hunk[-1][0] != ' ':
396 if h.hunk[-1][0] != ' ':
397 # if the hunk tried to put something at the bottom of the file
397 # if the hunk tried to put something at the bottom of the file
398 # override the start line and use eof here
398 # override the start line and use eof here
399 search_start = len(self.lines)
399 search_start = len(self.lines)
400 else:
400 else:
401 search_start = orig_start
401 search_start = orig_start
402
402
403 for fuzzlen in xrange(3):
403 for fuzzlen in xrange(3):
404 for toponly in [ True, False ]:
404 for toponly in [ True, False ]:
405 old = h.old(fuzzlen, toponly)
405 old = h.old(fuzzlen, toponly)
406
406
407 cand = self.findlines(old[0][1:], search_start)
407 cand = self.findlines(old[0][1:], search_start)
408 for l in cand:
408 for l in cand:
409 if diffhelpers.testhunk(old, self.lines, l) == 0:
409 if diffhelpers.testhunk(old, self.lines, l) == 0:
410 newlines = h.new(fuzzlen, toponly)
410 newlines = h.new(fuzzlen, toponly)
411 self.lines[l : l + len(old)] = newlines
411 self.lines[l : l + len(old)] = newlines
412 self.offset += len(newlines) - len(old)
412 self.offset += len(newlines) - len(old)
413 self.dirty = 1
413 self.dirty = 1
414 if fuzzlen:
414 if fuzzlen:
415 fuzzstr = "with fuzz %d " % fuzzlen
415 fuzzstr = "with fuzz %d " % fuzzlen
416 f = self.ui.warn
416 f = self.ui.warn
417 self.printfile(True)
417 self.printfile(True)
418 else:
418 else:
419 fuzzstr = ""
419 fuzzstr = ""
420 f = self.ui.note
420 f = self.ui.note
421 offset = l - orig_start - fuzzlen
421 offset = l - orig_start - fuzzlen
422 if offset == 1:
422 if offset == 1:
423 msg = _("Hunk #%d succeeded at %d %s"
423 msg = _("Hunk #%d succeeded at %d %s"
424 "(offset %d line).\n")
424 "(offset %d line).\n")
425 else:
425 else:
426 msg = _("Hunk #%d succeeded at %d %s"
426 msg = _("Hunk #%d succeeded at %d %s"
427 "(offset %d lines).\n")
427 "(offset %d lines).\n")
428 f(msg % (h.number, l+1, fuzzstr, offset))
428 f(msg % (h.number, l+1, fuzzstr, offset))
429 return fuzzlen
429 return fuzzlen
430 self.printfile(True)
430 self.printfile(True)
431 self.ui.warn(_("Hunk #%d FAILED at %d\n") % (h.number, orig_start))
431 self.ui.warn(_("Hunk #%d FAILED at %d\n") % (h.number, orig_start))
432 self.rej.append(h)
432 self.rej.append(h)
433 return -1
433 return -1
434
434
435 class hunk:
435 class hunk(object):
436 def __init__(self, desc, num, lr, context, create=False, remove=False):
436 def __init__(self, desc, num, lr, context, create=False, remove=False):
437 self.number = num
437 self.number = num
438 self.desc = desc
438 self.desc = desc
439 self.hunk = [ desc ]
439 self.hunk = [ desc ]
440 self.a = []
440 self.a = []
441 self.b = []
441 self.b = []
442 if context:
442 if context:
443 self.read_context_hunk(lr)
443 self.read_context_hunk(lr)
444 else:
444 else:
445 self.read_unified_hunk(lr)
445 self.read_unified_hunk(lr)
446 self.create = create
446 self.create = create
447 self.remove = remove and not create
447 self.remove = remove and not create
448
448
449 def read_unified_hunk(self, lr):
449 def read_unified_hunk(self, lr):
450 m = unidesc.match(self.desc)
450 m = unidesc.match(self.desc)
451 if not m:
451 if not m:
452 raise PatchError(_("bad hunk #%d") % self.number)
452 raise PatchError(_("bad hunk #%d") % self.number)
453 self.starta, foo, self.lena, self.startb, foo2, self.lenb = m.groups()
453 self.starta, foo, self.lena, self.startb, foo2, self.lenb = m.groups()
454 if self.lena is None:
454 if self.lena is None:
455 self.lena = 1
455 self.lena = 1
456 else:
456 else:
457 self.lena = int(self.lena)
457 self.lena = int(self.lena)
458 if self.lenb is None:
458 if self.lenb is None:
459 self.lenb = 1
459 self.lenb = 1
460 else:
460 else:
461 self.lenb = int(self.lenb)
461 self.lenb = int(self.lenb)
462 self.starta = int(self.starta)
462 self.starta = int(self.starta)
463 self.startb = int(self.startb)
463 self.startb = int(self.startb)
464 diffhelpers.addlines(lr, self.hunk, self.lena, self.lenb, self.a, self.b)
464 diffhelpers.addlines(lr, self.hunk, self.lena, self.lenb, self.a, self.b)
465 # if we hit eof before finishing out the hunk, the last line will
465 # if we hit eof before finishing out the hunk, the last line will
466 # be zero length. Lets try to fix it up.
466 # be zero length. Lets try to fix it up.
467 while len(self.hunk[-1]) == 0:
467 while len(self.hunk[-1]) == 0:
468 del self.hunk[-1]
468 del self.hunk[-1]
469 del self.a[-1]
469 del self.a[-1]
470 del self.b[-1]
470 del self.b[-1]
471 self.lena -= 1
471 self.lena -= 1
472 self.lenb -= 1
472 self.lenb -= 1
473
473
474 def read_context_hunk(self, lr):
474 def read_context_hunk(self, lr):
475 self.desc = lr.readline()
475 self.desc = lr.readline()
476 m = contextdesc.match(self.desc)
476 m = contextdesc.match(self.desc)
477 if not m:
477 if not m:
478 raise PatchError(_("bad hunk #%d") % self.number)
478 raise PatchError(_("bad hunk #%d") % self.number)
479 foo, self.starta, foo2, aend, foo3 = m.groups()
479 foo, self.starta, foo2, aend, foo3 = m.groups()
480 self.starta = int(self.starta)
480 self.starta = int(self.starta)
481 if aend is None:
481 if aend is None:
482 aend = self.starta
482 aend = self.starta
483 self.lena = int(aend) - self.starta
483 self.lena = int(aend) - self.starta
484 if self.starta:
484 if self.starta:
485 self.lena += 1
485 self.lena += 1
486 for x in xrange(self.lena):
486 for x in xrange(self.lena):
487 l = lr.readline()
487 l = lr.readline()
488 if l.startswith('---'):
488 if l.startswith('---'):
489 lr.push(l)
489 lr.push(l)
490 break
490 break
491 s = l[2:]
491 s = l[2:]
492 if l.startswith('- ') or l.startswith('! '):
492 if l.startswith('- ') or l.startswith('! '):
493 u = '-' + s
493 u = '-' + s
494 elif l.startswith(' '):
494 elif l.startswith(' '):
495 u = ' ' + s
495 u = ' ' + s
496 else:
496 else:
497 raise PatchError(_("bad hunk #%d old text line %d") %
497 raise PatchError(_("bad hunk #%d old text line %d") %
498 (self.number, x))
498 (self.number, x))
499 self.a.append(u)
499 self.a.append(u)
500 self.hunk.append(u)
500 self.hunk.append(u)
501
501
502 l = lr.readline()
502 l = lr.readline()
503 if l.startswith('\ '):
503 if l.startswith('\ '):
504 s = self.a[-1][:-1]
504 s = self.a[-1][:-1]
505 self.a[-1] = s
505 self.a[-1] = s
506 self.hunk[-1] = s
506 self.hunk[-1] = s
507 l = lr.readline()
507 l = lr.readline()
508 m = contextdesc.match(l)
508 m = contextdesc.match(l)
509 if not m:
509 if not m:
510 raise PatchError(_("bad hunk #%d") % self.number)
510 raise PatchError(_("bad hunk #%d") % self.number)
511 foo, self.startb, foo2, bend, foo3 = m.groups()
511 foo, self.startb, foo2, bend, foo3 = m.groups()
512 self.startb = int(self.startb)
512 self.startb = int(self.startb)
513 if bend is None:
513 if bend is None:
514 bend = self.startb
514 bend = self.startb
515 self.lenb = int(bend) - self.startb
515 self.lenb = int(bend) - self.startb
516 if self.startb:
516 if self.startb:
517 self.lenb += 1
517 self.lenb += 1
518 hunki = 1
518 hunki = 1
519 for x in xrange(self.lenb):
519 for x in xrange(self.lenb):
520 l = lr.readline()
520 l = lr.readline()
521 if l.startswith('\ '):
521 if l.startswith('\ '):
522 s = self.b[-1][:-1]
522 s = self.b[-1][:-1]
523 self.b[-1] = s
523 self.b[-1] = s
524 self.hunk[hunki-1] = s
524 self.hunk[hunki-1] = s
525 continue
525 continue
526 if not l:
526 if not l:
527 lr.push(l)
527 lr.push(l)
528 break
528 break
529 s = l[2:]
529 s = l[2:]
530 if l.startswith('+ ') or l.startswith('! '):
530 if l.startswith('+ ') or l.startswith('! '):
531 u = '+' + s
531 u = '+' + s
532 elif l.startswith(' '):
532 elif l.startswith(' '):
533 u = ' ' + s
533 u = ' ' + s
534 elif len(self.b) == 0:
534 elif len(self.b) == 0:
535 # this can happen when the hunk does not add any lines
535 # this can happen when the hunk does not add any lines
536 lr.push(l)
536 lr.push(l)
537 break
537 break
538 else:
538 else:
539 raise PatchError(_("bad hunk #%d old text line %d") %
539 raise PatchError(_("bad hunk #%d old text line %d") %
540 (self.number, x))
540 (self.number, x))
541 self.b.append(s)
541 self.b.append(s)
542 while True:
542 while True:
543 if hunki >= len(self.hunk):
543 if hunki >= len(self.hunk):
544 h = ""
544 h = ""
545 else:
545 else:
546 h = self.hunk[hunki]
546 h = self.hunk[hunki]
547 hunki += 1
547 hunki += 1
548 if h == u:
548 if h == u:
549 break
549 break
550 elif h.startswith('-'):
550 elif h.startswith('-'):
551 continue
551 continue
552 else:
552 else:
553 self.hunk.insert(hunki-1, u)
553 self.hunk.insert(hunki-1, u)
554 break
554 break
555
555
556 if not self.a:
556 if not self.a:
557 # this happens when lines were only added to the hunk
557 # this happens when lines were only added to the hunk
558 for x in self.hunk:
558 for x in self.hunk:
559 if x.startswith('-') or x.startswith(' '):
559 if x.startswith('-') or x.startswith(' '):
560 self.a.append(x)
560 self.a.append(x)
561 if not self.b:
561 if not self.b:
562 # this happens when lines were only deleted from the hunk
562 # this happens when lines were only deleted from the hunk
563 for x in self.hunk:
563 for x in self.hunk:
564 if x.startswith('+') or x.startswith(' '):
564 if x.startswith('+') or x.startswith(' '):
565 self.b.append(x[1:])
565 self.b.append(x[1:])
566 # @@ -start,len +start,len @@
566 # @@ -start,len +start,len @@
567 self.desc = "@@ -%d,%d +%d,%d @@\n" % (self.starta, self.lena,
567 self.desc = "@@ -%d,%d +%d,%d @@\n" % (self.starta, self.lena,
568 self.startb, self.lenb)
568 self.startb, self.lenb)
569 self.hunk[0] = self.desc
569 self.hunk[0] = self.desc
570
570
571 def reverse(self):
571 def reverse(self):
572 self.create, self.remove = self.remove, self.create
572 self.create, self.remove = self.remove, self.create
573 origlena = self.lena
573 origlena = self.lena
574 origstarta = self.starta
574 origstarta = self.starta
575 self.lena = self.lenb
575 self.lena = self.lenb
576 self.starta = self.startb
576 self.starta = self.startb
577 self.lenb = origlena
577 self.lenb = origlena
578 self.startb = origstarta
578 self.startb = origstarta
579 self.a = []
579 self.a = []
580 self.b = []
580 self.b = []
581 # self.hunk[0] is the @@ description
581 # self.hunk[0] is the @@ description
582 for x in xrange(1, len(self.hunk)):
582 for x in xrange(1, len(self.hunk)):
583 o = self.hunk[x]
583 o = self.hunk[x]
584 if o.startswith('-'):
584 if o.startswith('-'):
585 n = '+' + o[1:]
585 n = '+' + o[1:]
586 self.b.append(o[1:])
586 self.b.append(o[1:])
587 elif o.startswith('+'):
587 elif o.startswith('+'):
588 n = '-' + o[1:]
588 n = '-' + o[1:]
589 self.a.append(n)
589 self.a.append(n)
590 else:
590 else:
591 n = o
591 n = o
592 self.b.append(o[1:])
592 self.b.append(o[1:])
593 self.a.append(o)
593 self.a.append(o)
594 self.hunk[x] = o
594 self.hunk[x] = o
595
595
596 def fix_newline(self):
596 def fix_newline(self):
597 diffhelpers.fix_newline(self.hunk, self.a, self.b)
597 diffhelpers.fix_newline(self.hunk, self.a, self.b)
598
598
599 def complete(self):
599 def complete(self):
600 return len(self.a) == self.lena and len(self.b) == self.lenb
600 return len(self.a) == self.lena and len(self.b) == self.lenb
601
601
602 def createfile(self):
602 def createfile(self):
603 return self.starta == 0 and self.lena == 0 and self.create
603 return self.starta == 0 and self.lena == 0 and self.create
604
604
605 def rmfile(self):
605 def rmfile(self):
606 return self.startb == 0 and self.lenb == 0 and self.remove
606 return self.startb == 0 and self.lenb == 0 and self.remove
607
607
608 def fuzzit(self, l, fuzz, toponly):
608 def fuzzit(self, l, fuzz, toponly):
609 # this removes context lines from the top and bottom of list 'l'. It
609 # this removes context lines from the top and bottom of list 'l'. It
610 # checks the hunk to make sure only context lines are removed, and then
610 # checks the hunk to make sure only context lines are removed, and then
611 # returns a new shortened list of lines.
611 # returns a new shortened list of lines.
612 fuzz = min(fuzz, len(l)-1)
612 fuzz = min(fuzz, len(l)-1)
613 if fuzz:
613 if fuzz:
614 top = 0
614 top = 0
615 bot = 0
615 bot = 0
616 hlen = len(self.hunk)
616 hlen = len(self.hunk)
617 for x in xrange(hlen-1):
617 for x in xrange(hlen-1):
618 # the hunk starts with the @@ line, so use x+1
618 # the hunk starts with the @@ line, so use x+1
619 if self.hunk[x+1][0] == ' ':
619 if self.hunk[x+1][0] == ' ':
620 top += 1
620 top += 1
621 else:
621 else:
622 break
622 break
623 if not toponly:
623 if not toponly:
624 for x in xrange(hlen-1):
624 for x in xrange(hlen-1):
625 if self.hunk[hlen-bot-1][0] == ' ':
625 if self.hunk[hlen-bot-1][0] == ' ':
626 bot += 1
626 bot += 1
627 else:
627 else:
628 break
628 break
629
629
630 # top and bot now count context in the hunk
630 # top and bot now count context in the hunk
631 # adjust them if either one is short
631 # adjust them if either one is short
632 context = max(top, bot, 3)
632 context = max(top, bot, 3)
633 if bot < context:
633 if bot < context:
634 bot = max(0, fuzz - (context - bot))
634 bot = max(0, fuzz - (context - bot))
635 else:
635 else:
636 bot = min(fuzz, bot)
636 bot = min(fuzz, bot)
637 if top < context:
637 if top < context:
638 top = max(0, fuzz - (context - top))
638 top = max(0, fuzz - (context - top))
639 else:
639 else:
640 top = min(fuzz, top)
640 top = min(fuzz, top)
641
641
642 return l[top:len(l)-bot]
642 return l[top:len(l)-bot]
643 return l
643 return l
644
644
645 def old(self, fuzz=0, toponly=False):
645 def old(self, fuzz=0, toponly=False):
646 return self.fuzzit(self.a, fuzz, toponly)
646 return self.fuzzit(self.a, fuzz, toponly)
647
647
648 def newctrl(self):
648 def newctrl(self):
649 res = []
649 res = []
650 for x in self.hunk:
650 for x in self.hunk:
651 c = x[0]
651 c = x[0]
652 if c == ' ' or c == '+':
652 if c == ' ' or c == '+':
653 res.append(x)
653 res.append(x)
654 return res
654 return res
655
655
656 def new(self, fuzz=0, toponly=False):
656 def new(self, fuzz=0, toponly=False):
657 return self.fuzzit(self.b, fuzz, toponly)
657 return self.fuzzit(self.b, fuzz, toponly)
658
658
659 class githunk(object):
659 class githunk(object):
660 """A git hunk"""
660 """A git hunk"""
661 def __init__(self, gitpatch):
661 def __init__(self, gitpatch):
662 self.gitpatch = gitpatch
662 self.gitpatch = gitpatch
663 self.text = None
663 self.text = None
664 self.hunk = []
664 self.hunk = []
665
665
666 def createfile(self):
666 def createfile(self):
667 return self.gitpatch.op in ('ADD', 'RENAME', 'COPY')
667 return self.gitpatch.op in ('ADD', 'RENAME', 'COPY')
668
668
669 def rmfile(self):
669 def rmfile(self):
670 return self.gitpatch.op == 'DELETE'
670 return self.gitpatch.op == 'DELETE'
671
671
672 def complete(self):
672 def complete(self):
673 return self.text is not None
673 return self.text is not None
674
674
675 def new(self):
675 def new(self):
676 return [self.text]
676 return [self.text]
677
677
678 class binhunk(githunk):
678 class binhunk(githunk):
679 'A binary patch file. Only understands literals so far.'
679 'A binary patch file. Only understands literals so far.'
680 def __init__(self, gitpatch):
680 def __init__(self, gitpatch):
681 super(binhunk, self).__init__(gitpatch)
681 super(binhunk, self).__init__(gitpatch)
682 self.hunk = ['GIT binary patch\n']
682 self.hunk = ['GIT binary patch\n']
683
683
684 def extract(self, lr):
684 def extract(self, lr):
685 line = lr.readline()
685 line = lr.readline()
686 self.hunk.append(line)
686 self.hunk.append(line)
687 while line and not line.startswith('literal '):
687 while line and not line.startswith('literal '):
688 line = lr.readline()
688 line = lr.readline()
689 self.hunk.append(line)
689 self.hunk.append(line)
690 if not line:
690 if not line:
691 raise PatchError(_('could not extract binary patch'))
691 raise PatchError(_('could not extract binary patch'))
692 size = int(line[8:].rstrip())
692 size = int(line[8:].rstrip())
693 dec = []
693 dec = []
694 line = lr.readline()
694 line = lr.readline()
695 self.hunk.append(line)
695 self.hunk.append(line)
696 while len(line) > 1:
696 while len(line) > 1:
697 l = line[0]
697 l = line[0]
698 if l <= 'Z' and l >= 'A':
698 if l <= 'Z' and l >= 'A':
699 l = ord(l) - ord('A') + 1
699 l = ord(l) - ord('A') + 1
700 else:
700 else:
701 l = ord(l) - ord('a') + 27
701 l = ord(l) - ord('a') + 27
702 dec.append(base85.b85decode(line[1:-1])[:l])
702 dec.append(base85.b85decode(line[1:-1])[:l])
703 line = lr.readline()
703 line = lr.readline()
704 self.hunk.append(line)
704 self.hunk.append(line)
705 text = zlib.decompress(''.join(dec))
705 text = zlib.decompress(''.join(dec))
706 if len(text) != size:
706 if len(text) != size:
707 raise PatchError(_('binary patch is %d bytes, not %d') %
707 raise PatchError(_('binary patch is %d bytes, not %d') %
708 len(text), size)
708 len(text), size)
709 self.text = text
709 self.text = text
710
710
711 class symlinkhunk(githunk):
711 class symlinkhunk(githunk):
712 """A git symlink hunk"""
712 """A git symlink hunk"""
713 def __init__(self, gitpatch, hunk):
713 def __init__(self, gitpatch, hunk):
714 super(symlinkhunk, self).__init__(gitpatch)
714 super(symlinkhunk, self).__init__(gitpatch)
715 self.hunk = hunk
715 self.hunk = hunk
716
716
717 def complete(self):
717 def complete(self):
718 return True
718 return True
719
719
720 def fix_newline(self):
720 def fix_newline(self):
721 return
721 return
722
722
723 def parsefilename(str):
723 def parsefilename(str):
724 # --- filename \t|space stuff
724 # --- filename \t|space stuff
725 s = str[4:].rstrip('\r\n')
725 s = str[4:].rstrip('\r\n')
726 i = s.find('\t')
726 i = s.find('\t')
727 if i < 0:
727 if i < 0:
728 i = s.find(' ')
728 i = s.find(' ')
729 if i < 0:
729 if i < 0:
730 return s
730 return s
731 return s[:i]
731 return s[:i]
732
732
733 def selectfile(afile_orig, bfile_orig, hunk, strip, reverse):
733 def selectfile(afile_orig, bfile_orig, hunk, strip, reverse):
734 def pathstrip(path, count=1):
734 def pathstrip(path, count=1):
735 pathlen = len(path)
735 pathlen = len(path)
736 i = 0
736 i = 0
737 if count == 0:
737 if count == 0:
738 return '', path.rstrip()
738 return '', path.rstrip()
739 while count > 0:
739 while count > 0:
740 i = path.find('/', i)
740 i = path.find('/', i)
741 if i == -1:
741 if i == -1:
742 raise PatchError(_("unable to strip away %d dirs from %s") %
742 raise PatchError(_("unable to strip away %d dirs from %s") %
743 (count, path))
743 (count, path))
744 i += 1
744 i += 1
745 # consume '//' in the path
745 # consume '//' in the path
746 while i < pathlen - 1 and path[i] == '/':
746 while i < pathlen - 1 and path[i] == '/':
747 i += 1
747 i += 1
748 count -= 1
748 count -= 1
749 return path[:i].lstrip(), path[i:].rstrip()
749 return path[:i].lstrip(), path[i:].rstrip()
750
750
751 nulla = afile_orig == "/dev/null"
751 nulla = afile_orig == "/dev/null"
752 nullb = bfile_orig == "/dev/null"
752 nullb = bfile_orig == "/dev/null"
753 abase, afile = pathstrip(afile_orig, strip)
753 abase, afile = pathstrip(afile_orig, strip)
754 gooda = not nulla and util.lexists(afile)
754 gooda = not nulla and util.lexists(afile)
755 bbase, bfile = pathstrip(bfile_orig, strip)
755 bbase, bfile = pathstrip(bfile_orig, strip)
756 if afile == bfile:
756 if afile == bfile:
757 goodb = gooda
757 goodb = gooda
758 else:
758 else:
759 goodb = not nullb and os.path.exists(bfile)
759 goodb = not nullb and os.path.exists(bfile)
760 createfunc = hunk.createfile
760 createfunc = hunk.createfile
761 if reverse:
761 if reverse:
762 createfunc = hunk.rmfile
762 createfunc = hunk.rmfile
763 missing = not goodb and not gooda and not createfunc()
763 missing = not goodb and not gooda and not createfunc()
764 # If afile is "a/b/foo" and bfile is "a/b/foo.orig" we assume the
764 # If afile is "a/b/foo" and bfile is "a/b/foo.orig" we assume the
765 # diff is between a file and its backup. In this case, the original
765 # diff is between a file and its backup. In this case, the original
766 # file should be patched (see original mpatch code).
766 # file should be patched (see original mpatch code).
767 isbackup = (abase == bbase and bfile.startswith(afile))
767 isbackup = (abase == bbase and bfile.startswith(afile))
768 fname = None
768 fname = None
769 if not missing:
769 if not missing:
770 if gooda and goodb:
770 if gooda and goodb:
771 fname = isbackup and afile or bfile
771 fname = isbackup and afile or bfile
772 elif gooda:
772 elif gooda:
773 fname = afile
773 fname = afile
774
774
775 if not fname:
775 if not fname:
776 if not nullb:
776 if not nullb:
777 fname = isbackup and afile or bfile
777 fname = isbackup and afile or bfile
778 elif not nulla:
778 elif not nulla:
779 fname = afile
779 fname = afile
780 else:
780 else:
781 raise PatchError(_("undefined source and destination files"))
781 raise PatchError(_("undefined source and destination files"))
782
782
783 return fname, missing
783 return fname, missing
784
784
785 class linereader:
785 class linereader(object):
786 # simple class to allow pushing lines back into the input stream
786 # simple class to allow pushing lines back into the input stream
787 def __init__(self, fp):
787 def __init__(self, fp):
788 self.fp = fp
788 self.fp = fp
789 self.buf = []
789 self.buf = []
790
790
791 def push(self, line):
791 def push(self, line):
792 if line is not None:
792 if line is not None:
793 self.buf.append(line)
793 self.buf.append(line)
794
794
795 def readline(self):
795 def readline(self):
796 if self.buf:
796 if self.buf:
797 return self.buf.pop(0)
797 return self.buf.pop(0)
798 return self.fp.readline()
798 return self.fp.readline()
799
799
800 def __iter__(self):
800 def __iter__(self):
801 while 1:
801 while 1:
802 l = self.readline()
802 l = self.readline()
803 if not l:
803 if not l:
804 break
804 break
805 yield l
805 yield l
806
806
807 def scangitpatch(lr, firstline):
807 def scangitpatch(lr, firstline):
808 """
808 """
809 Git patches can emit:
809 Git patches can emit:
810 - rename a to b
810 - rename a to b
811 - change b
811 - change b
812 - copy a to c
812 - copy a to c
813 - change c
813 - change c
814
814
815 We cannot apply this sequence as-is, the renamed 'a' could not be
815 We cannot apply this sequence as-is, the renamed 'a' could not be
816 found for it would have been renamed already. And we cannot copy
816 found for it would have been renamed already. And we cannot copy
817 from 'b' instead because 'b' would have been changed already. So
817 from 'b' instead because 'b' would have been changed already. So
818 we scan the git patch for copy and rename commands so we can
818 we scan the git patch for copy and rename commands so we can
819 perform the copies ahead of time.
819 perform the copies ahead of time.
820 """
820 """
821 pos = 0
821 pos = 0
822 try:
822 try:
823 pos = lr.fp.tell()
823 pos = lr.fp.tell()
824 fp = lr.fp
824 fp = lr.fp
825 except IOError:
825 except IOError:
826 fp = cStringIO.StringIO(lr.fp.read())
826 fp = cStringIO.StringIO(lr.fp.read())
827 gitlr = linereader(fp)
827 gitlr = linereader(fp)
828 gitlr.push(firstline)
828 gitlr.push(firstline)
829 (dopatch, gitpatches) = readgitpatch(gitlr)
829 (dopatch, gitpatches) = readgitpatch(gitlr)
830 fp.seek(pos)
830 fp.seek(pos)
831 return dopatch, gitpatches
831 return dopatch, gitpatches
832
832
833 def iterhunks(ui, fp, sourcefile=None):
833 def iterhunks(ui, fp, sourcefile=None):
834 """Read a patch and yield the following events:
834 """Read a patch and yield the following events:
835 - ("file", afile, bfile, firsthunk): select a new target file.
835 - ("file", afile, bfile, firsthunk): select a new target file.
836 - ("hunk", hunk): a new hunk is ready to be applied, follows a
836 - ("hunk", hunk): a new hunk is ready to be applied, follows a
837 "file" event.
837 "file" event.
838 - ("git", gitchanges): current diff is in git format, gitchanges
838 - ("git", gitchanges): current diff is in git format, gitchanges
839 maps filenames to gitpatch records. Unique event.
839 maps filenames to gitpatch records. Unique event.
840 """
840 """
841 changed = {}
841 changed = {}
842 current_hunk = None
842 current_hunk = None
843 afile = ""
843 afile = ""
844 bfile = ""
844 bfile = ""
845 state = None
845 state = None
846 hunknum = 0
846 hunknum = 0
847 emitfile = False
847 emitfile = False
848 git = False
848 git = False
849
849
850 # our states
850 # our states
851 BFILE = 1
851 BFILE = 1
852 context = None
852 context = None
853 lr = linereader(fp)
853 lr = linereader(fp)
854 dopatch = True
854 dopatch = True
855 # gitworkdone is True if a git operation (copy, rename, ...) was
855 # gitworkdone is True if a git operation (copy, rename, ...) was
856 # performed already for the current file. Useful when the file
856 # performed already for the current file. Useful when the file
857 # section may have no hunk.
857 # section may have no hunk.
858 gitworkdone = False
858 gitworkdone = False
859
859
860 while True:
860 while True:
861 newfile = False
861 newfile = False
862 x = lr.readline()
862 x = lr.readline()
863 if not x:
863 if not x:
864 break
864 break
865 if current_hunk:
865 if current_hunk:
866 if x.startswith('\ '):
866 if x.startswith('\ '):
867 current_hunk.fix_newline()
867 current_hunk.fix_newline()
868 yield 'hunk', current_hunk
868 yield 'hunk', current_hunk
869 current_hunk = None
869 current_hunk = None
870 gitworkdone = False
870 gitworkdone = False
871 if ((sourcefile or state == BFILE) and ((not context and x[0] == '@') or
871 if ((sourcefile or state == BFILE) and ((not context and x[0] == '@') or
872 ((context is not False) and x.startswith('***************')))):
872 ((context is not False) and x.startswith('***************')))):
873 try:
873 try:
874 if context is None and x.startswith('***************'):
874 if context is None and x.startswith('***************'):
875 context = True
875 context = True
876 gpatch = changed.get(bfile)
876 gpatch = changed.get(bfile)
877 create = afile == '/dev/null' or gpatch and gpatch.op == 'ADD'
877 create = afile == '/dev/null' or gpatch and gpatch.op == 'ADD'
878 remove = bfile == '/dev/null' or gpatch and gpatch.op == 'DELETE'
878 remove = bfile == '/dev/null' or gpatch and gpatch.op == 'DELETE'
879 current_hunk = hunk(x, hunknum + 1, lr, context, create, remove)
879 current_hunk = hunk(x, hunknum + 1, lr, context, create, remove)
880 if remove:
880 if remove:
881 gpatch = changed.get(afile[2:])
881 gpatch = changed.get(afile[2:])
882 if gpatch and gpatch.mode[0]:
882 if gpatch and gpatch.mode[0]:
883 current_hunk = symlinkhunk(gpatch, current_hunk)
883 current_hunk = symlinkhunk(gpatch, current_hunk)
884 except PatchError, err:
884 except PatchError, err:
885 ui.debug(err)
885 ui.debug(err)
886 current_hunk = None
886 current_hunk = None
887 continue
887 continue
888 hunknum += 1
888 hunknum += 1
889 if emitfile:
889 if emitfile:
890 emitfile = False
890 emitfile = False
891 yield 'file', (afile, bfile, current_hunk)
891 yield 'file', (afile, bfile, current_hunk)
892 elif state == BFILE and x.startswith('GIT binary patch'):
892 elif state == BFILE and x.startswith('GIT binary patch'):
893 current_hunk = binhunk(changed[bfile])
893 current_hunk = binhunk(changed[bfile])
894 hunknum += 1
894 hunknum += 1
895 if emitfile:
895 if emitfile:
896 emitfile = False
896 emitfile = False
897 yield 'file', ('a/' + afile, 'b/' + bfile, current_hunk)
897 yield 'file', ('a/' + afile, 'b/' + bfile, current_hunk)
898 current_hunk.extract(lr)
898 current_hunk.extract(lr)
899 elif x.startswith('diff --git'):
899 elif x.startswith('diff --git'):
900 # check for git diff, scanning the whole patch file if needed
900 # check for git diff, scanning the whole patch file if needed
901 m = gitre.match(x)
901 m = gitre.match(x)
902 if m:
902 if m:
903 afile, bfile = m.group(1, 2)
903 afile, bfile = m.group(1, 2)
904 if not git:
904 if not git:
905 git = True
905 git = True
906 dopatch, gitpatches = scangitpatch(lr, x)
906 dopatch, gitpatches = scangitpatch(lr, x)
907 yield 'git', gitpatches
907 yield 'git', gitpatches
908 for gp in gitpatches:
908 for gp in gitpatches:
909 changed[gp.path] = gp
909 changed[gp.path] = gp
910 # else error?
910 # else error?
911 # copy/rename + modify should modify target, not source
911 # copy/rename + modify should modify target, not source
912 gp = changed.get(bfile)
912 gp = changed.get(bfile)
913 if gp and gp.op in ('COPY', 'DELETE', 'RENAME', 'ADD'):
913 if gp and gp.op in ('COPY', 'DELETE', 'RENAME', 'ADD'):
914 afile = bfile
914 afile = bfile
915 gitworkdone = True
915 gitworkdone = True
916 newfile = True
916 newfile = True
917 elif x.startswith('---'):
917 elif x.startswith('---'):
918 # check for a unified diff
918 # check for a unified diff
919 l2 = lr.readline()
919 l2 = lr.readline()
920 if not l2.startswith('+++'):
920 if not l2.startswith('+++'):
921 lr.push(l2)
921 lr.push(l2)
922 continue
922 continue
923 newfile = True
923 newfile = True
924 context = False
924 context = False
925 afile = parsefilename(x)
925 afile = parsefilename(x)
926 bfile = parsefilename(l2)
926 bfile = parsefilename(l2)
927 elif x.startswith('***'):
927 elif x.startswith('***'):
928 # check for a context diff
928 # check for a context diff
929 l2 = lr.readline()
929 l2 = lr.readline()
930 if not l2.startswith('---'):
930 if not l2.startswith('---'):
931 lr.push(l2)
931 lr.push(l2)
932 continue
932 continue
933 l3 = lr.readline()
933 l3 = lr.readline()
934 lr.push(l3)
934 lr.push(l3)
935 if not l3.startswith("***************"):
935 if not l3.startswith("***************"):
936 lr.push(l2)
936 lr.push(l2)
937 continue
937 continue
938 newfile = True
938 newfile = True
939 context = True
939 context = True
940 afile = parsefilename(x)
940 afile = parsefilename(x)
941 bfile = parsefilename(l2)
941 bfile = parsefilename(l2)
942
942
943 if newfile:
943 if newfile:
944 emitfile = True
944 emitfile = True
945 state = BFILE
945 state = BFILE
946 hunknum = 0
946 hunknum = 0
947 if current_hunk:
947 if current_hunk:
948 if current_hunk.complete():
948 if current_hunk.complete():
949 yield 'hunk', current_hunk
949 yield 'hunk', current_hunk
950 else:
950 else:
951 raise PatchError(_("malformed patch %s %s") % (afile,
951 raise PatchError(_("malformed patch %s %s") % (afile,
952 current_hunk.desc))
952 current_hunk.desc))
953
953
954 if hunknum == 0 and dopatch and not gitworkdone:
954 if hunknum == 0 and dopatch and not gitworkdone:
955 raise NoHunks
955 raise NoHunks
956
956
957 def applydiff(ui, fp, changed, strip=1, sourcefile=None, reverse=False):
957 def applydiff(ui, fp, changed, strip=1, sourcefile=None, reverse=False):
958 """reads a patch from fp and tries to apply it. The dict 'changed' is
958 """reads a patch from fp and tries to apply it. The dict 'changed' is
959 filled in with all of the filenames changed by the patch. Returns 0
959 filled in with all of the filenames changed by the patch. Returns 0
960 for a clean patch, -1 if any rejects were found and 1 if there was
960 for a clean patch, -1 if any rejects were found and 1 if there was
961 any fuzz."""
961 any fuzz."""
962
962
963 rejects = 0
963 rejects = 0
964 err = 0
964 err = 0
965 current_file = None
965 current_file = None
966 gitpatches = None
966 gitpatches = None
967 opener = util.opener(os.getcwd())
967 opener = util.opener(os.getcwd())
968
968
969 def closefile():
969 def closefile():
970 if not current_file:
970 if not current_file:
971 return 0
971 return 0
972 current_file.close()
972 current_file.close()
973 return len(current_file.rej)
973 return len(current_file.rej)
974
974
975 for state, values in iterhunks(ui, fp, sourcefile):
975 for state, values in iterhunks(ui, fp, sourcefile):
976 if state == 'hunk':
976 if state == 'hunk':
977 if not current_file:
977 if not current_file:
978 continue
978 continue
979 current_hunk = values
979 current_hunk = values
980 ret = current_file.apply(current_hunk, reverse)
980 ret = current_file.apply(current_hunk, reverse)
981 if ret >= 0:
981 if ret >= 0:
982 changed.setdefault(current_file.fname, None)
982 changed.setdefault(current_file.fname, None)
983 if ret > 0:
983 if ret > 0:
984 err = 1
984 err = 1
985 elif state == 'file':
985 elif state == 'file':
986 rejects += closefile()
986 rejects += closefile()
987 afile, bfile, first_hunk = values
987 afile, bfile, first_hunk = values
988 try:
988 try:
989 if sourcefile:
989 if sourcefile:
990 current_file = patchfile(ui, sourcefile, opener)
990 current_file = patchfile(ui, sourcefile, opener)
991 else:
991 else:
992 current_file, missing = selectfile(afile, bfile, first_hunk,
992 current_file, missing = selectfile(afile, bfile, first_hunk,
993 strip, reverse)
993 strip, reverse)
994 current_file = patchfile(ui, current_file, opener, missing)
994 current_file = patchfile(ui, current_file, opener, missing)
995 except PatchError, err:
995 except PatchError, err:
996 ui.warn(str(err) + '\n')
996 ui.warn(str(err) + '\n')
997 current_file, current_hunk = None, None
997 current_file, current_hunk = None, None
998 rejects += 1
998 rejects += 1
999 continue
999 continue
1000 elif state == 'git':
1000 elif state == 'git':
1001 gitpatches = values
1001 gitpatches = values
1002 cwd = os.getcwd()
1002 cwd = os.getcwd()
1003 for gp in gitpatches:
1003 for gp in gitpatches:
1004 if gp.op in ('COPY', 'RENAME'):
1004 if gp.op in ('COPY', 'RENAME'):
1005 copyfile(gp.oldpath, gp.path, cwd)
1005 copyfile(gp.oldpath, gp.path, cwd)
1006 changed[gp.path] = gp
1006 changed[gp.path] = gp
1007 else:
1007 else:
1008 raise util.Abort(_('unsupported parser state: %s') % state)
1008 raise util.Abort(_('unsupported parser state: %s') % state)
1009
1009
1010 rejects += closefile()
1010 rejects += closefile()
1011
1011
1012 if rejects:
1012 if rejects:
1013 return -1
1013 return -1
1014 return err
1014 return err
1015
1015
1016 def diffopts(ui, opts={}, untrusted=False):
1016 def diffopts(ui, opts={}, untrusted=False):
1017 def get(key, name=None, getter=ui.configbool):
1017 def get(key, name=None, getter=ui.configbool):
1018 return (opts.get(key) or
1018 return (opts.get(key) or
1019 getter('diff', name or key, None, untrusted=untrusted))
1019 getter('diff', name or key, None, untrusted=untrusted))
1020 return mdiff.diffopts(
1020 return mdiff.diffopts(
1021 text=opts.get('text'),
1021 text=opts.get('text'),
1022 git=get('git'),
1022 git=get('git'),
1023 nodates=get('nodates'),
1023 nodates=get('nodates'),
1024 showfunc=get('show_function', 'showfunc'),
1024 showfunc=get('show_function', 'showfunc'),
1025 ignorews=get('ignore_all_space', 'ignorews'),
1025 ignorews=get('ignore_all_space', 'ignorews'),
1026 ignorewsamount=get('ignore_space_change', 'ignorewsamount'),
1026 ignorewsamount=get('ignore_space_change', 'ignorewsamount'),
1027 ignoreblanklines=get('ignore_blank_lines', 'ignoreblanklines'),
1027 ignoreblanklines=get('ignore_blank_lines', 'ignoreblanklines'),
1028 context=get('unified', getter=ui.config))
1028 context=get('unified', getter=ui.config))
1029
1029
1030 def updatedir(ui, repo, patches, similarity=0):
1030 def updatedir(ui, repo, patches, similarity=0):
1031 '''Update dirstate after patch application according to metadata'''
1031 '''Update dirstate after patch application according to metadata'''
1032 if not patches:
1032 if not patches:
1033 return
1033 return
1034 copies = []
1034 copies = []
1035 removes = set()
1035 removes = set()
1036 cfiles = patches.keys()
1036 cfiles = patches.keys()
1037 cwd = repo.getcwd()
1037 cwd = repo.getcwd()
1038 if cwd:
1038 if cwd:
1039 cfiles = [util.pathto(repo.root, cwd, f) for f in patches.keys()]
1039 cfiles = [util.pathto(repo.root, cwd, f) for f in patches.keys()]
1040 for f in patches:
1040 for f in patches:
1041 gp = patches[f]
1041 gp = patches[f]
1042 if not gp:
1042 if not gp:
1043 continue
1043 continue
1044 if gp.op == 'RENAME':
1044 if gp.op == 'RENAME':
1045 copies.append((gp.oldpath, gp.path))
1045 copies.append((gp.oldpath, gp.path))
1046 removes.add(gp.oldpath)
1046 removes.add(gp.oldpath)
1047 elif gp.op == 'COPY':
1047 elif gp.op == 'COPY':
1048 copies.append((gp.oldpath, gp.path))
1048 copies.append((gp.oldpath, gp.path))
1049 elif gp.op == 'DELETE':
1049 elif gp.op == 'DELETE':
1050 removes.add(gp.path)
1050 removes.add(gp.path)
1051 for src, dst in copies:
1051 for src, dst in copies:
1052 repo.copy(src, dst)
1052 repo.copy(src, dst)
1053 if (not similarity) and removes:
1053 if (not similarity) and removes:
1054 repo.remove(sorted(removes), True)
1054 repo.remove(sorted(removes), True)
1055 for f in patches:
1055 for f in patches:
1056 gp = patches[f]
1056 gp = patches[f]
1057 if gp and gp.mode:
1057 if gp and gp.mode:
1058 islink, isexec = gp.mode
1058 islink, isexec = gp.mode
1059 dst = repo.wjoin(gp.path)
1059 dst = repo.wjoin(gp.path)
1060 # patch won't create empty files
1060 # patch won't create empty files
1061 if gp.op == 'ADD' and not os.path.exists(dst):
1061 if gp.op == 'ADD' and not os.path.exists(dst):
1062 flags = (isexec and 'x' or '') + (islink and 'l' or '')
1062 flags = (isexec and 'x' or '') + (islink and 'l' or '')
1063 repo.wwrite(gp.path, '', flags)
1063 repo.wwrite(gp.path, '', flags)
1064 elif gp.op != 'DELETE':
1064 elif gp.op != 'DELETE':
1065 util.set_flags(dst, islink, isexec)
1065 util.set_flags(dst, islink, isexec)
1066 cmdutil.addremove(repo, cfiles, similarity=similarity)
1066 cmdutil.addremove(repo, cfiles, similarity=similarity)
1067 files = patches.keys()
1067 files = patches.keys()
1068 files.extend([r for r in removes if r not in files])
1068 files.extend([r for r in removes if r not in files])
1069 return sorted(files)
1069 return sorted(files)
1070
1070
1071 def externalpatch(patcher, args, patchname, ui, strip, cwd, files):
1071 def externalpatch(patcher, args, patchname, ui, strip, cwd, files):
1072 """use <patcher> to apply <patchname> to the working directory.
1072 """use <patcher> to apply <patchname> to the working directory.
1073 returns whether patch was applied with fuzz factor."""
1073 returns whether patch was applied with fuzz factor."""
1074
1074
1075 fuzz = False
1075 fuzz = False
1076 if cwd:
1076 if cwd:
1077 args.append('-d %s' % util.shellquote(cwd))
1077 args.append('-d %s' % util.shellquote(cwd))
1078 fp = util.popen('%s %s -p%d < %s' % (patcher, ' '.join(args), strip,
1078 fp = util.popen('%s %s -p%d < %s' % (patcher, ' '.join(args), strip,
1079 util.shellquote(patchname)))
1079 util.shellquote(patchname)))
1080
1080
1081 for line in fp:
1081 for line in fp:
1082 line = line.rstrip()
1082 line = line.rstrip()
1083 ui.note(line + '\n')
1083 ui.note(line + '\n')
1084 if line.startswith('patching file '):
1084 if line.startswith('patching file '):
1085 pf = util.parse_patch_output(line)
1085 pf = util.parse_patch_output(line)
1086 printed_file = False
1086 printed_file = False
1087 files.setdefault(pf, None)
1087 files.setdefault(pf, None)
1088 elif line.find('with fuzz') >= 0:
1088 elif line.find('with fuzz') >= 0:
1089 fuzz = True
1089 fuzz = True
1090 if not printed_file:
1090 if not printed_file:
1091 ui.warn(pf + '\n')
1091 ui.warn(pf + '\n')
1092 printed_file = True
1092 printed_file = True
1093 ui.warn(line + '\n')
1093 ui.warn(line + '\n')
1094 elif line.find('saving rejects to file') >= 0:
1094 elif line.find('saving rejects to file') >= 0:
1095 ui.warn(line + '\n')
1095 ui.warn(line + '\n')
1096 elif line.find('FAILED') >= 0:
1096 elif line.find('FAILED') >= 0:
1097 if not printed_file:
1097 if not printed_file:
1098 ui.warn(pf + '\n')
1098 ui.warn(pf + '\n')
1099 printed_file = True
1099 printed_file = True
1100 ui.warn(line + '\n')
1100 ui.warn(line + '\n')
1101 code = fp.close()
1101 code = fp.close()
1102 if code:
1102 if code:
1103 raise PatchError(_("patch command failed: %s") %
1103 raise PatchError(_("patch command failed: %s") %
1104 util.explain_exit(code)[0])
1104 util.explain_exit(code)[0])
1105 return fuzz
1105 return fuzz
1106
1106
1107 def internalpatch(patchobj, ui, strip, cwd, files={}):
1107 def internalpatch(patchobj, ui, strip, cwd, files={}):
1108 """use builtin patch to apply <patchobj> to the working directory.
1108 """use builtin patch to apply <patchobj> to the working directory.
1109 returns whether patch was applied with fuzz factor."""
1109 returns whether patch was applied with fuzz factor."""
1110 try:
1110 try:
1111 fp = file(patchobj, 'rb')
1111 fp = file(patchobj, 'rb')
1112 except TypeError:
1112 except TypeError:
1113 fp = patchobj
1113 fp = patchobj
1114 if cwd:
1114 if cwd:
1115 curdir = os.getcwd()
1115 curdir = os.getcwd()
1116 os.chdir(cwd)
1116 os.chdir(cwd)
1117 try:
1117 try:
1118 ret = applydiff(ui, fp, files, strip=strip)
1118 ret = applydiff(ui, fp, files, strip=strip)
1119 finally:
1119 finally:
1120 if cwd:
1120 if cwd:
1121 os.chdir(curdir)
1121 os.chdir(curdir)
1122 if ret < 0:
1122 if ret < 0:
1123 raise PatchError
1123 raise PatchError
1124 return ret > 0
1124 return ret > 0
1125
1125
1126 def patch(patchname, ui, strip=1, cwd=None, files={}):
1126 def patch(patchname, ui, strip=1, cwd=None, files={}):
1127 """apply <patchname> to the working directory.
1127 """apply <patchname> to the working directory.
1128 returns whether patch was applied with fuzz factor."""
1128 returns whether patch was applied with fuzz factor."""
1129 patcher = ui.config('ui', 'patch')
1129 patcher = ui.config('ui', 'patch')
1130 args = []
1130 args = []
1131 try:
1131 try:
1132 if patcher:
1132 if patcher:
1133 return externalpatch(patcher, args, patchname, ui, strip, cwd,
1133 return externalpatch(patcher, args, patchname, ui, strip, cwd,
1134 files)
1134 files)
1135 else:
1135 else:
1136 try:
1136 try:
1137 return internalpatch(patchname, ui, strip, cwd, files)
1137 return internalpatch(patchname, ui, strip, cwd, files)
1138 except NoHunks:
1138 except NoHunks:
1139 patcher = util.find_exe('gpatch') or util.find_exe('patch') or 'patch'
1139 patcher = util.find_exe('gpatch') or util.find_exe('patch') or 'patch'
1140 ui.debug(_('no valid hunks found; trying with %r instead\n') %
1140 ui.debug(_('no valid hunks found; trying with %r instead\n') %
1141 patcher)
1141 patcher)
1142 if util.needbinarypatch():
1142 if util.needbinarypatch():
1143 args.append('--binary')
1143 args.append('--binary')
1144 return externalpatch(patcher, args, patchname, ui, strip, cwd,
1144 return externalpatch(patcher, args, patchname, ui, strip, cwd,
1145 files)
1145 files)
1146 except PatchError, err:
1146 except PatchError, err:
1147 s = str(err)
1147 s = str(err)
1148 if s:
1148 if s:
1149 raise util.Abort(s)
1149 raise util.Abort(s)
1150 else:
1150 else:
1151 raise util.Abort(_('patch failed to apply'))
1151 raise util.Abort(_('patch failed to apply'))
1152
1152
1153 def b85diff(to, tn):
1153 def b85diff(to, tn):
1154 '''print base85-encoded binary diff'''
1154 '''print base85-encoded binary diff'''
1155 def gitindex(text):
1155 def gitindex(text):
1156 if not text:
1156 if not text:
1157 return '0' * 40
1157 return '0' * 40
1158 l = len(text)
1158 l = len(text)
1159 s = util.sha1('blob %d\0' % l)
1159 s = util.sha1('blob %d\0' % l)
1160 s.update(text)
1160 s.update(text)
1161 return s.hexdigest()
1161 return s.hexdigest()
1162
1162
1163 def fmtline(line):
1163 def fmtline(line):
1164 l = len(line)
1164 l = len(line)
1165 if l <= 26:
1165 if l <= 26:
1166 l = chr(ord('A') + l - 1)
1166 l = chr(ord('A') + l - 1)
1167 else:
1167 else:
1168 l = chr(l - 26 + ord('a') - 1)
1168 l = chr(l - 26 + ord('a') - 1)
1169 return '%c%s\n' % (l, base85.b85encode(line, True))
1169 return '%c%s\n' % (l, base85.b85encode(line, True))
1170
1170
1171 def chunk(text, csize=52):
1171 def chunk(text, csize=52):
1172 l = len(text)
1172 l = len(text)
1173 i = 0
1173 i = 0
1174 while i < l:
1174 while i < l:
1175 yield text[i:i+csize]
1175 yield text[i:i+csize]
1176 i += csize
1176 i += csize
1177
1177
1178 tohash = gitindex(to)
1178 tohash = gitindex(to)
1179 tnhash = gitindex(tn)
1179 tnhash = gitindex(tn)
1180 if tohash == tnhash:
1180 if tohash == tnhash:
1181 return ""
1181 return ""
1182
1182
1183 # TODO: deltas
1183 # TODO: deltas
1184 ret = ['index %s..%s\nGIT binary patch\nliteral %s\n' %
1184 ret = ['index %s..%s\nGIT binary patch\nliteral %s\n' %
1185 (tohash, tnhash, len(tn))]
1185 (tohash, tnhash, len(tn))]
1186 for l in chunk(zlib.compress(tn)):
1186 for l in chunk(zlib.compress(tn)):
1187 ret.append(fmtline(l))
1187 ret.append(fmtline(l))
1188 ret.append('\n')
1188 ret.append('\n')
1189 return ''.join(ret)
1189 return ''.join(ret)
1190
1190
1191 def _addmodehdr(header, omode, nmode):
1191 def _addmodehdr(header, omode, nmode):
1192 if omode != nmode:
1192 if omode != nmode:
1193 header.append('old mode %s\n' % omode)
1193 header.append('old mode %s\n' % omode)
1194 header.append('new mode %s\n' % nmode)
1194 header.append('new mode %s\n' % nmode)
1195
1195
1196 def diff(repo, node1=None, node2=None, match=None, changes=None, opts=None):
1196 def diff(repo, node1=None, node2=None, match=None, changes=None, opts=None):
1197 '''yields diff of changes to files between two nodes, or node and
1197 '''yields diff of changes to files between two nodes, or node and
1198 working directory.
1198 working directory.
1199
1199
1200 if node1 is None, use first dirstate parent instead.
1200 if node1 is None, use first dirstate parent instead.
1201 if node2 is None, compare node1 with working directory.'''
1201 if node2 is None, compare node1 with working directory.'''
1202
1202
1203 if opts is None:
1203 if opts is None:
1204 opts = mdiff.defaultopts
1204 opts = mdiff.defaultopts
1205
1205
1206 if not node1:
1206 if not node1:
1207 node1 = repo.dirstate.parents()[0]
1207 node1 = repo.dirstate.parents()[0]
1208
1208
1209 flcache = {}
1209 flcache = {}
1210 def getfilectx(f, ctx):
1210 def getfilectx(f, ctx):
1211 flctx = ctx.filectx(f, filelog=flcache.get(f))
1211 flctx = ctx.filectx(f, filelog=flcache.get(f))
1212 if f not in flcache:
1212 if f not in flcache:
1213 flcache[f] = flctx._filelog
1213 flcache[f] = flctx._filelog
1214 return flctx
1214 return flctx
1215
1215
1216 ctx1 = repo[node1]
1216 ctx1 = repo[node1]
1217 ctx2 = repo[node2]
1217 ctx2 = repo[node2]
1218
1218
1219 if not changes:
1219 if not changes:
1220 changes = repo.status(ctx1, ctx2, match=match)
1220 changes = repo.status(ctx1, ctx2, match=match)
1221 modified, added, removed = changes[:3]
1221 modified, added, removed = changes[:3]
1222
1222
1223 if not modified and not added and not removed:
1223 if not modified and not added and not removed:
1224 return
1224 return
1225
1225
1226 date1 = util.datestr(ctx1.date())
1226 date1 = util.datestr(ctx1.date())
1227 man1 = ctx1.manifest()
1227 man1 = ctx1.manifest()
1228
1228
1229 if repo.ui.quiet:
1229 if repo.ui.quiet:
1230 r = None
1230 r = None
1231 else:
1231 else:
1232 hexfunc = repo.ui.debugflag and hex or short
1232 hexfunc = repo.ui.debugflag and hex or short
1233 r = [hexfunc(node) for node in [node1, node2] if node]
1233 r = [hexfunc(node) for node in [node1, node2] if node]
1234
1234
1235 if opts.git:
1235 if opts.git:
1236 copy, diverge = copies.copies(repo, ctx1, ctx2, repo[nullid])
1236 copy, diverge = copies.copies(repo, ctx1, ctx2, repo[nullid])
1237 copy = copy.copy()
1237 copy = copy.copy()
1238 for k, v in copy.items():
1238 for k, v in copy.items():
1239 copy[v] = k
1239 copy[v] = k
1240
1240
1241 gone = set()
1241 gone = set()
1242 gitmode = {'l': '120000', 'x': '100755', '': '100644'}
1242 gitmode = {'l': '120000', 'x': '100755', '': '100644'}
1243
1243
1244 for f in sorted(modified + added + removed):
1244 for f in sorted(modified + added + removed):
1245 to = None
1245 to = None
1246 tn = None
1246 tn = None
1247 dodiff = True
1247 dodiff = True
1248 header = []
1248 header = []
1249 if f in man1:
1249 if f in man1:
1250 to = getfilectx(f, ctx1).data()
1250 to = getfilectx(f, ctx1).data()
1251 if f not in removed:
1251 if f not in removed:
1252 tn = getfilectx(f, ctx2).data()
1252 tn = getfilectx(f, ctx2).data()
1253 a, b = f, f
1253 a, b = f, f
1254 if opts.git:
1254 if opts.git:
1255 if f in added:
1255 if f in added:
1256 mode = gitmode[ctx2.flags(f)]
1256 mode = gitmode[ctx2.flags(f)]
1257 if f in copy:
1257 if f in copy:
1258 a = copy[f]
1258 a = copy[f]
1259 omode = gitmode[man1.flags(a)]
1259 omode = gitmode[man1.flags(a)]
1260 _addmodehdr(header, omode, mode)
1260 _addmodehdr(header, omode, mode)
1261 if a in removed and a not in gone:
1261 if a in removed and a not in gone:
1262 op = 'rename'
1262 op = 'rename'
1263 gone.add(a)
1263 gone.add(a)
1264 else:
1264 else:
1265 op = 'copy'
1265 op = 'copy'
1266 header.append('%s from %s\n' % (op, a))
1266 header.append('%s from %s\n' % (op, a))
1267 header.append('%s to %s\n' % (op, f))
1267 header.append('%s to %s\n' % (op, f))
1268 to = getfilectx(a, ctx1).data()
1268 to = getfilectx(a, ctx1).data()
1269 else:
1269 else:
1270 header.append('new file mode %s\n' % mode)
1270 header.append('new file mode %s\n' % mode)
1271 if util.binary(tn):
1271 if util.binary(tn):
1272 dodiff = 'binary'
1272 dodiff = 'binary'
1273 elif f in removed:
1273 elif f in removed:
1274 # have we already reported a copy above?
1274 # have we already reported a copy above?
1275 if f in copy and copy[f] in added and copy[copy[f]] == f:
1275 if f in copy and copy[f] in added and copy[copy[f]] == f:
1276 dodiff = False
1276 dodiff = False
1277 else:
1277 else:
1278 header.append('deleted file mode %s\n' %
1278 header.append('deleted file mode %s\n' %
1279 gitmode[man1.flags(f)])
1279 gitmode[man1.flags(f)])
1280 else:
1280 else:
1281 omode = gitmode[man1.flags(f)]
1281 omode = gitmode[man1.flags(f)]
1282 nmode = gitmode[ctx2.flags(f)]
1282 nmode = gitmode[ctx2.flags(f)]
1283 _addmodehdr(header, omode, nmode)
1283 _addmodehdr(header, omode, nmode)
1284 if util.binary(to) or util.binary(tn):
1284 if util.binary(to) or util.binary(tn):
1285 dodiff = 'binary'
1285 dodiff = 'binary'
1286 r = None
1286 r = None
1287 header.insert(0, mdiff.diffline(r, a, b, opts))
1287 header.insert(0, mdiff.diffline(r, a, b, opts))
1288 if dodiff:
1288 if dodiff:
1289 if dodiff == 'binary':
1289 if dodiff == 'binary':
1290 text = b85diff(to, tn)
1290 text = b85diff(to, tn)
1291 else:
1291 else:
1292 text = mdiff.unidiff(to, date1,
1292 text = mdiff.unidiff(to, date1,
1293 # ctx2 date may be dynamic
1293 # ctx2 date may be dynamic
1294 tn, util.datestr(ctx2.date()),
1294 tn, util.datestr(ctx2.date()),
1295 a, b, r, opts=opts)
1295 a, b, r, opts=opts)
1296 if header and (text or len(header) > 1):
1296 if header and (text or len(header) > 1):
1297 yield ''.join(header)
1297 yield ''.join(header)
1298 if text:
1298 if text:
1299 yield text
1299 yield text
1300
1300
1301 def export(repo, revs, template='hg-%h.patch', fp=None, switch_parent=False,
1301 def export(repo, revs, template='hg-%h.patch', fp=None, switch_parent=False,
1302 opts=None):
1302 opts=None):
1303 '''export changesets as hg patches.'''
1303 '''export changesets as hg patches.'''
1304
1304
1305 total = len(revs)
1305 total = len(revs)
1306 revwidth = max([len(str(rev)) for rev in revs])
1306 revwidth = max([len(str(rev)) for rev in revs])
1307
1307
1308 def single(rev, seqno, fp):
1308 def single(rev, seqno, fp):
1309 ctx = repo[rev]
1309 ctx = repo[rev]
1310 node = ctx.node()
1310 node = ctx.node()
1311 parents = [p.node() for p in ctx.parents() if p]
1311 parents = [p.node() for p in ctx.parents() if p]
1312 branch = ctx.branch()
1312 branch = ctx.branch()
1313 if switch_parent:
1313 if switch_parent:
1314 parents.reverse()
1314 parents.reverse()
1315 prev = (parents and parents[0]) or nullid
1315 prev = (parents and parents[0]) or nullid
1316
1316
1317 if not fp:
1317 if not fp:
1318 fp = cmdutil.make_file(repo, template, node, total=total,
1318 fp = cmdutil.make_file(repo, template, node, total=total,
1319 seqno=seqno, revwidth=revwidth,
1319 seqno=seqno, revwidth=revwidth,
1320 mode='ab')
1320 mode='ab')
1321 if fp != sys.stdout and hasattr(fp, 'name'):
1321 if fp != sys.stdout and hasattr(fp, 'name'):
1322 repo.ui.note("%s\n" % fp.name)
1322 repo.ui.note("%s\n" % fp.name)
1323
1323
1324 fp.write("# HG changeset patch\n")
1324 fp.write("# HG changeset patch\n")
1325 fp.write("# User %s\n" % ctx.user())
1325 fp.write("# User %s\n" % ctx.user())
1326 fp.write("# Date %d %d\n" % ctx.date())
1326 fp.write("# Date %d %d\n" % ctx.date())
1327 if branch and (branch != 'default'):
1327 if branch and (branch != 'default'):
1328 fp.write("# Branch %s\n" % branch)
1328 fp.write("# Branch %s\n" % branch)
1329 fp.write("# Node ID %s\n" % hex(node))
1329 fp.write("# Node ID %s\n" % hex(node))
1330 fp.write("# Parent %s\n" % hex(prev))
1330 fp.write("# Parent %s\n" % hex(prev))
1331 if len(parents) > 1:
1331 if len(parents) > 1:
1332 fp.write("# Parent %s\n" % hex(parents[1]))
1332 fp.write("# Parent %s\n" % hex(parents[1]))
1333 fp.write(ctx.description().rstrip())
1333 fp.write(ctx.description().rstrip())
1334 fp.write("\n\n")
1334 fp.write("\n\n")
1335
1335
1336 for chunk in diff(repo, prev, node, opts=opts):
1336 for chunk in diff(repo, prev, node, opts=opts):
1337 fp.write(chunk)
1337 fp.write(chunk)
1338
1338
1339 for seqno, rev in enumerate(revs):
1339 for seqno, rev in enumerate(revs):
1340 single(rev, seqno+1, fp)
1340 single(rev, seqno+1, fp)
1341
1341
1342 def diffstatdata(lines):
1342 def diffstatdata(lines):
1343 filename, adds, removes = None, 0, 0
1343 filename, adds, removes = None, 0, 0
1344 for line in lines:
1344 for line in lines:
1345 if line.startswith('diff'):
1345 if line.startswith('diff'):
1346 if filename:
1346 if filename:
1347 yield (filename, adds, removes)
1347 yield (filename, adds, removes)
1348 # set numbers to 0 anyway when starting new file
1348 # set numbers to 0 anyway when starting new file
1349 adds, removes = 0, 0
1349 adds, removes = 0, 0
1350 if line.startswith('diff --git'):
1350 if line.startswith('diff --git'):
1351 filename = gitre.search(line).group(1)
1351 filename = gitre.search(line).group(1)
1352 else:
1352 else:
1353 # format: "diff -r ... -r ... filename"
1353 # format: "diff -r ... -r ... filename"
1354 filename = line.split(None, 5)[-1]
1354 filename = line.split(None, 5)[-1]
1355 elif line.startswith('+') and not line.startswith('+++'):
1355 elif line.startswith('+') and not line.startswith('+++'):
1356 adds += 1
1356 adds += 1
1357 elif line.startswith('-') and not line.startswith('---'):
1357 elif line.startswith('-') and not line.startswith('---'):
1358 removes += 1
1358 removes += 1
1359 if filename:
1359 if filename:
1360 yield (filename, adds, removes)
1360 yield (filename, adds, removes)
1361
1361
1362 def diffstat(lines, width=80):
1362 def diffstat(lines, width=80):
1363 output = []
1363 output = []
1364 stats = list(diffstatdata(lines))
1364 stats = list(diffstatdata(lines))
1365
1365
1366 maxtotal, maxname = 0, 0
1366 maxtotal, maxname = 0, 0
1367 totaladds, totalremoves = 0, 0
1367 totaladds, totalremoves = 0, 0
1368 for filename, adds, removes in stats:
1368 for filename, adds, removes in stats:
1369 totaladds += adds
1369 totaladds += adds
1370 totalremoves += removes
1370 totalremoves += removes
1371 maxname = max(maxname, len(filename))
1371 maxname = max(maxname, len(filename))
1372 maxtotal = max(maxtotal, adds+removes)
1372 maxtotal = max(maxtotal, adds+removes)
1373
1373
1374 countwidth = len(str(maxtotal))
1374 countwidth = len(str(maxtotal))
1375 graphwidth = width - countwidth - maxname
1375 graphwidth = width - countwidth - maxname
1376 if graphwidth < 10:
1376 if graphwidth < 10:
1377 graphwidth = 10
1377 graphwidth = 10
1378
1378
1379 factor = max(int(math.ceil(float(maxtotal) / graphwidth)), 1)
1379 factor = max(int(math.ceil(float(maxtotal) / graphwidth)), 1)
1380
1380
1381 for filename, adds, removes in stats:
1381 for filename, adds, removes in stats:
1382 # If diffstat runs out of room it doesn't print anything, which
1382 # If diffstat runs out of room it doesn't print anything, which
1383 # isn't very useful, so always print at least one + or - if there
1383 # isn't very useful, so always print at least one + or - if there
1384 # were at least some changes
1384 # were at least some changes
1385 pluses = '+' * max(adds/factor, int(bool(adds)))
1385 pluses = '+' * max(adds/factor, int(bool(adds)))
1386 minuses = '-' * max(removes/factor, int(bool(removes)))
1386 minuses = '-' * max(removes/factor, int(bool(removes)))
1387 output.append(' %-*s | %*.d %s%s\n' % (maxname, filename, countwidth,
1387 output.append(' %-*s | %*.d %s%s\n' % (maxname, filename, countwidth,
1388 adds+removes, pluses, minuses))
1388 adds+removes, pluses, minuses))
1389
1389
1390 if stats:
1390 if stats:
1391 output.append(' %d files changed, %d insertions(+), %d deletions(-)\n'
1391 output.append(' %d files changed, %d insertions(+), %d deletions(-)\n'
1392 % (len(stats), totaladds, totalremoves))
1392 % (len(stats), totaladds, totalremoves))
1393
1393
1394 return ''.join(output)
1394 return ''.join(output)
@@ -1,332 +1,332
1 # store.py - repository store handling for Mercurial
1 # store.py - repository store handling for Mercurial
2 #
2 #
3 # Copyright 2008 Matt Mackall <mpm@selenic.com>
3 # Copyright 2008 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2, incorporated herein by reference.
6 # GNU General Public License version 2, incorporated herein by reference.
7
7
8 from i18n import _
8 from i18n import _
9 import osutil, util
9 import osutil, util
10 import os, stat
10 import os, stat
11
11
12 _sha = util.sha1
12 _sha = util.sha1
13
13
14 # This avoids a collision between a file named foo and a dir named
14 # This avoids a collision between a file named foo and a dir named
15 # foo.i or foo.d
15 # foo.i or foo.d
16 def encodedir(path):
16 def encodedir(path):
17 if not path.startswith('data/'):
17 if not path.startswith('data/'):
18 return path
18 return path
19 return (path
19 return (path
20 .replace(".hg/", ".hg.hg/")
20 .replace(".hg/", ".hg.hg/")
21 .replace(".i/", ".i.hg/")
21 .replace(".i/", ".i.hg/")
22 .replace(".d/", ".d.hg/"))
22 .replace(".d/", ".d.hg/"))
23
23
24 def decodedir(path):
24 def decodedir(path):
25 if not path.startswith('data/'):
25 if not path.startswith('data/'):
26 return path
26 return path
27 return (path
27 return (path
28 .replace(".d.hg/", ".d/")
28 .replace(".d.hg/", ".d/")
29 .replace(".i.hg/", ".i/")
29 .replace(".i.hg/", ".i/")
30 .replace(".hg.hg/", ".hg/"))
30 .replace(".hg.hg/", ".hg/"))
31
31
32 def _buildencodefun():
32 def _buildencodefun():
33 e = '_'
33 e = '_'
34 win_reserved = [ord(x) for x in '\\:*?"<>|']
34 win_reserved = [ord(x) for x in '\\:*?"<>|']
35 cmap = dict([ (chr(x), chr(x)) for x in xrange(127) ])
35 cmap = dict([ (chr(x), chr(x)) for x in xrange(127) ])
36 for x in (range(32) + range(126, 256) + win_reserved):
36 for x in (range(32) + range(126, 256) + win_reserved):
37 cmap[chr(x)] = "~%02x" % x
37 cmap[chr(x)] = "~%02x" % x
38 for x in range(ord("A"), ord("Z")+1) + [ord(e)]:
38 for x in range(ord("A"), ord("Z")+1) + [ord(e)]:
39 cmap[chr(x)] = e + chr(x).lower()
39 cmap[chr(x)] = e + chr(x).lower()
40 dmap = {}
40 dmap = {}
41 for k, v in cmap.iteritems():
41 for k, v in cmap.iteritems():
42 dmap[v] = k
42 dmap[v] = k
43 def decode(s):
43 def decode(s):
44 i = 0
44 i = 0
45 while i < len(s):
45 while i < len(s):
46 for l in xrange(1, 4):
46 for l in xrange(1, 4):
47 try:
47 try:
48 yield dmap[s[i:i+l]]
48 yield dmap[s[i:i+l]]
49 i += l
49 i += l
50 break
50 break
51 except KeyError:
51 except KeyError:
52 pass
52 pass
53 else:
53 else:
54 raise KeyError
54 raise KeyError
55 return (lambda s: "".join([cmap[c] for c in encodedir(s)]),
55 return (lambda s: "".join([cmap[c] for c in encodedir(s)]),
56 lambda s: decodedir("".join(list(decode(s)))))
56 lambda s: decodedir("".join(list(decode(s)))))
57
57
58 encodefilename, decodefilename = _buildencodefun()
58 encodefilename, decodefilename = _buildencodefun()
59
59
60 def _build_lower_encodefun():
60 def _build_lower_encodefun():
61 win_reserved = [ord(x) for x in '\\:*?"<>|']
61 win_reserved = [ord(x) for x in '\\:*?"<>|']
62 cmap = dict([ (chr(x), chr(x)) for x in xrange(127) ])
62 cmap = dict([ (chr(x), chr(x)) for x in xrange(127) ])
63 for x in (range(32) + range(126, 256) + win_reserved):
63 for x in (range(32) + range(126, 256) + win_reserved):
64 cmap[chr(x)] = "~%02x" % x
64 cmap[chr(x)] = "~%02x" % x
65 for x in range(ord("A"), ord("Z")+1):
65 for x in range(ord("A"), ord("Z")+1):
66 cmap[chr(x)] = chr(x).lower()
66 cmap[chr(x)] = chr(x).lower()
67 return lambda s: "".join([cmap[c] for c in s])
67 return lambda s: "".join([cmap[c] for c in s])
68
68
69 lowerencode = _build_lower_encodefun()
69 lowerencode = _build_lower_encodefun()
70
70
71 _windows_reserved_filenames = '''con prn aux nul
71 _windows_reserved_filenames = '''con prn aux nul
72 com1 com2 com3 com4 com5 com6 com7 com8 com9
72 com1 com2 com3 com4 com5 com6 com7 com8 com9
73 lpt1 lpt2 lpt3 lpt4 lpt5 lpt6 lpt7 lpt8 lpt9'''.split()
73 lpt1 lpt2 lpt3 lpt4 lpt5 lpt6 lpt7 lpt8 lpt9'''.split()
74 def auxencode(path):
74 def auxencode(path):
75 res = []
75 res = []
76 for n in path.split('/'):
76 for n in path.split('/'):
77 if n:
77 if n:
78 base = n.split('.')[0]
78 base = n.split('.')[0]
79 if base and (base in _windows_reserved_filenames):
79 if base and (base in _windows_reserved_filenames):
80 # encode third letter ('aux' -> 'au~78')
80 # encode third letter ('aux' -> 'au~78')
81 ec = "~%02x" % ord(n[2])
81 ec = "~%02x" % ord(n[2])
82 n = n[0:2] + ec + n[3:]
82 n = n[0:2] + ec + n[3:]
83 if n[-1] in '. ':
83 if n[-1] in '. ':
84 # encode last period or space ('foo...' -> 'foo..~2e')
84 # encode last period or space ('foo...' -> 'foo..~2e')
85 n = n[:-1] + "~%02x" % ord(n[-1])
85 n = n[:-1] + "~%02x" % ord(n[-1])
86 res.append(n)
86 res.append(n)
87 return '/'.join(res)
87 return '/'.join(res)
88
88
89 MAX_PATH_LEN_IN_HGSTORE = 120
89 MAX_PATH_LEN_IN_HGSTORE = 120
90 DIR_PREFIX_LEN = 8
90 DIR_PREFIX_LEN = 8
91 _MAX_SHORTENED_DIRS_LEN = 8 * (DIR_PREFIX_LEN + 1) - 4
91 _MAX_SHORTENED_DIRS_LEN = 8 * (DIR_PREFIX_LEN + 1) - 4
92 def hybridencode(path):
92 def hybridencode(path):
93 '''encodes path with a length limit
93 '''encodes path with a length limit
94
94
95 Encodes all paths that begin with 'data/', according to the following.
95 Encodes all paths that begin with 'data/', according to the following.
96
96
97 Default encoding (reversible):
97 Default encoding (reversible):
98
98
99 Encodes all uppercase letters 'X' as '_x'. All reserved or illegal
99 Encodes all uppercase letters 'X' as '_x'. All reserved or illegal
100 characters are encoded as '~xx', where xx is the two digit hex code
100 characters are encoded as '~xx', where xx is the two digit hex code
101 of the character (see encodefilename).
101 of the character (see encodefilename).
102 Relevant path components consisting of Windows reserved filenames are
102 Relevant path components consisting of Windows reserved filenames are
103 masked by encoding the third character ('aux' -> 'au~78', see auxencode).
103 masked by encoding the third character ('aux' -> 'au~78', see auxencode).
104
104
105 Hashed encoding (not reversible):
105 Hashed encoding (not reversible):
106
106
107 If the default-encoded path is longer than MAX_PATH_LEN_IN_HGSTORE, a
107 If the default-encoded path is longer than MAX_PATH_LEN_IN_HGSTORE, a
108 non-reversible hybrid hashing of the path is done instead.
108 non-reversible hybrid hashing of the path is done instead.
109 This encoding uses up to DIR_PREFIX_LEN characters of all directory
109 This encoding uses up to DIR_PREFIX_LEN characters of all directory
110 levels of the lowerencoded path, but not more levels than can fit into
110 levels of the lowerencoded path, but not more levels than can fit into
111 _MAX_SHORTENED_DIRS_LEN.
111 _MAX_SHORTENED_DIRS_LEN.
112 Then follows the filler followed by the sha digest of the full path.
112 Then follows the filler followed by the sha digest of the full path.
113 The filler is the beginning of the basename of the lowerencoded path
113 The filler is the beginning of the basename of the lowerencoded path
114 (the basename is everything after the last path separator). The filler
114 (the basename is everything after the last path separator). The filler
115 is as long as possible, filling in characters from the basename until
115 is as long as possible, filling in characters from the basename until
116 the encoded path has MAX_PATH_LEN_IN_HGSTORE characters (or all chars
116 the encoded path has MAX_PATH_LEN_IN_HGSTORE characters (or all chars
117 of the basename have been taken).
117 of the basename have been taken).
118 The extension (e.g. '.i' or '.d') is preserved.
118 The extension (e.g. '.i' or '.d') is preserved.
119
119
120 The string 'data/' at the beginning is replaced with 'dh/', if the hashed
120 The string 'data/' at the beginning is replaced with 'dh/', if the hashed
121 encoding was used.
121 encoding was used.
122 '''
122 '''
123 if not path.startswith('data/'):
123 if not path.startswith('data/'):
124 return path
124 return path
125 # escape directories ending with .i and .d
125 # escape directories ending with .i and .d
126 path = encodedir(path)
126 path = encodedir(path)
127 ndpath = path[len('data/'):]
127 ndpath = path[len('data/'):]
128 res = 'data/' + auxencode(encodefilename(ndpath))
128 res = 'data/' + auxencode(encodefilename(ndpath))
129 if len(res) > MAX_PATH_LEN_IN_HGSTORE:
129 if len(res) > MAX_PATH_LEN_IN_HGSTORE:
130 digest = _sha(path).hexdigest()
130 digest = _sha(path).hexdigest()
131 aep = auxencode(lowerencode(ndpath))
131 aep = auxencode(lowerencode(ndpath))
132 _root, ext = os.path.splitext(aep)
132 _root, ext = os.path.splitext(aep)
133 parts = aep.split('/')
133 parts = aep.split('/')
134 basename = parts[-1]
134 basename = parts[-1]
135 sdirs = []
135 sdirs = []
136 for p in parts[:-1]:
136 for p in parts[:-1]:
137 d = p[:DIR_PREFIX_LEN]
137 d = p[:DIR_PREFIX_LEN]
138 if d[-1] in '. ':
138 if d[-1] in '. ':
139 # Windows can't access dirs ending in period or space
139 # Windows can't access dirs ending in period or space
140 d = d[:-1] + '_'
140 d = d[:-1] + '_'
141 t = '/'.join(sdirs) + '/' + d
141 t = '/'.join(sdirs) + '/' + d
142 if len(t) > _MAX_SHORTENED_DIRS_LEN:
142 if len(t) > _MAX_SHORTENED_DIRS_LEN:
143 break
143 break
144 sdirs.append(d)
144 sdirs.append(d)
145 dirs = '/'.join(sdirs)
145 dirs = '/'.join(sdirs)
146 if len(dirs) > 0:
146 if len(dirs) > 0:
147 dirs += '/'
147 dirs += '/'
148 res = 'dh/' + dirs + digest + ext
148 res = 'dh/' + dirs + digest + ext
149 space_left = MAX_PATH_LEN_IN_HGSTORE - len(res)
149 space_left = MAX_PATH_LEN_IN_HGSTORE - len(res)
150 if space_left > 0:
150 if space_left > 0:
151 filler = basename[:space_left]
151 filler = basename[:space_left]
152 res = 'dh/' + dirs + filler + digest + ext
152 res = 'dh/' + dirs + filler + digest + ext
153 return res
153 return res
154
154
155 def _calcmode(path):
155 def _calcmode(path):
156 try:
156 try:
157 # files in .hg/ will be created using this mode
157 # files in .hg/ will be created using this mode
158 mode = os.stat(path).st_mode
158 mode = os.stat(path).st_mode
159 # avoid some useless chmods
159 # avoid some useless chmods
160 if (0777 & ~util.umask) == (0777 & mode):
160 if (0777 & ~util.umask) == (0777 & mode):
161 mode = None
161 mode = None
162 except OSError:
162 except OSError:
163 mode = None
163 mode = None
164 return mode
164 return mode
165
165
166 _data = 'data 00manifest.d 00manifest.i 00changelog.d 00changelog.i'
166 _data = 'data 00manifest.d 00manifest.i 00changelog.d 00changelog.i'
167
167
168 class basicstore:
168 class basicstore(object):
169 '''base class for local repository stores'''
169 '''base class for local repository stores'''
170 def __init__(self, path, opener, pathjoiner):
170 def __init__(self, path, opener, pathjoiner):
171 self.pathjoiner = pathjoiner
171 self.pathjoiner = pathjoiner
172 self.path = path
172 self.path = path
173 self.createmode = _calcmode(path)
173 self.createmode = _calcmode(path)
174 op = opener(self.path)
174 op = opener(self.path)
175 op.createmode = self.createmode
175 op.createmode = self.createmode
176 self.opener = lambda f, *args, **kw: op(encodedir(f), *args, **kw)
176 self.opener = lambda f, *args, **kw: op(encodedir(f), *args, **kw)
177
177
178 def join(self, f):
178 def join(self, f):
179 return self.pathjoiner(self.path, encodedir(f))
179 return self.pathjoiner(self.path, encodedir(f))
180
180
181 def _walk(self, relpath, recurse):
181 def _walk(self, relpath, recurse):
182 '''yields (unencoded, encoded, size)'''
182 '''yields (unencoded, encoded, size)'''
183 path = self.pathjoiner(self.path, relpath)
183 path = self.pathjoiner(self.path, relpath)
184 striplen = len(self.path) + len(os.sep)
184 striplen = len(self.path) + len(os.sep)
185 l = []
185 l = []
186 if os.path.isdir(path):
186 if os.path.isdir(path):
187 visit = [path]
187 visit = [path]
188 while visit:
188 while visit:
189 p = visit.pop()
189 p = visit.pop()
190 for f, kind, st in osutil.listdir(p, stat=True):
190 for f, kind, st in osutil.listdir(p, stat=True):
191 fp = self.pathjoiner(p, f)
191 fp = self.pathjoiner(p, f)
192 if kind == stat.S_IFREG and f[-2:] in ('.d', '.i'):
192 if kind == stat.S_IFREG and f[-2:] in ('.d', '.i'):
193 n = util.pconvert(fp[striplen:])
193 n = util.pconvert(fp[striplen:])
194 l.append((decodedir(n), n, st.st_size))
194 l.append((decodedir(n), n, st.st_size))
195 elif kind == stat.S_IFDIR and recurse:
195 elif kind == stat.S_IFDIR and recurse:
196 visit.append(fp)
196 visit.append(fp)
197 return sorted(l)
197 return sorted(l)
198
198
199 def datafiles(self):
199 def datafiles(self):
200 return self._walk('data', True)
200 return self._walk('data', True)
201
201
202 def walk(self):
202 def walk(self):
203 '''yields (unencoded, encoded, size)'''
203 '''yields (unencoded, encoded, size)'''
204 # yield data files first
204 # yield data files first
205 for x in self.datafiles():
205 for x in self.datafiles():
206 yield x
206 yield x
207 # yield manifest before changelog
207 # yield manifest before changelog
208 for x in reversed(self._walk('', False)):
208 for x in reversed(self._walk('', False)):
209 yield x
209 yield x
210
210
211 def copylist(self):
211 def copylist(self):
212 return ['requires'] + _data.split()
212 return ['requires'] + _data.split()
213
213
214 class encodedstore(basicstore):
214 class encodedstore(basicstore):
215 def __init__(self, path, opener, pathjoiner):
215 def __init__(self, path, opener, pathjoiner):
216 self.pathjoiner = pathjoiner
216 self.pathjoiner = pathjoiner
217 self.path = self.pathjoiner(path, 'store')
217 self.path = self.pathjoiner(path, 'store')
218 self.createmode = _calcmode(self.path)
218 self.createmode = _calcmode(self.path)
219 op = opener(self.path)
219 op = opener(self.path)
220 op.createmode = self.createmode
220 op.createmode = self.createmode
221 self.opener = lambda f, *args, **kw: op(encodefilename(f), *args, **kw)
221 self.opener = lambda f, *args, **kw: op(encodefilename(f), *args, **kw)
222
222
223 def datafiles(self):
223 def datafiles(self):
224 for a, b, size in self._walk('data', True):
224 for a, b, size in self._walk('data', True):
225 try:
225 try:
226 a = decodefilename(a)
226 a = decodefilename(a)
227 except KeyError:
227 except KeyError:
228 a = None
228 a = None
229 yield a, b, size
229 yield a, b, size
230
230
231 def join(self, f):
231 def join(self, f):
232 return self.pathjoiner(self.path, encodefilename(f))
232 return self.pathjoiner(self.path, encodefilename(f))
233
233
234 def copylist(self):
234 def copylist(self):
235 return (['requires', '00changelog.i'] +
235 return (['requires', '00changelog.i'] +
236 [self.pathjoiner('store', f) for f in _data.split()])
236 [self.pathjoiner('store', f) for f in _data.split()])
237
237
238 class fncache(object):
238 class fncache(object):
239 # the filename used to be partially encoded
239 # the filename used to be partially encoded
240 # hence the encodedir/decodedir dance
240 # hence the encodedir/decodedir dance
241 def __init__(self, opener):
241 def __init__(self, opener):
242 self.opener = opener
242 self.opener = opener
243 self.entries = None
243 self.entries = None
244
244
245 def _load(self):
245 def _load(self):
246 '''fill the entries from the fncache file'''
246 '''fill the entries from the fncache file'''
247 self.entries = set()
247 self.entries = set()
248 try:
248 try:
249 fp = self.opener('fncache', mode='rb')
249 fp = self.opener('fncache', mode='rb')
250 except IOError:
250 except IOError:
251 # skip nonexistent file
251 # skip nonexistent file
252 return
252 return
253 for n, line in enumerate(fp):
253 for n, line in enumerate(fp):
254 if (len(line) < 2) or (line[-1] != '\n'):
254 if (len(line) < 2) or (line[-1] != '\n'):
255 t = _('invalid entry in fncache, line %s') % (n + 1)
255 t = _('invalid entry in fncache, line %s') % (n + 1)
256 raise util.Abort(t)
256 raise util.Abort(t)
257 self.entries.add(decodedir(line[:-1]))
257 self.entries.add(decodedir(line[:-1]))
258 fp.close()
258 fp.close()
259
259
260 def rewrite(self, files):
260 def rewrite(self, files):
261 fp = self.opener('fncache', mode='wb')
261 fp = self.opener('fncache', mode='wb')
262 for p in files:
262 for p in files:
263 fp.write(encodedir(p) + '\n')
263 fp.write(encodedir(p) + '\n')
264 fp.close()
264 fp.close()
265 self.entries = set(files)
265 self.entries = set(files)
266
266
267 def add(self, fn):
267 def add(self, fn):
268 if self.entries is None:
268 if self.entries is None:
269 self._load()
269 self._load()
270 self.opener('fncache', 'ab').write(encodedir(fn) + '\n')
270 self.opener('fncache', 'ab').write(encodedir(fn) + '\n')
271
271
272 def __contains__(self, fn):
272 def __contains__(self, fn):
273 if self.entries is None:
273 if self.entries is None:
274 self._load()
274 self._load()
275 return fn in self.entries
275 return fn in self.entries
276
276
277 def __iter__(self):
277 def __iter__(self):
278 if self.entries is None:
278 if self.entries is None:
279 self._load()
279 self._load()
280 return iter(self.entries)
280 return iter(self.entries)
281
281
282 class fncachestore(basicstore):
282 class fncachestore(basicstore):
283 def __init__(self, path, opener, pathjoiner):
283 def __init__(self, path, opener, pathjoiner):
284 self.pathjoiner = pathjoiner
284 self.pathjoiner = pathjoiner
285 self.path = self.pathjoiner(path, 'store')
285 self.path = self.pathjoiner(path, 'store')
286 self.createmode = _calcmode(self.path)
286 self.createmode = _calcmode(self.path)
287 self._op = opener(self.path)
287 self._op = opener(self.path)
288 self._op.createmode = self.createmode
288 self._op.createmode = self.createmode
289 self.fncache = fncache(self._op)
289 self.fncache = fncache(self._op)
290
290
291 def fncacheopener(path, mode='r', *args, **kw):
291 def fncacheopener(path, mode='r', *args, **kw):
292 if (mode not in ('r', 'rb')
292 if (mode not in ('r', 'rb')
293 and path.startswith('data/')
293 and path.startswith('data/')
294 and path not in self.fncache):
294 and path not in self.fncache):
295 self.fncache.add(path)
295 self.fncache.add(path)
296 return self._op(hybridencode(path), mode, *args, **kw)
296 return self._op(hybridencode(path), mode, *args, **kw)
297 self.opener = fncacheopener
297 self.opener = fncacheopener
298
298
299 def join(self, f):
299 def join(self, f):
300 return self.pathjoiner(self.path, hybridencode(f))
300 return self.pathjoiner(self.path, hybridencode(f))
301
301
302 def datafiles(self):
302 def datafiles(self):
303 rewrite = False
303 rewrite = False
304 existing = []
304 existing = []
305 pjoin = self.pathjoiner
305 pjoin = self.pathjoiner
306 spath = self.path
306 spath = self.path
307 for f in self.fncache:
307 for f in self.fncache:
308 ef = hybridencode(f)
308 ef = hybridencode(f)
309 try:
309 try:
310 st = os.stat(pjoin(spath, ef))
310 st = os.stat(pjoin(spath, ef))
311 yield f, ef, st.st_size
311 yield f, ef, st.st_size
312 existing.append(f)
312 existing.append(f)
313 except OSError:
313 except OSError:
314 # nonexistent entry
314 # nonexistent entry
315 rewrite = True
315 rewrite = True
316 if rewrite:
316 if rewrite:
317 # rewrite fncache to remove nonexistent entries
317 # rewrite fncache to remove nonexistent entries
318 # (may be caused by rollback / strip)
318 # (may be caused by rollback / strip)
319 self.fncache.rewrite(existing)
319 self.fncache.rewrite(existing)
320
320
321 def copylist(self):
321 def copylist(self):
322 d = _data + ' dh fncache'
322 d = _data + ' dh fncache'
323 return (['requires', '00changelog.i'] +
323 return (['requires', '00changelog.i'] +
324 [self.pathjoiner('store', f) for f in d.split()])
324 [self.pathjoiner('store', f) for f in d.split()])
325
325
326 def store(requirements, path, opener, pathjoiner=None):
326 def store(requirements, path, opener, pathjoiner=None):
327 pathjoiner = pathjoiner or os.path.join
327 pathjoiner = pathjoiner or os.path.join
328 if 'store' in requirements:
328 if 'store' in requirements:
329 if 'fncache' in requirements:
329 if 'fncache' in requirements:
330 return fncachestore(path, opener, pathjoiner)
330 return fncachestore(path, opener, pathjoiner)
331 return encodedstore(path, opener, pathjoiner)
331 return encodedstore(path, opener, pathjoiner)
332 return basicstore(path, opener, pathjoiner)
332 return basicstore(path, opener, pathjoiner)
@@ -1,1247 +1,1247
1 # util.py - Mercurial utility functions and platform specfic implementations
1 # util.py - Mercurial utility functions and platform specfic implementations
2 #
2 #
3 # Copyright 2005 K. Thananchayan <thananck@yahoo.com>
3 # Copyright 2005 K. Thananchayan <thananck@yahoo.com>
4 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
5 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
5 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
6 #
6 #
7 # This software may be used and distributed according to the terms of the
7 # This software may be used and distributed according to the terms of the
8 # GNU General Public License version 2, incorporated herein by reference.
8 # GNU General Public License version 2, incorporated herein by reference.
9
9
10 """Mercurial utility functions and platform specfic implementations.
10 """Mercurial utility functions and platform specfic implementations.
11
11
12 This contains helper routines that are independent of the SCM core and
12 This contains helper routines that are independent of the SCM core and
13 hide platform-specific details from the core.
13 hide platform-specific details from the core.
14 """
14 """
15
15
16 from i18n import _
16 from i18n import _
17 import error, osutil
17 import error, osutil
18 import cStringIO, errno, re, shutil, sys, tempfile, traceback
18 import cStringIO, errno, re, shutil, sys, tempfile, traceback
19 import os, stat, time, calendar, random
19 import os, stat, time, calendar, random
20 import imp
20 import imp
21
21
22 # Python compatibility
22 # Python compatibility
23
23
24 def sha1(s):
24 def sha1(s):
25 return _fastsha1(s)
25 return _fastsha1(s)
26
26
27 def _fastsha1(s):
27 def _fastsha1(s):
28 # This function will import sha1 from hashlib or sha (whichever is
28 # This function will import sha1 from hashlib or sha (whichever is
29 # available) and overwrite itself with it on the first call.
29 # available) and overwrite itself with it on the first call.
30 # Subsequent calls will go directly to the imported function.
30 # Subsequent calls will go directly to the imported function.
31 try:
31 try:
32 from hashlib import sha1 as _sha1
32 from hashlib import sha1 as _sha1
33 except ImportError:
33 except ImportError:
34 from sha import sha as _sha1
34 from sha import sha as _sha1
35 global _fastsha1, sha1
35 global _fastsha1, sha1
36 _fastsha1 = sha1 = _sha1
36 _fastsha1 = sha1 = _sha1
37 return _sha1(s)
37 return _sha1(s)
38
38
39 import subprocess
39 import subprocess
40 closefds = os.name == 'posix'
40 closefds = os.name == 'posix'
41 def popen2(cmd):
41 def popen2(cmd):
42 p = subprocess.Popen(cmd, shell=True, close_fds=closefds,
42 p = subprocess.Popen(cmd, shell=True, close_fds=closefds,
43 stdin=subprocess.PIPE, stdout=subprocess.PIPE)
43 stdin=subprocess.PIPE, stdout=subprocess.PIPE)
44 return p.stdin, p.stdout
44 return p.stdin, p.stdout
45 def popen3(cmd):
45 def popen3(cmd):
46 p = subprocess.Popen(cmd, shell=True, close_fds=closefds,
46 p = subprocess.Popen(cmd, shell=True, close_fds=closefds,
47 stdin=subprocess.PIPE, stdout=subprocess.PIPE,
47 stdin=subprocess.PIPE, stdout=subprocess.PIPE,
48 stderr=subprocess.PIPE)
48 stderr=subprocess.PIPE)
49 return p.stdin, p.stdout, p.stderr
49 return p.stdin, p.stdout, p.stderr
50
50
51 def version():
51 def version():
52 """Return version information if available."""
52 """Return version information if available."""
53 try:
53 try:
54 import __version__
54 import __version__
55 return __version__.version
55 return __version__.version
56 except ImportError:
56 except ImportError:
57 return 'unknown'
57 return 'unknown'
58
58
59 # used by parsedate
59 # used by parsedate
60 defaultdateformats = (
60 defaultdateformats = (
61 '%Y-%m-%d %H:%M:%S',
61 '%Y-%m-%d %H:%M:%S',
62 '%Y-%m-%d %I:%M:%S%p',
62 '%Y-%m-%d %I:%M:%S%p',
63 '%Y-%m-%d %H:%M',
63 '%Y-%m-%d %H:%M',
64 '%Y-%m-%d %I:%M%p',
64 '%Y-%m-%d %I:%M%p',
65 '%Y-%m-%d',
65 '%Y-%m-%d',
66 '%m-%d',
66 '%m-%d',
67 '%m/%d',
67 '%m/%d',
68 '%m/%d/%y',
68 '%m/%d/%y',
69 '%m/%d/%Y',
69 '%m/%d/%Y',
70 '%a %b %d %H:%M:%S %Y',
70 '%a %b %d %H:%M:%S %Y',
71 '%a %b %d %I:%M:%S%p %Y',
71 '%a %b %d %I:%M:%S%p %Y',
72 '%a, %d %b %Y %H:%M:%S', # GNU coreutils "/bin/date --rfc-2822"
72 '%a, %d %b %Y %H:%M:%S', # GNU coreutils "/bin/date --rfc-2822"
73 '%b %d %H:%M:%S %Y',
73 '%b %d %H:%M:%S %Y',
74 '%b %d %I:%M:%S%p %Y',
74 '%b %d %I:%M:%S%p %Y',
75 '%b %d %H:%M:%S',
75 '%b %d %H:%M:%S',
76 '%b %d %I:%M:%S%p',
76 '%b %d %I:%M:%S%p',
77 '%b %d %H:%M',
77 '%b %d %H:%M',
78 '%b %d %I:%M%p',
78 '%b %d %I:%M%p',
79 '%b %d %Y',
79 '%b %d %Y',
80 '%b %d',
80 '%b %d',
81 '%H:%M:%S',
81 '%H:%M:%S',
82 '%I:%M:%SP',
82 '%I:%M:%SP',
83 '%H:%M',
83 '%H:%M',
84 '%I:%M%p',
84 '%I:%M%p',
85 )
85 )
86
86
87 extendeddateformats = defaultdateformats + (
87 extendeddateformats = defaultdateformats + (
88 "%Y",
88 "%Y",
89 "%Y-%m",
89 "%Y-%m",
90 "%b",
90 "%b",
91 "%b %Y",
91 "%b %Y",
92 )
92 )
93
93
94 def cachefunc(func):
94 def cachefunc(func):
95 '''cache the result of function calls'''
95 '''cache the result of function calls'''
96 # XXX doesn't handle keywords args
96 # XXX doesn't handle keywords args
97 cache = {}
97 cache = {}
98 if func.func_code.co_argcount == 1:
98 if func.func_code.co_argcount == 1:
99 # we gain a small amount of time because
99 # we gain a small amount of time because
100 # we don't need to pack/unpack the list
100 # we don't need to pack/unpack the list
101 def f(arg):
101 def f(arg):
102 if arg not in cache:
102 if arg not in cache:
103 cache[arg] = func(arg)
103 cache[arg] = func(arg)
104 return cache[arg]
104 return cache[arg]
105 else:
105 else:
106 def f(*args):
106 def f(*args):
107 if args not in cache:
107 if args not in cache:
108 cache[args] = func(*args)
108 cache[args] = func(*args)
109 return cache[args]
109 return cache[args]
110
110
111 return f
111 return f
112
112
113 class propertycache(object):
113 class propertycache(object):
114 def __init__(self, func):
114 def __init__(self, func):
115 self.func = func
115 self.func = func
116 self.name = func.__name__
116 self.name = func.__name__
117 def __get__(self, obj, type=None):
117 def __get__(self, obj, type=None):
118 result = self.func(obj)
118 result = self.func(obj)
119 setattr(obj, self.name, result)
119 setattr(obj, self.name, result)
120 return result
120 return result
121
121
122 def pipefilter(s, cmd):
122 def pipefilter(s, cmd):
123 '''filter string S through command CMD, returning its output'''
123 '''filter string S through command CMD, returning its output'''
124 p = subprocess.Popen(cmd, shell=True, close_fds=closefds,
124 p = subprocess.Popen(cmd, shell=True, close_fds=closefds,
125 stdin=subprocess.PIPE, stdout=subprocess.PIPE)
125 stdin=subprocess.PIPE, stdout=subprocess.PIPE)
126 pout, perr = p.communicate(s)
126 pout, perr = p.communicate(s)
127 return pout
127 return pout
128
128
129 def tempfilter(s, cmd):
129 def tempfilter(s, cmd):
130 '''filter string S through a pair of temporary files with CMD.
130 '''filter string S through a pair of temporary files with CMD.
131 CMD is used as a template to create the real command to be run,
131 CMD is used as a template to create the real command to be run,
132 with the strings INFILE and OUTFILE replaced by the real names of
132 with the strings INFILE and OUTFILE replaced by the real names of
133 the temporary files generated.'''
133 the temporary files generated.'''
134 inname, outname = None, None
134 inname, outname = None, None
135 try:
135 try:
136 infd, inname = tempfile.mkstemp(prefix='hg-filter-in-')
136 infd, inname = tempfile.mkstemp(prefix='hg-filter-in-')
137 fp = os.fdopen(infd, 'wb')
137 fp = os.fdopen(infd, 'wb')
138 fp.write(s)
138 fp.write(s)
139 fp.close()
139 fp.close()
140 outfd, outname = tempfile.mkstemp(prefix='hg-filter-out-')
140 outfd, outname = tempfile.mkstemp(prefix='hg-filter-out-')
141 os.close(outfd)
141 os.close(outfd)
142 cmd = cmd.replace('INFILE', inname)
142 cmd = cmd.replace('INFILE', inname)
143 cmd = cmd.replace('OUTFILE', outname)
143 cmd = cmd.replace('OUTFILE', outname)
144 code = os.system(cmd)
144 code = os.system(cmd)
145 if sys.platform == 'OpenVMS' and code & 1:
145 if sys.platform == 'OpenVMS' and code & 1:
146 code = 0
146 code = 0
147 if code: raise Abort(_("command '%s' failed: %s") %
147 if code: raise Abort(_("command '%s' failed: %s") %
148 (cmd, explain_exit(code)))
148 (cmd, explain_exit(code)))
149 return open(outname, 'rb').read()
149 return open(outname, 'rb').read()
150 finally:
150 finally:
151 try:
151 try:
152 if inname: os.unlink(inname)
152 if inname: os.unlink(inname)
153 except: pass
153 except: pass
154 try:
154 try:
155 if outname: os.unlink(outname)
155 if outname: os.unlink(outname)
156 except: pass
156 except: pass
157
157
158 filtertable = {
158 filtertable = {
159 'tempfile:': tempfilter,
159 'tempfile:': tempfilter,
160 'pipe:': pipefilter,
160 'pipe:': pipefilter,
161 }
161 }
162
162
163 def filter(s, cmd):
163 def filter(s, cmd):
164 "filter a string through a command that transforms its input to its output"
164 "filter a string through a command that transforms its input to its output"
165 for name, fn in filtertable.iteritems():
165 for name, fn in filtertable.iteritems():
166 if cmd.startswith(name):
166 if cmd.startswith(name):
167 return fn(s, cmd[len(name):].lstrip())
167 return fn(s, cmd[len(name):].lstrip())
168 return pipefilter(s, cmd)
168 return pipefilter(s, cmd)
169
169
170 def binary(s):
170 def binary(s):
171 """return true if a string is binary data"""
171 """return true if a string is binary data"""
172 return bool(s and '\0' in s)
172 return bool(s and '\0' in s)
173
173
174 def increasingchunks(source, min=1024, max=65536):
174 def increasingchunks(source, min=1024, max=65536):
175 '''return no less than min bytes per chunk while data remains,
175 '''return no less than min bytes per chunk while data remains,
176 doubling min after each chunk until it reaches max'''
176 doubling min after each chunk until it reaches max'''
177 def log2(x):
177 def log2(x):
178 if not x:
178 if not x:
179 return 0
179 return 0
180 i = 0
180 i = 0
181 while x:
181 while x:
182 x >>= 1
182 x >>= 1
183 i += 1
183 i += 1
184 return i - 1
184 return i - 1
185
185
186 buf = []
186 buf = []
187 blen = 0
187 blen = 0
188 for chunk in source:
188 for chunk in source:
189 buf.append(chunk)
189 buf.append(chunk)
190 blen += len(chunk)
190 blen += len(chunk)
191 if blen >= min:
191 if blen >= min:
192 if min < max:
192 if min < max:
193 min = min << 1
193 min = min << 1
194 nmin = 1 << log2(blen)
194 nmin = 1 << log2(blen)
195 if nmin > min:
195 if nmin > min:
196 min = nmin
196 min = nmin
197 if min > max:
197 if min > max:
198 min = max
198 min = max
199 yield ''.join(buf)
199 yield ''.join(buf)
200 blen = 0
200 blen = 0
201 buf = []
201 buf = []
202 if buf:
202 if buf:
203 yield ''.join(buf)
203 yield ''.join(buf)
204
204
205 Abort = error.Abort
205 Abort = error.Abort
206
206
207 def always(fn): return True
207 def always(fn): return True
208 def never(fn): return False
208 def never(fn): return False
209
209
210 def pathto(root, n1, n2):
210 def pathto(root, n1, n2):
211 '''return the relative path from one place to another.
211 '''return the relative path from one place to another.
212 root should use os.sep to separate directories
212 root should use os.sep to separate directories
213 n1 should use os.sep to separate directories
213 n1 should use os.sep to separate directories
214 n2 should use "/" to separate directories
214 n2 should use "/" to separate directories
215 returns an os.sep-separated path.
215 returns an os.sep-separated path.
216
216
217 If n1 is a relative path, it's assumed it's
217 If n1 is a relative path, it's assumed it's
218 relative to root.
218 relative to root.
219 n2 should always be relative to root.
219 n2 should always be relative to root.
220 '''
220 '''
221 if not n1: return localpath(n2)
221 if not n1: return localpath(n2)
222 if os.path.isabs(n1):
222 if os.path.isabs(n1):
223 if os.path.splitdrive(root)[0] != os.path.splitdrive(n1)[0]:
223 if os.path.splitdrive(root)[0] != os.path.splitdrive(n1)[0]:
224 return os.path.join(root, localpath(n2))
224 return os.path.join(root, localpath(n2))
225 n2 = '/'.join((pconvert(root), n2))
225 n2 = '/'.join((pconvert(root), n2))
226 a, b = splitpath(n1), n2.split('/')
226 a, b = splitpath(n1), n2.split('/')
227 a.reverse()
227 a.reverse()
228 b.reverse()
228 b.reverse()
229 while a and b and a[-1] == b[-1]:
229 while a and b and a[-1] == b[-1]:
230 a.pop()
230 a.pop()
231 b.pop()
231 b.pop()
232 b.reverse()
232 b.reverse()
233 return os.sep.join((['..'] * len(a)) + b) or '.'
233 return os.sep.join((['..'] * len(a)) + b) or '.'
234
234
235 def canonpath(root, cwd, myname):
235 def canonpath(root, cwd, myname):
236 """return the canonical path of myname, given cwd and root"""
236 """return the canonical path of myname, given cwd and root"""
237 if root == os.sep:
237 if root == os.sep:
238 rootsep = os.sep
238 rootsep = os.sep
239 elif endswithsep(root):
239 elif endswithsep(root):
240 rootsep = root
240 rootsep = root
241 else:
241 else:
242 rootsep = root + os.sep
242 rootsep = root + os.sep
243 name = myname
243 name = myname
244 if not os.path.isabs(name):
244 if not os.path.isabs(name):
245 name = os.path.join(root, cwd, name)
245 name = os.path.join(root, cwd, name)
246 name = os.path.normpath(name)
246 name = os.path.normpath(name)
247 audit_path = path_auditor(root)
247 audit_path = path_auditor(root)
248 if name != rootsep and name.startswith(rootsep):
248 if name != rootsep and name.startswith(rootsep):
249 name = name[len(rootsep):]
249 name = name[len(rootsep):]
250 audit_path(name)
250 audit_path(name)
251 return pconvert(name)
251 return pconvert(name)
252 elif name == root:
252 elif name == root:
253 return ''
253 return ''
254 else:
254 else:
255 # Determine whether `name' is in the hierarchy at or beneath `root',
255 # Determine whether `name' is in the hierarchy at or beneath `root',
256 # by iterating name=dirname(name) until that causes no change (can't
256 # by iterating name=dirname(name) until that causes no change (can't
257 # check name == '/', because that doesn't work on windows). For each
257 # check name == '/', because that doesn't work on windows). For each
258 # `name', compare dev/inode numbers. If they match, the list `rel'
258 # `name', compare dev/inode numbers. If they match, the list `rel'
259 # holds the reversed list of components making up the relative file
259 # holds the reversed list of components making up the relative file
260 # name we want.
260 # name we want.
261 root_st = os.stat(root)
261 root_st = os.stat(root)
262 rel = []
262 rel = []
263 while True:
263 while True:
264 try:
264 try:
265 name_st = os.stat(name)
265 name_st = os.stat(name)
266 except OSError:
266 except OSError:
267 break
267 break
268 if samestat(name_st, root_st):
268 if samestat(name_st, root_st):
269 if not rel:
269 if not rel:
270 # name was actually the same as root (maybe a symlink)
270 # name was actually the same as root (maybe a symlink)
271 return ''
271 return ''
272 rel.reverse()
272 rel.reverse()
273 name = os.path.join(*rel)
273 name = os.path.join(*rel)
274 audit_path(name)
274 audit_path(name)
275 return pconvert(name)
275 return pconvert(name)
276 dirname, basename = os.path.split(name)
276 dirname, basename = os.path.split(name)
277 rel.append(basename)
277 rel.append(basename)
278 if dirname == name:
278 if dirname == name:
279 break
279 break
280 name = dirname
280 name = dirname
281
281
282 raise Abort('%s not under root' % myname)
282 raise Abort('%s not under root' % myname)
283
283
284 _hgexecutable = None
284 _hgexecutable = None
285
285
286 def main_is_frozen():
286 def main_is_frozen():
287 """return True if we are a frozen executable.
287 """return True if we are a frozen executable.
288
288
289 The code supports py2exe (most common, Windows only) and tools/freeze
289 The code supports py2exe (most common, Windows only) and tools/freeze
290 (portable, not much used).
290 (portable, not much used).
291 """
291 """
292 return (hasattr(sys, "frozen") or # new py2exe
292 return (hasattr(sys, "frozen") or # new py2exe
293 hasattr(sys, "importers") or # old py2exe
293 hasattr(sys, "importers") or # old py2exe
294 imp.is_frozen("__main__")) # tools/freeze
294 imp.is_frozen("__main__")) # tools/freeze
295
295
296 def hgexecutable():
296 def hgexecutable():
297 """return location of the 'hg' executable.
297 """return location of the 'hg' executable.
298
298
299 Defaults to $HG or 'hg' in the search path.
299 Defaults to $HG or 'hg' in the search path.
300 """
300 """
301 if _hgexecutable is None:
301 if _hgexecutable is None:
302 hg = os.environ.get('HG')
302 hg = os.environ.get('HG')
303 if hg:
303 if hg:
304 set_hgexecutable(hg)
304 set_hgexecutable(hg)
305 elif main_is_frozen():
305 elif main_is_frozen():
306 set_hgexecutable(sys.executable)
306 set_hgexecutable(sys.executable)
307 else:
307 else:
308 set_hgexecutable(find_exe('hg') or 'hg')
308 set_hgexecutable(find_exe('hg') or 'hg')
309 return _hgexecutable
309 return _hgexecutable
310
310
311 def set_hgexecutable(path):
311 def set_hgexecutable(path):
312 """set location of the 'hg' executable"""
312 """set location of the 'hg' executable"""
313 global _hgexecutable
313 global _hgexecutable
314 _hgexecutable = path
314 _hgexecutable = path
315
315
316 def system(cmd, environ={}, cwd=None, onerr=None, errprefix=None):
316 def system(cmd, environ={}, cwd=None, onerr=None, errprefix=None):
317 '''enhanced shell command execution.
317 '''enhanced shell command execution.
318 run with environment maybe modified, maybe in different dir.
318 run with environment maybe modified, maybe in different dir.
319
319
320 if command fails and onerr is None, return status. if ui object,
320 if command fails and onerr is None, return status. if ui object,
321 print error message and return status, else raise onerr object as
321 print error message and return status, else raise onerr object as
322 exception.'''
322 exception.'''
323 def py2shell(val):
323 def py2shell(val):
324 'convert python object into string that is useful to shell'
324 'convert python object into string that is useful to shell'
325 if val is None or val is False:
325 if val is None or val is False:
326 return '0'
326 return '0'
327 if val is True:
327 if val is True:
328 return '1'
328 return '1'
329 return str(val)
329 return str(val)
330 oldenv = {}
330 oldenv = {}
331 for k in environ:
331 for k in environ:
332 oldenv[k] = os.environ.get(k)
332 oldenv[k] = os.environ.get(k)
333 if cwd is not None:
333 if cwd is not None:
334 oldcwd = os.getcwd()
334 oldcwd = os.getcwd()
335 origcmd = cmd
335 origcmd = cmd
336 if os.name == 'nt':
336 if os.name == 'nt':
337 cmd = '"%s"' % cmd
337 cmd = '"%s"' % cmd
338 try:
338 try:
339 for k, v in environ.iteritems():
339 for k, v in environ.iteritems():
340 os.environ[k] = py2shell(v)
340 os.environ[k] = py2shell(v)
341 os.environ['HG'] = hgexecutable()
341 os.environ['HG'] = hgexecutable()
342 if cwd is not None and oldcwd != cwd:
342 if cwd is not None and oldcwd != cwd:
343 os.chdir(cwd)
343 os.chdir(cwd)
344 rc = os.system(cmd)
344 rc = os.system(cmd)
345 if sys.platform == 'OpenVMS' and rc & 1:
345 if sys.platform == 'OpenVMS' and rc & 1:
346 rc = 0
346 rc = 0
347 if rc and onerr:
347 if rc and onerr:
348 errmsg = '%s %s' % (os.path.basename(origcmd.split(None, 1)[0]),
348 errmsg = '%s %s' % (os.path.basename(origcmd.split(None, 1)[0]),
349 explain_exit(rc)[0])
349 explain_exit(rc)[0])
350 if errprefix:
350 if errprefix:
351 errmsg = '%s: %s' % (errprefix, errmsg)
351 errmsg = '%s: %s' % (errprefix, errmsg)
352 try:
352 try:
353 onerr.warn(errmsg + '\n')
353 onerr.warn(errmsg + '\n')
354 except AttributeError:
354 except AttributeError:
355 raise onerr(errmsg)
355 raise onerr(errmsg)
356 return rc
356 return rc
357 finally:
357 finally:
358 for k, v in oldenv.iteritems():
358 for k, v in oldenv.iteritems():
359 if v is None:
359 if v is None:
360 del os.environ[k]
360 del os.environ[k]
361 else:
361 else:
362 os.environ[k] = v
362 os.environ[k] = v
363 if cwd is not None and oldcwd != cwd:
363 if cwd is not None and oldcwd != cwd:
364 os.chdir(oldcwd)
364 os.chdir(oldcwd)
365
365
366 def checksignature(func):
366 def checksignature(func):
367 '''wrap a function with code to check for calling errors'''
367 '''wrap a function with code to check for calling errors'''
368 def check(*args, **kwargs):
368 def check(*args, **kwargs):
369 try:
369 try:
370 return func(*args, **kwargs)
370 return func(*args, **kwargs)
371 except TypeError:
371 except TypeError:
372 if len(traceback.extract_tb(sys.exc_info()[2])) == 1:
372 if len(traceback.extract_tb(sys.exc_info()[2])) == 1:
373 raise error.SignatureError
373 raise error.SignatureError
374 raise
374 raise
375
375
376 return check
376 return check
377
377
378 # os.path.lexists is not available on python2.3
378 # os.path.lexists is not available on python2.3
379 def lexists(filename):
379 def lexists(filename):
380 "test whether a file with this name exists. does not follow symlinks"
380 "test whether a file with this name exists. does not follow symlinks"
381 try:
381 try:
382 os.lstat(filename)
382 os.lstat(filename)
383 except:
383 except:
384 return False
384 return False
385 return True
385 return True
386
386
387 def rename(src, dst):
387 def rename(src, dst):
388 """forcibly rename a file"""
388 """forcibly rename a file"""
389 try:
389 try:
390 os.rename(src, dst)
390 os.rename(src, dst)
391 except OSError, err: # FIXME: check err (EEXIST ?)
391 except OSError, err: # FIXME: check err (EEXIST ?)
392
392
393 # On windows, rename to existing file is not allowed, so we
393 # On windows, rename to existing file is not allowed, so we
394 # must delete destination first. But if a file is open, unlink
394 # must delete destination first. But if a file is open, unlink
395 # schedules it for delete but does not delete it. Rename
395 # schedules it for delete but does not delete it. Rename
396 # happens immediately even for open files, so we rename
396 # happens immediately even for open files, so we rename
397 # destination to a temporary name, then delete that. Then
397 # destination to a temporary name, then delete that. Then
398 # rename is safe to do.
398 # rename is safe to do.
399 # The temporary name is chosen at random to avoid the situation
399 # The temporary name is chosen at random to avoid the situation
400 # where a file is left lying around from a previous aborted run.
400 # where a file is left lying around from a previous aborted run.
401 # The usual race condition this introduces can't be avoided as
401 # The usual race condition this introduces can't be avoided as
402 # we need the name to rename into, and not the file itself. Due
402 # we need the name to rename into, and not the file itself. Due
403 # to the nature of the operation however, any races will at worst
403 # to the nature of the operation however, any races will at worst
404 # lead to the rename failing and the current operation aborting.
404 # lead to the rename failing and the current operation aborting.
405
405
406 def tempname(prefix):
406 def tempname(prefix):
407 for tries in xrange(10):
407 for tries in xrange(10):
408 temp = '%s-%08x' % (prefix, random.randint(0, 0xffffffff))
408 temp = '%s-%08x' % (prefix, random.randint(0, 0xffffffff))
409 if not os.path.exists(temp):
409 if not os.path.exists(temp):
410 return temp
410 return temp
411 raise IOError, (errno.EEXIST, "No usable temporary filename found")
411 raise IOError, (errno.EEXIST, "No usable temporary filename found")
412
412
413 temp = tempname(dst)
413 temp = tempname(dst)
414 os.rename(dst, temp)
414 os.rename(dst, temp)
415 os.unlink(temp)
415 os.unlink(temp)
416 os.rename(src, dst)
416 os.rename(src, dst)
417
417
418 def unlink(f):
418 def unlink(f):
419 """unlink and remove the directory if it is empty"""
419 """unlink and remove the directory if it is empty"""
420 os.unlink(f)
420 os.unlink(f)
421 # try removing directories that might now be empty
421 # try removing directories that might now be empty
422 try:
422 try:
423 os.removedirs(os.path.dirname(f))
423 os.removedirs(os.path.dirname(f))
424 except OSError:
424 except OSError:
425 pass
425 pass
426
426
427 def copyfile(src, dest):
427 def copyfile(src, dest):
428 "copy a file, preserving mode and atime/mtime"
428 "copy a file, preserving mode and atime/mtime"
429 if os.path.islink(src):
429 if os.path.islink(src):
430 try:
430 try:
431 os.unlink(dest)
431 os.unlink(dest)
432 except:
432 except:
433 pass
433 pass
434 os.symlink(os.readlink(src), dest)
434 os.symlink(os.readlink(src), dest)
435 else:
435 else:
436 try:
436 try:
437 shutil.copyfile(src, dest)
437 shutil.copyfile(src, dest)
438 shutil.copystat(src, dest)
438 shutil.copystat(src, dest)
439 except shutil.Error, inst:
439 except shutil.Error, inst:
440 raise Abort(str(inst))
440 raise Abort(str(inst))
441
441
442 def copyfiles(src, dst, hardlink=None):
442 def copyfiles(src, dst, hardlink=None):
443 """Copy a directory tree using hardlinks if possible"""
443 """Copy a directory tree using hardlinks if possible"""
444
444
445 if hardlink is None:
445 if hardlink is None:
446 hardlink = (os.stat(src).st_dev ==
446 hardlink = (os.stat(src).st_dev ==
447 os.stat(os.path.dirname(dst)).st_dev)
447 os.stat(os.path.dirname(dst)).st_dev)
448
448
449 if os.path.isdir(src):
449 if os.path.isdir(src):
450 os.mkdir(dst)
450 os.mkdir(dst)
451 for name, kind in osutil.listdir(src):
451 for name, kind in osutil.listdir(src):
452 srcname = os.path.join(src, name)
452 srcname = os.path.join(src, name)
453 dstname = os.path.join(dst, name)
453 dstname = os.path.join(dst, name)
454 copyfiles(srcname, dstname, hardlink)
454 copyfiles(srcname, dstname, hardlink)
455 else:
455 else:
456 if hardlink:
456 if hardlink:
457 try:
457 try:
458 os_link(src, dst)
458 os_link(src, dst)
459 except (IOError, OSError):
459 except (IOError, OSError):
460 hardlink = False
460 hardlink = False
461 shutil.copy(src, dst)
461 shutil.copy(src, dst)
462 else:
462 else:
463 shutil.copy(src, dst)
463 shutil.copy(src, dst)
464
464
465 class path_auditor(object):
465 class path_auditor(object):
466 '''ensure that a filesystem path contains no banned components.
466 '''ensure that a filesystem path contains no banned components.
467 the following properties of a path are checked:
467 the following properties of a path are checked:
468
468
469 - under top-level .hg
469 - under top-level .hg
470 - starts at the root of a windows drive
470 - starts at the root of a windows drive
471 - contains ".."
471 - contains ".."
472 - traverses a symlink (e.g. a/symlink_here/b)
472 - traverses a symlink (e.g. a/symlink_here/b)
473 - inside a nested repository'''
473 - inside a nested repository'''
474
474
475 def __init__(self, root):
475 def __init__(self, root):
476 self.audited = set()
476 self.audited = set()
477 self.auditeddir = set()
477 self.auditeddir = set()
478 self.root = root
478 self.root = root
479
479
480 def __call__(self, path):
480 def __call__(self, path):
481 if path in self.audited:
481 if path in self.audited:
482 return
482 return
483 normpath = os.path.normcase(path)
483 normpath = os.path.normcase(path)
484 parts = splitpath(normpath)
484 parts = splitpath(normpath)
485 if (os.path.splitdrive(path)[0]
485 if (os.path.splitdrive(path)[0]
486 or parts[0].lower() in ('.hg', '.hg.', '')
486 or parts[0].lower() in ('.hg', '.hg.', '')
487 or os.pardir in parts):
487 or os.pardir in parts):
488 raise Abort(_("path contains illegal component: %s") % path)
488 raise Abort(_("path contains illegal component: %s") % path)
489 if '.hg' in path.lower():
489 if '.hg' in path.lower():
490 lparts = [p.lower() for p in parts]
490 lparts = [p.lower() for p in parts]
491 for p in '.hg', '.hg.':
491 for p in '.hg', '.hg.':
492 if p in lparts[1:]:
492 if p in lparts[1:]:
493 pos = lparts.index(p)
493 pos = lparts.index(p)
494 base = os.path.join(*parts[:pos])
494 base = os.path.join(*parts[:pos])
495 raise Abort(_('path %r is inside repo %r') % (path, base))
495 raise Abort(_('path %r is inside repo %r') % (path, base))
496 def check(prefix):
496 def check(prefix):
497 curpath = os.path.join(self.root, prefix)
497 curpath = os.path.join(self.root, prefix)
498 try:
498 try:
499 st = os.lstat(curpath)
499 st = os.lstat(curpath)
500 except OSError, err:
500 except OSError, err:
501 # EINVAL can be raised as invalid path syntax under win32.
501 # EINVAL can be raised as invalid path syntax under win32.
502 # They must be ignored for patterns can be checked too.
502 # They must be ignored for patterns can be checked too.
503 if err.errno not in (errno.ENOENT, errno.ENOTDIR, errno.EINVAL):
503 if err.errno not in (errno.ENOENT, errno.ENOTDIR, errno.EINVAL):
504 raise
504 raise
505 else:
505 else:
506 if stat.S_ISLNK(st.st_mode):
506 if stat.S_ISLNK(st.st_mode):
507 raise Abort(_('path %r traverses symbolic link %r') %
507 raise Abort(_('path %r traverses symbolic link %r') %
508 (path, prefix))
508 (path, prefix))
509 elif (stat.S_ISDIR(st.st_mode) and
509 elif (stat.S_ISDIR(st.st_mode) and
510 os.path.isdir(os.path.join(curpath, '.hg'))):
510 os.path.isdir(os.path.join(curpath, '.hg'))):
511 raise Abort(_('path %r is inside repo %r') %
511 raise Abort(_('path %r is inside repo %r') %
512 (path, prefix))
512 (path, prefix))
513 parts.pop()
513 parts.pop()
514 prefixes = []
514 prefixes = []
515 while parts:
515 while parts:
516 prefix = os.sep.join(parts)
516 prefix = os.sep.join(parts)
517 if prefix in self.auditeddir:
517 if prefix in self.auditeddir:
518 break
518 break
519 check(prefix)
519 check(prefix)
520 prefixes.append(prefix)
520 prefixes.append(prefix)
521 parts.pop()
521 parts.pop()
522
522
523 self.audited.add(path)
523 self.audited.add(path)
524 # only add prefixes to the cache after checking everything: we don't
524 # only add prefixes to the cache after checking everything: we don't
525 # want to add "foo/bar/baz" before checking if there's a "foo/.hg"
525 # want to add "foo/bar/baz" before checking if there's a "foo/.hg"
526 self.auditeddir.update(prefixes)
526 self.auditeddir.update(prefixes)
527
527
528 def nlinks(pathname):
528 def nlinks(pathname):
529 """Return number of hardlinks for the given file."""
529 """Return number of hardlinks for the given file."""
530 return os.lstat(pathname).st_nlink
530 return os.lstat(pathname).st_nlink
531
531
532 if hasattr(os, 'link'):
532 if hasattr(os, 'link'):
533 os_link = os.link
533 os_link = os.link
534 else:
534 else:
535 def os_link(src, dst):
535 def os_link(src, dst):
536 raise OSError(0, _("Hardlinks not supported"))
536 raise OSError(0, _("Hardlinks not supported"))
537
537
538 def lookup_reg(key, name=None, scope=None):
538 def lookup_reg(key, name=None, scope=None):
539 return None
539 return None
540
540
541 if os.name == 'nt':
541 if os.name == 'nt':
542 from windows import *
542 from windows import *
543 else:
543 else:
544 from posix import *
544 from posix import *
545
545
546 def makelock(info, pathname):
546 def makelock(info, pathname):
547 try:
547 try:
548 return os.symlink(info, pathname)
548 return os.symlink(info, pathname)
549 except OSError, why:
549 except OSError, why:
550 if why.errno == errno.EEXIST:
550 if why.errno == errno.EEXIST:
551 raise
551 raise
552 except AttributeError: # no symlink in os
552 except AttributeError: # no symlink in os
553 pass
553 pass
554
554
555 ld = os.open(pathname, os.O_CREAT | os.O_WRONLY | os.O_EXCL)
555 ld = os.open(pathname, os.O_CREAT | os.O_WRONLY | os.O_EXCL)
556 os.write(ld, info)
556 os.write(ld, info)
557 os.close(ld)
557 os.close(ld)
558
558
559 def readlock(pathname):
559 def readlock(pathname):
560 try:
560 try:
561 return os.readlink(pathname)
561 return os.readlink(pathname)
562 except OSError, why:
562 except OSError, why:
563 if why.errno not in (errno.EINVAL, errno.ENOSYS):
563 if why.errno not in (errno.EINVAL, errno.ENOSYS):
564 raise
564 raise
565 except AttributeError: # no symlink in os
565 except AttributeError: # no symlink in os
566 pass
566 pass
567 return posixfile(pathname).read()
567 return posixfile(pathname).read()
568
568
569 def fstat(fp):
569 def fstat(fp):
570 '''stat file object that may not have fileno method.'''
570 '''stat file object that may not have fileno method.'''
571 try:
571 try:
572 return os.fstat(fp.fileno())
572 return os.fstat(fp.fileno())
573 except AttributeError:
573 except AttributeError:
574 return os.stat(fp.name)
574 return os.stat(fp.name)
575
575
576 # File system features
576 # File system features
577
577
578 def checkcase(path):
578 def checkcase(path):
579 """
579 """
580 Check whether the given path is on a case-sensitive filesystem
580 Check whether the given path is on a case-sensitive filesystem
581
581
582 Requires a path (like /foo/.hg) ending with a foldable final
582 Requires a path (like /foo/.hg) ending with a foldable final
583 directory component.
583 directory component.
584 """
584 """
585 s1 = os.stat(path)
585 s1 = os.stat(path)
586 d, b = os.path.split(path)
586 d, b = os.path.split(path)
587 p2 = os.path.join(d, b.upper())
587 p2 = os.path.join(d, b.upper())
588 if path == p2:
588 if path == p2:
589 p2 = os.path.join(d, b.lower())
589 p2 = os.path.join(d, b.lower())
590 try:
590 try:
591 s2 = os.stat(p2)
591 s2 = os.stat(p2)
592 if s2 == s1:
592 if s2 == s1:
593 return False
593 return False
594 return True
594 return True
595 except:
595 except:
596 return True
596 return True
597
597
598 _fspathcache = {}
598 _fspathcache = {}
599 def fspath(name, root):
599 def fspath(name, root):
600 '''Get name in the case stored in the filesystem
600 '''Get name in the case stored in the filesystem
601
601
602 The name is either relative to root, or it is an absolute path starting
602 The name is either relative to root, or it is an absolute path starting
603 with root. Note that this function is unnecessary, and should not be
603 with root. Note that this function is unnecessary, and should not be
604 called, for case-sensitive filesystems (simply because it's expensive).
604 called, for case-sensitive filesystems (simply because it's expensive).
605 '''
605 '''
606 # If name is absolute, make it relative
606 # If name is absolute, make it relative
607 if name.lower().startswith(root.lower()):
607 if name.lower().startswith(root.lower()):
608 l = len(root)
608 l = len(root)
609 if name[l] == os.sep or name[l] == os.altsep:
609 if name[l] == os.sep or name[l] == os.altsep:
610 l = l + 1
610 l = l + 1
611 name = name[l:]
611 name = name[l:]
612
612
613 if not os.path.exists(os.path.join(root, name)):
613 if not os.path.exists(os.path.join(root, name)):
614 return None
614 return None
615
615
616 seps = os.sep
616 seps = os.sep
617 if os.altsep:
617 if os.altsep:
618 seps = seps + os.altsep
618 seps = seps + os.altsep
619 # Protect backslashes. This gets silly very quickly.
619 # Protect backslashes. This gets silly very quickly.
620 seps.replace('\\','\\\\')
620 seps.replace('\\','\\\\')
621 pattern = re.compile(r'([^%s]+)|([%s]+)' % (seps, seps))
621 pattern = re.compile(r'([^%s]+)|([%s]+)' % (seps, seps))
622 dir = os.path.normcase(os.path.normpath(root))
622 dir = os.path.normcase(os.path.normpath(root))
623 result = []
623 result = []
624 for part, sep in pattern.findall(name):
624 for part, sep in pattern.findall(name):
625 if sep:
625 if sep:
626 result.append(sep)
626 result.append(sep)
627 continue
627 continue
628
628
629 if dir not in _fspathcache:
629 if dir not in _fspathcache:
630 _fspathcache[dir] = os.listdir(dir)
630 _fspathcache[dir] = os.listdir(dir)
631 contents = _fspathcache[dir]
631 contents = _fspathcache[dir]
632
632
633 lpart = part.lower()
633 lpart = part.lower()
634 for n in contents:
634 for n in contents:
635 if n.lower() == lpart:
635 if n.lower() == lpart:
636 result.append(n)
636 result.append(n)
637 break
637 break
638 else:
638 else:
639 # Cannot happen, as the file exists!
639 # Cannot happen, as the file exists!
640 result.append(part)
640 result.append(part)
641 dir = os.path.join(dir, lpart)
641 dir = os.path.join(dir, lpart)
642
642
643 return ''.join(result)
643 return ''.join(result)
644
644
645 def checkexec(path):
645 def checkexec(path):
646 """
646 """
647 Check whether the given path is on a filesystem with UNIX-like exec flags
647 Check whether the given path is on a filesystem with UNIX-like exec flags
648
648
649 Requires a directory (like /foo/.hg)
649 Requires a directory (like /foo/.hg)
650 """
650 """
651
651
652 # VFAT on some Linux versions can flip mode but it doesn't persist
652 # VFAT on some Linux versions can flip mode but it doesn't persist
653 # a FS remount. Frequently we can detect it if files are created
653 # a FS remount. Frequently we can detect it if files are created
654 # with exec bit on.
654 # with exec bit on.
655
655
656 try:
656 try:
657 EXECFLAGS = stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH
657 EXECFLAGS = stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH
658 fh, fn = tempfile.mkstemp("", "", path)
658 fh, fn = tempfile.mkstemp("", "", path)
659 try:
659 try:
660 os.close(fh)
660 os.close(fh)
661 m = os.stat(fn).st_mode & 0777
661 m = os.stat(fn).st_mode & 0777
662 new_file_has_exec = m & EXECFLAGS
662 new_file_has_exec = m & EXECFLAGS
663 os.chmod(fn, m ^ EXECFLAGS)
663 os.chmod(fn, m ^ EXECFLAGS)
664 exec_flags_cannot_flip = ((os.stat(fn).st_mode & 0777) == m)
664 exec_flags_cannot_flip = ((os.stat(fn).st_mode & 0777) == m)
665 finally:
665 finally:
666 os.unlink(fn)
666 os.unlink(fn)
667 except (IOError, OSError):
667 except (IOError, OSError):
668 # we don't care, the user probably won't be able to commit anyway
668 # we don't care, the user probably won't be able to commit anyway
669 return False
669 return False
670 return not (new_file_has_exec or exec_flags_cannot_flip)
670 return not (new_file_has_exec or exec_flags_cannot_flip)
671
671
672 def checklink(path):
672 def checklink(path):
673 """check whether the given path is on a symlink-capable filesystem"""
673 """check whether the given path is on a symlink-capable filesystem"""
674 # mktemp is not racy because symlink creation will fail if the
674 # mktemp is not racy because symlink creation will fail if the
675 # file already exists
675 # file already exists
676 name = tempfile.mktemp(dir=path)
676 name = tempfile.mktemp(dir=path)
677 try:
677 try:
678 os.symlink(".", name)
678 os.symlink(".", name)
679 os.unlink(name)
679 os.unlink(name)
680 return True
680 return True
681 except (OSError, AttributeError):
681 except (OSError, AttributeError):
682 return False
682 return False
683
683
684 def needbinarypatch():
684 def needbinarypatch():
685 """return True if patches should be applied in binary mode by default."""
685 """return True if patches should be applied in binary mode by default."""
686 return os.name == 'nt'
686 return os.name == 'nt'
687
687
688 def endswithsep(path):
688 def endswithsep(path):
689 '''Check path ends with os.sep or os.altsep.'''
689 '''Check path ends with os.sep or os.altsep.'''
690 return path.endswith(os.sep) or os.altsep and path.endswith(os.altsep)
690 return path.endswith(os.sep) or os.altsep and path.endswith(os.altsep)
691
691
692 def splitpath(path):
692 def splitpath(path):
693 '''Split path by os.sep.
693 '''Split path by os.sep.
694 Note that this function does not use os.altsep because this is
694 Note that this function does not use os.altsep because this is
695 an alternative of simple "xxx.split(os.sep)".
695 an alternative of simple "xxx.split(os.sep)".
696 It is recommended to use os.path.normpath() before using this
696 It is recommended to use os.path.normpath() before using this
697 function if need.'''
697 function if need.'''
698 return path.split(os.sep)
698 return path.split(os.sep)
699
699
700 def gui():
700 def gui():
701 '''Are we running in a GUI?'''
701 '''Are we running in a GUI?'''
702 return os.name == "nt" or os.name == "mac" or os.environ.get("DISPLAY")
702 return os.name == "nt" or os.name == "mac" or os.environ.get("DISPLAY")
703
703
704 def mktempcopy(name, emptyok=False, createmode=None):
704 def mktempcopy(name, emptyok=False, createmode=None):
705 """Create a temporary file with the same contents from name
705 """Create a temporary file with the same contents from name
706
706
707 The permission bits are copied from the original file.
707 The permission bits are copied from the original file.
708
708
709 If the temporary file is going to be truncated immediately, you
709 If the temporary file is going to be truncated immediately, you
710 can use emptyok=True as an optimization.
710 can use emptyok=True as an optimization.
711
711
712 Returns the name of the temporary file.
712 Returns the name of the temporary file.
713 """
713 """
714 d, fn = os.path.split(name)
714 d, fn = os.path.split(name)
715 fd, temp = tempfile.mkstemp(prefix='.%s-' % fn, dir=d)
715 fd, temp = tempfile.mkstemp(prefix='.%s-' % fn, dir=d)
716 os.close(fd)
716 os.close(fd)
717 # Temporary files are created with mode 0600, which is usually not
717 # Temporary files are created with mode 0600, which is usually not
718 # what we want. If the original file already exists, just copy
718 # what we want. If the original file already exists, just copy
719 # its mode. Otherwise, manually obey umask.
719 # its mode. Otherwise, manually obey umask.
720 try:
720 try:
721 st_mode = os.lstat(name).st_mode & 0777
721 st_mode = os.lstat(name).st_mode & 0777
722 except OSError, inst:
722 except OSError, inst:
723 if inst.errno != errno.ENOENT:
723 if inst.errno != errno.ENOENT:
724 raise
724 raise
725 st_mode = createmode
725 st_mode = createmode
726 if st_mode is None:
726 if st_mode is None:
727 st_mode = ~umask
727 st_mode = ~umask
728 st_mode &= 0666
728 st_mode &= 0666
729 os.chmod(temp, st_mode)
729 os.chmod(temp, st_mode)
730 if emptyok:
730 if emptyok:
731 return temp
731 return temp
732 try:
732 try:
733 try:
733 try:
734 ifp = posixfile(name, "rb")
734 ifp = posixfile(name, "rb")
735 except IOError, inst:
735 except IOError, inst:
736 if inst.errno == errno.ENOENT:
736 if inst.errno == errno.ENOENT:
737 return temp
737 return temp
738 if not getattr(inst, 'filename', None):
738 if not getattr(inst, 'filename', None):
739 inst.filename = name
739 inst.filename = name
740 raise
740 raise
741 ofp = posixfile(temp, "wb")
741 ofp = posixfile(temp, "wb")
742 for chunk in filechunkiter(ifp):
742 for chunk in filechunkiter(ifp):
743 ofp.write(chunk)
743 ofp.write(chunk)
744 ifp.close()
744 ifp.close()
745 ofp.close()
745 ofp.close()
746 except:
746 except:
747 try: os.unlink(temp)
747 try: os.unlink(temp)
748 except: pass
748 except: pass
749 raise
749 raise
750 return temp
750 return temp
751
751
752 class atomictempfile:
752 class atomictempfile(object):
753 """file-like object that atomically updates a file
753 """file-like object that atomically updates a file
754
754
755 All writes will be redirected to a temporary copy of the original
755 All writes will be redirected to a temporary copy of the original
756 file. When rename is called, the copy is renamed to the original
756 file. When rename is called, the copy is renamed to the original
757 name, making the changes visible.
757 name, making the changes visible.
758 """
758 """
759 def __init__(self, name, mode, createmode):
759 def __init__(self, name, mode, createmode):
760 self.__name = name
760 self.__name = name
761 self._fp = None
761 self._fp = None
762 self.temp = mktempcopy(name, emptyok=('w' in mode),
762 self.temp = mktempcopy(name, emptyok=('w' in mode),
763 createmode=createmode)
763 createmode=createmode)
764 self._fp = posixfile(self.temp, mode)
764 self._fp = posixfile(self.temp, mode)
765
765
766 def __getattr__(self, name):
766 def __getattr__(self, name):
767 return getattr(self._fp, name)
767 return getattr(self._fp, name)
768
768
769 def rename(self):
769 def rename(self):
770 if not self.closed:
770 if not self.closed:
771 self._fp.close()
771 self._fp.close()
772 rename(self.temp, localpath(self.__name))
772 rename(self.temp, localpath(self.__name))
773
773
774 def __del__(self):
774 def __del__(self):
775 if not self.closed:
775 if not self.closed:
776 try:
776 try:
777 os.unlink(self.temp)
777 os.unlink(self.temp)
778 except: pass
778 except: pass
779 if self._fp:
779 if self._fp:
780 self._fp.close()
780 self._fp.close()
781
781
782 def makedirs(name, mode=None):
782 def makedirs(name, mode=None):
783 """recursive directory creation with parent mode inheritance"""
783 """recursive directory creation with parent mode inheritance"""
784 try:
784 try:
785 os.mkdir(name)
785 os.mkdir(name)
786 if mode is not None:
786 if mode is not None:
787 os.chmod(name, mode)
787 os.chmod(name, mode)
788 return
788 return
789 except OSError, err:
789 except OSError, err:
790 if err.errno == errno.EEXIST:
790 if err.errno == errno.EEXIST:
791 return
791 return
792 if err.errno != errno.ENOENT:
792 if err.errno != errno.ENOENT:
793 raise
793 raise
794 parent = os.path.abspath(os.path.dirname(name))
794 parent = os.path.abspath(os.path.dirname(name))
795 makedirs(parent, mode)
795 makedirs(parent, mode)
796 makedirs(name, mode)
796 makedirs(name, mode)
797
797
798 class opener(object):
798 class opener(object):
799 """Open files relative to a base directory
799 """Open files relative to a base directory
800
800
801 This class is used to hide the details of COW semantics and
801 This class is used to hide the details of COW semantics and
802 remote file access from higher level code.
802 remote file access from higher level code.
803 """
803 """
804 def __init__(self, base, audit=True):
804 def __init__(self, base, audit=True):
805 self.base = base
805 self.base = base
806 if audit:
806 if audit:
807 self.audit_path = path_auditor(base)
807 self.audit_path = path_auditor(base)
808 else:
808 else:
809 self.audit_path = always
809 self.audit_path = always
810 self.createmode = None
810 self.createmode = None
811
811
812 def __getattr__(self, name):
812 def __getattr__(self, name):
813 if name == '_can_symlink':
813 if name == '_can_symlink':
814 self._can_symlink = checklink(self.base)
814 self._can_symlink = checklink(self.base)
815 return self._can_symlink
815 return self._can_symlink
816 raise AttributeError(name)
816 raise AttributeError(name)
817
817
818 def _fixfilemode(self, name):
818 def _fixfilemode(self, name):
819 if self.createmode is None:
819 if self.createmode is None:
820 return
820 return
821 os.chmod(name, self.createmode & 0666)
821 os.chmod(name, self.createmode & 0666)
822
822
823 def __call__(self, path, mode="r", text=False, atomictemp=False):
823 def __call__(self, path, mode="r", text=False, atomictemp=False):
824 self.audit_path(path)
824 self.audit_path(path)
825 f = os.path.join(self.base, path)
825 f = os.path.join(self.base, path)
826
826
827 if not text and "b" not in mode:
827 if not text and "b" not in mode:
828 mode += "b" # for that other OS
828 mode += "b" # for that other OS
829
829
830 nlink = -1
830 nlink = -1
831 if mode not in ("r", "rb"):
831 if mode not in ("r", "rb"):
832 try:
832 try:
833 nlink = nlinks(f)
833 nlink = nlinks(f)
834 except OSError:
834 except OSError:
835 nlink = 0
835 nlink = 0
836 d = os.path.dirname(f)
836 d = os.path.dirname(f)
837 if not os.path.isdir(d):
837 if not os.path.isdir(d):
838 makedirs(d, self.createmode)
838 makedirs(d, self.createmode)
839 if atomictemp:
839 if atomictemp:
840 return atomictempfile(f, mode, self.createmode)
840 return atomictempfile(f, mode, self.createmode)
841 if nlink > 1:
841 if nlink > 1:
842 rename(mktempcopy(f), f)
842 rename(mktempcopy(f), f)
843 fp = posixfile(f, mode)
843 fp = posixfile(f, mode)
844 if nlink == 0:
844 if nlink == 0:
845 self._fixfilemode(f)
845 self._fixfilemode(f)
846 return fp
846 return fp
847
847
848 def symlink(self, src, dst):
848 def symlink(self, src, dst):
849 self.audit_path(dst)
849 self.audit_path(dst)
850 linkname = os.path.join(self.base, dst)
850 linkname = os.path.join(self.base, dst)
851 try:
851 try:
852 os.unlink(linkname)
852 os.unlink(linkname)
853 except OSError:
853 except OSError:
854 pass
854 pass
855
855
856 dirname = os.path.dirname(linkname)
856 dirname = os.path.dirname(linkname)
857 if not os.path.exists(dirname):
857 if not os.path.exists(dirname):
858 makedirs(dirname, self.createmode)
858 makedirs(dirname, self.createmode)
859
859
860 if self._can_symlink:
860 if self._can_symlink:
861 try:
861 try:
862 os.symlink(src, linkname)
862 os.symlink(src, linkname)
863 except OSError, err:
863 except OSError, err:
864 raise OSError(err.errno, _('could not symlink to %r: %s') %
864 raise OSError(err.errno, _('could not symlink to %r: %s') %
865 (src, err.strerror), linkname)
865 (src, err.strerror), linkname)
866 else:
866 else:
867 f = self(dst, "w")
867 f = self(dst, "w")
868 f.write(src)
868 f.write(src)
869 f.close()
869 f.close()
870 self._fixfilemode(dst)
870 self._fixfilemode(dst)
871
871
872 class chunkbuffer(object):
872 class chunkbuffer(object):
873 """Allow arbitrary sized chunks of data to be efficiently read from an
873 """Allow arbitrary sized chunks of data to be efficiently read from an
874 iterator over chunks of arbitrary size."""
874 iterator over chunks of arbitrary size."""
875
875
876 def __init__(self, in_iter):
876 def __init__(self, in_iter):
877 """in_iter is the iterator that's iterating over the input chunks.
877 """in_iter is the iterator that's iterating over the input chunks.
878 targetsize is how big a buffer to try to maintain."""
878 targetsize is how big a buffer to try to maintain."""
879 self.iter = iter(in_iter)
879 self.iter = iter(in_iter)
880 self.buf = ''
880 self.buf = ''
881 self.targetsize = 2**16
881 self.targetsize = 2**16
882
882
883 def read(self, l):
883 def read(self, l):
884 """Read L bytes of data from the iterator of chunks of data.
884 """Read L bytes of data from the iterator of chunks of data.
885 Returns less than L bytes if the iterator runs dry."""
885 Returns less than L bytes if the iterator runs dry."""
886 if l > len(self.buf) and self.iter:
886 if l > len(self.buf) and self.iter:
887 # Clamp to a multiple of self.targetsize
887 # Clamp to a multiple of self.targetsize
888 targetsize = max(l, self.targetsize)
888 targetsize = max(l, self.targetsize)
889 collector = cStringIO.StringIO()
889 collector = cStringIO.StringIO()
890 collector.write(self.buf)
890 collector.write(self.buf)
891 collected = len(self.buf)
891 collected = len(self.buf)
892 for chunk in self.iter:
892 for chunk in self.iter:
893 collector.write(chunk)
893 collector.write(chunk)
894 collected += len(chunk)
894 collected += len(chunk)
895 if collected >= targetsize:
895 if collected >= targetsize:
896 break
896 break
897 if collected < targetsize:
897 if collected < targetsize:
898 self.iter = False
898 self.iter = False
899 self.buf = collector.getvalue()
899 self.buf = collector.getvalue()
900 if len(self.buf) == l:
900 if len(self.buf) == l:
901 s, self.buf = str(self.buf), ''
901 s, self.buf = str(self.buf), ''
902 else:
902 else:
903 s, self.buf = self.buf[:l], buffer(self.buf, l)
903 s, self.buf = self.buf[:l], buffer(self.buf, l)
904 return s
904 return s
905
905
906 def filechunkiter(f, size=65536, limit=None):
906 def filechunkiter(f, size=65536, limit=None):
907 """Create a generator that produces the data in the file size
907 """Create a generator that produces the data in the file size
908 (default 65536) bytes at a time, up to optional limit (default is
908 (default 65536) bytes at a time, up to optional limit (default is
909 to read all data). Chunks may be less than size bytes if the
909 to read all data). Chunks may be less than size bytes if the
910 chunk is the last chunk in the file, or the file is a socket or
910 chunk is the last chunk in the file, or the file is a socket or
911 some other type of file that sometimes reads less data than is
911 some other type of file that sometimes reads less data than is
912 requested."""
912 requested."""
913 assert size >= 0
913 assert size >= 0
914 assert limit is None or limit >= 0
914 assert limit is None or limit >= 0
915 while True:
915 while True:
916 if limit is None: nbytes = size
916 if limit is None: nbytes = size
917 else: nbytes = min(limit, size)
917 else: nbytes = min(limit, size)
918 s = nbytes and f.read(nbytes)
918 s = nbytes and f.read(nbytes)
919 if not s: break
919 if not s: break
920 if limit: limit -= len(s)
920 if limit: limit -= len(s)
921 yield s
921 yield s
922
922
923 def makedate():
923 def makedate():
924 lt = time.localtime()
924 lt = time.localtime()
925 if lt[8] == 1 and time.daylight:
925 if lt[8] == 1 and time.daylight:
926 tz = time.altzone
926 tz = time.altzone
927 else:
927 else:
928 tz = time.timezone
928 tz = time.timezone
929 return time.mktime(lt), tz
929 return time.mktime(lt), tz
930
930
931 def datestr(date=None, format='%a %b %d %H:%M:%S %Y %1%2'):
931 def datestr(date=None, format='%a %b %d %H:%M:%S %Y %1%2'):
932 """represent a (unixtime, offset) tuple as a localized time.
932 """represent a (unixtime, offset) tuple as a localized time.
933 unixtime is seconds since the epoch, and offset is the time zone's
933 unixtime is seconds since the epoch, and offset is the time zone's
934 number of seconds away from UTC. if timezone is false, do not
934 number of seconds away from UTC. if timezone is false, do not
935 append time zone to string."""
935 append time zone to string."""
936 t, tz = date or makedate()
936 t, tz = date or makedate()
937 if "%1" in format or "%2" in format:
937 if "%1" in format or "%2" in format:
938 sign = (tz > 0) and "-" or "+"
938 sign = (tz > 0) and "-" or "+"
939 minutes = abs(tz) / 60
939 minutes = abs(tz) / 60
940 format = format.replace("%1", "%c%02d" % (sign, minutes / 60))
940 format = format.replace("%1", "%c%02d" % (sign, minutes / 60))
941 format = format.replace("%2", "%02d" % (minutes % 60))
941 format = format.replace("%2", "%02d" % (minutes % 60))
942 s = time.strftime(format, time.gmtime(float(t) - tz))
942 s = time.strftime(format, time.gmtime(float(t) - tz))
943 return s
943 return s
944
944
945 def shortdate(date=None):
945 def shortdate(date=None):
946 """turn (timestamp, tzoff) tuple into iso 8631 date."""
946 """turn (timestamp, tzoff) tuple into iso 8631 date."""
947 return datestr(date, format='%Y-%m-%d')
947 return datestr(date, format='%Y-%m-%d')
948
948
949 def strdate(string, format, defaults=[]):
949 def strdate(string, format, defaults=[]):
950 """parse a localized time string and return a (unixtime, offset) tuple.
950 """parse a localized time string and return a (unixtime, offset) tuple.
951 if the string cannot be parsed, ValueError is raised."""
951 if the string cannot be parsed, ValueError is raised."""
952 def timezone(string):
952 def timezone(string):
953 tz = string.split()[-1]
953 tz = string.split()[-1]
954 if tz[0] in "+-" and len(tz) == 5 and tz[1:].isdigit():
954 if tz[0] in "+-" and len(tz) == 5 and tz[1:].isdigit():
955 sign = (tz[0] == "+") and 1 or -1
955 sign = (tz[0] == "+") and 1 or -1
956 hours = int(tz[1:3])
956 hours = int(tz[1:3])
957 minutes = int(tz[3:5])
957 minutes = int(tz[3:5])
958 return -sign * (hours * 60 + minutes) * 60
958 return -sign * (hours * 60 + minutes) * 60
959 if tz == "GMT" or tz == "UTC":
959 if tz == "GMT" or tz == "UTC":
960 return 0
960 return 0
961 return None
961 return None
962
962
963 # NOTE: unixtime = localunixtime + offset
963 # NOTE: unixtime = localunixtime + offset
964 offset, date = timezone(string), string
964 offset, date = timezone(string), string
965 if offset != None:
965 if offset != None:
966 date = " ".join(string.split()[:-1])
966 date = " ".join(string.split()[:-1])
967
967
968 # add missing elements from defaults
968 # add missing elements from defaults
969 for part in defaults:
969 for part in defaults:
970 found = [True for p in part if ("%"+p) in format]
970 found = [True for p in part if ("%"+p) in format]
971 if not found:
971 if not found:
972 date += "@" + defaults[part]
972 date += "@" + defaults[part]
973 format += "@%" + part[0]
973 format += "@%" + part[0]
974
974
975 timetuple = time.strptime(date, format)
975 timetuple = time.strptime(date, format)
976 localunixtime = int(calendar.timegm(timetuple))
976 localunixtime = int(calendar.timegm(timetuple))
977 if offset is None:
977 if offset is None:
978 # local timezone
978 # local timezone
979 unixtime = int(time.mktime(timetuple))
979 unixtime = int(time.mktime(timetuple))
980 offset = unixtime - localunixtime
980 offset = unixtime - localunixtime
981 else:
981 else:
982 unixtime = localunixtime + offset
982 unixtime = localunixtime + offset
983 return unixtime, offset
983 return unixtime, offset
984
984
985 def parsedate(date, formats=None, defaults=None):
985 def parsedate(date, formats=None, defaults=None):
986 """parse a localized date/time string and return a (unixtime, offset) tuple.
986 """parse a localized date/time string and return a (unixtime, offset) tuple.
987
987
988 The date may be a "unixtime offset" string or in one of the specified
988 The date may be a "unixtime offset" string or in one of the specified
989 formats. If the date already is a (unixtime, offset) tuple, it is returned.
989 formats. If the date already is a (unixtime, offset) tuple, it is returned.
990 """
990 """
991 if not date:
991 if not date:
992 return 0, 0
992 return 0, 0
993 if isinstance(date, tuple) and len(date) == 2:
993 if isinstance(date, tuple) and len(date) == 2:
994 return date
994 return date
995 if not formats:
995 if not formats:
996 formats = defaultdateformats
996 formats = defaultdateformats
997 date = date.strip()
997 date = date.strip()
998 try:
998 try:
999 when, offset = map(int, date.split(' '))
999 when, offset = map(int, date.split(' '))
1000 except ValueError:
1000 except ValueError:
1001 # fill out defaults
1001 # fill out defaults
1002 if not defaults:
1002 if not defaults:
1003 defaults = {}
1003 defaults = {}
1004 now = makedate()
1004 now = makedate()
1005 for part in "d mb yY HI M S".split():
1005 for part in "d mb yY HI M S".split():
1006 if part not in defaults:
1006 if part not in defaults:
1007 if part[0] in "HMS":
1007 if part[0] in "HMS":
1008 defaults[part] = "00"
1008 defaults[part] = "00"
1009 else:
1009 else:
1010 defaults[part] = datestr(now, "%" + part[0])
1010 defaults[part] = datestr(now, "%" + part[0])
1011
1011
1012 for format in formats:
1012 for format in formats:
1013 try:
1013 try:
1014 when, offset = strdate(date, format, defaults)
1014 when, offset = strdate(date, format, defaults)
1015 except (ValueError, OverflowError):
1015 except (ValueError, OverflowError):
1016 pass
1016 pass
1017 else:
1017 else:
1018 break
1018 break
1019 else:
1019 else:
1020 raise Abort(_('invalid date: %r ') % date)
1020 raise Abort(_('invalid date: %r ') % date)
1021 # validate explicit (probably user-specified) date and
1021 # validate explicit (probably user-specified) date and
1022 # time zone offset. values must fit in signed 32 bits for
1022 # time zone offset. values must fit in signed 32 bits for
1023 # current 32-bit linux runtimes. timezones go from UTC-12
1023 # current 32-bit linux runtimes. timezones go from UTC-12
1024 # to UTC+14
1024 # to UTC+14
1025 if abs(when) > 0x7fffffff:
1025 if abs(when) > 0x7fffffff:
1026 raise Abort(_('date exceeds 32 bits: %d') % when)
1026 raise Abort(_('date exceeds 32 bits: %d') % when)
1027 if offset < -50400 or offset > 43200:
1027 if offset < -50400 or offset > 43200:
1028 raise Abort(_('impossible time zone offset: %d') % offset)
1028 raise Abort(_('impossible time zone offset: %d') % offset)
1029 return when, offset
1029 return when, offset
1030
1030
1031 def matchdate(date):
1031 def matchdate(date):
1032 """Return a function that matches a given date match specifier
1032 """Return a function that matches a given date match specifier
1033
1033
1034 Formats include:
1034 Formats include:
1035
1035
1036 '{date}' match a given date to the accuracy provided
1036 '{date}' match a given date to the accuracy provided
1037
1037
1038 '<{date}' on or before a given date
1038 '<{date}' on or before a given date
1039
1039
1040 '>{date}' on or after a given date
1040 '>{date}' on or after a given date
1041
1041
1042 """
1042 """
1043
1043
1044 def lower(date):
1044 def lower(date):
1045 d = dict(mb="1", d="1")
1045 d = dict(mb="1", d="1")
1046 return parsedate(date, extendeddateformats, d)[0]
1046 return parsedate(date, extendeddateformats, d)[0]
1047
1047
1048 def upper(date):
1048 def upper(date):
1049 d = dict(mb="12", HI="23", M="59", S="59")
1049 d = dict(mb="12", HI="23", M="59", S="59")
1050 for days in "31 30 29".split():
1050 for days in "31 30 29".split():
1051 try:
1051 try:
1052 d["d"] = days
1052 d["d"] = days
1053 return parsedate(date, extendeddateformats, d)[0]
1053 return parsedate(date, extendeddateformats, d)[0]
1054 except:
1054 except:
1055 pass
1055 pass
1056 d["d"] = "28"
1056 d["d"] = "28"
1057 return parsedate(date, extendeddateformats, d)[0]
1057 return parsedate(date, extendeddateformats, d)[0]
1058
1058
1059 date = date.strip()
1059 date = date.strip()
1060 if date[0] == "<":
1060 if date[0] == "<":
1061 when = upper(date[1:])
1061 when = upper(date[1:])
1062 return lambda x: x <= when
1062 return lambda x: x <= when
1063 elif date[0] == ">":
1063 elif date[0] == ">":
1064 when = lower(date[1:])
1064 when = lower(date[1:])
1065 return lambda x: x >= when
1065 return lambda x: x >= when
1066 elif date[0] == "-":
1066 elif date[0] == "-":
1067 try:
1067 try:
1068 days = int(date[1:])
1068 days = int(date[1:])
1069 except ValueError:
1069 except ValueError:
1070 raise Abort(_("invalid day spec: %s") % date[1:])
1070 raise Abort(_("invalid day spec: %s") % date[1:])
1071 when = makedate()[0] - days * 3600 * 24
1071 when = makedate()[0] - days * 3600 * 24
1072 return lambda x: x >= when
1072 return lambda x: x >= when
1073 elif " to " in date:
1073 elif " to " in date:
1074 a, b = date.split(" to ")
1074 a, b = date.split(" to ")
1075 start, stop = lower(a), upper(b)
1075 start, stop = lower(a), upper(b)
1076 return lambda x: x >= start and x <= stop
1076 return lambda x: x >= start and x <= stop
1077 else:
1077 else:
1078 start, stop = lower(date), upper(date)
1078 start, stop = lower(date), upper(date)
1079 return lambda x: x >= start and x <= stop
1079 return lambda x: x >= start and x <= stop
1080
1080
1081 def shortuser(user):
1081 def shortuser(user):
1082 """Return a short representation of a user name or email address."""
1082 """Return a short representation of a user name or email address."""
1083 f = user.find('@')
1083 f = user.find('@')
1084 if f >= 0:
1084 if f >= 0:
1085 user = user[:f]
1085 user = user[:f]
1086 f = user.find('<')
1086 f = user.find('<')
1087 if f >= 0:
1087 if f >= 0:
1088 user = user[f+1:]
1088 user = user[f+1:]
1089 f = user.find(' ')
1089 f = user.find(' ')
1090 if f >= 0:
1090 if f >= 0:
1091 user = user[:f]
1091 user = user[:f]
1092 f = user.find('.')
1092 f = user.find('.')
1093 if f >= 0:
1093 if f >= 0:
1094 user = user[:f]
1094 user = user[:f]
1095 return user
1095 return user
1096
1096
1097 def email(author):
1097 def email(author):
1098 '''get email of author.'''
1098 '''get email of author.'''
1099 r = author.find('>')
1099 r = author.find('>')
1100 if r == -1: r = None
1100 if r == -1: r = None
1101 return author[author.find('<')+1:r]
1101 return author[author.find('<')+1:r]
1102
1102
1103 def ellipsis(text, maxlength=400):
1103 def ellipsis(text, maxlength=400):
1104 """Trim string to at most maxlength (default: 400) characters."""
1104 """Trim string to at most maxlength (default: 400) characters."""
1105 if len(text) <= maxlength:
1105 if len(text) <= maxlength:
1106 return text
1106 return text
1107 else:
1107 else:
1108 return "%s..." % (text[:maxlength-3])
1108 return "%s..." % (text[:maxlength-3])
1109
1109
1110 def walkrepos(path, followsym=False, seen_dirs=None, recurse=False):
1110 def walkrepos(path, followsym=False, seen_dirs=None, recurse=False):
1111 '''yield every hg repository under path, recursively.'''
1111 '''yield every hg repository under path, recursively.'''
1112 def errhandler(err):
1112 def errhandler(err):
1113 if err.filename == path:
1113 if err.filename == path:
1114 raise err
1114 raise err
1115 if followsym and hasattr(os.path, 'samestat'):
1115 if followsym and hasattr(os.path, 'samestat'):
1116 def _add_dir_if_not_there(dirlst, dirname):
1116 def _add_dir_if_not_there(dirlst, dirname):
1117 match = False
1117 match = False
1118 samestat = os.path.samestat
1118 samestat = os.path.samestat
1119 dirstat = os.stat(dirname)
1119 dirstat = os.stat(dirname)
1120 for lstdirstat in dirlst:
1120 for lstdirstat in dirlst:
1121 if samestat(dirstat, lstdirstat):
1121 if samestat(dirstat, lstdirstat):
1122 match = True
1122 match = True
1123 break
1123 break
1124 if not match:
1124 if not match:
1125 dirlst.append(dirstat)
1125 dirlst.append(dirstat)
1126 return not match
1126 return not match
1127 else:
1127 else:
1128 followsym = False
1128 followsym = False
1129
1129
1130 if (seen_dirs is None) and followsym:
1130 if (seen_dirs is None) and followsym:
1131 seen_dirs = []
1131 seen_dirs = []
1132 _add_dir_if_not_there(seen_dirs, path)
1132 _add_dir_if_not_there(seen_dirs, path)
1133 for root, dirs, files in os.walk(path, topdown=True, onerror=errhandler):
1133 for root, dirs, files in os.walk(path, topdown=True, onerror=errhandler):
1134 if '.hg' in dirs:
1134 if '.hg' in dirs:
1135 yield root # found a repository
1135 yield root # found a repository
1136 qroot = os.path.join(root, '.hg', 'patches')
1136 qroot = os.path.join(root, '.hg', 'patches')
1137 if os.path.isdir(os.path.join(qroot, '.hg')):
1137 if os.path.isdir(os.path.join(qroot, '.hg')):
1138 yield qroot # we have a patch queue repo here
1138 yield qroot # we have a patch queue repo here
1139 if recurse:
1139 if recurse:
1140 # avoid recursing inside the .hg directory
1140 # avoid recursing inside the .hg directory
1141 dirs.remove('.hg')
1141 dirs.remove('.hg')
1142 else:
1142 else:
1143 dirs[:] = [] # don't descend further
1143 dirs[:] = [] # don't descend further
1144 elif followsym:
1144 elif followsym:
1145 newdirs = []
1145 newdirs = []
1146 for d in dirs:
1146 for d in dirs:
1147 fname = os.path.join(root, d)
1147 fname = os.path.join(root, d)
1148 if _add_dir_if_not_there(seen_dirs, fname):
1148 if _add_dir_if_not_there(seen_dirs, fname):
1149 if os.path.islink(fname):
1149 if os.path.islink(fname):
1150 for hgname in walkrepos(fname, True, seen_dirs):
1150 for hgname in walkrepos(fname, True, seen_dirs):
1151 yield hgname
1151 yield hgname
1152 else:
1152 else:
1153 newdirs.append(d)
1153 newdirs.append(d)
1154 dirs[:] = newdirs
1154 dirs[:] = newdirs
1155
1155
1156 _rcpath = None
1156 _rcpath = None
1157
1157
1158 def os_rcpath():
1158 def os_rcpath():
1159 '''return default os-specific hgrc search path'''
1159 '''return default os-specific hgrc search path'''
1160 path = system_rcpath()
1160 path = system_rcpath()
1161 path.extend(user_rcpath())
1161 path.extend(user_rcpath())
1162 path = [os.path.normpath(f) for f in path]
1162 path = [os.path.normpath(f) for f in path]
1163 return path
1163 return path
1164
1164
1165 def rcpath():
1165 def rcpath():
1166 '''return hgrc search path. if env var HGRCPATH is set, use it.
1166 '''return hgrc search path. if env var HGRCPATH is set, use it.
1167 for each item in path, if directory, use files ending in .rc,
1167 for each item in path, if directory, use files ending in .rc,
1168 else use item.
1168 else use item.
1169 make HGRCPATH empty to only look in .hg/hgrc of current repo.
1169 make HGRCPATH empty to only look in .hg/hgrc of current repo.
1170 if no HGRCPATH, use default os-specific path.'''
1170 if no HGRCPATH, use default os-specific path.'''
1171 global _rcpath
1171 global _rcpath
1172 if _rcpath is None:
1172 if _rcpath is None:
1173 if 'HGRCPATH' in os.environ:
1173 if 'HGRCPATH' in os.environ:
1174 _rcpath = []
1174 _rcpath = []
1175 for p in os.environ['HGRCPATH'].split(os.pathsep):
1175 for p in os.environ['HGRCPATH'].split(os.pathsep):
1176 if not p: continue
1176 if not p: continue
1177 if os.path.isdir(p):
1177 if os.path.isdir(p):
1178 for f, kind in osutil.listdir(p):
1178 for f, kind in osutil.listdir(p):
1179 if f.endswith('.rc'):
1179 if f.endswith('.rc'):
1180 _rcpath.append(os.path.join(p, f))
1180 _rcpath.append(os.path.join(p, f))
1181 else:
1181 else:
1182 _rcpath.append(p)
1182 _rcpath.append(p)
1183 else:
1183 else:
1184 _rcpath = os_rcpath()
1184 _rcpath = os_rcpath()
1185 return _rcpath
1185 return _rcpath
1186
1186
1187 def bytecount(nbytes):
1187 def bytecount(nbytes):
1188 '''return byte count formatted as readable string, with units'''
1188 '''return byte count formatted as readable string, with units'''
1189
1189
1190 units = (
1190 units = (
1191 (100, 1<<30, _('%.0f GB')),
1191 (100, 1<<30, _('%.0f GB')),
1192 (10, 1<<30, _('%.1f GB')),
1192 (10, 1<<30, _('%.1f GB')),
1193 (1, 1<<30, _('%.2f GB')),
1193 (1, 1<<30, _('%.2f GB')),
1194 (100, 1<<20, _('%.0f MB')),
1194 (100, 1<<20, _('%.0f MB')),
1195 (10, 1<<20, _('%.1f MB')),
1195 (10, 1<<20, _('%.1f MB')),
1196 (1, 1<<20, _('%.2f MB')),
1196 (1, 1<<20, _('%.2f MB')),
1197 (100, 1<<10, _('%.0f KB')),
1197 (100, 1<<10, _('%.0f KB')),
1198 (10, 1<<10, _('%.1f KB')),
1198 (10, 1<<10, _('%.1f KB')),
1199 (1, 1<<10, _('%.2f KB')),
1199 (1, 1<<10, _('%.2f KB')),
1200 (1, 1, _('%.0f bytes')),
1200 (1, 1, _('%.0f bytes')),
1201 )
1201 )
1202
1202
1203 for multiplier, divisor, format in units:
1203 for multiplier, divisor, format in units:
1204 if nbytes >= divisor * multiplier:
1204 if nbytes >= divisor * multiplier:
1205 return format % (nbytes / float(divisor))
1205 return format % (nbytes / float(divisor))
1206 return units[-1][2] % nbytes
1206 return units[-1][2] % nbytes
1207
1207
1208 def drop_scheme(scheme, path):
1208 def drop_scheme(scheme, path):
1209 sc = scheme + ':'
1209 sc = scheme + ':'
1210 if path.startswith(sc):
1210 if path.startswith(sc):
1211 path = path[len(sc):]
1211 path = path[len(sc):]
1212 if path.startswith('//'):
1212 if path.startswith('//'):
1213 path = path[2:]
1213 path = path[2:]
1214 return path
1214 return path
1215
1215
1216 def uirepr(s):
1216 def uirepr(s):
1217 # Avoid double backslash in Windows path repr()
1217 # Avoid double backslash in Windows path repr()
1218 return repr(s).replace('\\\\', '\\')
1218 return repr(s).replace('\\\\', '\\')
1219
1219
1220 def termwidth():
1220 def termwidth():
1221 if 'COLUMNS' in os.environ:
1221 if 'COLUMNS' in os.environ:
1222 try:
1222 try:
1223 return int(os.environ['COLUMNS'])
1223 return int(os.environ['COLUMNS'])
1224 except ValueError:
1224 except ValueError:
1225 pass
1225 pass
1226 try:
1226 try:
1227 import termios, array, fcntl
1227 import termios, array, fcntl
1228 for dev in (sys.stdout, sys.stdin):
1228 for dev in (sys.stdout, sys.stdin):
1229 try:
1229 try:
1230 try:
1230 try:
1231 fd = dev.fileno()
1231 fd = dev.fileno()
1232 except AttributeError:
1232 except AttributeError:
1233 continue
1233 continue
1234 if not os.isatty(fd):
1234 if not os.isatty(fd):
1235 continue
1235 continue
1236 arri = fcntl.ioctl(fd, termios.TIOCGWINSZ, '\0' * 8)
1236 arri = fcntl.ioctl(fd, termios.TIOCGWINSZ, '\0' * 8)
1237 return array.array('h', arri)[1]
1237 return array.array('h', arri)[1]
1238 except ValueError:
1238 except ValueError:
1239 pass
1239 pass
1240 except ImportError:
1240 except ImportError:
1241 pass
1241 pass
1242 return 80
1242 return 80
1243
1243
1244 def iterlines(iterator):
1244 def iterlines(iterator):
1245 for chunk in iterator:
1245 for chunk in iterator:
1246 for line in chunk.splitlines():
1246 for line in chunk.splitlines():
1247 yield line
1247 yield line
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
General Comments 0
You need to be logged in to leave comments. Login now