##// END OF EJS Templates
Use lexists() instead of exists() where appropriate
Patrick Mezard -
r12344:b6173aee stable
parent child Browse files
Show More
@@ -1,338 +1,338 b''
1 # gnuarch.py - GNU Arch support for the convert extension
1 # gnuarch.py - GNU Arch support for the convert extension
2 #
2 #
3 # Copyright 2008, 2009 Aleix Conchillo Flaque <aleix@member.fsf.org>
3 # Copyright 2008, 2009 Aleix Conchillo Flaque <aleix@member.fsf.org>
4 # and others
4 # and others
5 #
5 #
6 # This software may be used and distributed according to the terms of the
6 # This software may be used and distributed according to the terms of the
7 # GNU General Public License version 2 or any later version.
7 # GNU General Public License version 2 or any later version.
8
8
9 from common import NoRepo, commandline, commit, converter_source
9 from common import NoRepo, commandline, commit, converter_source
10 from mercurial.i18n import _
10 from mercurial.i18n import _
11 from mercurial import encoding, util
11 from mercurial import encoding, util
12 import os, shutil, tempfile, stat
12 import os, shutil, tempfile, stat
13 from email.Parser import Parser
13 from email.Parser import Parser
14
14
15 class gnuarch_source(converter_source, commandline):
15 class gnuarch_source(converter_source, commandline):
16
16
17 class gnuarch_rev(object):
17 class gnuarch_rev(object):
18 def __init__(self, rev):
18 def __init__(self, rev):
19 self.rev = rev
19 self.rev = rev
20 self.summary = ''
20 self.summary = ''
21 self.date = None
21 self.date = None
22 self.author = ''
22 self.author = ''
23 self.continuationof = None
23 self.continuationof = None
24 self.add_files = []
24 self.add_files = []
25 self.mod_files = []
25 self.mod_files = []
26 self.del_files = []
26 self.del_files = []
27 self.ren_files = {}
27 self.ren_files = {}
28 self.ren_dirs = {}
28 self.ren_dirs = {}
29
29
30 def __init__(self, ui, path, rev=None):
30 def __init__(self, ui, path, rev=None):
31 super(gnuarch_source, self).__init__(ui, path, rev=rev)
31 super(gnuarch_source, self).__init__(ui, path, rev=rev)
32
32
33 if not os.path.exists(os.path.join(path, '{arch}')):
33 if not os.path.exists(os.path.join(path, '{arch}')):
34 raise NoRepo(_("%s does not look like a GNU Arch repository")
34 raise NoRepo(_("%s does not look like a GNU Arch repository")
35 % path)
35 % path)
36
36
37 # Could use checktool, but we want to check for baz or tla.
37 # Could use checktool, but we want to check for baz or tla.
38 self.execmd = None
38 self.execmd = None
39 if util.find_exe('baz'):
39 if util.find_exe('baz'):
40 self.execmd = 'baz'
40 self.execmd = 'baz'
41 else:
41 else:
42 if util.find_exe('tla'):
42 if util.find_exe('tla'):
43 self.execmd = 'tla'
43 self.execmd = 'tla'
44 else:
44 else:
45 raise util.Abort(_('cannot find a GNU Arch tool'))
45 raise util.Abort(_('cannot find a GNU Arch tool'))
46
46
47 commandline.__init__(self, ui, self.execmd)
47 commandline.__init__(self, ui, self.execmd)
48
48
49 self.path = os.path.realpath(path)
49 self.path = os.path.realpath(path)
50 self.tmppath = None
50 self.tmppath = None
51
51
52 self.treeversion = None
52 self.treeversion = None
53 self.lastrev = None
53 self.lastrev = None
54 self.changes = {}
54 self.changes = {}
55 self.parents = {}
55 self.parents = {}
56 self.tags = {}
56 self.tags = {}
57 self.catlogparser = Parser()
57 self.catlogparser = Parser()
58 self.encoding = encoding.encoding
58 self.encoding = encoding.encoding
59 self.archives = []
59 self.archives = []
60
60
61 def before(self):
61 def before(self):
62 # Get registered archives
62 # Get registered archives
63 self.archives = [i.rstrip('\n')
63 self.archives = [i.rstrip('\n')
64 for i in self.runlines0('archives', '-n')]
64 for i in self.runlines0('archives', '-n')]
65
65
66 if self.execmd == 'tla':
66 if self.execmd == 'tla':
67 output = self.run0('tree-version', self.path)
67 output = self.run0('tree-version', self.path)
68 else:
68 else:
69 output = self.run0('tree-version', '-d', self.path)
69 output = self.run0('tree-version', '-d', self.path)
70 self.treeversion = output.strip()
70 self.treeversion = output.strip()
71
71
72 # Get name of temporary directory
72 # Get name of temporary directory
73 version = self.treeversion.split('/')
73 version = self.treeversion.split('/')
74 self.tmppath = os.path.join(tempfile.gettempdir(),
74 self.tmppath = os.path.join(tempfile.gettempdir(),
75 'hg-%s' % version[1])
75 'hg-%s' % version[1])
76
76
77 # Generate parents dictionary
77 # Generate parents dictionary
78 self.parents[None] = []
78 self.parents[None] = []
79 treeversion = self.treeversion
79 treeversion = self.treeversion
80 child = None
80 child = None
81 while treeversion:
81 while treeversion:
82 self.ui.status(_('analyzing tree version %s...\n') % treeversion)
82 self.ui.status(_('analyzing tree version %s...\n') % treeversion)
83
83
84 archive = treeversion.split('/')[0]
84 archive = treeversion.split('/')[0]
85 if archive not in self.archives:
85 if archive not in self.archives:
86 self.ui.status(_('tree analysis stopped because it points to '
86 self.ui.status(_('tree analysis stopped because it points to '
87 'an unregistered archive %s...\n') % archive)
87 'an unregistered archive %s...\n') % archive)
88 break
88 break
89
89
90 # Get the complete list of revisions for that tree version
90 # Get the complete list of revisions for that tree version
91 output, status = self.runlines('revisions', '-r', '-f', treeversion)
91 output, status = self.runlines('revisions', '-r', '-f', treeversion)
92 self.checkexit(status, 'failed retrieveing revisions for %s'
92 self.checkexit(status, 'failed retrieveing revisions for %s'
93 % treeversion)
93 % treeversion)
94
94
95 # No new iteration unless a revision has a continuation-of header
95 # No new iteration unless a revision has a continuation-of header
96 treeversion = None
96 treeversion = None
97
97
98 for l in output:
98 for l in output:
99 rev = l.strip()
99 rev = l.strip()
100 self.changes[rev] = self.gnuarch_rev(rev)
100 self.changes[rev] = self.gnuarch_rev(rev)
101 self.parents[rev] = []
101 self.parents[rev] = []
102
102
103 # Read author, date and summary
103 # Read author, date and summary
104 catlog, status = self.run('cat-log', '-d', self.path, rev)
104 catlog, status = self.run('cat-log', '-d', self.path, rev)
105 if status:
105 if status:
106 catlog = self.run0('cat-archive-log', rev)
106 catlog = self.run0('cat-archive-log', rev)
107 self._parsecatlog(catlog, rev)
107 self._parsecatlog(catlog, rev)
108
108
109 # Populate the parents map
109 # Populate the parents map
110 self.parents[child].append(rev)
110 self.parents[child].append(rev)
111
111
112 # Keep track of the current revision as the child of the next
112 # Keep track of the current revision as the child of the next
113 # revision scanned
113 # revision scanned
114 child = rev
114 child = rev
115
115
116 # Check if we have to follow the usual incremental history
116 # Check if we have to follow the usual incremental history
117 # or if we have to 'jump' to a different treeversion given
117 # or if we have to 'jump' to a different treeversion given
118 # by the continuation-of header.
118 # by the continuation-of header.
119 if self.changes[rev].continuationof:
119 if self.changes[rev].continuationof:
120 treeversion = '--'.join(
120 treeversion = '--'.join(
121 self.changes[rev].continuationof.split('--')[:-1])
121 self.changes[rev].continuationof.split('--')[:-1])
122 break
122 break
123
123
124 # If we reached a base-0 revision w/o any continuation-of
124 # If we reached a base-0 revision w/o any continuation-of
125 # header, it means the tree history ends here.
125 # header, it means the tree history ends here.
126 if rev[-6:] == 'base-0':
126 if rev[-6:] == 'base-0':
127 break
127 break
128
128
129 def after(self):
129 def after(self):
130 self.ui.debug('cleaning up %s\n' % self.tmppath)
130 self.ui.debug('cleaning up %s\n' % self.tmppath)
131 shutil.rmtree(self.tmppath, ignore_errors=True)
131 shutil.rmtree(self.tmppath, ignore_errors=True)
132
132
133 def getheads(self):
133 def getheads(self):
134 return self.parents[None]
134 return self.parents[None]
135
135
136 def getfile(self, name, rev):
136 def getfile(self, name, rev):
137 if rev != self.lastrev:
137 if rev != self.lastrev:
138 raise util.Abort(_('internal calling inconsistency'))
138 raise util.Abort(_('internal calling inconsistency'))
139
139
140 # Raise IOError if necessary (i.e. deleted files).
140 # Raise IOError if necessary (i.e. deleted files).
141 if not os.path.exists(os.path.join(self.tmppath, name)):
141 if not os.path.lexists(os.path.join(self.tmppath, name)):
142 raise IOError
142 raise IOError
143
143
144 return self._getfile(name, rev)
144 return self._getfile(name, rev)
145
145
146 def getchanges(self, rev):
146 def getchanges(self, rev):
147 self._update(rev)
147 self._update(rev)
148 changes = []
148 changes = []
149 copies = {}
149 copies = {}
150
150
151 for f in self.changes[rev].add_files:
151 for f in self.changes[rev].add_files:
152 changes.append((f, rev))
152 changes.append((f, rev))
153
153
154 for f in self.changes[rev].mod_files:
154 for f in self.changes[rev].mod_files:
155 changes.append((f, rev))
155 changes.append((f, rev))
156
156
157 for f in self.changes[rev].del_files:
157 for f in self.changes[rev].del_files:
158 changes.append((f, rev))
158 changes.append((f, rev))
159
159
160 for src in self.changes[rev].ren_files:
160 for src in self.changes[rev].ren_files:
161 to = self.changes[rev].ren_files[src]
161 to = self.changes[rev].ren_files[src]
162 changes.append((src, rev))
162 changes.append((src, rev))
163 changes.append((to, rev))
163 changes.append((to, rev))
164 copies[to] = src
164 copies[to] = src
165
165
166 for src in self.changes[rev].ren_dirs:
166 for src in self.changes[rev].ren_dirs:
167 to = self.changes[rev].ren_dirs[src]
167 to = self.changes[rev].ren_dirs[src]
168 chgs, cps = self._rendirchanges(src, to)
168 chgs, cps = self._rendirchanges(src, to)
169 changes += [(f, rev) for f in chgs]
169 changes += [(f, rev) for f in chgs]
170 copies.update(cps)
170 copies.update(cps)
171
171
172 self.lastrev = rev
172 self.lastrev = rev
173 return sorted(set(changes)), copies
173 return sorted(set(changes)), copies
174
174
175 def getcommit(self, rev):
175 def getcommit(self, rev):
176 changes = self.changes[rev]
176 changes = self.changes[rev]
177 return commit(author=changes.author, date=changes.date,
177 return commit(author=changes.author, date=changes.date,
178 desc=changes.summary, parents=self.parents[rev], rev=rev)
178 desc=changes.summary, parents=self.parents[rev], rev=rev)
179
179
180 def gettags(self):
180 def gettags(self):
181 return self.tags
181 return self.tags
182
182
183 def _execute(self, cmd, *args, **kwargs):
183 def _execute(self, cmd, *args, **kwargs):
184 cmdline = [self.execmd, cmd]
184 cmdline = [self.execmd, cmd]
185 cmdline += args
185 cmdline += args
186 cmdline = [util.shellquote(arg) for arg in cmdline]
186 cmdline = [util.shellquote(arg) for arg in cmdline]
187 cmdline += ['>', util.nulldev, '2>', util.nulldev]
187 cmdline += ['>', util.nulldev, '2>', util.nulldev]
188 cmdline = util.quotecommand(' '.join(cmdline))
188 cmdline = util.quotecommand(' '.join(cmdline))
189 self.ui.debug(cmdline, '\n')
189 self.ui.debug(cmdline, '\n')
190 return os.system(cmdline)
190 return os.system(cmdline)
191
191
192 def _update(self, rev):
192 def _update(self, rev):
193 self.ui.debug('applying revision %s...\n' % rev)
193 self.ui.debug('applying revision %s...\n' % rev)
194 changeset, status = self.runlines('replay', '-d', self.tmppath,
194 changeset, status = self.runlines('replay', '-d', self.tmppath,
195 rev)
195 rev)
196 if status:
196 if status:
197 # Something went wrong while merging (baz or tla
197 # Something went wrong while merging (baz or tla
198 # issue?), get latest revision and try from there
198 # issue?), get latest revision and try from there
199 shutil.rmtree(self.tmppath, ignore_errors=True)
199 shutil.rmtree(self.tmppath, ignore_errors=True)
200 self._obtainrevision(rev)
200 self._obtainrevision(rev)
201 else:
201 else:
202 old_rev = self.parents[rev][0]
202 old_rev = self.parents[rev][0]
203 self.ui.debug('computing changeset between %s and %s...\n'
203 self.ui.debug('computing changeset between %s and %s...\n'
204 % (old_rev, rev))
204 % (old_rev, rev))
205 self._parsechangeset(changeset, rev)
205 self._parsechangeset(changeset, rev)
206
206
207 def _getfile(self, name, rev):
207 def _getfile(self, name, rev):
208 mode = os.lstat(os.path.join(self.tmppath, name)).st_mode
208 mode = os.lstat(os.path.join(self.tmppath, name)).st_mode
209 if stat.S_ISLNK(mode):
209 if stat.S_ISLNK(mode):
210 data = os.readlink(os.path.join(self.tmppath, name))
210 data = os.readlink(os.path.join(self.tmppath, name))
211 mode = mode and 'l' or ''
211 mode = mode and 'l' or ''
212 else:
212 else:
213 data = open(os.path.join(self.tmppath, name), 'rb').read()
213 data = open(os.path.join(self.tmppath, name), 'rb').read()
214 mode = (mode & 0111) and 'x' or ''
214 mode = (mode & 0111) and 'x' or ''
215 return data, mode
215 return data, mode
216
216
217 def _exclude(self, name):
217 def _exclude(self, name):
218 exclude = ['{arch}', '.arch-ids', '.arch-inventory']
218 exclude = ['{arch}', '.arch-ids', '.arch-inventory']
219 for exc in exclude:
219 for exc in exclude:
220 if name.find(exc) != -1:
220 if name.find(exc) != -1:
221 return True
221 return True
222 return False
222 return False
223
223
224 def _readcontents(self, path):
224 def _readcontents(self, path):
225 files = []
225 files = []
226 contents = os.listdir(path)
226 contents = os.listdir(path)
227 while len(contents) > 0:
227 while len(contents) > 0:
228 c = contents.pop()
228 c = contents.pop()
229 p = os.path.join(path, c)
229 p = os.path.join(path, c)
230 # os.walk could be used, but here we avoid internal GNU
230 # os.walk could be used, but here we avoid internal GNU
231 # Arch files and directories, thus saving a lot time.
231 # Arch files and directories, thus saving a lot time.
232 if not self._exclude(p):
232 if not self._exclude(p):
233 if os.path.isdir(p):
233 if os.path.isdir(p):
234 contents += [os.path.join(c, f) for f in os.listdir(p)]
234 contents += [os.path.join(c, f) for f in os.listdir(p)]
235 else:
235 else:
236 files.append(c)
236 files.append(c)
237 return files
237 return files
238
238
239 def _rendirchanges(self, src, dest):
239 def _rendirchanges(self, src, dest):
240 changes = []
240 changes = []
241 copies = {}
241 copies = {}
242 files = self._readcontents(os.path.join(self.tmppath, dest))
242 files = self._readcontents(os.path.join(self.tmppath, dest))
243 for f in files:
243 for f in files:
244 s = os.path.join(src, f)
244 s = os.path.join(src, f)
245 d = os.path.join(dest, f)
245 d = os.path.join(dest, f)
246 changes.append(s)
246 changes.append(s)
247 changes.append(d)
247 changes.append(d)
248 copies[d] = s
248 copies[d] = s
249 return changes, copies
249 return changes, copies
250
250
251 def _obtainrevision(self, rev):
251 def _obtainrevision(self, rev):
252 self.ui.debug('obtaining revision %s...\n' % rev)
252 self.ui.debug('obtaining revision %s...\n' % rev)
253 output = self._execute('get', rev, self.tmppath)
253 output = self._execute('get', rev, self.tmppath)
254 self.checkexit(output)
254 self.checkexit(output)
255 self.ui.debug('analyzing revision %s...\n' % rev)
255 self.ui.debug('analyzing revision %s...\n' % rev)
256 files = self._readcontents(self.tmppath)
256 files = self._readcontents(self.tmppath)
257 self.changes[rev].add_files += files
257 self.changes[rev].add_files += files
258
258
259 def _stripbasepath(self, path):
259 def _stripbasepath(self, path):
260 if path.startswith('./'):
260 if path.startswith('./'):
261 return path[2:]
261 return path[2:]
262 return path
262 return path
263
263
264 def _parsecatlog(self, data, rev):
264 def _parsecatlog(self, data, rev):
265 try:
265 try:
266 catlog = self.catlogparser.parsestr(data)
266 catlog = self.catlogparser.parsestr(data)
267
267
268 # Commit date
268 # Commit date
269 self.changes[rev].date = util.datestr(
269 self.changes[rev].date = util.datestr(
270 util.strdate(catlog['Standard-date'],
270 util.strdate(catlog['Standard-date'],
271 '%Y-%m-%d %H:%M:%S'))
271 '%Y-%m-%d %H:%M:%S'))
272
272
273 # Commit author
273 # Commit author
274 self.changes[rev].author = self.recode(catlog['Creator'])
274 self.changes[rev].author = self.recode(catlog['Creator'])
275
275
276 # Commit description
276 # Commit description
277 self.changes[rev].summary = '\n\n'.join((catlog['Summary'],
277 self.changes[rev].summary = '\n\n'.join((catlog['Summary'],
278 catlog.get_payload()))
278 catlog.get_payload()))
279 self.changes[rev].summary = self.recode(self.changes[rev].summary)
279 self.changes[rev].summary = self.recode(self.changes[rev].summary)
280
280
281 # Commit revision origin when dealing with a branch or tag
281 # Commit revision origin when dealing with a branch or tag
282 if 'Continuation-of' in catlog:
282 if 'Continuation-of' in catlog:
283 self.changes[rev].continuationof = self.recode(
283 self.changes[rev].continuationof = self.recode(
284 catlog['Continuation-of'])
284 catlog['Continuation-of'])
285 except Exception:
285 except Exception:
286 raise util.Abort(_('could not parse cat-log of %s') % rev)
286 raise util.Abort(_('could not parse cat-log of %s') % rev)
287
287
288 def _parsechangeset(self, data, rev):
288 def _parsechangeset(self, data, rev):
289 for l in data:
289 for l in data:
290 l = l.strip()
290 l = l.strip()
291 # Added file (ignore added directory)
291 # Added file (ignore added directory)
292 if l.startswith('A') and not l.startswith('A/'):
292 if l.startswith('A') and not l.startswith('A/'):
293 file = self._stripbasepath(l[1:].strip())
293 file = self._stripbasepath(l[1:].strip())
294 if not self._exclude(file):
294 if not self._exclude(file):
295 self.changes[rev].add_files.append(file)
295 self.changes[rev].add_files.append(file)
296 # Deleted file (ignore deleted directory)
296 # Deleted file (ignore deleted directory)
297 elif l.startswith('D') and not l.startswith('D/'):
297 elif l.startswith('D') and not l.startswith('D/'):
298 file = self._stripbasepath(l[1:].strip())
298 file = self._stripbasepath(l[1:].strip())
299 if not self._exclude(file):
299 if not self._exclude(file):
300 self.changes[rev].del_files.append(file)
300 self.changes[rev].del_files.append(file)
301 # Modified binary file
301 # Modified binary file
302 elif l.startswith('Mb'):
302 elif l.startswith('Mb'):
303 file = self._stripbasepath(l[2:].strip())
303 file = self._stripbasepath(l[2:].strip())
304 if not self._exclude(file):
304 if not self._exclude(file):
305 self.changes[rev].mod_files.append(file)
305 self.changes[rev].mod_files.append(file)
306 # Modified link
306 # Modified link
307 elif l.startswith('M->'):
307 elif l.startswith('M->'):
308 file = self._stripbasepath(l[3:].strip())
308 file = self._stripbasepath(l[3:].strip())
309 if not self._exclude(file):
309 if not self._exclude(file):
310 self.changes[rev].mod_files.append(file)
310 self.changes[rev].mod_files.append(file)
311 # Modified file
311 # Modified file
312 elif l.startswith('M'):
312 elif l.startswith('M'):
313 file = self._stripbasepath(l[1:].strip())
313 file = self._stripbasepath(l[1:].strip())
314 if not self._exclude(file):
314 if not self._exclude(file):
315 self.changes[rev].mod_files.append(file)
315 self.changes[rev].mod_files.append(file)
316 # Renamed file (or link)
316 # Renamed file (or link)
317 elif l.startswith('=>'):
317 elif l.startswith('=>'):
318 files = l[2:].strip().split(' ')
318 files = l[2:].strip().split(' ')
319 if len(files) == 1:
319 if len(files) == 1:
320 files = l[2:].strip().split('\t')
320 files = l[2:].strip().split('\t')
321 src = self._stripbasepath(files[0])
321 src = self._stripbasepath(files[0])
322 dst = self._stripbasepath(files[1])
322 dst = self._stripbasepath(files[1])
323 if not self._exclude(src) and not self._exclude(dst):
323 if not self._exclude(src) and not self._exclude(dst):
324 self.changes[rev].ren_files[src] = dst
324 self.changes[rev].ren_files[src] = dst
325 # Conversion from file to link or from link to file (modified)
325 # Conversion from file to link or from link to file (modified)
326 elif l.startswith('ch'):
326 elif l.startswith('ch'):
327 file = self._stripbasepath(l[2:].strip())
327 file = self._stripbasepath(l[2:].strip())
328 if not self._exclude(file):
328 if not self._exclude(file):
329 self.changes[rev].mod_files.append(file)
329 self.changes[rev].mod_files.append(file)
330 # Renamed directory
330 # Renamed directory
331 elif l.startswith('/>'):
331 elif l.startswith('/>'):
332 dirs = l[2:].strip().split(' ')
332 dirs = l[2:].strip().split(' ')
333 if len(dirs) == 1:
333 if len(dirs) == 1:
334 dirs = l[2:].strip().split('\t')
334 dirs = l[2:].strip().split('\t')
335 src = self._stripbasepath(dirs[0])
335 src = self._stripbasepath(dirs[0])
336 dst = self._stripbasepath(dirs[1])
336 dst = self._stripbasepath(dirs[1])
337 if not self._exclude(src) and not self._exclude(dst):
337 if not self._exclude(src) and not self._exclude(dst):
338 self.changes[rev].ren_dirs[src] = dst
338 self.changes[rev].ren_dirs[src] = dst
@@ -1,3025 +1,3025 b''
1 # mq.py - patch queues for mercurial
1 # mq.py - patch queues for mercurial
2 #
2 #
3 # Copyright 2005, 2006 Chris Mason <mason@suse.com>
3 # Copyright 2005, 2006 Chris Mason <mason@suse.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 '''manage a stack of patches
8 '''manage a stack of patches
9
9
10 This extension lets you work with a stack of patches in a Mercurial
10 This extension lets you work with a stack of patches in a Mercurial
11 repository. It manages two stacks of patches - all known patches, and
11 repository. It manages two stacks of patches - all known patches, and
12 applied patches (subset of known patches).
12 applied patches (subset of known patches).
13
13
14 Known patches are represented as patch files in the .hg/patches
14 Known patches are represented as patch files in the .hg/patches
15 directory. Applied patches are both patch files and changesets.
15 directory. Applied patches are both patch files and changesets.
16
16
17 Common tasks (use :hg:`help command` for more details)::
17 Common tasks (use :hg:`help command` for more details)::
18
18
19 create new patch qnew
19 create new patch qnew
20 import existing patch qimport
20 import existing patch qimport
21
21
22 print patch series qseries
22 print patch series qseries
23 print applied patches qapplied
23 print applied patches qapplied
24
24
25 add known patch to applied stack qpush
25 add known patch to applied stack qpush
26 remove patch from applied stack qpop
26 remove patch from applied stack qpop
27 refresh contents of top applied patch qrefresh
27 refresh contents of top applied patch qrefresh
28
28
29 By default, mq will automatically use git patches when required to
29 By default, mq will automatically use git patches when required to
30 avoid losing file mode changes, copy records, binary files or empty
30 avoid losing file mode changes, copy records, binary files or empty
31 files creations or deletions. This behaviour can be configured with::
31 files creations or deletions. This behaviour can be configured with::
32
32
33 [mq]
33 [mq]
34 git = auto/keep/yes/no
34 git = auto/keep/yes/no
35
35
36 If set to 'keep', mq will obey the [diff] section configuration while
36 If set to 'keep', mq will obey the [diff] section configuration while
37 preserving existing git patches upon qrefresh. If set to 'yes' or
37 preserving existing git patches upon qrefresh. If set to 'yes' or
38 'no', mq will override the [diff] section and always generate git or
38 'no', mq will override the [diff] section and always generate git or
39 regular patches, possibly losing data in the second case.
39 regular patches, possibly losing data in the second case.
40
40
41 You will by default be managing a patch queue named "patches". You can
41 You will by default be managing a patch queue named "patches". You can
42 create other, independent patch queues with the :hg:`qqueue` command.
42 create other, independent patch queues with the :hg:`qqueue` command.
43 '''
43 '''
44
44
45 from mercurial.i18n import _
45 from mercurial.i18n import _
46 from mercurial.node import bin, hex, short, nullid, nullrev
46 from mercurial.node import bin, hex, short, nullid, nullrev
47 from mercurial.lock import release
47 from mercurial.lock import release
48 from mercurial import commands, cmdutil, hg, patch, util
48 from mercurial import commands, cmdutil, hg, patch, util
49 from mercurial import repair, extensions, url, error
49 from mercurial import repair, extensions, url, error
50 import os, sys, re, errno
50 import os, sys, re, errno
51
51
52 commands.norepo += " qclone"
52 commands.norepo += " qclone"
53
53
54 # Patch names looks like unix-file names.
54 # Patch names looks like unix-file names.
55 # They must be joinable with queue directory and result in the patch path.
55 # They must be joinable with queue directory and result in the patch path.
56 normname = util.normpath
56 normname = util.normpath
57
57
58 class statusentry(object):
58 class statusentry(object):
59 def __init__(self, node, name):
59 def __init__(self, node, name):
60 self.node, self.name = node, name
60 self.node, self.name = node, name
61 def __repr__(self):
61 def __repr__(self):
62 return hex(self.node) + ':' + self.name
62 return hex(self.node) + ':' + self.name
63
63
64 class patchheader(object):
64 class patchheader(object):
65 def __init__(self, pf, plainmode=False):
65 def __init__(self, pf, plainmode=False):
66 def eatdiff(lines):
66 def eatdiff(lines):
67 while lines:
67 while lines:
68 l = lines[-1]
68 l = lines[-1]
69 if (l.startswith("diff -") or
69 if (l.startswith("diff -") or
70 l.startswith("Index:") or
70 l.startswith("Index:") or
71 l.startswith("===========")):
71 l.startswith("===========")):
72 del lines[-1]
72 del lines[-1]
73 else:
73 else:
74 break
74 break
75 def eatempty(lines):
75 def eatempty(lines):
76 while lines:
76 while lines:
77 if not lines[-1].strip():
77 if not lines[-1].strip():
78 del lines[-1]
78 del lines[-1]
79 else:
79 else:
80 break
80 break
81
81
82 message = []
82 message = []
83 comments = []
83 comments = []
84 user = None
84 user = None
85 date = None
85 date = None
86 parent = None
86 parent = None
87 format = None
87 format = None
88 subject = None
88 subject = None
89 diffstart = 0
89 diffstart = 0
90
90
91 for line in file(pf):
91 for line in file(pf):
92 line = line.rstrip()
92 line = line.rstrip()
93 if (line.startswith('diff --git')
93 if (line.startswith('diff --git')
94 or (diffstart and line.startswith('+++ '))):
94 or (diffstart and line.startswith('+++ '))):
95 diffstart = 2
95 diffstart = 2
96 break
96 break
97 diffstart = 0 # reset
97 diffstart = 0 # reset
98 if line.startswith("--- "):
98 if line.startswith("--- "):
99 diffstart = 1
99 diffstart = 1
100 continue
100 continue
101 elif format == "hgpatch":
101 elif format == "hgpatch":
102 # parse values when importing the result of an hg export
102 # parse values when importing the result of an hg export
103 if line.startswith("# User "):
103 if line.startswith("# User "):
104 user = line[7:]
104 user = line[7:]
105 elif line.startswith("# Date "):
105 elif line.startswith("# Date "):
106 date = line[7:]
106 date = line[7:]
107 elif line.startswith("# Parent "):
107 elif line.startswith("# Parent "):
108 parent = line[9:]
108 parent = line[9:]
109 elif not line.startswith("# ") and line:
109 elif not line.startswith("# ") and line:
110 message.append(line)
110 message.append(line)
111 format = None
111 format = None
112 elif line == '# HG changeset patch':
112 elif line == '# HG changeset patch':
113 message = []
113 message = []
114 format = "hgpatch"
114 format = "hgpatch"
115 elif (format != "tagdone" and (line.startswith("Subject: ") or
115 elif (format != "tagdone" and (line.startswith("Subject: ") or
116 line.startswith("subject: "))):
116 line.startswith("subject: "))):
117 subject = line[9:]
117 subject = line[9:]
118 format = "tag"
118 format = "tag"
119 elif (format != "tagdone" and (line.startswith("From: ") or
119 elif (format != "tagdone" and (line.startswith("From: ") or
120 line.startswith("from: "))):
120 line.startswith("from: "))):
121 user = line[6:]
121 user = line[6:]
122 format = "tag"
122 format = "tag"
123 elif (format != "tagdone" and (line.startswith("Date: ") or
123 elif (format != "tagdone" and (line.startswith("Date: ") or
124 line.startswith("date: "))):
124 line.startswith("date: "))):
125 date = line[6:]
125 date = line[6:]
126 format = "tag"
126 format = "tag"
127 elif format == "tag" and line == "":
127 elif format == "tag" and line == "":
128 # when looking for tags (subject: from: etc) they
128 # when looking for tags (subject: from: etc) they
129 # end once you find a blank line in the source
129 # end once you find a blank line in the source
130 format = "tagdone"
130 format = "tagdone"
131 elif message or line:
131 elif message or line:
132 message.append(line)
132 message.append(line)
133 comments.append(line)
133 comments.append(line)
134
134
135 eatdiff(message)
135 eatdiff(message)
136 eatdiff(comments)
136 eatdiff(comments)
137 eatempty(message)
137 eatempty(message)
138 eatempty(comments)
138 eatempty(comments)
139
139
140 # make sure message isn't empty
140 # make sure message isn't empty
141 if format and format.startswith("tag") and subject:
141 if format and format.startswith("tag") and subject:
142 message.insert(0, "")
142 message.insert(0, "")
143 message.insert(0, subject)
143 message.insert(0, subject)
144
144
145 self.message = message
145 self.message = message
146 self.comments = comments
146 self.comments = comments
147 self.user = user
147 self.user = user
148 self.date = date
148 self.date = date
149 self.parent = parent
149 self.parent = parent
150 self.haspatch = diffstart > 1
150 self.haspatch = diffstart > 1
151 self.plainmode = plainmode
151 self.plainmode = plainmode
152
152
153 def setuser(self, user):
153 def setuser(self, user):
154 if not self.updateheader(['From: ', '# User '], user):
154 if not self.updateheader(['From: ', '# User '], user):
155 try:
155 try:
156 patchheaderat = self.comments.index('# HG changeset patch')
156 patchheaderat = self.comments.index('# HG changeset patch')
157 self.comments.insert(patchheaderat + 1, '# User ' + user)
157 self.comments.insert(patchheaderat + 1, '# User ' + user)
158 except ValueError:
158 except ValueError:
159 if self.plainmode or self._hasheader(['Date: ']):
159 if self.plainmode or self._hasheader(['Date: ']):
160 self.comments = ['From: ' + user] + self.comments
160 self.comments = ['From: ' + user] + self.comments
161 else:
161 else:
162 tmp = ['# HG changeset patch', '# User ' + user, '']
162 tmp = ['# HG changeset patch', '# User ' + user, '']
163 self.comments = tmp + self.comments
163 self.comments = tmp + self.comments
164 self.user = user
164 self.user = user
165
165
166 def setdate(self, date):
166 def setdate(self, date):
167 if not self.updateheader(['Date: ', '# Date '], date):
167 if not self.updateheader(['Date: ', '# Date '], date):
168 try:
168 try:
169 patchheaderat = self.comments.index('# HG changeset patch')
169 patchheaderat = self.comments.index('# HG changeset patch')
170 self.comments.insert(patchheaderat + 1, '# Date ' + date)
170 self.comments.insert(patchheaderat + 1, '# Date ' + date)
171 except ValueError:
171 except ValueError:
172 if self.plainmode or self._hasheader(['From: ']):
172 if self.plainmode or self._hasheader(['From: ']):
173 self.comments = ['Date: ' + date] + self.comments
173 self.comments = ['Date: ' + date] + self.comments
174 else:
174 else:
175 tmp = ['# HG changeset patch', '# Date ' + date, '']
175 tmp = ['# HG changeset patch', '# Date ' + date, '']
176 self.comments = tmp + self.comments
176 self.comments = tmp + self.comments
177 self.date = date
177 self.date = date
178
178
179 def setparent(self, parent):
179 def setparent(self, parent):
180 if not self.updateheader(['# Parent '], parent):
180 if not self.updateheader(['# Parent '], parent):
181 try:
181 try:
182 patchheaderat = self.comments.index('# HG changeset patch')
182 patchheaderat = self.comments.index('# HG changeset patch')
183 self.comments.insert(patchheaderat + 1, '# Parent ' + parent)
183 self.comments.insert(patchheaderat + 1, '# Parent ' + parent)
184 except ValueError:
184 except ValueError:
185 pass
185 pass
186 self.parent = parent
186 self.parent = parent
187
187
188 def setmessage(self, message):
188 def setmessage(self, message):
189 if self.comments:
189 if self.comments:
190 self._delmsg()
190 self._delmsg()
191 self.message = [message]
191 self.message = [message]
192 self.comments += self.message
192 self.comments += self.message
193
193
194 def updateheader(self, prefixes, new):
194 def updateheader(self, prefixes, new):
195 '''Update all references to a field in the patch header.
195 '''Update all references to a field in the patch header.
196 Return whether the field is present.'''
196 Return whether the field is present.'''
197 res = False
197 res = False
198 for prefix in prefixes:
198 for prefix in prefixes:
199 for i in xrange(len(self.comments)):
199 for i in xrange(len(self.comments)):
200 if self.comments[i].startswith(prefix):
200 if self.comments[i].startswith(prefix):
201 self.comments[i] = prefix + new
201 self.comments[i] = prefix + new
202 res = True
202 res = True
203 break
203 break
204 return res
204 return res
205
205
206 def _hasheader(self, prefixes):
206 def _hasheader(self, prefixes):
207 '''Check if a header starts with any of the given prefixes.'''
207 '''Check if a header starts with any of the given prefixes.'''
208 for prefix in prefixes:
208 for prefix in prefixes:
209 for comment in self.comments:
209 for comment in self.comments:
210 if comment.startswith(prefix):
210 if comment.startswith(prefix):
211 return True
211 return True
212 return False
212 return False
213
213
214 def __str__(self):
214 def __str__(self):
215 if not self.comments:
215 if not self.comments:
216 return ''
216 return ''
217 return '\n'.join(self.comments) + '\n\n'
217 return '\n'.join(self.comments) + '\n\n'
218
218
219 def _delmsg(self):
219 def _delmsg(self):
220 '''Remove existing message, keeping the rest of the comments fields.
220 '''Remove existing message, keeping the rest of the comments fields.
221 If comments contains 'subject: ', message will prepend
221 If comments contains 'subject: ', message will prepend
222 the field and a blank line.'''
222 the field and a blank line.'''
223 if self.message:
223 if self.message:
224 subj = 'subject: ' + self.message[0].lower()
224 subj = 'subject: ' + self.message[0].lower()
225 for i in xrange(len(self.comments)):
225 for i in xrange(len(self.comments)):
226 if subj == self.comments[i].lower():
226 if subj == self.comments[i].lower():
227 del self.comments[i]
227 del self.comments[i]
228 self.message = self.message[2:]
228 self.message = self.message[2:]
229 break
229 break
230 ci = 0
230 ci = 0
231 for mi in self.message:
231 for mi in self.message:
232 while mi != self.comments[ci]:
232 while mi != self.comments[ci]:
233 ci += 1
233 ci += 1
234 del self.comments[ci]
234 del self.comments[ci]
235
235
236 class queue(object):
236 class queue(object):
237 def __init__(self, ui, path, patchdir=None):
237 def __init__(self, ui, path, patchdir=None):
238 self.basepath = path
238 self.basepath = path
239 try:
239 try:
240 fh = open(os.path.join(path, 'patches.queue'))
240 fh = open(os.path.join(path, 'patches.queue'))
241 cur = fh.read().rstrip()
241 cur = fh.read().rstrip()
242 if not cur:
242 if not cur:
243 curpath = os.path.join(path, 'patches')
243 curpath = os.path.join(path, 'patches')
244 else:
244 else:
245 curpath = os.path.join(path, 'patches-' + cur)
245 curpath = os.path.join(path, 'patches-' + cur)
246 except IOError:
246 except IOError:
247 curpath = os.path.join(path, 'patches')
247 curpath = os.path.join(path, 'patches')
248 self.path = patchdir or curpath
248 self.path = patchdir or curpath
249 self.opener = util.opener(self.path)
249 self.opener = util.opener(self.path)
250 self.ui = ui
250 self.ui = ui
251 self.applied_dirty = 0
251 self.applied_dirty = 0
252 self.series_dirty = 0
252 self.series_dirty = 0
253 self.added = []
253 self.added = []
254 self.series_path = "series"
254 self.series_path = "series"
255 self.status_path = "status"
255 self.status_path = "status"
256 self.guards_path = "guards"
256 self.guards_path = "guards"
257 self.active_guards = None
257 self.active_guards = None
258 self.guards_dirty = False
258 self.guards_dirty = False
259 # Handle mq.git as a bool with extended values
259 # Handle mq.git as a bool with extended values
260 try:
260 try:
261 gitmode = ui.configbool('mq', 'git', None)
261 gitmode = ui.configbool('mq', 'git', None)
262 if gitmode is None:
262 if gitmode is None:
263 raise error.ConfigError()
263 raise error.ConfigError()
264 self.gitmode = gitmode and 'yes' or 'no'
264 self.gitmode = gitmode and 'yes' or 'no'
265 except error.ConfigError:
265 except error.ConfigError:
266 self.gitmode = ui.config('mq', 'git', 'auto').lower()
266 self.gitmode = ui.config('mq', 'git', 'auto').lower()
267 self.plainmode = ui.configbool('mq', 'plain', False)
267 self.plainmode = ui.configbool('mq', 'plain', False)
268
268
269 @util.propertycache
269 @util.propertycache
270 def applied(self):
270 def applied(self):
271 if os.path.exists(self.join(self.status_path)):
271 if os.path.exists(self.join(self.status_path)):
272 def parse(l):
272 def parse(l):
273 n, name = l.split(':', 1)
273 n, name = l.split(':', 1)
274 return statusentry(bin(n), name)
274 return statusentry(bin(n), name)
275 lines = self.opener(self.status_path).read().splitlines()
275 lines = self.opener(self.status_path).read().splitlines()
276 return [parse(l) for l in lines]
276 return [parse(l) for l in lines]
277 return []
277 return []
278
278
279 @util.propertycache
279 @util.propertycache
280 def full_series(self):
280 def full_series(self):
281 if os.path.exists(self.join(self.series_path)):
281 if os.path.exists(self.join(self.series_path)):
282 return self.opener(self.series_path).read().splitlines()
282 return self.opener(self.series_path).read().splitlines()
283 return []
283 return []
284
284
285 @util.propertycache
285 @util.propertycache
286 def series(self):
286 def series(self):
287 self.parse_series()
287 self.parse_series()
288 return self.series
288 return self.series
289
289
290 @util.propertycache
290 @util.propertycache
291 def series_guards(self):
291 def series_guards(self):
292 self.parse_series()
292 self.parse_series()
293 return self.series_guards
293 return self.series_guards
294
294
295 def invalidate(self):
295 def invalidate(self):
296 for a in 'applied full_series series series_guards'.split():
296 for a in 'applied full_series series series_guards'.split():
297 if a in self.__dict__:
297 if a in self.__dict__:
298 delattr(self, a)
298 delattr(self, a)
299 self.applied_dirty = 0
299 self.applied_dirty = 0
300 self.series_dirty = 0
300 self.series_dirty = 0
301 self.guards_dirty = False
301 self.guards_dirty = False
302 self.active_guards = None
302 self.active_guards = None
303
303
304 def diffopts(self, opts={}, patchfn=None):
304 def diffopts(self, opts={}, patchfn=None):
305 diffopts = patch.diffopts(self.ui, opts)
305 diffopts = patch.diffopts(self.ui, opts)
306 if self.gitmode == 'auto':
306 if self.gitmode == 'auto':
307 diffopts.upgrade = True
307 diffopts.upgrade = True
308 elif self.gitmode == 'keep':
308 elif self.gitmode == 'keep':
309 pass
309 pass
310 elif self.gitmode in ('yes', 'no'):
310 elif self.gitmode in ('yes', 'no'):
311 diffopts.git = self.gitmode == 'yes'
311 diffopts.git = self.gitmode == 'yes'
312 else:
312 else:
313 raise util.Abort(_('mq.git option can be auto/keep/yes/no'
313 raise util.Abort(_('mq.git option can be auto/keep/yes/no'
314 ' got %s') % self.gitmode)
314 ' got %s') % self.gitmode)
315 if patchfn:
315 if patchfn:
316 diffopts = self.patchopts(diffopts, patchfn)
316 diffopts = self.patchopts(diffopts, patchfn)
317 return diffopts
317 return diffopts
318
318
319 def patchopts(self, diffopts, *patches):
319 def patchopts(self, diffopts, *patches):
320 """Return a copy of input diff options with git set to true if
320 """Return a copy of input diff options with git set to true if
321 referenced patch is a git patch and should be preserved as such.
321 referenced patch is a git patch and should be preserved as such.
322 """
322 """
323 diffopts = diffopts.copy()
323 diffopts = diffopts.copy()
324 if not diffopts.git and self.gitmode == 'keep':
324 if not diffopts.git and self.gitmode == 'keep':
325 for patchfn in patches:
325 for patchfn in patches:
326 patchf = self.opener(patchfn, 'r')
326 patchf = self.opener(patchfn, 'r')
327 # if the patch was a git patch, refresh it as a git patch
327 # if the patch was a git patch, refresh it as a git patch
328 for line in patchf:
328 for line in patchf:
329 if line.startswith('diff --git'):
329 if line.startswith('diff --git'):
330 diffopts.git = True
330 diffopts.git = True
331 break
331 break
332 patchf.close()
332 patchf.close()
333 return diffopts
333 return diffopts
334
334
335 def join(self, *p):
335 def join(self, *p):
336 return os.path.join(self.path, *p)
336 return os.path.join(self.path, *p)
337
337
338 def find_series(self, patch):
338 def find_series(self, patch):
339 def matchpatch(l):
339 def matchpatch(l):
340 l = l.split('#', 1)[0]
340 l = l.split('#', 1)[0]
341 return l.strip() == patch
341 return l.strip() == patch
342 for index, l in enumerate(self.full_series):
342 for index, l in enumerate(self.full_series):
343 if matchpatch(l):
343 if matchpatch(l):
344 return index
344 return index
345 return None
345 return None
346
346
347 guard_re = re.compile(r'\s?#([-+][^-+# \t\r\n\f][^# \t\r\n\f]*)')
347 guard_re = re.compile(r'\s?#([-+][^-+# \t\r\n\f][^# \t\r\n\f]*)')
348
348
349 def parse_series(self):
349 def parse_series(self):
350 self.series = []
350 self.series = []
351 self.series_guards = []
351 self.series_guards = []
352 for l in self.full_series:
352 for l in self.full_series:
353 h = l.find('#')
353 h = l.find('#')
354 if h == -1:
354 if h == -1:
355 patch = l
355 patch = l
356 comment = ''
356 comment = ''
357 elif h == 0:
357 elif h == 0:
358 continue
358 continue
359 else:
359 else:
360 patch = l[:h]
360 patch = l[:h]
361 comment = l[h:]
361 comment = l[h:]
362 patch = patch.strip()
362 patch = patch.strip()
363 if patch:
363 if patch:
364 if patch in self.series:
364 if patch in self.series:
365 raise util.Abort(_('%s appears more than once in %s') %
365 raise util.Abort(_('%s appears more than once in %s') %
366 (patch, self.join(self.series_path)))
366 (patch, self.join(self.series_path)))
367 self.series.append(patch)
367 self.series.append(patch)
368 self.series_guards.append(self.guard_re.findall(comment))
368 self.series_guards.append(self.guard_re.findall(comment))
369
369
370 def check_guard(self, guard):
370 def check_guard(self, guard):
371 if not guard:
371 if not guard:
372 return _('guard cannot be an empty string')
372 return _('guard cannot be an empty string')
373 bad_chars = '# \t\r\n\f'
373 bad_chars = '# \t\r\n\f'
374 first = guard[0]
374 first = guard[0]
375 if first in '-+':
375 if first in '-+':
376 return (_('guard %r starts with invalid character: %r') %
376 return (_('guard %r starts with invalid character: %r') %
377 (guard, first))
377 (guard, first))
378 for c in bad_chars:
378 for c in bad_chars:
379 if c in guard:
379 if c in guard:
380 return _('invalid character in guard %r: %r') % (guard, c)
380 return _('invalid character in guard %r: %r') % (guard, c)
381
381
382 def set_active(self, guards):
382 def set_active(self, guards):
383 for guard in guards:
383 for guard in guards:
384 bad = self.check_guard(guard)
384 bad = self.check_guard(guard)
385 if bad:
385 if bad:
386 raise util.Abort(bad)
386 raise util.Abort(bad)
387 guards = sorted(set(guards))
387 guards = sorted(set(guards))
388 self.ui.debug('active guards: %s\n' % ' '.join(guards))
388 self.ui.debug('active guards: %s\n' % ' '.join(guards))
389 self.active_guards = guards
389 self.active_guards = guards
390 self.guards_dirty = True
390 self.guards_dirty = True
391
391
392 def active(self):
392 def active(self):
393 if self.active_guards is None:
393 if self.active_guards is None:
394 self.active_guards = []
394 self.active_guards = []
395 try:
395 try:
396 guards = self.opener(self.guards_path).read().split()
396 guards = self.opener(self.guards_path).read().split()
397 except IOError, err:
397 except IOError, err:
398 if err.errno != errno.ENOENT:
398 if err.errno != errno.ENOENT:
399 raise
399 raise
400 guards = []
400 guards = []
401 for i, guard in enumerate(guards):
401 for i, guard in enumerate(guards):
402 bad = self.check_guard(guard)
402 bad = self.check_guard(guard)
403 if bad:
403 if bad:
404 self.ui.warn('%s:%d: %s\n' %
404 self.ui.warn('%s:%d: %s\n' %
405 (self.join(self.guards_path), i + 1, bad))
405 (self.join(self.guards_path), i + 1, bad))
406 else:
406 else:
407 self.active_guards.append(guard)
407 self.active_guards.append(guard)
408 return self.active_guards
408 return self.active_guards
409
409
410 def set_guards(self, idx, guards):
410 def set_guards(self, idx, guards):
411 for g in guards:
411 for g in guards:
412 if len(g) < 2:
412 if len(g) < 2:
413 raise util.Abort(_('guard %r too short') % g)
413 raise util.Abort(_('guard %r too short') % g)
414 if g[0] not in '-+':
414 if g[0] not in '-+':
415 raise util.Abort(_('guard %r starts with invalid char') % g)
415 raise util.Abort(_('guard %r starts with invalid char') % g)
416 bad = self.check_guard(g[1:])
416 bad = self.check_guard(g[1:])
417 if bad:
417 if bad:
418 raise util.Abort(bad)
418 raise util.Abort(bad)
419 drop = self.guard_re.sub('', self.full_series[idx])
419 drop = self.guard_re.sub('', self.full_series[idx])
420 self.full_series[idx] = drop + ''.join([' #' + g for g in guards])
420 self.full_series[idx] = drop + ''.join([' #' + g for g in guards])
421 self.parse_series()
421 self.parse_series()
422 self.series_dirty = True
422 self.series_dirty = True
423
423
424 def pushable(self, idx):
424 def pushable(self, idx):
425 if isinstance(idx, str):
425 if isinstance(idx, str):
426 idx = self.series.index(idx)
426 idx = self.series.index(idx)
427 patchguards = self.series_guards[idx]
427 patchguards = self.series_guards[idx]
428 if not patchguards:
428 if not patchguards:
429 return True, None
429 return True, None
430 guards = self.active()
430 guards = self.active()
431 exactneg = [g for g in patchguards if g[0] == '-' and g[1:] in guards]
431 exactneg = [g for g in patchguards if g[0] == '-' and g[1:] in guards]
432 if exactneg:
432 if exactneg:
433 return False, exactneg[0]
433 return False, exactneg[0]
434 pos = [g for g in patchguards if g[0] == '+']
434 pos = [g for g in patchguards if g[0] == '+']
435 exactpos = [g for g in pos if g[1:] in guards]
435 exactpos = [g for g in pos if g[1:] in guards]
436 if pos:
436 if pos:
437 if exactpos:
437 if exactpos:
438 return True, exactpos[0]
438 return True, exactpos[0]
439 return False, pos
439 return False, pos
440 return True, ''
440 return True, ''
441
441
442 def explain_pushable(self, idx, all_patches=False):
442 def explain_pushable(self, idx, all_patches=False):
443 write = all_patches and self.ui.write or self.ui.warn
443 write = all_patches and self.ui.write or self.ui.warn
444 if all_patches or self.ui.verbose:
444 if all_patches or self.ui.verbose:
445 if isinstance(idx, str):
445 if isinstance(idx, str):
446 idx = self.series.index(idx)
446 idx = self.series.index(idx)
447 pushable, why = self.pushable(idx)
447 pushable, why = self.pushable(idx)
448 if all_patches and pushable:
448 if all_patches and pushable:
449 if why is None:
449 if why is None:
450 write(_('allowing %s - no guards in effect\n') %
450 write(_('allowing %s - no guards in effect\n') %
451 self.series[idx])
451 self.series[idx])
452 else:
452 else:
453 if not why:
453 if not why:
454 write(_('allowing %s - no matching negative guards\n') %
454 write(_('allowing %s - no matching negative guards\n') %
455 self.series[idx])
455 self.series[idx])
456 else:
456 else:
457 write(_('allowing %s - guarded by %r\n') %
457 write(_('allowing %s - guarded by %r\n') %
458 (self.series[idx], why))
458 (self.series[idx], why))
459 if not pushable:
459 if not pushable:
460 if why:
460 if why:
461 write(_('skipping %s - guarded by %r\n') %
461 write(_('skipping %s - guarded by %r\n') %
462 (self.series[idx], why))
462 (self.series[idx], why))
463 else:
463 else:
464 write(_('skipping %s - no matching guards\n') %
464 write(_('skipping %s - no matching guards\n') %
465 self.series[idx])
465 self.series[idx])
466
466
467 def save_dirty(self):
467 def save_dirty(self):
468 def write_list(items, path):
468 def write_list(items, path):
469 fp = self.opener(path, 'w')
469 fp = self.opener(path, 'w')
470 for i in items:
470 for i in items:
471 fp.write("%s\n" % i)
471 fp.write("%s\n" % i)
472 fp.close()
472 fp.close()
473 if self.applied_dirty:
473 if self.applied_dirty:
474 write_list(map(str, self.applied), self.status_path)
474 write_list(map(str, self.applied), self.status_path)
475 if self.series_dirty:
475 if self.series_dirty:
476 write_list(self.full_series, self.series_path)
476 write_list(self.full_series, self.series_path)
477 if self.guards_dirty:
477 if self.guards_dirty:
478 write_list(self.active_guards, self.guards_path)
478 write_list(self.active_guards, self.guards_path)
479 if self.added:
479 if self.added:
480 qrepo = self.qrepo()
480 qrepo = self.qrepo()
481 if qrepo:
481 if qrepo:
482 qrepo[None].add(self.added)
482 qrepo[None].add(self.added)
483 self.added = []
483 self.added = []
484
484
485 def removeundo(self, repo):
485 def removeundo(self, repo):
486 undo = repo.sjoin('undo')
486 undo = repo.sjoin('undo')
487 if not os.path.exists(undo):
487 if not os.path.exists(undo):
488 return
488 return
489 try:
489 try:
490 os.unlink(undo)
490 os.unlink(undo)
491 except OSError, inst:
491 except OSError, inst:
492 self.ui.warn(_('error removing undo: %s\n') % str(inst))
492 self.ui.warn(_('error removing undo: %s\n') % str(inst))
493
493
494 def printdiff(self, repo, diffopts, node1, node2=None, files=None,
494 def printdiff(self, repo, diffopts, node1, node2=None, files=None,
495 fp=None, changes=None, opts={}):
495 fp=None, changes=None, opts={}):
496 stat = opts.get('stat')
496 stat = opts.get('stat')
497 m = cmdutil.match(repo, files, opts)
497 m = cmdutil.match(repo, files, opts)
498 cmdutil.diffordiffstat(self.ui, repo, diffopts, node1, node2, m,
498 cmdutil.diffordiffstat(self.ui, repo, diffopts, node1, node2, m,
499 changes, stat, fp)
499 changes, stat, fp)
500
500
501 def mergeone(self, repo, mergeq, head, patch, rev, diffopts):
501 def mergeone(self, repo, mergeq, head, patch, rev, diffopts):
502 # first try just applying the patch
502 # first try just applying the patch
503 (err, n) = self.apply(repo, [patch], update_status=False,
503 (err, n) = self.apply(repo, [patch], update_status=False,
504 strict=True, merge=rev)
504 strict=True, merge=rev)
505
505
506 if err == 0:
506 if err == 0:
507 return (err, n)
507 return (err, n)
508
508
509 if n is None:
509 if n is None:
510 raise util.Abort(_("apply failed for patch %s") % patch)
510 raise util.Abort(_("apply failed for patch %s") % patch)
511
511
512 self.ui.warn(_("patch didn't work out, merging %s\n") % patch)
512 self.ui.warn(_("patch didn't work out, merging %s\n") % patch)
513
513
514 # apply failed, strip away that rev and merge.
514 # apply failed, strip away that rev and merge.
515 hg.clean(repo, head)
515 hg.clean(repo, head)
516 self.strip(repo, n, update=False, backup='strip')
516 self.strip(repo, n, update=False, backup='strip')
517
517
518 ctx = repo[rev]
518 ctx = repo[rev]
519 ret = hg.merge(repo, rev)
519 ret = hg.merge(repo, rev)
520 if ret:
520 if ret:
521 raise util.Abort(_("update returned %d") % ret)
521 raise util.Abort(_("update returned %d") % ret)
522 n = repo.commit(ctx.description(), ctx.user(), force=True)
522 n = repo.commit(ctx.description(), ctx.user(), force=True)
523 if n is None:
523 if n is None:
524 raise util.Abort(_("repo commit failed"))
524 raise util.Abort(_("repo commit failed"))
525 try:
525 try:
526 ph = patchheader(mergeq.join(patch), self.plainmode)
526 ph = patchheader(mergeq.join(patch), self.plainmode)
527 except:
527 except:
528 raise util.Abort(_("unable to read %s") % patch)
528 raise util.Abort(_("unable to read %s") % patch)
529
529
530 diffopts = self.patchopts(diffopts, patch)
530 diffopts = self.patchopts(diffopts, patch)
531 patchf = self.opener(patch, "w")
531 patchf = self.opener(patch, "w")
532 comments = str(ph)
532 comments = str(ph)
533 if comments:
533 if comments:
534 patchf.write(comments)
534 patchf.write(comments)
535 self.printdiff(repo, diffopts, head, n, fp=patchf)
535 self.printdiff(repo, diffopts, head, n, fp=patchf)
536 patchf.close()
536 patchf.close()
537 self.removeundo(repo)
537 self.removeundo(repo)
538 return (0, n)
538 return (0, n)
539
539
540 def qparents(self, repo, rev=None):
540 def qparents(self, repo, rev=None):
541 if rev is None:
541 if rev is None:
542 (p1, p2) = repo.dirstate.parents()
542 (p1, p2) = repo.dirstate.parents()
543 if p2 == nullid:
543 if p2 == nullid:
544 return p1
544 return p1
545 if not self.applied:
545 if not self.applied:
546 return None
546 return None
547 return self.applied[-1].node
547 return self.applied[-1].node
548 p1, p2 = repo.changelog.parents(rev)
548 p1, p2 = repo.changelog.parents(rev)
549 if p2 != nullid and p2 in [x.node for x in self.applied]:
549 if p2 != nullid and p2 in [x.node for x in self.applied]:
550 return p2
550 return p2
551 return p1
551 return p1
552
552
553 def mergepatch(self, repo, mergeq, series, diffopts):
553 def mergepatch(self, repo, mergeq, series, diffopts):
554 if not self.applied:
554 if not self.applied:
555 # each of the patches merged in will have two parents. This
555 # each of the patches merged in will have two parents. This
556 # can confuse the qrefresh, qdiff, and strip code because it
556 # can confuse the qrefresh, qdiff, and strip code because it
557 # needs to know which parent is actually in the patch queue.
557 # needs to know which parent is actually in the patch queue.
558 # so, we insert a merge marker with only one parent. This way
558 # so, we insert a merge marker with only one parent. This way
559 # the first patch in the queue is never a merge patch
559 # the first patch in the queue is never a merge patch
560 #
560 #
561 pname = ".hg.patches.merge.marker"
561 pname = ".hg.patches.merge.marker"
562 n = repo.commit('[mq]: merge marker', force=True)
562 n = repo.commit('[mq]: merge marker', force=True)
563 self.removeundo(repo)
563 self.removeundo(repo)
564 self.applied.append(statusentry(n, pname))
564 self.applied.append(statusentry(n, pname))
565 self.applied_dirty = 1
565 self.applied_dirty = 1
566
566
567 head = self.qparents(repo)
567 head = self.qparents(repo)
568
568
569 for patch in series:
569 for patch in series:
570 patch = mergeq.lookup(patch, strict=True)
570 patch = mergeq.lookup(patch, strict=True)
571 if not patch:
571 if not patch:
572 self.ui.warn(_("patch %s does not exist\n") % patch)
572 self.ui.warn(_("patch %s does not exist\n") % patch)
573 return (1, None)
573 return (1, None)
574 pushable, reason = self.pushable(patch)
574 pushable, reason = self.pushable(patch)
575 if not pushable:
575 if not pushable:
576 self.explain_pushable(patch, all_patches=True)
576 self.explain_pushable(patch, all_patches=True)
577 continue
577 continue
578 info = mergeq.isapplied(patch)
578 info = mergeq.isapplied(patch)
579 if not info:
579 if not info:
580 self.ui.warn(_("patch %s is not applied\n") % patch)
580 self.ui.warn(_("patch %s is not applied\n") % patch)
581 return (1, None)
581 return (1, None)
582 rev = info[1]
582 rev = info[1]
583 err, head = self.mergeone(repo, mergeq, head, patch, rev, diffopts)
583 err, head = self.mergeone(repo, mergeq, head, patch, rev, diffopts)
584 if head:
584 if head:
585 self.applied.append(statusentry(head, patch))
585 self.applied.append(statusentry(head, patch))
586 self.applied_dirty = 1
586 self.applied_dirty = 1
587 if err:
587 if err:
588 return (err, head)
588 return (err, head)
589 self.save_dirty()
589 self.save_dirty()
590 return (0, head)
590 return (0, head)
591
591
592 def patch(self, repo, patchfile):
592 def patch(self, repo, patchfile):
593 '''Apply patchfile to the working directory.
593 '''Apply patchfile to the working directory.
594 patchfile: name of patch file'''
594 patchfile: name of patch file'''
595 files = {}
595 files = {}
596 try:
596 try:
597 fuzz = patch.patch(patchfile, self.ui, strip=1, cwd=repo.root,
597 fuzz = patch.patch(patchfile, self.ui, strip=1, cwd=repo.root,
598 files=files, eolmode=None)
598 files=files, eolmode=None)
599 except Exception, inst:
599 except Exception, inst:
600 self.ui.note(str(inst) + '\n')
600 self.ui.note(str(inst) + '\n')
601 if not self.ui.verbose:
601 if not self.ui.verbose:
602 self.ui.warn(_("patch failed, unable to continue (try -v)\n"))
602 self.ui.warn(_("patch failed, unable to continue (try -v)\n"))
603 return (False, files, False)
603 return (False, files, False)
604
604
605 return (True, files, fuzz)
605 return (True, files, fuzz)
606
606
607 def apply(self, repo, series, list=False, update_status=True,
607 def apply(self, repo, series, list=False, update_status=True,
608 strict=False, patchdir=None, merge=None, all_files=None):
608 strict=False, patchdir=None, merge=None, all_files=None):
609 wlock = lock = tr = None
609 wlock = lock = tr = None
610 try:
610 try:
611 wlock = repo.wlock()
611 wlock = repo.wlock()
612 lock = repo.lock()
612 lock = repo.lock()
613 tr = repo.transaction("qpush")
613 tr = repo.transaction("qpush")
614 try:
614 try:
615 ret = self._apply(repo, series, list, update_status,
615 ret = self._apply(repo, series, list, update_status,
616 strict, patchdir, merge, all_files=all_files)
616 strict, patchdir, merge, all_files=all_files)
617 tr.close()
617 tr.close()
618 self.save_dirty()
618 self.save_dirty()
619 return ret
619 return ret
620 except:
620 except:
621 try:
621 try:
622 tr.abort()
622 tr.abort()
623 finally:
623 finally:
624 repo.invalidate()
624 repo.invalidate()
625 repo.dirstate.invalidate()
625 repo.dirstate.invalidate()
626 raise
626 raise
627 finally:
627 finally:
628 release(tr, lock, wlock)
628 release(tr, lock, wlock)
629 self.removeundo(repo)
629 self.removeundo(repo)
630
630
631 def _apply(self, repo, series, list=False, update_status=True,
631 def _apply(self, repo, series, list=False, update_status=True,
632 strict=False, patchdir=None, merge=None, all_files=None):
632 strict=False, patchdir=None, merge=None, all_files=None):
633 '''returns (error, hash)
633 '''returns (error, hash)
634 error = 1 for unable to read, 2 for patch failed, 3 for patch fuzz'''
634 error = 1 for unable to read, 2 for patch failed, 3 for patch fuzz'''
635 # TODO unify with commands.py
635 # TODO unify with commands.py
636 if not patchdir:
636 if not patchdir:
637 patchdir = self.path
637 patchdir = self.path
638 err = 0
638 err = 0
639 n = None
639 n = None
640 for patchname in series:
640 for patchname in series:
641 pushable, reason = self.pushable(patchname)
641 pushable, reason = self.pushable(patchname)
642 if not pushable:
642 if not pushable:
643 self.explain_pushable(patchname, all_patches=True)
643 self.explain_pushable(patchname, all_patches=True)
644 continue
644 continue
645 self.ui.status(_("applying %s\n") % patchname)
645 self.ui.status(_("applying %s\n") % patchname)
646 pf = os.path.join(patchdir, patchname)
646 pf = os.path.join(patchdir, patchname)
647
647
648 try:
648 try:
649 ph = patchheader(self.join(patchname), self.plainmode)
649 ph = patchheader(self.join(patchname), self.plainmode)
650 except:
650 except:
651 self.ui.warn(_("unable to read %s\n") % patchname)
651 self.ui.warn(_("unable to read %s\n") % patchname)
652 err = 1
652 err = 1
653 break
653 break
654
654
655 message = ph.message
655 message = ph.message
656 if not message:
656 if not message:
657 message = "imported patch %s\n" % patchname
657 message = "imported patch %s\n" % patchname
658 else:
658 else:
659 if list:
659 if list:
660 message.append("\nimported patch %s" % patchname)
660 message.append("\nimported patch %s" % patchname)
661 message = '\n'.join(message)
661 message = '\n'.join(message)
662
662
663 if ph.haspatch:
663 if ph.haspatch:
664 (patcherr, files, fuzz) = self.patch(repo, pf)
664 (patcherr, files, fuzz) = self.patch(repo, pf)
665 if all_files is not None:
665 if all_files is not None:
666 all_files.update(files)
666 all_files.update(files)
667 patcherr = not patcherr
667 patcherr = not patcherr
668 else:
668 else:
669 self.ui.warn(_("patch %s is empty\n") % patchname)
669 self.ui.warn(_("patch %s is empty\n") % patchname)
670 patcherr, files, fuzz = 0, [], 0
670 patcherr, files, fuzz = 0, [], 0
671
671
672 if merge and files:
672 if merge and files:
673 # Mark as removed/merged and update dirstate parent info
673 # Mark as removed/merged and update dirstate parent info
674 removed = []
674 removed = []
675 merged = []
675 merged = []
676 for f in files:
676 for f in files:
677 if os.path.exists(repo.wjoin(f)):
677 if os.path.lexists(repo.wjoin(f)):
678 merged.append(f)
678 merged.append(f)
679 else:
679 else:
680 removed.append(f)
680 removed.append(f)
681 for f in removed:
681 for f in removed:
682 repo.dirstate.remove(f)
682 repo.dirstate.remove(f)
683 for f in merged:
683 for f in merged:
684 repo.dirstate.merge(f)
684 repo.dirstate.merge(f)
685 p1, p2 = repo.dirstate.parents()
685 p1, p2 = repo.dirstate.parents()
686 repo.dirstate.setparents(p1, merge)
686 repo.dirstate.setparents(p1, merge)
687
687
688 files = patch.updatedir(self.ui, repo, files)
688 files = patch.updatedir(self.ui, repo, files)
689 match = cmdutil.matchfiles(repo, files or [])
689 match = cmdutil.matchfiles(repo, files or [])
690 n = repo.commit(message, ph.user, ph.date, match=match, force=True)
690 n = repo.commit(message, ph.user, ph.date, match=match, force=True)
691
691
692 if n is None:
692 if n is None:
693 raise util.Abort(_("repo commit failed"))
693 raise util.Abort(_("repo commit failed"))
694
694
695 if update_status:
695 if update_status:
696 self.applied.append(statusentry(n, patchname))
696 self.applied.append(statusentry(n, patchname))
697
697
698 if patcherr:
698 if patcherr:
699 self.ui.warn(_("patch failed, rejects left in working dir\n"))
699 self.ui.warn(_("patch failed, rejects left in working dir\n"))
700 err = 2
700 err = 2
701 break
701 break
702
702
703 if fuzz and strict:
703 if fuzz and strict:
704 self.ui.warn(_("fuzz found when applying patch, stopping\n"))
704 self.ui.warn(_("fuzz found when applying patch, stopping\n"))
705 err = 3
705 err = 3
706 break
706 break
707 return (err, n)
707 return (err, n)
708
708
709 def _cleanup(self, patches, numrevs, keep=False):
709 def _cleanup(self, patches, numrevs, keep=False):
710 if not keep:
710 if not keep:
711 r = self.qrepo()
711 r = self.qrepo()
712 if r:
712 if r:
713 r[None].remove(patches, True)
713 r[None].remove(patches, True)
714 else:
714 else:
715 for p in patches:
715 for p in patches:
716 os.unlink(self.join(p))
716 os.unlink(self.join(p))
717
717
718 if numrevs:
718 if numrevs:
719 del self.applied[:numrevs]
719 del self.applied[:numrevs]
720 self.applied_dirty = 1
720 self.applied_dirty = 1
721
721
722 for i in sorted([self.find_series(p) for p in patches], reverse=True):
722 for i in sorted([self.find_series(p) for p in patches], reverse=True):
723 del self.full_series[i]
723 del self.full_series[i]
724 self.parse_series()
724 self.parse_series()
725 self.series_dirty = 1
725 self.series_dirty = 1
726
726
727 def _revpatches(self, repo, revs):
727 def _revpatches(self, repo, revs):
728 firstrev = repo[self.applied[0].node].rev()
728 firstrev = repo[self.applied[0].node].rev()
729 patches = []
729 patches = []
730 for i, rev in enumerate(revs):
730 for i, rev in enumerate(revs):
731
731
732 if rev < firstrev:
732 if rev < firstrev:
733 raise util.Abort(_('revision %d is not managed') % rev)
733 raise util.Abort(_('revision %d is not managed') % rev)
734
734
735 ctx = repo[rev]
735 ctx = repo[rev]
736 base = self.applied[i].node
736 base = self.applied[i].node
737 if ctx.node() != base:
737 if ctx.node() != base:
738 msg = _('cannot delete revision %d above applied patches')
738 msg = _('cannot delete revision %d above applied patches')
739 raise util.Abort(msg % rev)
739 raise util.Abort(msg % rev)
740
740
741 patch = self.applied[i].name
741 patch = self.applied[i].name
742 for fmt in ('[mq]: %s', 'imported patch %s'):
742 for fmt in ('[mq]: %s', 'imported patch %s'):
743 if ctx.description() == fmt % patch:
743 if ctx.description() == fmt % patch:
744 msg = _('patch %s finalized without changeset message\n')
744 msg = _('patch %s finalized without changeset message\n')
745 repo.ui.status(msg % patch)
745 repo.ui.status(msg % patch)
746 break
746 break
747
747
748 patches.append(patch)
748 patches.append(patch)
749 return patches
749 return patches
750
750
751 def finish(self, repo, revs):
751 def finish(self, repo, revs):
752 patches = self._revpatches(repo, sorted(revs))
752 patches = self._revpatches(repo, sorted(revs))
753 self._cleanup(patches, len(patches))
753 self._cleanup(patches, len(patches))
754
754
755 def delete(self, repo, patches, opts):
755 def delete(self, repo, patches, opts):
756 if not patches and not opts.get('rev'):
756 if not patches and not opts.get('rev'):
757 raise util.Abort(_('qdelete requires at least one revision or '
757 raise util.Abort(_('qdelete requires at least one revision or '
758 'patch name'))
758 'patch name'))
759
759
760 realpatches = []
760 realpatches = []
761 for patch in patches:
761 for patch in patches:
762 patch = self.lookup(patch, strict=True)
762 patch = self.lookup(patch, strict=True)
763 info = self.isapplied(patch)
763 info = self.isapplied(patch)
764 if info:
764 if info:
765 raise util.Abort(_("cannot delete applied patch %s") % patch)
765 raise util.Abort(_("cannot delete applied patch %s") % patch)
766 if patch not in self.series:
766 if patch not in self.series:
767 raise util.Abort(_("patch %s not in series file") % patch)
767 raise util.Abort(_("patch %s not in series file") % patch)
768 realpatches.append(patch)
768 realpatches.append(patch)
769
769
770 numrevs = 0
770 numrevs = 0
771 if opts.get('rev'):
771 if opts.get('rev'):
772 if not self.applied:
772 if not self.applied:
773 raise util.Abort(_('no patches applied'))
773 raise util.Abort(_('no patches applied'))
774 revs = cmdutil.revrange(repo, opts['rev'])
774 revs = cmdutil.revrange(repo, opts['rev'])
775 if len(revs) > 1 and revs[0] > revs[1]:
775 if len(revs) > 1 and revs[0] > revs[1]:
776 revs.reverse()
776 revs.reverse()
777 revpatches = self._revpatches(repo, revs)
777 revpatches = self._revpatches(repo, revs)
778 realpatches += revpatches
778 realpatches += revpatches
779 numrevs = len(revpatches)
779 numrevs = len(revpatches)
780
780
781 self._cleanup(realpatches, numrevs, opts.get('keep'))
781 self._cleanup(realpatches, numrevs, opts.get('keep'))
782
782
783 def check_toppatch(self, repo):
783 def check_toppatch(self, repo):
784 if self.applied:
784 if self.applied:
785 top = self.applied[-1].node
785 top = self.applied[-1].node
786 patch = self.applied[-1].name
786 patch = self.applied[-1].name
787 pp = repo.dirstate.parents()
787 pp = repo.dirstate.parents()
788 if top not in pp:
788 if top not in pp:
789 raise util.Abort(_("working directory revision is not qtip"))
789 raise util.Abort(_("working directory revision is not qtip"))
790 return top, patch
790 return top, patch
791 return None, None
791 return None, None
792
792
793 def check_localchanges(self, repo, force=False, refresh=True):
793 def check_localchanges(self, repo, force=False, refresh=True):
794 m, a, r, d = repo.status()[:4]
794 m, a, r, d = repo.status()[:4]
795 if (m or a or r or d) and not force:
795 if (m or a or r or d) and not force:
796 if refresh:
796 if refresh:
797 raise util.Abort(_("local changes found, refresh first"))
797 raise util.Abort(_("local changes found, refresh first"))
798 else:
798 else:
799 raise util.Abort(_("local changes found"))
799 raise util.Abort(_("local changes found"))
800 return m, a, r, d
800 return m, a, r, d
801
801
802 _reserved = ('series', 'status', 'guards')
802 _reserved = ('series', 'status', 'guards')
803 def check_reserved_name(self, name):
803 def check_reserved_name(self, name):
804 if (name in self._reserved or name.startswith('.hg')
804 if (name in self._reserved or name.startswith('.hg')
805 or name.startswith('.mq') or '#' in name or ':' in name):
805 or name.startswith('.mq') or '#' in name or ':' in name):
806 raise util.Abort(_('"%s" cannot be used as the name of a patch')
806 raise util.Abort(_('"%s" cannot be used as the name of a patch')
807 % name)
807 % name)
808
808
809 def new(self, repo, patchfn, *pats, **opts):
809 def new(self, repo, patchfn, *pats, **opts):
810 """options:
810 """options:
811 msg: a string or a no-argument function returning a string
811 msg: a string or a no-argument function returning a string
812 """
812 """
813 msg = opts.get('msg')
813 msg = opts.get('msg')
814 user = opts.get('user')
814 user = opts.get('user')
815 date = opts.get('date')
815 date = opts.get('date')
816 if date:
816 if date:
817 date = util.parsedate(date)
817 date = util.parsedate(date)
818 diffopts = self.diffopts({'git': opts.get('git')})
818 diffopts = self.diffopts({'git': opts.get('git')})
819 self.check_reserved_name(patchfn)
819 self.check_reserved_name(patchfn)
820 if os.path.exists(self.join(patchfn)):
820 if os.path.exists(self.join(patchfn)):
821 raise util.Abort(_('patch "%s" already exists') % patchfn)
821 raise util.Abort(_('patch "%s" already exists') % patchfn)
822 if opts.get('include') or opts.get('exclude') or pats:
822 if opts.get('include') or opts.get('exclude') or pats:
823 match = cmdutil.match(repo, pats, opts)
823 match = cmdutil.match(repo, pats, opts)
824 # detect missing files in pats
824 # detect missing files in pats
825 def badfn(f, msg):
825 def badfn(f, msg):
826 raise util.Abort('%s: %s' % (f, msg))
826 raise util.Abort('%s: %s' % (f, msg))
827 match.bad = badfn
827 match.bad = badfn
828 m, a, r, d = repo.status(match=match)[:4]
828 m, a, r, d = repo.status(match=match)[:4]
829 else:
829 else:
830 m, a, r, d = self.check_localchanges(repo, force=True)
830 m, a, r, d = self.check_localchanges(repo, force=True)
831 match = cmdutil.matchfiles(repo, m + a + r)
831 match = cmdutil.matchfiles(repo, m + a + r)
832 if len(repo[None].parents()) > 1:
832 if len(repo[None].parents()) > 1:
833 raise util.Abort(_('cannot manage merge changesets'))
833 raise util.Abort(_('cannot manage merge changesets'))
834 commitfiles = m + a + r
834 commitfiles = m + a + r
835 self.check_toppatch(repo)
835 self.check_toppatch(repo)
836 insert = self.full_series_end()
836 insert = self.full_series_end()
837 wlock = repo.wlock()
837 wlock = repo.wlock()
838 try:
838 try:
839 # if patch file write fails, abort early
839 # if patch file write fails, abort early
840 p = self.opener(patchfn, "w")
840 p = self.opener(patchfn, "w")
841 try:
841 try:
842 if self.plainmode:
842 if self.plainmode:
843 if user:
843 if user:
844 p.write("From: " + user + "\n")
844 p.write("From: " + user + "\n")
845 if not date:
845 if not date:
846 p.write("\n")
846 p.write("\n")
847 if date:
847 if date:
848 p.write("Date: %d %d\n\n" % date)
848 p.write("Date: %d %d\n\n" % date)
849 else:
849 else:
850 p.write("# HG changeset patch\n")
850 p.write("# HG changeset patch\n")
851 p.write("# Parent "
851 p.write("# Parent "
852 + hex(repo[None].parents()[0].node()) + "\n")
852 + hex(repo[None].parents()[0].node()) + "\n")
853 if user:
853 if user:
854 p.write("# User " + user + "\n")
854 p.write("# User " + user + "\n")
855 if date:
855 if date:
856 p.write("# Date %s %s\n\n" % date)
856 p.write("# Date %s %s\n\n" % date)
857 if hasattr(msg, '__call__'):
857 if hasattr(msg, '__call__'):
858 msg = msg()
858 msg = msg()
859 commitmsg = msg and msg or ("[mq]: %s" % patchfn)
859 commitmsg = msg and msg or ("[mq]: %s" % patchfn)
860 n = repo.commit(commitmsg, user, date, match=match, force=True)
860 n = repo.commit(commitmsg, user, date, match=match, force=True)
861 if n is None:
861 if n is None:
862 raise util.Abort(_("repo commit failed"))
862 raise util.Abort(_("repo commit failed"))
863 try:
863 try:
864 self.full_series[insert:insert] = [patchfn]
864 self.full_series[insert:insert] = [patchfn]
865 self.applied.append(statusentry(n, patchfn))
865 self.applied.append(statusentry(n, patchfn))
866 self.parse_series()
866 self.parse_series()
867 self.series_dirty = 1
867 self.series_dirty = 1
868 self.applied_dirty = 1
868 self.applied_dirty = 1
869 if msg:
869 if msg:
870 msg = msg + "\n\n"
870 msg = msg + "\n\n"
871 p.write(msg)
871 p.write(msg)
872 if commitfiles:
872 if commitfiles:
873 parent = self.qparents(repo, n)
873 parent = self.qparents(repo, n)
874 chunks = patch.diff(repo, node1=parent, node2=n,
874 chunks = patch.diff(repo, node1=parent, node2=n,
875 match=match, opts=diffopts)
875 match=match, opts=diffopts)
876 for chunk in chunks:
876 for chunk in chunks:
877 p.write(chunk)
877 p.write(chunk)
878 p.close()
878 p.close()
879 wlock.release()
879 wlock.release()
880 wlock = None
880 wlock = None
881 r = self.qrepo()
881 r = self.qrepo()
882 if r:
882 if r:
883 r[None].add([patchfn])
883 r[None].add([patchfn])
884 except:
884 except:
885 repo.rollback()
885 repo.rollback()
886 raise
886 raise
887 except Exception:
887 except Exception:
888 patchpath = self.join(patchfn)
888 patchpath = self.join(patchfn)
889 try:
889 try:
890 os.unlink(patchpath)
890 os.unlink(patchpath)
891 except:
891 except:
892 self.ui.warn(_('error unlinking %s\n') % patchpath)
892 self.ui.warn(_('error unlinking %s\n') % patchpath)
893 raise
893 raise
894 self.removeundo(repo)
894 self.removeundo(repo)
895 finally:
895 finally:
896 release(wlock)
896 release(wlock)
897
897
898 def strip(self, repo, rev, update=True, backup="all", force=None):
898 def strip(self, repo, rev, update=True, backup="all", force=None):
899 wlock = lock = None
899 wlock = lock = None
900 try:
900 try:
901 wlock = repo.wlock()
901 wlock = repo.wlock()
902 lock = repo.lock()
902 lock = repo.lock()
903
903
904 if update:
904 if update:
905 self.check_localchanges(repo, force=force, refresh=False)
905 self.check_localchanges(repo, force=force, refresh=False)
906 urev = self.qparents(repo, rev)
906 urev = self.qparents(repo, rev)
907 hg.clean(repo, urev)
907 hg.clean(repo, urev)
908 repo.dirstate.write()
908 repo.dirstate.write()
909
909
910 self.removeundo(repo)
910 self.removeundo(repo)
911 repair.strip(self.ui, repo, rev, backup)
911 repair.strip(self.ui, repo, rev, backup)
912 # strip may have unbundled a set of backed up revisions after
912 # strip may have unbundled a set of backed up revisions after
913 # the actual strip
913 # the actual strip
914 self.removeundo(repo)
914 self.removeundo(repo)
915 finally:
915 finally:
916 release(lock, wlock)
916 release(lock, wlock)
917
917
918 def isapplied(self, patch):
918 def isapplied(self, patch):
919 """returns (index, rev, patch)"""
919 """returns (index, rev, patch)"""
920 for i, a in enumerate(self.applied):
920 for i, a in enumerate(self.applied):
921 if a.name == patch:
921 if a.name == patch:
922 return (i, a.node, a.name)
922 return (i, a.node, a.name)
923 return None
923 return None
924
924
925 # if the exact patch name does not exist, we try a few
925 # if the exact patch name does not exist, we try a few
926 # variations. If strict is passed, we try only #1
926 # variations. If strict is passed, we try only #1
927 #
927 #
928 # 1) a number to indicate an offset in the series file
928 # 1) a number to indicate an offset in the series file
929 # 2) a unique substring of the patch name was given
929 # 2) a unique substring of the patch name was given
930 # 3) patchname[-+]num to indicate an offset in the series file
930 # 3) patchname[-+]num to indicate an offset in the series file
931 def lookup(self, patch, strict=False):
931 def lookup(self, patch, strict=False):
932 patch = patch and str(patch)
932 patch = patch and str(patch)
933
933
934 def partial_name(s):
934 def partial_name(s):
935 if s in self.series:
935 if s in self.series:
936 return s
936 return s
937 matches = [x for x in self.series if s in x]
937 matches = [x for x in self.series if s in x]
938 if len(matches) > 1:
938 if len(matches) > 1:
939 self.ui.warn(_('patch name "%s" is ambiguous:\n') % s)
939 self.ui.warn(_('patch name "%s" is ambiguous:\n') % s)
940 for m in matches:
940 for m in matches:
941 self.ui.warn(' %s\n' % m)
941 self.ui.warn(' %s\n' % m)
942 return None
942 return None
943 if matches:
943 if matches:
944 return matches[0]
944 return matches[0]
945 if self.series and self.applied:
945 if self.series and self.applied:
946 if s == 'qtip':
946 if s == 'qtip':
947 return self.series[self.series_end(True)-1]
947 return self.series[self.series_end(True)-1]
948 if s == 'qbase':
948 if s == 'qbase':
949 return self.series[0]
949 return self.series[0]
950 return None
950 return None
951
951
952 if patch is None:
952 if patch is None:
953 return None
953 return None
954 if patch in self.series:
954 if patch in self.series:
955 return patch
955 return patch
956
956
957 if not os.path.isfile(self.join(patch)):
957 if not os.path.isfile(self.join(patch)):
958 try:
958 try:
959 sno = int(patch)
959 sno = int(patch)
960 except (ValueError, OverflowError):
960 except (ValueError, OverflowError):
961 pass
961 pass
962 else:
962 else:
963 if -len(self.series) <= sno < len(self.series):
963 if -len(self.series) <= sno < len(self.series):
964 return self.series[sno]
964 return self.series[sno]
965
965
966 if not strict:
966 if not strict:
967 res = partial_name(patch)
967 res = partial_name(patch)
968 if res:
968 if res:
969 return res
969 return res
970 minus = patch.rfind('-')
970 minus = patch.rfind('-')
971 if minus >= 0:
971 if minus >= 0:
972 res = partial_name(patch[:minus])
972 res = partial_name(patch[:minus])
973 if res:
973 if res:
974 i = self.series.index(res)
974 i = self.series.index(res)
975 try:
975 try:
976 off = int(patch[minus + 1:] or 1)
976 off = int(patch[minus + 1:] or 1)
977 except (ValueError, OverflowError):
977 except (ValueError, OverflowError):
978 pass
978 pass
979 else:
979 else:
980 if i - off >= 0:
980 if i - off >= 0:
981 return self.series[i - off]
981 return self.series[i - off]
982 plus = patch.rfind('+')
982 plus = patch.rfind('+')
983 if plus >= 0:
983 if plus >= 0:
984 res = partial_name(patch[:plus])
984 res = partial_name(patch[:plus])
985 if res:
985 if res:
986 i = self.series.index(res)
986 i = self.series.index(res)
987 try:
987 try:
988 off = int(patch[plus + 1:] or 1)
988 off = int(patch[plus + 1:] or 1)
989 except (ValueError, OverflowError):
989 except (ValueError, OverflowError):
990 pass
990 pass
991 else:
991 else:
992 if i + off < len(self.series):
992 if i + off < len(self.series):
993 return self.series[i + off]
993 return self.series[i + off]
994 raise util.Abort(_("patch %s not in series") % patch)
994 raise util.Abort(_("patch %s not in series") % patch)
995
995
996 def push(self, repo, patch=None, force=False, list=False,
996 def push(self, repo, patch=None, force=False, list=False,
997 mergeq=None, all=False, move=False):
997 mergeq=None, all=False, move=False):
998 diffopts = self.diffopts()
998 diffopts = self.diffopts()
999 wlock = repo.wlock()
999 wlock = repo.wlock()
1000 try:
1000 try:
1001 heads = []
1001 heads = []
1002 for b, ls in repo.branchmap().iteritems():
1002 for b, ls in repo.branchmap().iteritems():
1003 heads += ls
1003 heads += ls
1004 if not heads:
1004 if not heads:
1005 heads = [nullid]
1005 heads = [nullid]
1006 if repo.dirstate.parents()[0] not in heads:
1006 if repo.dirstate.parents()[0] not in heads:
1007 self.ui.status(_("(working directory not at a head)\n"))
1007 self.ui.status(_("(working directory not at a head)\n"))
1008
1008
1009 if not self.series:
1009 if not self.series:
1010 self.ui.warn(_('no patches in series\n'))
1010 self.ui.warn(_('no patches in series\n'))
1011 return 0
1011 return 0
1012
1012
1013 patch = self.lookup(patch)
1013 patch = self.lookup(patch)
1014 # Suppose our series file is: A B C and the current 'top'
1014 # Suppose our series file is: A B C and the current 'top'
1015 # patch is B. qpush C should be performed (moving forward)
1015 # patch is B. qpush C should be performed (moving forward)
1016 # qpush B is a NOP (no change) qpush A is an error (can't
1016 # qpush B is a NOP (no change) qpush A is an error (can't
1017 # go backwards with qpush)
1017 # go backwards with qpush)
1018 if patch:
1018 if patch:
1019 info = self.isapplied(patch)
1019 info = self.isapplied(patch)
1020 if info:
1020 if info:
1021 if info[0] < len(self.applied) - 1:
1021 if info[0] < len(self.applied) - 1:
1022 raise util.Abort(
1022 raise util.Abort(
1023 _("cannot push to a previous patch: %s") % patch)
1023 _("cannot push to a previous patch: %s") % patch)
1024 self.ui.warn(
1024 self.ui.warn(
1025 _('qpush: %s is already at the top\n') % patch)
1025 _('qpush: %s is already at the top\n') % patch)
1026 return 0
1026 return 0
1027 pushable, reason = self.pushable(patch)
1027 pushable, reason = self.pushable(patch)
1028 if not pushable:
1028 if not pushable:
1029 if reason:
1029 if reason:
1030 reason = _('guarded by %r') % reason
1030 reason = _('guarded by %r') % reason
1031 else:
1031 else:
1032 reason = _('no matching guards')
1032 reason = _('no matching guards')
1033 self.ui.warn(_("cannot push '%s' - %s\n") % (patch, reason))
1033 self.ui.warn(_("cannot push '%s' - %s\n") % (patch, reason))
1034 return 1
1034 return 1
1035 elif all:
1035 elif all:
1036 patch = self.series[-1]
1036 patch = self.series[-1]
1037 if self.isapplied(patch):
1037 if self.isapplied(patch):
1038 self.ui.warn(_('all patches are currently applied\n'))
1038 self.ui.warn(_('all patches are currently applied\n'))
1039 return 0
1039 return 0
1040
1040
1041 # Following the above example, starting at 'top' of B:
1041 # Following the above example, starting at 'top' of B:
1042 # qpush should be performed (pushes C), but a subsequent
1042 # qpush should be performed (pushes C), but a subsequent
1043 # qpush without an argument is an error (nothing to
1043 # qpush without an argument is an error (nothing to
1044 # apply). This allows a loop of "...while hg qpush..." to
1044 # apply). This allows a loop of "...while hg qpush..." to
1045 # work as it detects an error when done
1045 # work as it detects an error when done
1046 start = self.series_end()
1046 start = self.series_end()
1047 if start == len(self.series):
1047 if start == len(self.series):
1048 self.ui.warn(_('patch series already fully applied\n'))
1048 self.ui.warn(_('patch series already fully applied\n'))
1049 return 1
1049 return 1
1050 if not force:
1050 if not force:
1051 self.check_localchanges(repo)
1051 self.check_localchanges(repo)
1052
1052
1053 if move:
1053 if move:
1054 if not patch:
1054 if not patch:
1055 raise util.Abort(_("please specify the patch to move"))
1055 raise util.Abort(_("please specify the patch to move"))
1056 for i, rpn in enumerate(self.full_series[start:]):
1056 for i, rpn in enumerate(self.full_series[start:]):
1057 # strip markers for patch guards
1057 # strip markers for patch guards
1058 if self.guard_re.split(rpn, 1)[0] == patch:
1058 if self.guard_re.split(rpn, 1)[0] == patch:
1059 break
1059 break
1060 index = start + i
1060 index = start + i
1061 assert index < len(self.full_series)
1061 assert index < len(self.full_series)
1062 fullpatch = self.full_series[index]
1062 fullpatch = self.full_series[index]
1063 del self.full_series[index]
1063 del self.full_series[index]
1064 self.full_series.insert(start, fullpatch)
1064 self.full_series.insert(start, fullpatch)
1065 self.parse_series()
1065 self.parse_series()
1066 self.series_dirty = 1
1066 self.series_dirty = 1
1067
1067
1068 self.applied_dirty = 1
1068 self.applied_dirty = 1
1069 if start > 0:
1069 if start > 0:
1070 self.check_toppatch(repo)
1070 self.check_toppatch(repo)
1071 if not patch:
1071 if not patch:
1072 patch = self.series[start]
1072 patch = self.series[start]
1073 end = start + 1
1073 end = start + 1
1074 else:
1074 else:
1075 end = self.series.index(patch, start) + 1
1075 end = self.series.index(patch, start) + 1
1076
1076
1077 s = self.series[start:end]
1077 s = self.series[start:end]
1078 all_files = set()
1078 all_files = set()
1079 try:
1079 try:
1080 if mergeq:
1080 if mergeq:
1081 ret = self.mergepatch(repo, mergeq, s, diffopts)
1081 ret = self.mergepatch(repo, mergeq, s, diffopts)
1082 else:
1082 else:
1083 ret = self.apply(repo, s, list, all_files=all_files)
1083 ret = self.apply(repo, s, list, all_files=all_files)
1084 except:
1084 except:
1085 self.ui.warn(_('cleaning up working directory...'))
1085 self.ui.warn(_('cleaning up working directory...'))
1086 node = repo.dirstate.parents()[0]
1086 node = repo.dirstate.parents()[0]
1087 hg.revert(repo, node, None)
1087 hg.revert(repo, node, None)
1088 # only remove unknown files that we know we touched or
1088 # only remove unknown files that we know we touched or
1089 # created while patching
1089 # created while patching
1090 for f in all_files:
1090 for f in all_files:
1091 if f not in repo.dirstate:
1091 if f not in repo.dirstate:
1092 try:
1092 try:
1093 util.unlink(repo.wjoin(f))
1093 util.unlink(repo.wjoin(f))
1094 except OSError, inst:
1094 except OSError, inst:
1095 if inst.errno != errno.ENOENT:
1095 if inst.errno != errno.ENOENT:
1096 raise
1096 raise
1097 self.ui.warn(_('done\n'))
1097 self.ui.warn(_('done\n'))
1098 raise
1098 raise
1099
1099
1100 if not self.applied:
1100 if not self.applied:
1101 return ret[0]
1101 return ret[0]
1102 top = self.applied[-1].name
1102 top = self.applied[-1].name
1103 if ret[0] and ret[0] > 1:
1103 if ret[0] and ret[0] > 1:
1104 msg = _("errors during apply, please fix and refresh %s\n")
1104 msg = _("errors during apply, please fix and refresh %s\n")
1105 self.ui.write(msg % top)
1105 self.ui.write(msg % top)
1106 else:
1106 else:
1107 self.ui.write(_("now at: %s\n") % top)
1107 self.ui.write(_("now at: %s\n") % top)
1108 return ret[0]
1108 return ret[0]
1109
1109
1110 finally:
1110 finally:
1111 wlock.release()
1111 wlock.release()
1112
1112
1113 def pop(self, repo, patch=None, force=False, update=True, all=False):
1113 def pop(self, repo, patch=None, force=False, update=True, all=False):
1114 wlock = repo.wlock()
1114 wlock = repo.wlock()
1115 try:
1115 try:
1116 if patch:
1116 if patch:
1117 # index, rev, patch
1117 # index, rev, patch
1118 info = self.isapplied(patch)
1118 info = self.isapplied(patch)
1119 if not info:
1119 if not info:
1120 patch = self.lookup(patch)
1120 patch = self.lookup(patch)
1121 info = self.isapplied(patch)
1121 info = self.isapplied(patch)
1122 if not info:
1122 if not info:
1123 raise util.Abort(_("patch %s is not applied") % patch)
1123 raise util.Abort(_("patch %s is not applied") % patch)
1124
1124
1125 if not self.applied:
1125 if not self.applied:
1126 # Allow qpop -a to work repeatedly,
1126 # Allow qpop -a to work repeatedly,
1127 # but not qpop without an argument
1127 # but not qpop without an argument
1128 self.ui.warn(_("no patches applied\n"))
1128 self.ui.warn(_("no patches applied\n"))
1129 return not all
1129 return not all
1130
1130
1131 if all:
1131 if all:
1132 start = 0
1132 start = 0
1133 elif patch:
1133 elif patch:
1134 start = info[0] + 1
1134 start = info[0] + 1
1135 else:
1135 else:
1136 start = len(self.applied) - 1
1136 start = len(self.applied) - 1
1137
1137
1138 if start >= len(self.applied):
1138 if start >= len(self.applied):
1139 self.ui.warn(_("qpop: %s is already at the top\n") % patch)
1139 self.ui.warn(_("qpop: %s is already at the top\n") % patch)
1140 return
1140 return
1141
1141
1142 if not update:
1142 if not update:
1143 parents = repo.dirstate.parents()
1143 parents = repo.dirstate.parents()
1144 rr = [x.node for x in self.applied]
1144 rr = [x.node for x in self.applied]
1145 for p in parents:
1145 for p in parents:
1146 if p in rr:
1146 if p in rr:
1147 self.ui.warn(_("qpop: forcing dirstate update\n"))
1147 self.ui.warn(_("qpop: forcing dirstate update\n"))
1148 update = True
1148 update = True
1149 else:
1149 else:
1150 parents = [p.node() for p in repo[None].parents()]
1150 parents = [p.node() for p in repo[None].parents()]
1151 needupdate = False
1151 needupdate = False
1152 for entry in self.applied[start:]:
1152 for entry in self.applied[start:]:
1153 if entry.node in parents:
1153 if entry.node in parents:
1154 needupdate = True
1154 needupdate = True
1155 break
1155 break
1156 update = needupdate
1156 update = needupdate
1157
1157
1158 if not force and update:
1158 if not force and update:
1159 self.check_localchanges(repo)
1159 self.check_localchanges(repo)
1160
1160
1161 self.applied_dirty = 1
1161 self.applied_dirty = 1
1162 end = len(self.applied)
1162 end = len(self.applied)
1163 rev = self.applied[start].node
1163 rev = self.applied[start].node
1164 if update:
1164 if update:
1165 top = self.check_toppatch(repo)[0]
1165 top = self.check_toppatch(repo)[0]
1166
1166
1167 try:
1167 try:
1168 heads = repo.changelog.heads(rev)
1168 heads = repo.changelog.heads(rev)
1169 except error.LookupError:
1169 except error.LookupError:
1170 node = short(rev)
1170 node = short(rev)
1171 raise util.Abort(_('trying to pop unknown node %s') % node)
1171 raise util.Abort(_('trying to pop unknown node %s') % node)
1172
1172
1173 if heads != [self.applied[-1].node]:
1173 if heads != [self.applied[-1].node]:
1174 raise util.Abort(_("popping would remove a revision not "
1174 raise util.Abort(_("popping would remove a revision not "
1175 "managed by this patch queue"))
1175 "managed by this patch queue"))
1176
1176
1177 # we know there are no local changes, so we can make a simplified
1177 # we know there are no local changes, so we can make a simplified
1178 # form of hg.update.
1178 # form of hg.update.
1179 if update:
1179 if update:
1180 qp = self.qparents(repo, rev)
1180 qp = self.qparents(repo, rev)
1181 ctx = repo[qp]
1181 ctx = repo[qp]
1182 m, a, r, d = repo.status(qp, top)[:4]
1182 m, a, r, d = repo.status(qp, top)[:4]
1183 if d:
1183 if d:
1184 raise util.Abort(_("deletions found between repo revs"))
1184 raise util.Abort(_("deletions found between repo revs"))
1185 for f in a:
1185 for f in a:
1186 try:
1186 try:
1187 util.unlink(repo.wjoin(f))
1187 util.unlink(repo.wjoin(f))
1188 except OSError, e:
1188 except OSError, e:
1189 if e.errno != errno.ENOENT:
1189 if e.errno != errno.ENOENT:
1190 raise
1190 raise
1191 repo.dirstate.forget(f)
1191 repo.dirstate.forget(f)
1192 for f in m + r:
1192 for f in m + r:
1193 fctx = ctx[f]
1193 fctx = ctx[f]
1194 repo.wwrite(f, fctx.data(), fctx.flags())
1194 repo.wwrite(f, fctx.data(), fctx.flags())
1195 repo.dirstate.normal(f)
1195 repo.dirstate.normal(f)
1196 repo.dirstate.setparents(qp, nullid)
1196 repo.dirstate.setparents(qp, nullid)
1197 for patch in reversed(self.applied[start:end]):
1197 for patch in reversed(self.applied[start:end]):
1198 self.ui.status(_("popping %s\n") % patch.name)
1198 self.ui.status(_("popping %s\n") % patch.name)
1199 del self.applied[start:end]
1199 del self.applied[start:end]
1200 self.strip(repo, rev, update=False, backup='strip')
1200 self.strip(repo, rev, update=False, backup='strip')
1201 if self.applied:
1201 if self.applied:
1202 self.ui.write(_("now at: %s\n") % self.applied[-1].name)
1202 self.ui.write(_("now at: %s\n") % self.applied[-1].name)
1203 else:
1203 else:
1204 self.ui.write(_("patch queue now empty\n"))
1204 self.ui.write(_("patch queue now empty\n"))
1205 finally:
1205 finally:
1206 wlock.release()
1206 wlock.release()
1207
1207
1208 def diff(self, repo, pats, opts):
1208 def diff(self, repo, pats, opts):
1209 top, patch = self.check_toppatch(repo)
1209 top, patch = self.check_toppatch(repo)
1210 if not top:
1210 if not top:
1211 self.ui.write(_("no patches applied\n"))
1211 self.ui.write(_("no patches applied\n"))
1212 return
1212 return
1213 qp = self.qparents(repo, top)
1213 qp = self.qparents(repo, top)
1214 if opts.get('reverse'):
1214 if opts.get('reverse'):
1215 node1, node2 = None, qp
1215 node1, node2 = None, qp
1216 else:
1216 else:
1217 node1, node2 = qp, None
1217 node1, node2 = qp, None
1218 diffopts = self.diffopts(opts, patch)
1218 diffopts = self.diffopts(opts, patch)
1219 self.printdiff(repo, diffopts, node1, node2, files=pats, opts=opts)
1219 self.printdiff(repo, diffopts, node1, node2, files=pats, opts=opts)
1220
1220
1221 def refresh(self, repo, pats=None, **opts):
1221 def refresh(self, repo, pats=None, **opts):
1222 if not self.applied:
1222 if not self.applied:
1223 self.ui.write(_("no patches applied\n"))
1223 self.ui.write(_("no patches applied\n"))
1224 return 1
1224 return 1
1225 msg = opts.get('msg', '').rstrip()
1225 msg = opts.get('msg', '').rstrip()
1226 newuser = opts.get('user')
1226 newuser = opts.get('user')
1227 newdate = opts.get('date')
1227 newdate = opts.get('date')
1228 if newdate:
1228 if newdate:
1229 newdate = '%d %d' % util.parsedate(newdate)
1229 newdate = '%d %d' % util.parsedate(newdate)
1230 wlock = repo.wlock()
1230 wlock = repo.wlock()
1231
1231
1232 try:
1232 try:
1233 self.check_toppatch(repo)
1233 self.check_toppatch(repo)
1234 (top, patchfn) = (self.applied[-1].node, self.applied[-1].name)
1234 (top, patchfn) = (self.applied[-1].node, self.applied[-1].name)
1235 if repo.changelog.heads(top) != [top]:
1235 if repo.changelog.heads(top) != [top]:
1236 raise util.Abort(_("cannot refresh a revision with children"))
1236 raise util.Abort(_("cannot refresh a revision with children"))
1237
1237
1238 cparents = repo.changelog.parents(top)
1238 cparents = repo.changelog.parents(top)
1239 patchparent = self.qparents(repo, top)
1239 patchparent = self.qparents(repo, top)
1240 ph = patchheader(self.join(patchfn), self.plainmode)
1240 ph = patchheader(self.join(patchfn), self.plainmode)
1241 diffopts = self.diffopts({'git': opts.get('git')}, patchfn)
1241 diffopts = self.diffopts({'git': opts.get('git')}, patchfn)
1242 if msg:
1242 if msg:
1243 ph.setmessage(msg)
1243 ph.setmessage(msg)
1244 if newuser:
1244 if newuser:
1245 ph.setuser(newuser)
1245 ph.setuser(newuser)
1246 if newdate:
1246 if newdate:
1247 ph.setdate(newdate)
1247 ph.setdate(newdate)
1248 ph.setparent(hex(patchparent))
1248 ph.setparent(hex(patchparent))
1249
1249
1250 # only commit new patch when write is complete
1250 # only commit new patch when write is complete
1251 patchf = self.opener(patchfn, 'w', atomictemp=True)
1251 patchf = self.opener(patchfn, 'w', atomictemp=True)
1252
1252
1253 comments = str(ph)
1253 comments = str(ph)
1254 if comments:
1254 if comments:
1255 patchf.write(comments)
1255 patchf.write(comments)
1256
1256
1257 # update the dirstate in place, strip off the qtip commit
1257 # update the dirstate in place, strip off the qtip commit
1258 # and then commit.
1258 # and then commit.
1259 #
1259 #
1260 # this should really read:
1260 # this should really read:
1261 # mm, dd, aa, aa2 = repo.status(tip, patchparent)[:4]
1261 # mm, dd, aa, aa2 = repo.status(tip, patchparent)[:4]
1262 # but we do it backwards to take advantage of manifest/chlog
1262 # but we do it backwards to take advantage of manifest/chlog
1263 # caching against the next repo.status call
1263 # caching against the next repo.status call
1264 mm, aa, dd, aa2 = repo.status(patchparent, top)[:4]
1264 mm, aa, dd, aa2 = repo.status(patchparent, top)[:4]
1265 changes = repo.changelog.read(top)
1265 changes = repo.changelog.read(top)
1266 man = repo.manifest.read(changes[0])
1266 man = repo.manifest.read(changes[0])
1267 aaa = aa[:]
1267 aaa = aa[:]
1268 matchfn = cmdutil.match(repo, pats, opts)
1268 matchfn = cmdutil.match(repo, pats, opts)
1269 # in short mode, we only diff the files included in the
1269 # in short mode, we only diff the files included in the
1270 # patch already plus specified files
1270 # patch already plus specified files
1271 if opts.get('short'):
1271 if opts.get('short'):
1272 # if amending a patch, we start with existing
1272 # if amending a patch, we start with existing
1273 # files plus specified files - unfiltered
1273 # files plus specified files - unfiltered
1274 match = cmdutil.matchfiles(repo, mm + aa + dd + matchfn.files())
1274 match = cmdutil.matchfiles(repo, mm + aa + dd + matchfn.files())
1275 # filter with inc/exl options
1275 # filter with inc/exl options
1276 matchfn = cmdutil.match(repo, opts=opts)
1276 matchfn = cmdutil.match(repo, opts=opts)
1277 else:
1277 else:
1278 match = cmdutil.matchall(repo)
1278 match = cmdutil.matchall(repo)
1279 m, a, r, d = repo.status(match=match)[:4]
1279 m, a, r, d = repo.status(match=match)[:4]
1280
1280
1281 # we might end up with files that were added between
1281 # we might end up with files that were added between
1282 # qtip and the dirstate parent, but then changed in the
1282 # qtip and the dirstate parent, but then changed in the
1283 # local dirstate. in this case, we want them to only
1283 # local dirstate. in this case, we want them to only
1284 # show up in the added section
1284 # show up in the added section
1285 for x in m:
1285 for x in m:
1286 if x not in aa:
1286 if x not in aa:
1287 mm.append(x)
1287 mm.append(x)
1288 # we might end up with files added by the local dirstate that
1288 # we might end up with files added by the local dirstate that
1289 # were deleted by the patch. In this case, they should only
1289 # were deleted by the patch. In this case, they should only
1290 # show up in the changed section.
1290 # show up in the changed section.
1291 for x in a:
1291 for x in a:
1292 if x in dd:
1292 if x in dd:
1293 del dd[dd.index(x)]
1293 del dd[dd.index(x)]
1294 mm.append(x)
1294 mm.append(x)
1295 else:
1295 else:
1296 aa.append(x)
1296 aa.append(x)
1297 # make sure any files deleted in the local dirstate
1297 # make sure any files deleted in the local dirstate
1298 # are not in the add or change column of the patch
1298 # are not in the add or change column of the patch
1299 forget = []
1299 forget = []
1300 for x in d + r:
1300 for x in d + r:
1301 if x in aa:
1301 if x in aa:
1302 del aa[aa.index(x)]
1302 del aa[aa.index(x)]
1303 forget.append(x)
1303 forget.append(x)
1304 continue
1304 continue
1305 elif x in mm:
1305 elif x in mm:
1306 del mm[mm.index(x)]
1306 del mm[mm.index(x)]
1307 dd.append(x)
1307 dd.append(x)
1308
1308
1309 m = list(set(mm))
1309 m = list(set(mm))
1310 r = list(set(dd))
1310 r = list(set(dd))
1311 a = list(set(aa))
1311 a = list(set(aa))
1312 c = [filter(matchfn, l) for l in (m, a, r)]
1312 c = [filter(matchfn, l) for l in (m, a, r)]
1313 match = cmdutil.matchfiles(repo, set(c[0] + c[1] + c[2]))
1313 match = cmdutil.matchfiles(repo, set(c[0] + c[1] + c[2]))
1314 chunks = patch.diff(repo, patchparent, match=match,
1314 chunks = patch.diff(repo, patchparent, match=match,
1315 changes=c, opts=diffopts)
1315 changes=c, opts=diffopts)
1316 for chunk in chunks:
1316 for chunk in chunks:
1317 patchf.write(chunk)
1317 patchf.write(chunk)
1318
1318
1319 try:
1319 try:
1320 if diffopts.git or diffopts.upgrade:
1320 if diffopts.git or diffopts.upgrade:
1321 copies = {}
1321 copies = {}
1322 for dst in a:
1322 for dst in a:
1323 src = repo.dirstate.copied(dst)
1323 src = repo.dirstate.copied(dst)
1324 # during qfold, the source file for copies may
1324 # during qfold, the source file for copies may
1325 # be removed. Treat this as a simple add.
1325 # be removed. Treat this as a simple add.
1326 if src is not None and src in repo.dirstate:
1326 if src is not None and src in repo.dirstate:
1327 copies.setdefault(src, []).append(dst)
1327 copies.setdefault(src, []).append(dst)
1328 repo.dirstate.add(dst)
1328 repo.dirstate.add(dst)
1329 # remember the copies between patchparent and qtip
1329 # remember the copies between patchparent and qtip
1330 for dst in aaa:
1330 for dst in aaa:
1331 f = repo.file(dst)
1331 f = repo.file(dst)
1332 src = f.renamed(man[dst])
1332 src = f.renamed(man[dst])
1333 if src:
1333 if src:
1334 copies.setdefault(src[0], []).extend(
1334 copies.setdefault(src[0], []).extend(
1335 copies.get(dst, []))
1335 copies.get(dst, []))
1336 if dst in a:
1336 if dst in a:
1337 copies[src[0]].append(dst)
1337 copies[src[0]].append(dst)
1338 # we can't copy a file created by the patch itself
1338 # we can't copy a file created by the patch itself
1339 if dst in copies:
1339 if dst in copies:
1340 del copies[dst]
1340 del copies[dst]
1341 for src, dsts in copies.iteritems():
1341 for src, dsts in copies.iteritems():
1342 for dst in dsts:
1342 for dst in dsts:
1343 repo.dirstate.copy(src, dst)
1343 repo.dirstate.copy(src, dst)
1344 else:
1344 else:
1345 for dst in a:
1345 for dst in a:
1346 repo.dirstate.add(dst)
1346 repo.dirstate.add(dst)
1347 # Drop useless copy information
1347 # Drop useless copy information
1348 for f in list(repo.dirstate.copies()):
1348 for f in list(repo.dirstate.copies()):
1349 repo.dirstate.copy(None, f)
1349 repo.dirstate.copy(None, f)
1350 for f in r:
1350 for f in r:
1351 repo.dirstate.remove(f)
1351 repo.dirstate.remove(f)
1352 # if the patch excludes a modified file, mark that
1352 # if the patch excludes a modified file, mark that
1353 # file with mtime=0 so status can see it.
1353 # file with mtime=0 so status can see it.
1354 mm = []
1354 mm = []
1355 for i in xrange(len(m)-1, -1, -1):
1355 for i in xrange(len(m)-1, -1, -1):
1356 if not matchfn(m[i]):
1356 if not matchfn(m[i]):
1357 mm.append(m[i])
1357 mm.append(m[i])
1358 del m[i]
1358 del m[i]
1359 for f in m:
1359 for f in m:
1360 repo.dirstate.normal(f)
1360 repo.dirstate.normal(f)
1361 for f in mm:
1361 for f in mm:
1362 repo.dirstate.normallookup(f)
1362 repo.dirstate.normallookup(f)
1363 for f in forget:
1363 for f in forget:
1364 repo.dirstate.forget(f)
1364 repo.dirstate.forget(f)
1365
1365
1366 if not msg:
1366 if not msg:
1367 if not ph.message:
1367 if not ph.message:
1368 message = "[mq]: %s\n" % patchfn
1368 message = "[mq]: %s\n" % patchfn
1369 else:
1369 else:
1370 message = "\n".join(ph.message)
1370 message = "\n".join(ph.message)
1371 else:
1371 else:
1372 message = msg
1372 message = msg
1373
1373
1374 user = ph.user or changes[1]
1374 user = ph.user or changes[1]
1375
1375
1376 # assumes strip can roll itself back if interrupted
1376 # assumes strip can roll itself back if interrupted
1377 repo.dirstate.setparents(*cparents)
1377 repo.dirstate.setparents(*cparents)
1378 self.applied.pop()
1378 self.applied.pop()
1379 self.applied_dirty = 1
1379 self.applied_dirty = 1
1380 self.strip(repo, top, update=False,
1380 self.strip(repo, top, update=False,
1381 backup='strip')
1381 backup='strip')
1382 except:
1382 except:
1383 repo.dirstate.invalidate()
1383 repo.dirstate.invalidate()
1384 raise
1384 raise
1385
1385
1386 try:
1386 try:
1387 # might be nice to attempt to roll back strip after this
1387 # might be nice to attempt to roll back strip after this
1388 patchf.rename()
1388 patchf.rename()
1389 n = repo.commit(message, user, ph.date, match=match,
1389 n = repo.commit(message, user, ph.date, match=match,
1390 force=True)
1390 force=True)
1391 self.applied.append(statusentry(n, patchfn))
1391 self.applied.append(statusentry(n, patchfn))
1392 except:
1392 except:
1393 ctx = repo[cparents[0]]
1393 ctx = repo[cparents[0]]
1394 repo.dirstate.rebuild(ctx.node(), ctx.manifest())
1394 repo.dirstate.rebuild(ctx.node(), ctx.manifest())
1395 self.save_dirty()
1395 self.save_dirty()
1396 self.ui.warn(_('refresh interrupted while patch was popped! '
1396 self.ui.warn(_('refresh interrupted while patch was popped! '
1397 '(revert --all, qpush to recover)\n'))
1397 '(revert --all, qpush to recover)\n'))
1398 raise
1398 raise
1399 finally:
1399 finally:
1400 wlock.release()
1400 wlock.release()
1401 self.removeundo(repo)
1401 self.removeundo(repo)
1402
1402
1403 def init(self, repo, create=False):
1403 def init(self, repo, create=False):
1404 if not create and os.path.isdir(self.path):
1404 if not create and os.path.isdir(self.path):
1405 raise util.Abort(_("patch queue directory already exists"))
1405 raise util.Abort(_("patch queue directory already exists"))
1406 try:
1406 try:
1407 os.mkdir(self.path)
1407 os.mkdir(self.path)
1408 except OSError, inst:
1408 except OSError, inst:
1409 if inst.errno != errno.EEXIST or not create:
1409 if inst.errno != errno.EEXIST or not create:
1410 raise
1410 raise
1411 if create:
1411 if create:
1412 return self.qrepo(create=True)
1412 return self.qrepo(create=True)
1413
1413
1414 def unapplied(self, repo, patch=None):
1414 def unapplied(self, repo, patch=None):
1415 if patch and patch not in self.series:
1415 if patch and patch not in self.series:
1416 raise util.Abort(_("patch %s is not in series file") % patch)
1416 raise util.Abort(_("patch %s is not in series file") % patch)
1417 if not patch:
1417 if not patch:
1418 start = self.series_end()
1418 start = self.series_end()
1419 else:
1419 else:
1420 start = self.series.index(patch) + 1
1420 start = self.series.index(patch) + 1
1421 unapplied = []
1421 unapplied = []
1422 for i in xrange(start, len(self.series)):
1422 for i in xrange(start, len(self.series)):
1423 pushable, reason = self.pushable(i)
1423 pushable, reason = self.pushable(i)
1424 if pushable:
1424 if pushable:
1425 unapplied.append((i, self.series[i]))
1425 unapplied.append((i, self.series[i]))
1426 self.explain_pushable(i)
1426 self.explain_pushable(i)
1427 return unapplied
1427 return unapplied
1428
1428
1429 def qseries(self, repo, missing=None, start=0, length=None, status=None,
1429 def qseries(self, repo, missing=None, start=0, length=None, status=None,
1430 summary=False):
1430 summary=False):
1431 def displayname(pfx, patchname, state):
1431 def displayname(pfx, patchname, state):
1432 if pfx:
1432 if pfx:
1433 self.ui.write(pfx)
1433 self.ui.write(pfx)
1434 if summary:
1434 if summary:
1435 ph = patchheader(self.join(patchname), self.plainmode)
1435 ph = patchheader(self.join(patchname), self.plainmode)
1436 msg = ph.message and ph.message[0] or ''
1436 msg = ph.message and ph.message[0] or ''
1437 if self.ui.formatted():
1437 if self.ui.formatted():
1438 width = util.termwidth() - len(pfx) - len(patchname) - 2
1438 width = util.termwidth() - len(pfx) - len(patchname) - 2
1439 if width > 0:
1439 if width > 0:
1440 msg = util.ellipsis(msg, width)
1440 msg = util.ellipsis(msg, width)
1441 else:
1441 else:
1442 msg = ''
1442 msg = ''
1443 self.ui.write(patchname, label='qseries.' + state)
1443 self.ui.write(patchname, label='qseries.' + state)
1444 self.ui.write(': ')
1444 self.ui.write(': ')
1445 self.ui.write(msg, label='qseries.message.' + state)
1445 self.ui.write(msg, label='qseries.message.' + state)
1446 else:
1446 else:
1447 self.ui.write(patchname, label='qseries.' + state)
1447 self.ui.write(patchname, label='qseries.' + state)
1448 self.ui.write('\n')
1448 self.ui.write('\n')
1449
1449
1450 applied = set([p.name for p in self.applied])
1450 applied = set([p.name for p in self.applied])
1451 if length is None:
1451 if length is None:
1452 length = len(self.series) - start
1452 length = len(self.series) - start
1453 if not missing:
1453 if not missing:
1454 if self.ui.verbose:
1454 if self.ui.verbose:
1455 idxwidth = len(str(start + length - 1))
1455 idxwidth = len(str(start + length - 1))
1456 for i in xrange(start, start + length):
1456 for i in xrange(start, start + length):
1457 patch = self.series[i]
1457 patch = self.series[i]
1458 if patch in applied:
1458 if patch in applied:
1459 char, state = 'A', 'applied'
1459 char, state = 'A', 'applied'
1460 elif self.pushable(i)[0]:
1460 elif self.pushable(i)[0]:
1461 char, state = 'U', 'unapplied'
1461 char, state = 'U', 'unapplied'
1462 else:
1462 else:
1463 char, state = 'G', 'guarded'
1463 char, state = 'G', 'guarded'
1464 pfx = ''
1464 pfx = ''
1465 if self.ui.verbose:
1465 if self.ui.verbose:
1466 pfx = '%*d %s ' % (idxwidth, i, char)
1466 pfx = '%*d %s ' % (idxwidth, i, char)
1467 elif status and status != char:
1467 elif status and status != char:
1468 continue
1468 continue
1469 displayname(pfx, patch, state)
1469 displayname(pfx, patch, state)
1470 else:
1470 else:
1471 msng_list = []
1471 msng_list = []
1472 for root, dirs, files in os.walk(self.path):
1472 for root, dirs, files in os.walk(self.path):
1473 d = root[len(self.path) + 1:]
1473 d = root[len(self.path) + 1:]
1474 for f in files:
1474 for f in files:
1475 fl = os.path.join(d, f)
1475 fl = os.path.join(d, f)
1476 if (fl not in self.series and
1476 if (fl not in self.series and
1477 fl not in (self.status_path, self.series_path,
1477 fl not in (self.status_path, self.series_path,
1478 self.guards_path)
1478 self.guards_path)
1479 and not fl.startswith('.')):
1479 and not fl.startswith('.')):
1480 msng_list.append(fl)
1480 msng_list.append(fl)
1481 for x in sorted(msng_list):
1481 for x in sorted(msng_list):
1482 pfx = self.ui.verbose and ('D ') or ''
1482 pfx = self.ui.verbose and ('D ') or ''
1483 displayname(pfx, x, 'missing')
1483 displayname(pfx, x, 'missing')
1484
1484
1485 def issaveline(self, l):
1485 def issaveline(self, l):
1486 if l.name == '.hg.patches.save.line':
1486 if l.name == '.hg.patches.save.line':
1487 return True
1487 return True
1488
1488
1489 def qrepo(self, create=False):
1489 def qrepo(self, create=False):
1490 ui = self.ui.copy()
1490 ui = self.ui.copy()
1491 ui.setconfig('paths', 'default', '', overlay=False)
1491 ui.setconfig('paths', 'default', '', overlay=False)
1492 ui.setconfig('paths', 'default-push', '', overlay=False)
1492 ui.setconfig('paths', 'default-push', '', overlay=False)
1493 if create or os.path.isdir(self.join(".hg")):
1493 if create or os.path.isdir(self.join(".hg")):
1494 return hg.repository(ui, path=self.path, create=create)
1494 return hg.repository(ui, path=self.path, create=create)
1495
1495
1496 def restore(self, repo, rev, delete=None, qupdate=None):
1496 def restore(self, repo, rev, delete=None, qupdate=None):
1497 desc = repo[rev].description().strip()
1497 desc = repo[rev].description().strip()
1498 lines = desc.splitlines()
1498 lines = desc.splitlines()
1499 i = 0
1499 i = 0
1500 datastart = None
1500 datastart = None
1501 series = []
1501 series = []
1502 applied = []
1502 applied = []
1503 qpp = None
1503 qpp = None
1504 for i, line in enumerate(lines):
1504 for i, line in enumerate(lines):
1505 if line == 'Patch Data:':
1505 if line == 'Patch Data:':
1506 datastart = i + 1
1506 datastart = i + 1
1507 elif line.startswith('Dirstate:'):
1507 elif line.startswith('Dirstate:'):
1508 l = line.rstrip()
1508 l = line.rstrip()
1509 l = l[10:].split(' ')
1509 l = l[10:].split(' ')
1510 qpp = [bin(x) for x in l]
1510 qpp = [bin(x) for x in l]
1511 elif datastart != None:
1511 elif datastart != None:
1512 l = line.rstrip()
1512 l = line.rstrip()
1513 n, name = l.split(':', 1)
1513 n, name = l.split(':', 1)
1514 if n:
1514 if n:
1515 applied.append(statusentry(bin(n), name))
1515 applied.append(statusentry(bin(n), name))
1516 else:
1516 else:
1517 series.append(l)
1517 series.append(l)
1518 if datastart is None:
1518 if datastart is None:
1519 self.ui.warn(_("No saved patch data found\n"))
1519 self.ui.warn(_("No saved patch data found\n"))
1520 return 1
1520 return 1
1521 self.ui.warn(_("restoring status: %s\n") % lines[0])
1521 self.ui.warn(_("restoring status: %s\n") % lines[0])
1522 self.full_series = series
1522 self.full_series = series
1523 self.applied = applied
1523 self.applied = applied
1524 self.parse_series()
1524 self.parse_series()
1525 self.series_dirty = 1
1525 self.series_dirty = 1
1526 self.applied_dirty = 1
1526 self.applied_dirty = 1
1527 heads = repo.changelog.heads()
1527 heads = repo.changelog.heads()
1528 if delete:
1528 if delete:
1529 if rev not in heads:
1529 if rev not in heads:
1530 self.ui.warn(_("save entry has children, leaving it alone\n"))
1530 self.ui.warn(_("save entry has children, leaving it alone\n"))
1531 else:
1531 else:
1532 self.ui.warn(_("removing save entry %s\n") % short(rev))
1532 self.ui.warn(_("removing save entry %s\n") % short(rev))
1533 pp = repo.dirstate.parents()
1533 pp = repo.dirstate.parents()
1534 if rev in pp:
1534 if rev in pp:
1535 update = True
1535 update = True
1536 else:
1536 else:
1537 update = False
1537 update = False
1538 self.strip(repo, rev, update=update, backup='strip')
1538 self.strip(repo, rev, update=update, backup='strip')
1539 if qpp:
1539 if qpp:
1540 self.ui.warn(_("saved queue repository parents: %s %s\n") %
1540 self.ui.warn(_("saved queue repository parents: %s %s\n") %
1541 (short(qpp[0]), short(qpp[1])))
1541 (short(qpp[0]), short(qpp[1])))
1542 if qupdate:
1542 if qupdate:
1543 self.ui.status(_("queue directory updating\n"))
1543 self.ui.status(_("queue directory updating\n"))
1544 r = self.qrepo()
1544 r = self.qrepo()
1545 if not r:
1545 if not r:
1546 self.ui.warn(_("Unable to load queue repository\n"))
1546 self.ui.warn(_("Unable to load queue repository\n"))
1547 return 1
1547 return 1
1548 hg.clean(r, qpp[0])
1548 hg.clean(r, qpp[0])
1549
1549
1550 def save(self, repo, msg=None):
1550 def save(self, repo, msg=None):
1551 if not self.applied:
1551 if not self.applied:
1552 self.ui.warn(_("save: no patches applied, exiting\n"))
1552 self.ui.warn(_("save: no patches applied, exiting\n"))
1553 return 1
1553 return 1
1554 if self.issaveline(self.applied[-1]):
1554 if self.issaveline(self.applied[-1]):
1555 self.ui.warn(_("status is already saved\n"))
1555 self.ui.warn(_("status is already saved\n"))
1556 return 1
1556 return 1
1557
1557
1558 if not msg:
1558 if not msg:
1559 msg = _("hg patches saved state")
1559 msg = _("hg patches saved state")
1560 else:
1560 else:
1561 msg = "hg patches: " + msg.rstrip('\r\n')
1561 msg = "hg patches: " + msg.rstrip('\r\n')
1562 r = self.qrepo()
1562 r = self.qrepo()
1563 if r:
1563 if r:
1564 pp = r.dirstate.parents()
1564 pp = r.dirstate.parents()
1565 msg += "\nDirstate: %s %s" % (hex(pp[0]), hex(pp[1]))
1565 msg += "\nDirstate: %s %s" % (hex(pp[0]), hex(pp[1]))
1566 msg += "\n\nPatch Data:\n"
1566 msg += "\n\nPatch Data:\n"
1567 msg += ''.join('%s\n' % x for x in self.applied)
1567 msg += ''.join('%s\n' % x for x in self.applied)
1568 msg += ''.join(':%s\n' % x for x in self.full_series)
1568 msg += ''.join(':%s\n' % x for x in self.full_series)
1569 n = repo.commit(msg, force=True)
1569 n = repo.commit(msg, force=True)
1570 if not n:
1570 if not n:
1571 self.ui.warn(_("repo commit failed\n"))
1571 self.ui.warn(_("repo commit failed\n"))
1572 return 1
1572 return 1
1573 self.applied.append(statusentry(n, '.hg.patches.save.line'))
1573 self.applied.append(statusentry(n, '.hg.patches.save.line'))
1574 self.applied_dirty = 1
1574 self.applied_dirty = 1
1575 self.removeundo(repo)
1575 self.removeundo(repo)
1576
1576
1577 def full_series_end(self):
1577 def full_series_end(self):
1578 if self.applied:
1578 if self.applied:
1579 p = self.applied[-1].name
1579 p = self.applied[-1].name
1580 end = self.find_series(p)
1580 end = self.find_series(p)
1581 if end is None:
1581 if end is None:
1582 return len(self.full_series)
1582 return len(self.full_series)
1583 return end + 1
1583 return end + 1
1584 return 0
1584 return 0
1585
1585
1586 def series_end(self, all_patches=False):
1586 def series_end(self, all_patches=False):
1587 """If all_patches is False, return the index of the next pushable patch
1587 """If all_patches is False, return the index of the next pushable patch
1588 in the series, or the series length. If all_patches is True, return the
1588 in the series, or the series length. If all_patches is True, return the
1589 index of the first patch past the last applied one.
1589 index of the first patch past the last applied one.
1590 """
1590 """
1591 end = 0
1591 end = 0
1592 def next(start):
1592 def next(start):
1593 if all_patches or start >= len(self.series):
1593 if all_patches or start >= len(self.series):
1594 return start
1594 return start
1595 for i in xrange(start, len(self.series)):
1595 for i in xrange(start, len(self.series)):
1596 p, reason = self.pushable(i)
1596 p, reason = self.pushable(i)
1597 if p:
1597 if p:
1598 break
1598 break
1599 self.explain_pushable(i)
1599 self.explain_pushable(i)
1600 return i
1600 return i
1601 if self.applied:
1601 if self.applied:
1602 p = self.applied[-1].name
1602 p = self.applied[-1].name
1603 try:
1603 try:
1604 end = self.series.index(p)
1604 end = self.series.index(p)
1605 except ValueError:
1605 except ValueError:
1606 return 0
1606 return 0
1607 return next(end + 1)
1607 return next(end + 1)
1608 return next(end)
1608 return next(end)
1609
1609
1610 def appliedname(self, index):
1610 def appliedname(self, index):
1611 pname = self.applied[index].name
1611 pname = self.applied[index].name
1612 if not self.ui.verbose:
1612 if not self.ui.verbose:
1613 p = pname
1613 p = pname
1614 else:
1614 else:
1615 p = str(self.series.index(pname)) + " " + pname
1615 p = str(self.series.index(pname)) + " " + pname
1616 return p
1616 return p
1617
1617
1618 def qimport(self, repo, files, patchname=None, rev=None, existing=None,
1618 def qimport(self, repo, files, patchname=None, rev=None, existing=None,
1619 force=None, git=False):
1619 force=None, git=False):
1620 def checkseries(patchname):
1620 def checkseries(patchname):
1621 if patchname in self.series:
1621 if patchname in self.series:
1622 raise util.Abort(_('patch %s is already in the series file')
1622 raise util.Abort(_('patch %s is already in the series file')
1623 % patchname)
1623 % patchname)
1624 def checkfile(patchname):
1624 def checkfile(patchname):
1625 if not force and os.path.exists(self.join(patchname)):
1625 if not force and os.path.exists(self.join(patchname)):
1626 raise util.Abort(_('patch "%s" already exists')
1626 raise util.Abort(_('patch "%s" already exists')
1627 % patchname)
1627 % patchname)
1628
1628
1629 if rev:
1629 if rev:
1630 if files:
1630 if files:
1631 raise util.Abort(_('option "-r" not valid when importing '
1631 raise util.Abort(_('option "-r" not valid when importing '
1632 'files'))
1632 'files'))
1633 rev = cmdutil.revrange(repo, rev)
1633 rev = cmdutil.revrange(repo, rev)
1634 rev.sort(reverse=True)
1634 rev.sort(reverse=True)
1635 if (len(files) > 1 or len(rev) > 1) and patchname:
1635 if (len(files) > 1 or len(rev) > 1) and patchname:
1636 raise util.Abort(_('option "-n" not valid when importing multiple '
1636 raise util.Abort(_('option "-n" not valid when importing multiple '
1637 'patches'))
1637 'patches'))
1638 if rev:
1638 if rev:
1639 # If mq patches are applied, we can only import revisions
1639 # If mq patches are applied, we can only import revisions
1640 # that form a linear path to qbase.
1640 # that form a linear path to qbase.
1641 # Otherwise, they should form a linear path to a head.
1641 # Otherwise, they should form a linear path to a head.
1642 heads = repo.changelog.heads(repo.changelog.node(rev[-1]))
1642 heads = repo.changelog.heads(repo.changelog.node(rev[-1]))
1643 if len(heads) > 1:
1643 if len(heads) > 1:
1644 raise util.Abort(_('revision %d is the root of more than one '
1644 raise util.Abort(_('revision %d is the root of more than one '
1645 'branch') % rev[-1])
1645 'branch') % rev[-1])
1646 if self.applied:
1646 if self.applied:
1647 base = repo.changelog.node(rev[0])
1647 base = repo.changelog.node(rev[0])
1648 if base in [n.node for n in self.applied]:
1648 if base in [n.node for n in self.applied]:
1649 raise util.Abort(_('revision %d is already managed')
1649 raise util.Abort(_('revision %d is already managed')
1650 % rev[0])
1650 % rev[0])
1651 if heads != [self.applied[-1].node]:
1651 if heads != [self.applied[-1].node]:
1652 raise util.Abort(_('revision %d is not the parent of '
1652 raise util.Abort(_('revision %d is not the parent of '
1653 'the queue') % rev[0])
1653 'the queue') % rev[0])
1654 base = repo.changelog.rev(self.applied[0].node)
1654 base = repo.changelog.rev(self.applied[0].node)
1655 lastparent = repo.changelog.parentrevs(base)[0]
1655 lastparent = repo.changelog.parentrevs(base)[0]
1656 else:
1656 else:
1657 if heads != [repo.changelog.node(rev[0])]:
1657 if heads != [repo.changelog.node(rev[0])]:
1658 raise util.Abort(_('revision %d has unmanaged children')
1658 raise util.Abort(_('revision %d has unmanaged children')
1659 % rev[0])
1659 % rev[0])
1660 lastparent = None
1660 lastparent = None
1661
1661
1662 diffopts = self.diffopts({'git': git})
1662 diffopts = self.diffopts({'git': git})
1663 for r in rev:
1663 for r in rev:
1664 p1, p2 = repo.changelog.parentrevs(r)
1664 p1, p2 = repo.changelog.parentrevs(r)
1665 n = repo.changelog.node(r)
1665 n = repo.changelog.node(r)
1666 if p2 != nullrev:
1666 if p2 != nullrev:
1667 raise util.Abort(_('cannot import merge revision %d') % r)
1667 raise util.Abort(_('cannot import merge revision %d') % r)
1668 if lastparent and lastparent != r:
1668 if lastparent and lastparent != r:
1669 raise util.Abort(_('revision %d is not the parent of %d')
1669 raise util.Abort(_('revision %d is not the parent of %d')
1670 % (r, lastparent))
1670 % (r, lastparent))
1671 lastparent = p1
1671 lastparent = p1
1672
1672
1673 if not patchname:
1673 if not patchname:
1674 patchname = normname('%d.diff' % r)
1674 patchname = normname('%d.diff' % r)
1675 self.check_reserved_name(patchname)
1675 self.check_reserved_name(patchname)
1676 checkseries(patchname)
1676 checkseries(patchname)
1677 checkfile(patchname)
1677 checkfile(patchname)
1678 self.full_series.insert(0, patchname)
1678 self.full_series.insert(0, patchname)
1679
1679
1680 patchf = self.opener(patchname, "w")
1680 patchf = self.opener(patchname, "w")
1681 cmdutil.export(repo, [n], fp=patchf, opts=diffopts)
1681 cmdutil.export(repo, [n], fp=patchf, opts=diffopts)
1682 patchf.close()
1682 patchf.close()
1683
1683
1684 se = statusentry(n, patchname)
1684 se = statusentry(n, patchname)
1685 self.applied.insert(0, se)
1685 self.applied.insert(0, se)
1686
1686
1687 self.added.append(patchname)
1687 self.added.append(patchname)
1688 patchname = None
1688 patchname = None
1689 self.parse_series()
1689 self.parse_series()
1690 self.applied_dirty = 1
1690 self.applied_dirty = 1
1691 self.series_dirty = True
1691 self.series_dirty = True
1692
1692
1693 for i, filename in enumerate(files):
1693 for i, filename in enumerate(files):
1694 if existing:
1694 if existing:
1695 if filename == '-':
1695 if filename == '-':
1696 raise util.Abort(_('-e is incompatible with import from -'))
1696 raise util.Abort(_('-e is incompatible with import from -'))
1697 if not patchname:
1697 if not patchname:
1698 patchname = normname(filename)
1698 patchname = normname(filename)
1699 self.check_reserved_name(patchname)
1699 self.check_reserved_name(patchname)
1700 if not os.path.isfile(self.join(patchname)):
1700 if not os.path.isfile(self.join(patchname)):
1701 raise util.Abort(_("patch %s does not exist") % patchname)
1701 raise util.Abort(_("patch %s does not exist") % patchname)
1702 else:
1702 else:
1703 try:
1703 try:
1704 if filename == '-':
1704 if filename == '-':
1705 if not patchname:
1705 if not patchname:
1706 raise util.Abort(
1706 raise util.Abort(
1707 _('need --name to import a patch from -'))
1707 _('need --name to import a patch from -'))
1708 text = sys.stdin.read()
1708 text = sys.stdin.read()
1709 else:
1709 else:
1710 text = url.open(self.ui, filename).read()
1710 text = url.open(self.ui, filename).read()
1711 except (OSError, IOError):
1711 except (OSError, IOError):
1712 raise util.Abort(_("unable to read file %s") % filename)
1712 raise util.Abort(_("unable to read file %s") % filename)
1713 if not patchname:
1713 if not patchname:
1714 patchname = normname(os.path.basename(filename))
1714 patchname = normname(os.path.basename(filename))
1715 self.check_reserved_name(patchname)
1715 self.check_reserved_name(patchname)
1716 checkfile(patchname)
1716 checkfile(patchname)
1717 patchf = self.opener(patchname, "w")
1717 patchf = self.opener(patchname, "w")
1718 patchf.write(text)
1718 patchf.write(text)
1719 if not force:
1719 if not force:
1720 checkseries(patchname)
1720 checkseries(patchname)
1721 if patchname not in self.series:
1721 if patchname not in self.series:
1722 index = self.full_series_end() + i
1722 index = self.full_series_end() + i
1723 self.full_series[index:index] = [patchname]
1723 self.full_series[index:index] = [patchname]
1724 self.parse_series()
1724 self.parse_series()
1725 self.series_dirty = True
1725 self.series_dirty = True
1726 self.ui.warn(_("adding %s to series file\n") % patchname)
1726 self.ui.warn(_("adding %s to series file\n") % patchname)
1727 self.added.append(patchname)
1727 self.added.append(patchname)
1728 patchname = None
1728 patchname = None
1729
1729
1730 def delete(ui, repo, *patches, **opts):
1730 def delete(ui, repo, *patches, **opts):
1731 """remove patches from queue
1731 """remove patches from queue
1732
1732
1733 The patches must not be applied, and at least one patch is required. With
1733 The patches must not be applied, and at least one patch is required. With
1734 -k/--keep, the patch files are preserved in the patch directory.
1734 -k/--keep, the patch files are preserved in the patch directory.
1735
1735
1736 To stop managing a patch and move it into permanent history,
1736 To stop managing a patch and move it into permanent history,
1737 use the :hg:`qfinish` command."""
1737 use the :hg:`qfinish` command."""
1738 q = repo.mq
1738 q = repo.mq
1739 q.delete(repo, patches, opts)
1739 q.delete(repo, patches, opts)
1740 q.save_dirty()
1740 q.save_dirty()
1741 return 0
1741 return 0
1742
1742
1743 def applied(ui, repo, patch=None, **opts):
1743 def applied(ui, repo, patch=None, **opts):
1744 """print the patches already applied"""
1744 """print the patches already applied"""
1745
1745
1746 q = repo.mq
1746 q = repo.mq
1747 l = len(q.applied)
1747 l = len(q.applied)
1748
1748
1749 if patch:
1749 if patch:
1750 if patch not in q.series:
1750 if patch not in q.series:
1751 raise util.Abort(_("patch %s is not in series file") % patch)
1751 raise util.Abort(_("patch %s is not in series file") % patch)
1752 end = q.series.index(patch) + 1
1752 end = q.series.index(patch) + 1
1753 else:
1753 else:
1754 end = q.series_end(True)
1754 end = q.series_end(True)
1755
1755
1756 if opts.get('last') and not end:
1756 if opts.get('last') and not end:
1757 ui.write(_("no patches applied\n"))
1757 ui.write(_("no patches applied\n"))
1758 return 1
1758 return 1
1759 elif opts.get('last') and end == 1:
1759 elif opts.get('last') and end == 1:
1760 ui.write(_("only one patch applied\n"))
1760 ui.write(_("only one patch applied\n"))
1761 return 1
1761 return 1
1762 elif opts.get('last'):
1762 elif opts.get('last'):
1763 start = end - 2
1763 start = end - 2
1764 end = 1
1764 end = 1
1765 else:
1765 else:
1766 start = 0
1766 start = 0
1767
1767
1768 return q.qseries(repo, length=end, start=start, status='A',
1768 return q.qseries(repo, length=end, start=start, status='A',
1769 summary=opts.get('summary'))
1769 summary=opts.get('summary'))
1770
1770
1771 def unapplied(ui, repo, patch=None, **opts):
1771 def unapplied(ui, repo, patch=None, **opts):
1772 """print the patches not yet applied"""
1772 """print the patches not yet applied"""
1773
1773
1774 q = repo.mq
1774 q = repo.mq
1775 if patch:
1775 if patch:
1776 if patch not in q.series:
1776 if patch not in q.series:
1777 raise util.Abort(_("patch %s is not in series file") % patch)
1777 raise util.Abort(_("patch %s is not in series file") % patch)
1778 start = q.series.index(patch) + 1
1778 start = q.series.index(patch) + 1
1779 else:
1779 else:
1780 start = q.series_end(True)
1780 start = q.series_end(True)
1781
1781
1782 if start == len(q.series) and opts.get('first'):
1782 if start == len(q.series) and opts.get('first'):
1783 ui.write(_("all patches applied\n"))
1783 ui.write(_("all patches applied\n"))
1784 return 1
1784 return 1
1785
1785
1786 length = opts.get('first') and 1 or None
1786 length = opts.get('first') and 1 or None
1787 return q.qseries(repo, start=start, length=length, status='U',
1787 return q.qseries(repo, start=start, length=length, status='U',
1788 summary=opts.get('summary'))
1788 summary=opts.get('summary'))
1789
1789
1790 def qimport(ui, repo, *filename, **opts):
1790 def qimport(ui, repo, *filename, **opts):
1791 """import a patch
1791 """import a patch
1792
1792
1793 The patch is inserted into the series after the last applied
1793 The patch is inserted into the series after the last applied
1794 patch. If no patches have been applied, qimport prepends the patch
1794 patch. If no patches have been applied, qimport prepends the patch
1795 to the series.
1795 to the series.
1796
1796
1797 The patch will have the same name as its source file unless you
1797 The patch will have the same name as its source file unless you
1798 give it a new one with -n/--name.
1798 give it a new one with -n/--name.
1799
1799
1800 You can register an existing patch inside the patch directory with
1800 You can register an existing patch inside the patch directory with
1801 the -e/--existing flag.
1801 the -e/--existing flag.
1802
1802
1803 With -f/--force, an existing patch of the same name will be
1803 With -f/--force, an existing patch of the same name will be
1804 overwritten.
1804 overwritten.
1805
1805
1806 An existing changeset may be placed under mq control with -r/--rev
1806 An existing changeset may be placed under mq control with -r/--rev
1807 (e.g. qimport --rev tip -n patch will place tip under mq control).
1807 (e.g. qimport --rev tip -n patch will place tip under mq control).
1808 With -g/--git, patches imported with --rev will use the git diff
1808 With -g/--git, patches imported with --rev will use the git diff
1809 format. See the diffs help topic for information on why this is
1809 format. See the diffs help topic for information on why this is
1810 important for preserving rename/copy information and permission
1810 important for preserving rename/copy information and permission
1811 changes.
1811 changes.
1812
1812
1813 To import a patch from standard input, pass - as the patch file.
1813 To import a patch from standard input, pass - as the patch file.
1814 When importing from standard input, a patch name must be specified
1814 When importing from standard input, a patch name must be specified
1815 using the --name flag.
1815 using the --name flag.
1816 """
1816 """
1817 q = repo.mq
1817 q = repo.mq
1818 try:
1818 try:
1819 q.qimport(repo, filename, patchname=opts['name'],
1819 q.qimport(repo, filename, patchname=opts['name'],
1820 existing=opts['existing'], force=opts['force'], rev=opts['rev'],
1820 existing=opts['existing'], force=opts['force'], rev=opts['rev'],
1821 git=opts['git'])
1821 git=opts['git'])
1822 finally:
1822 finally:
1823 q.save_dirty()
1823 q.save_dirty()
1824
1824
1825 if opts.get('push') and not opts.get('rev'):
1825 if opts.get('push') and not opts.get('rev'):
1826 return q.push(repo, None)
1826 return q.push(repo, None)
1827 return 0
1827 return 0
1828
1828
1829 def qinit(ui, repo, create):
1829 def qinit(ui, repo, create):
1830 """initialize a new queue repository
1830 """initialize a new queue repository
1831
1831
1832 This command also creates a series file for ordering patches, and
1832 This command also creates a series file for ordering patches, and
1833 an mq-specific .hgignore file in the queue repository, to exclude
1833 an mq-specific .hgignore file in the queue repository, to exclude
1834 the status and guards files (these contain mostly transient state)."""
1834 the status and guards files (these contain mostly transient state)."""
1835 q = repo.mq
1835 q = repo.mq
1836 r = q.init(repo, create)
1836 r = q.init(repo, create)
1837 q.save_dirty()
1837 q.save_dirty()
1838 if r:
1838 if r:
1839 if not os.path.exists(r.wjoin('.hgignore')):
1839 if not os.path.exists(r.wjoin('.hgignore')):
1840 fp = r.wopener('.hgignore', 'w')
1840 fp = r.wopener('.hgignore', 'w')
1841 fp.write('^\\.hg\n')
1841 fp.write('^\\.hg\n')
1842 fp.write('^\\.mq\n')
1842 fp.write('^\\.mq\n')
1843 fp.write('syntax: glob\n')
1843 fp.write('syntax: glob\n')
1844 fp.write('status\n')
1844 fp.write('status\n')
1845 fp.write('guards\n')
1845 fp.write('guards\n')
1846 fp.close()
1846 fp.close()
1847 if not os.path.exists(r.wjoin('series')):
1847 if not os.path.exists(r.wjoin('series')):
1848 r.wopener('series', 'w').close()
1848 r.wopener('series', 'w').close()
1849 r[None].add(['.hgignore', 'series'])
1849 r[None].add(['.hgignore', 'series'])
1850 commands.add(ui, r)
1850 commands.add(ui, r)
1851 return 0
1851 return 0
1852
1852
1853 def init(ui, repo, **opts):
1853 def init(ui, repo, **opts):
1854 """init a new queue repository (DEPRECATED)
1854 """init a new queue repository (DEPRECATED)
1855
1855
1856 The queue repository is unversioned by default. If
1856 The queue repository is unversioned by default. If
1857 -c/--create-repo is specified, qinit will create a separate nested
1857 -c/--create-repo is specified, qinit will create a separate nested
1858 repository for patches (qinit -c may also be run later to convert
1858 repository for patches (qinit -c may also be run later to convert
1859 an unversioned patch repository into a versioned one). You can use
1859 an unversioned patch repository into a versioned one). You can use
1860 qcommit to commit changes to this queue repository.
1860 qcommit to commit changes to this queue repository.
1861
1861
1862 This command is deprecated. Without -c, it's implied by other relevant
1862 This command is deprecated. Without -c, it's implied by other relevant
1863 commands. With -c, use :hg:`init --mq` instead."""
1863 commands. With -c, use :hg:`init --mq` instead."""
1864 return qinit(ui, repo, create=opts['create_repo'])
1864 return qinit(ui, repo, create=opts['create_repo'])
1865
1865
1866 def clone(ui, source, dest=None, **opts):
1866 def clone(ui, source, dest=None, **opts):
1867 '''clone main and patch repository at same time
1867 '''clone main and patch repository at same time
1868
1868
1869 If source is local, destination will have no patches applied. If
1869 If source is local, destination will have no patches applied. If
1870 source is remote, this command can not check if patches are
1870 source is remote, this command can not check if patches are
1871 applied in source, so cannot guarantee that patches are not
1871 applied in source, so cannot guarantee that patches are not
1872 applied in destination. If you clone remote repository, be sure
1872 applied in destination. If you clone remote repository, be sure
1873 before that it has no patches applied.
1873 before that it has no patches applied.
1874
1874
1875 Source patch repository is looked for in <src>/.hg/patches by
1875 Source patch repository is looked for in <src>/.hg/patches by
1876 default. Use -p <url> to change.
1876 default. Use -p <url> to change.
1877
1877
1878 The patch directory must be a nested Mercurial repository, as
1878 The patch directory must be a nested Mercurial repository, as
1879 would be created by :hg:`init --mq`.
1879 would be created by :hg:`init --mq`.
1880 '''
1880 '''
1881 def patchdir(repo):
1881 def patchdir(repo):
1882 url = repo.url()
1882 url = repo.url()
1883 if url.endswith('/'):
1883 if url.endswith('/'):
1884 url = url[:-1]
1884 url = url[:-1]
1885 return url + '/.hg/patches'
1885 return url + '/.hg/patches'
1886 if dest is None:
1886 if dest is None:
1887 dest = hg.defaultdest(source)
1887 dest = hg.defaultdest(source)
1888 sr = hg.repository(hg.remoteui(ui, opts), ui.expandpath(source))
1888 sr = hg.repository(hg.remoteui(ui, opts), ui.expandpath(source))
1889 if opts['patches']:
1889 if opts['patches']:
1890 patchespath = ui.expandpath(opts['patches'])
1890 patchespath = ui.expandpath(opts['patches'])
1891 else:
1891 else:
1892 patchespath = patchdir(sr)
1892 patchespath = patchdir(sr)
1893 try:
1893 try:
1894 hg.repository(ui, patchespath)
1894 hg.repository(ui, patchespath)
1895 except error.RepoError:
1895 except error.RepoError:
1896 raise util.Abort(_('versioned patch repository not found'
1896 raise util.Abort(_('versioned patch repository not found'
1897 ' (see init --mq)'))
1897 ' (see init --mq)'))
1898 qbase, destrev = None, None
1898 qbase, destrev = None, None
1899 if sr.local():
1899 if sr.local():
1900 if sr.mq.applied:
1900 if sr.mq.applied:
1901 qbase = sr.mq.applied[0].node
1901 qbase = sr.mq.applied[0].node
1902 if not hg.islocal(dest):
1902 if not hg.islocal(dest):
1903 heads = set(sr.heads())
1903 heads = set(sr.heads())
1904 destrev = list(heads.difference(sr.heads(qbase)))
1904 destrev = list(heads.difference(sr.heads(qbase)))
1905 destrev.append(sr.changelog.parents(qbase)[0])
1905 destrev.append(sr.changelog.parents(qbase)[0])
1906 elif sr.capable('lookup'):
1906 elif sr.capable('lookup'):
1907 try:
1907 try:
1908 qbase = sr.lookup('qbase')
1908 qbase = sr.lookup('qbase')
1909 except error.RepoError:
1909 except error.RepoError:
1910 pass
1910 pass
1911 ui.note(_('cloning main repository\n'))
1911 ui.note(_('cloning main repository\n'))
1912 sr, dr = hg.clone(ui, sr.url(), dest,
1912 sr, dr = hg.clone(ui, sr.url(), dest,
1913 pull=opts['pull'],
1913 pull=opts['pull'],
1914 rev=destrev,
1914 rev=destrev,
1915 update=False,
1915 update=False,
1916 stream=opts['uncompressed'])
1916 stream=opts['uncompressed'])
1917 ui.note(_('cloning patch repository\n'))
1917 ui.note(_('cloning patch repository\n'))
1918 hg.clone(ui, opts['patches'] or patchdir(sr), patchdir(dr),
1918 hg.clone(ui, opts['patches'] or patchdir(sr), patchdir(dr),
1919 pull=opts['pull'], update=not opts['noupdate'],
1919 pull=opts['pull'], update=not opts['noupdate'],
1920 stream=opts['uncompressed'])
1920 stream=opts['uncompressed'])
1921 if dr.local():
1921 if dr.local():
1922 if qbase:
1922 if qbase:
1923 ui.note(_('stripping applied patches from destination '
1923 ui.note(_('stripping applied patches from destination '
1924 'repository\n'))
1924 'repository\n'))
1925 dr.mq.strip(dr, qbase, update=False, backup=None)
1925 dr.mq.strip(dr, qbase, update=False, backup=None)
1926 if not opts['noupdate']:
1926 if not opts['noupdate']:
1927 ui.note(_('updating destination repository\n'))
1927 ui.note(_('updating destination repository\n'))
1928 hg.update(dr, dr.changelog.tip())
1928 hg.update(dr, dr.changelog.tip())
1929
1929
1930 def commit(ui, repo, *pats, **opts):
1930 def commit(ui, repo, *pats, **opts):
1931 """commit changes in the queue repository (DEPRECATED)
1931 """commit changes in the queue repository (DEPRECATED)
1932
1932
1933 This command is deprecated; use :hg:`commit --mq` instead."""
1933 This command is deprecated; use :hg:`commit --mq` instead."""
1934 q = repo.mq
1934 q = repo.mq
1935 r = q.qrepo()
1935 r = q.qrepo()
1936 if not r:
1936 if not r:
1937 raise util.Abort('no queue repository')
1937 raise util.Abort('no queue repository')
1938 commands.commit(r.ui, r, *pats, **opts)
1938 commands.commit(r.ui, r, *pats, **opts)
1939
1939
1940 def series(ui, repo, **opts):
1940 def series(ui, repo, **opts):
1941 """print the entire series file"""
1941 """print the entire series file"""
1942 repo.mq.qseries(repo, missing=opts['missing'], summary=opts['summary'])
1942 repo.mq.qseries(repo, missing=opts['missing'], summary=opts['summary'])
1943 return 0
1943 return 0
1944
1944
1945 def top(ui, repo, **opts):
1945 def top(ui, repo, **opts):
1946 """print the name of the current patch"""
1946 """print the name of the current patch"""
1947 q = repo.mq
1947 q = repo.mq
1948 t = q.applied and q.series_end(True) or 0
1948 t = q.applied and q.series_end(True) or 0
1949 if t:
1949 if t:
1950 return q.qseries(repo, start=t - 1, length=1, status='A',
1950 return q.qseries(repo, start=t - 1, length=1, status='A',
1951 summary=opts.get('summary'))
1951 summary=opts.get('summary'))
1952 else:
1952 else:
1953 ui.write(_("no patches applied\n"))
1953 ui.write(_("no patches applied\n"))
1954 return 1
1954 return 1
1955
1955
1956 def next(ui, repo, **opts):
1956 def next(ui, repo, **opts):
1957 """print the name of the next patch"""
1957 """print the name of the next patch"""
1958 q = repo.mq
1958 q = repo.mq
1959 end = q.series_end()
1959 end = q.series_end()
1960 if end == len(q.series):
1960 if end == len(q.series):
1961 ui.write(_("all patches applied\n"))
1961 ui.write(_("all patches applied\n"))
1962 return 1
1962 return 1
1963 return q.qseries(repo, start=end, length=1, summary=opts.get('summary'))
1963 return q.qseries(repo, start=end, length=1, summary=opts.get('summary'))
1964
1964
1965 def prev(ui, repo, **opts):
1965 def prev(ui, repo, **opts):
1966 """print the name of the previous patch"""
1966 """print the name of the previous patch"""
1967 q = repo.mq
1967 q = repo.mq
1968 l = len(q.applied)
1968 l = len(q.applied)
1969 if l == 1:
1969 if l == 1:
1970 ui.write(_("only one patch applied\n"))
1970 ui.write(_("only one patch applied\n"))
1971 return 1
1971 return 1
1972 if not l:
1972 if not l:
1973 ui.write(_("no patches applied\n"))
1973 ui.write(_("no patches applied\n"))
1974 return 1
1974 return 1
1975 return q.qseries(repo, start=l - 2, length=1, status='A',
1975 return q.qseries(repo, start=l - 2, length=1, status='A',
1976 summary=opts.get('summary'))
1976 summary=opts.get('summary'))
1977
1977
1978 def setupheaderopts(ui, opts):
1978 def setupheaderopts(ui, opts):
1979 if not opts.get('user') and opts.get('currentuser'):
1979 if not opts.get('user') and opts.get('currentuser'):
1980 opts['user'] = ui.username()
1980 opts['user'] = ui.username()
1981 if not opts.get('date') and opts.get('currentdate'):
1981 if not opts.get('date') and opts.get('currentdate'):
1982 opts['date'] = "%d %d" % util.makedate()
1982 opts['date'] = "%d %d" % util.makedate()
1983
1983
1984 def new(ui, repo, patch, *args, **opts):
1984 def new(ui, repo, patch, *args, **opts):
1985 """create a new patch
1985 """create a new patch
1986
1986
1987 qnew creates a new patch on top of the currently-applied patch (if
1987 qnew creates a new patch on top of the currently-applied patch (if
1988 any). The patch will be initialized with any outstanding changes
1988 any). The patch will be initialized with any outstanding changes
1989 in the working directory. You may also use -I/--include,
1989 in the working directory. You may also use -I/--include,
1990 -X/--exclude, and/or a list of files after the patch name to add
1990 -X/--exclude, and/or a list of files after the patch name to add
1991 only changes to matching files to the new patch, leaving the rest
1991 only changes to matching files to the new patch, leaving the rest
1992 as uncommitted modifications.
1992 as uncommitted modifications.
1993
1993
1994 -u/--user and -d/--date can be used to set the (given) user and
1994 -u/--user and -d/--date can be used to set the (given) user and
1995 date, respectively. -U/--currentuser and -D/--currentdate set user
1995 date, respectively. -U/--currentuser and -D/--currentdate set user
1996 to current user and date to current date.
1996 to current user and date to current date.
1997
1997
1998 -e/--edit, -m/--message or -l/--logfile set the patch header as
1998 -e/--edit, -m/--message or -l/--logfile set the patch header as
1999 well as the commit message. If none is specified, the header is
1999 well as the commit message. If none is specified, the header is
2000 empty and the commit message is '[mq]: PATCH'.
2000 empty and the commit message is '[mq]: PATCH'.
2001
2001
2002 Use the -g/--git option to keep the patch in the git extended diff
2002 Use the -g/--git option to keep the patch in the git extended diff
2003 format. Read the diffs help topic for more information on why this
2003 format. Read the diffs help topic for more information on why this
2004 is important for preserving permission changes and copy/rename
2004 is important for preserving permission changes and copy/rename
2005 information.
2005 information.
2006 """
2006 """
2007 msg = cmdutil.logmessage(opts)
2007 msg = cmdutil.logmessage(opts)
2008 def getmsg():
2008 def getmsg():
2009 return ui.edit(msg, ui.username())
2009 return ui.edit(msg, ui.username())
2010 q = repo.mq
2010 q = repo.mq
2011 opts['msg'] = msg
2011 opts['msg'] = msg
2012 if opts.get('edit'):
2012 if opts.get('edit'):
2013 opts['msg'] = getmsg
2013 opts['msg'] = getmsg
2014 else:
2014 else:
2015 opts['msg'] = msg
2015 opts['msg'] = msg
2016 setupheaderopts(ui, opts)
2016 setupheaderopts(ui, opts)
2017 q.new(repo, patch, *args, **opts)
2017 q.new(repo, patch, *args, **opts)
2018 q.save_dirty()
2018 q.save_dirty()
2019 return 0
2019 return 0
2020
2020
2021 def refresh(ui, repo, *pats, **opts):
2021 def refresh(ui, repo, *pats, **opts):
2022 """update the current patch
2022 """update the current patch
2023
2023
2024 If any file patterns are provided, the refreshed patch will
2024 If any file patterns are provided, the refreshed patch will
2025 contain only the modifications that match those patterns; the
2025 contain only the modifications that match those patterns; the
2026 remaining modifications will remain in the working directory.
2026 remaining modifications will remain in the working directory.
2027
2027
2028 If -s/--short is specified, files currently included in the patch
2028 If -s/--short is specified, files currently included in the patch
2029 will be refreshed just like matched files and remain in the patch.
2029 will be refreshed just like matched files and remain in the patch.
2030
2030
2031 hg add/remove/copy/rename work as usual, though you might want to
2031 hg add/remove/copy/rename work as usual, though you might want to
2032 use git-style patches (-g/--git or [diff] git=1) to track copies
2032 use git-style patches (-g/--git or [diff] git=1) to track copies
2033 and renames. See the diffs help topic for more information on the
2033 and renames. See the diffs help topic for more information on the
2034 git diff format.
2034 git diff format.
2035 """
2035 """
2036 q = repo.mq
2036 q = repo.mq
2037 message = cmdutil.logmessage(opts)
2037 message = cmdutil.logmessage(opts)
2038 if opts['edit']:
2038 if opts['edit']:
2039 if not q.applied:
2039 if not q.applied:
2040 ui.write(_("no patches applied\n"))
2040 ui.write(_("no patches applied\n"))
2041 return 1
2041 return 1
2042 if message:
2042 if message:
2043 raise util.Abort(_('option "-e" incompatible with "-m" or "-l"'))
2043 raise util.Abort(_('option "-e" incompatible with "-m" or "-l"'))
2044 patch = q.applied[-1].name
2044 patch = q.applied[-1].name
2045 ph = patchheader(q.join(patch), q.plainmode)
2045 ph = patchheader(q.join(patch), q.plainmode)
2046 message = ui.edit('\n'.join(ph.message), ph.user or ui.username())
2046 message = ui.edit('\n'.join(ph.message), ph.user or ui.username())
2047 setupheaderopts(ui, opts)
2047 setupheaderopts(ui, opts)
2048 ret = q.refresh(repo, pats, msg=message, **opts)
2048 ret = q.refresh(repo, pats, msg=message, **opts)
2049 q.save_dirty()
2049 q.save_dirty()
2050 return ret
2050 return ret
2051
2051
2052 def diff(ui, repo, *pats, **opts):
2052 def diff(ui, repo, *pats, **opts):
2053 """diff of the current patch and subsequent modifications
2053 """diff of the current patch and subsequent modifications
2054
2054
2055 Shows a diff which includes the current patch as well as any
2055 Shows a diff which includes the current patch as well as any
2056 changes which have been made in the working directory since the
2056 changes which have been made in the working directory since the
2057 last refresh (thus showing what the current patch would become
2057 last refresh (thus showing what the current patch would become
2058 after a qrefresh).
2058 after a qrefresh).
2059
2059
2060 Use :hg:`diff` if you only want to see the changes made since the
2060 Use :hg:`diff` if you only want to see the changes made since the
2061 last qrefresh, or :hg:`export qtip` if you want to see changes
2061 last qrefresh, or :hg:`export qtip` if you want to see changes
2062 made by the current patch without including changes made since the
2062 made by the current patch without including changes made since the
2063 qrefresh.
2063 qrefresh.
2064 """
2064 """
2065 repo.mq.diff(repo, pats, opts)
2065 repo.mq.diff(repo, pats, opts)
2066 return 0
2066 return 0
2067
2067
2068 def fold(ui, repo, *files, **opts):
2068 def fold(ui, repo, *files, **opts):
2069 """fold the named patches into the current patch
2069 """fold the named patches into the current patch
2070
2070
2071 Patches must not yet be applied. Each patch will be successively
2071 Patches must not yet be applied. Each patch will be successively
2072 applied to the current patch in the order given. If all the
2072 applied to the current patch in the order given. If all the
2073 patches apply successfully, the current patch will be refreshed
2073 patches apply successfully, the current patch will be refreshed
2074 with the new cumulative patch, and the folded patches will be
2074 with the new cumulative patch, and the folded patches will be
2075 deleted. With -k/--keep, the folded patch files will not be
2075 deleted. With -k/--keep, the folded patch files will not be
2076 removed afterwards.
2076 removed afterwards.
2077
2077
2078 The header for each folded patch will be concatenated with the
2078 The header for each folded patch will be concatenated with the
2079 current patch header, separated by a line of '* * *'."""
2079 current patch header, separated by a line of '* * *'."""
2080
2080
2081 q = repo.mq
2081 q = repo.mq
2082
2082
2083 if not files:
2083 if not files:
2084 raise util.Abort(_('qfold requires at least one patch name'))
2084 raise util.Abort(_('qfold requires at least one patch name'))
2085 if not q.check_toppatch(repo)[0]:
2085 if not q.check_toppatch(repo)[0]:
2086 raise util.Abort(_('no patches applied'))
2086 raise util.Abort(_('no patches applied'))
2087 q.check_localchanges(repo)
2087 q.check_localchanges(repo)
2088
2088
2089 message = cmdutil.logmessage(opts)
2089 message = cmdutil.logmessage(opts)
2090 if opts['edit']:
2090 if opts['edit']:
2091 if message:
2091 if message:
2092 raise util.Abort(_('option "-e" incompatible with "-m" or "-l"'))
2092 raise util.Abort(_('option "-e" incompatible with "-m" or "-l"'))
2093
2093
2094 parent = q.lookup('qtip')
2094 parent = q.lookup('qtip')
2095 patches = []
2095 patches = []
2096 messages = []
2096 messages = []
2097 for f in files:
2097 for f in files:
2098 p = q.lookup(f)
2098 p = q.lookup(f)
2099 if p in patches or p == parent:
2099 if p in patches or p == parent:
2100 ui.warn(_('Skipping already folded patch %s') % p)
2100 ui.warn(_('Skipping already folded patch %s') % p)
2101 if q.isapplied(p):
2101 if q.isapplied(p):
2102 raise util.Abort(_('qfold cannot fold already applied patch %s') % p)
2102 raise util.Abort(_('qfold cannot fold already applied patch %s') % p)
2103 patches.append(p)
2103 patches.append(p)
2104
2104
2105 for p in patches:
2105 for p in patches:
2106 if not message:
2106 if not message:
2107 ph = patchheader(q.join(p), q.plainmode)
2107 ph = patchheader(q.join(p), q.plainmode)
2108 if ph.message:
2108 if ph.message:
2109 messages.append(ph.message)
2109 messages.append(ph.message)
2110 pf = q.join(p)
2110 pf = q.join(p)
2111 (patchsuccess, files, fuzz) = q.patch(repo, pf)
2111 (patchsuccess, files, fuzz) = q.patch(repo, pf)
2112 if not patchsuccess:
2112 if not patchsuccess:
2113 raise util.Abort(_('error folding patch %s') % p)
2113 raise util.Abort(_('error folding patch %s') % p)
2114 patch.updatedir(ui, repo, files)
2114 patch.updatedir(ui, repo, files)
2115
2115
2116 if not message:
2116 if not message:
2117 ph = patchheader(q.join(parent), q.plainmode)
2117 ph = patchheader(q.join(parent), q.plainmode)
2118 message, user = ph.message, ph.user
2118 message, user = ph.message, ph.user
2119 for msg in messages:
2119 for msg in messages:
2120 message.append('* * *')
2120 message.append('* * *')
2121 message.extend(msg)
2121 message.extend(msg)
2122 message = '\n'.join(message)
2122 message = '\n'.join(message)
2123
2123
2124 if opts['edit']:
2124 if opts['edit']:
2125 message = ui.edit(message, user or ui.username())
2125 message = ui.edit(message, user or ui.username())
2126
2126
2127 diffopts = q.patchopts(q.diffopts(), *patches)
2127 diffopts = q.patchopts(q.diffopts(), *patches)
2128 q.refresh(repo, msg=message, git=diffopts.git)
2128 q.refresh(repo, msg=message, git=diffopts.git)
2129 q.delete(repo, patches, opts)
2129 q.delete(repo, patches, opts)
2130 q.save_dirty()
2130 q.save_dirty()
2131
2131
2132 def goto(ui, repo, patch, **opts):
2132 def goto(ui, repo, patch, **opts):
2133 '''push or pop patches until named patch is at top of stack'''
2133 '''push or pop patches until named patch is at top of stack'''
2134 q = repo.mq
2134 q = repo.mq
2135 patch = q.lookup(patch)
2135 patch = q.lookup(patch)
2136 if q.isapplied(patch):
2136 if q.isapplied(patch):
2137 ret = q.pop(repo, patch, force=opts['force'])
2137 ret = q.pop(repo, patch, force=opts['force'])
2138 else:
2138 else:
2139 ret = q.push(repo, patch, force=opts['force'])
2139 ret = q.push(repo, patch, force=opts['force'])
2140 q.save_dirty()
2140 q.save_dirty()
2141 return ret
2141 return ret
2142
2142
2143 def guard(ui, repo, *args, **opts):
2143 def guard(ui, repo, *args, **opts):
2144 '''set or print guards for a patch
2144 '''set or print guards for a patch
2145
2145
2146 Guards control whether a patch can be pushed. A patch with no
2146 Guards control whether a patch can be pushed. A patch with no
2147 guards is always pushed. A patch with a positive guard ("+foo") is
2147 guards is always pushed. A patch with a positive guard ("+foo") is
2148 pushed only if the :hg:`qselect` command has activated it. A patch with
2148 pushed only if the :hg:`qselect` command has activated it. A patch with
2149 a negative guard ("-foo") is never pushed if the :hg:`qselect` command
2149 a negative guard ("-foo") is never pushed if the :hg:`qselect` command
2150 has activated it.
2150 has activated it.
2151
2151
2152 With no arguments, print the currently active guards.
2152 With no arguments, print the currently active guards.
2153 With arguments, set guards for the named patch.
2153 With arguments, set guards for the named patch.
2154 NOTE: Specifying negative guards now requires '--'.
2154 NOTE: Specifying negative guards now requires '--'.
2155
2155
2156 To set guards on another patch::
2156 To set guards on another patch::
2157
2157
2158 hg qguard other.patch -- +2.6.17 -stable
2158 hg qguard other.patch -- +2.6.17 -stable
2159 '''
2159 '''
2160 def status(idx):
2160 def status(idx):
2161 guards = q.series_guards[idx] or ['unguarded']
2161 guards = q.series_guards[idx] or ['unguarded']
2162 ui.write('%s: ' % ui.label(q.series[idx], 'qguard.patch'))
2162 ui.write('%s: ' % ui.label(q.series[idx], 'qguard.patch'))
2163 for i, guard in enumerate(guards):
2163 for i, guard in enumerate(guards):
2164 if guard.startswith('+'):
2164 if guard.startswith('+'):
2165 ui.write(guard, label='qguard.positive')
2165 ui.write(guard, label='qguard.positive')
2166 elif guard.startswith('-'):
2166 elif guard.startswith('-'):
2167 ui.write(guard, label='qguard.negative')
2167 ui.write(guard, label='qguard.negative')
2168 else:
2168 else:
2169 ui.write(guard, label='qguard.unguarded')
2169 ui.write(guard, label='qguard.unguarded')
2170 if i != len(guards) - 1:
2170 if i != len(guards) - 1:
2171 ui.write(' ')
2171 ui.write(' ')
2172 ui.write('\n')
2172 ui.write('\n')
2173 q = repo.mq
2173 q = repo.mq
2174 patch = None
2174 patch = None
2175 args = list(args)
2175 args = list(args)
2176 if opts['list']:
2176 if opts['list']:
2177 if args or opts['none']:
2177 if args or opts['none']:
2178 raise util.Abort(_('cannot mix -l/--list with options or arguments'))
2178 raise util.Abort(_('cannot mix -l/--list with options or arguments'))
2179 for i in xrange(len(q.series)):
2179 for i in xrange(len(q.series)):
2180 status(i)
2180 status(i)
2181 return
2181 return
2182 if not args or args[0][0:1] in '-+':
2182 if not args or args[0][0:1] in '-+':
2183 if not q.applied:
2183 if not q.applied:
2184 raise util.Abort(_('no patches applied'))
2184 raise util.Abort(_('no patches applied'))
2185 patch = q.applied[-1].name
2185 patch = q.applied[-1].name
2186 if patch is None and args[0][0:1] not in '-+':
2186 if patch is None and args[0][0:1] not in '-+':
2187 patch = args.pop(0)
2187 patch = args.pop(0)
2188 if patch is None:
2188 if patch is None:
2189 raise util.Abort(_('no patch to work with'))
2189 raise util.Abort(_('no patch to work with'))
2190 if args or opts['none']:
2190 if args or opts['none']:
2191 idx = q.find_series(patch)
2191 idx = q.find_series(patch)
2192 if idx is None:
2192 if idx is None:
2193 raise util.Abort(_('no patch named %s') % patch)
2193 raise util.Abort(_('no patch named %s') % patch)
2194 q.set_guards(idx, args)
2194 q.set_guards(idx, args)
2195 q.save_dirty()
2195 q.save_dirty()
2196 else:
2196 else:
2197 status(q.series.index(q.lookup(patch)))
2197 status(q.series.index(q.lookup(patch)))
2198
2198
2199 def header(ui, repo, patch=None):
2199 def header(ui, repo, patch=None):
2200 """print the header of the topmost or specified patch"""
2200 """print the header of the topmost or specified patch"""
2201 q = repo.mq
2201 q = repo.mq
2202
2202
2203 if patch:
2203 if patch:
2204 patch = q.lookup(patch)
2204 patch = q.lookup(patch)
2205 else:
2205 else:
2206 if not q.applied:
2206 if not q.applied:
2207 ui.write(_('no patches applied\n'))
2207 ui.write(_('no patches applied\n'))
2208 return 1
2208 return 1
2209 patch = q.lookup('qtip')
2209 patch = q.lookup('qtip')
2210 ph = patchheader(q.join(patch), q.plainmode)
2210 ph = patchheader(q.join(patch), q.plainmode)
2211
2211
2212 ui.write('\n'.join(ph.message) + '\n')
2212 ui.write('\n'.join(ph.message) + '\n')
2213
2213
2214 def lastsavename(path):
2214 def lastsavename(path):
2215 (directory, base) = os.path.split(path)
2215 (directory, base) = os.path.split(path)
2216 names = os.listdir(directory)
2216 names = os.listdir(directory)
2217 namere = re.compile("%s.([0-9]+)" % base)
2217 namere = re.compile("%s.([0-9]+)" % base)
2218 maxindex = None
2218 maxindex = None
2219 maxname = None
2219 maxname = None
2220 for f in names:
2220 for f in names:
2221 m = namere.match(f)
2221 m = namere.match(f)
2222 if m:
2222 if m:
2223 index = int(m.group(1))
2223 index = int(m.group(1))
2224 if maxindex is None or index > maxindex:
2224 if maxindex is None or index > maxindex:
2225 maxindex = index
2225 maxindex = index
2226 maxname = f
2226 maxname = f
2227 if maxname:
2227 if maxname:
2228 return (os.path.join(directory, maxname), maxindex)
2228 return (os.path.join(directory, maxname), maxindex)
2229 return (None, None)
2229 return (None, None)
2230
2230
2231 def savename(path):
2231 def savename(path):
2232 (last, index) = lastsavename(path)
2232 (last, index) = lastsavename(path)
2233 if last is None:
2233 if last is None:
2234 index = 0
2234 index = 0
2235 newpath = path + ".%d" % (index + 1)
2235 newpath = path + ".%d" % (index + 1)
2236 return newpath
2236 return newpath
2237
2237
2238 def push(ui, repo, patch=None, **opts):
2238 def push(ui, repo, patch=None, **opts):
2239 """push the next patch onto the stack
2239 """push the next patch onto the stack
2240
2240
2241 When -f/--force is applied, all local changes in patched files
2241 When -f/--force is applied, all local changes in patched files
2242 will be lost.
2242 will be lost.
2243 """
2243 """
2244 q = repo.mq
2244 q = repo.mq
2245 mergeq = None
2245 mergeq = None
2246
2246
2247 if opts['merge']:
2247 if opts['merge']:
2248 if opts['name']:
2248 if opts['name']:
2249 newpath = repo.join(opts['name'])
2249 newpath = repo.join(opts['name'])
2250 else:
2250 else:
2251 newpath, i = lastsavename(q.path)
2251 newpath, i = lastsavename(q.path)
2252 if not newpath:
2252 if not newpath:
2253 ui.warn(_("no saved queues found, please use -n\n"))
2253 ui.warn(_("no saved queues found, please use -n\n"))
2254 return 1
2254 return 1
2255 mergeq = queue(ui, repo.join(""), newpath)
2255 mergeq = queue(ui, repo.join(""), newpath)
2256 ui.warn(_("merging with queue at: %s\n") % mergeq.path)
2256 ui.warn(_("merging with queue at: %s\n") % mergeq.path)
2257 ret = q.push(repo, patch, force=opts['force'], list=opts['list'],
2257 ret = q.push(repo, patch, force=opts['force'], list=opts['list'],
2258 mergeq=mergeq, all=opts.get('all'), move=opts.get('move'))
2258 mergeq=mergeq, all=opts.get('all'), move=opts.get('move'))
2259 return ret
2259 return ret
2260
2260
2261 def pop(ui, repo, patch=None, **opts):
2261 def pop(ui, repo, patch=None, **opts):
2262 """pop the current patch off the stack
2262 """pop the current patch off the stack
2263
2263
2264 By default, pops off the top of the patch stack. If given a patch
2264 By default, pops off the top of the patch stack. If given a patch
2265 name, keeps popping off patches until the named patch is at the
2265 name, keeps popping off patches until the named patch is at the
2266 top of the stack.
2266 top of the stack.
2267 """
2267 """
2268 localupdate = True
2268 localupdate = True
2269 if opts['name']:
2269 if opts['name']:
2270 q = queue(ui, repo.join(""), repo.join(opts['name']))
2270 q = queue(ui, repo.join(""), repo.join(opts['name']))
2271 ui.warn(_('using patch queue: %s\n') % q.path)
2271 ui.warn(_('using patch queue: %s\n') % q.path)
2272 localupdate = False
2272 localupdate = False
2273 else:
2273 else:
2274 q = repo.mq
2274 q = repo.mq
2275 ret = q.pop(repo, patch, force=opts['force'], update=localupdate,
2275 ret = q.pop(repo, patch, force=opts['force'], update=localupdate,
2276 all=opts['all'])
2276 all=opts['all'])
2277 q.save_dirty()
2277 q.save_dirty()
2278 return ret
2278 return ret
2279
2279
2280 def rename(ui, repo, patch, name=None, **opts):
2280 def rename(ui, repo, patch, name=None, **opts):
2281 """rename a patch
2281 """rename a patch
2282
2282
2283 With one argument, renames the current patch to PATCH1.
2283 With one argument, renames the current patch to PATCH1.
2284 With two arguments, renames PATCH1 to PATCH2."""
2284 With two arguments, renames PATCH1 to PATCH2."""
2285
2285
2286 q = repo.mq
2286 q = repo.mq
2287
2287
2288 if not name:
2288 if not name:
2289 name = patch
2289 name = patch
2290 patch = None
2290 patch = None
2291
2291
2292 if patch:
2292 if patch:
2293 patch = q.lookup(patch)
2293 patch = q.lookup(patch)
2294 else:
2294 else:
2295 if not q.applied:
2295 if not q.applied:
2296 ui.write(_('no patches applied\n'))
2296 ui.write(_('no patches applied\n'))
2297 return
2297 return
2298 patch = q.lookup('qtip')
2298 patch = q.lookup('qtip')
2299 absdest = q.join(name)
2299 absdest = q.join(name)
2300 if os.path.isdir(absdest):
2300 if os.path.isdir(absdest):
2301 name = normname(os.path.join(name, os.path.basename(patch)))
2301 name = normname(os.path.join(name, os.path.basename(patch)))
2302 absdest = q.join(name)
2302 absdest = q.join(name)
2303 if os.path.exists(absdest):
2303 if os.path.exists(absdest):
2304 raise util.Abort(_('%s already exists') % absdest)
2304 raise util.Abort(_('%s already exists') % absdest)
2305
2305
2306 if name in q.series:
2306 if name in q.series:
2307 raise util.Abort(
2307 raise util.Abort(
2308 _('A patch named %s already exists in the series file') % name)
2308 _('A patch named %s already exists in the series file') % name)
2309
2309
2310 ui.note(_('renaming %s to %s\n') % (patch, name))
2310 ui.note(_('renaming %s to %s\n') % (patch, name))
2311 i = q.find_series(patch)
2311 i = q.find_series(patch)
2312 guards = q.guard_re.findall(q.full_series[i])
2312 guards = q.guard_re.findall(q.full_series[i])
2313 q.full_series[i] = name + ''.join([' #' + g for g in guards])
2313 q.full_series[i] = name + ''.join([' #' + g for g in guards])
2314 q.parse_series()
2314 q.parse_series()
2315 q.series_dirty = 1
2315 q.series_dirty = 1
2316
2316
2317 info = q.isapplied(patch)
2317 info = q.isapplied(patch)
2318 if info:
2318 if info:
2319 q.applied[info[0]] = statusentry(info[1], name)
2319 q.applied[info[0]] = statusentry(info[1], name)
2320 q.applied_dirty = 1
2320 q.applied_dirty = 1
2321
2321
2322 destdir = os.path.dirname(absdest)
2322 destdir = os.path.dirname(absdest)
2323 if not os.path.isdir(destdir):
2323 if not os.path.isdir(destdir):
2324 os.makedirs(destdir)
2324 os.makedirs(destdir)
2325 util.rename(q.join(patch), absdest)
2325 util.rename(q.join(patch), absdest)
2326 r = q.qrepo()
2326 r = q.qrepo()
2327 if r:
2327 if r:
2328 wctx = r[None]
2328 wctx = r[None]
2329 wlock = r.wlock()
2329 wlock = r.wlock()
2330 try:
2330 try:
2331 if r.dirstate[patch] == 'a':
2331 if r.dirstate[patch] == 'a':
2332 r.dirstate.forget(patch)
2332 r.dirstate.forget(patch)
2333 r.dirstate.add(name)
2333 r.dirstate.add(name)
2334 else:
2334 else:
2335 if r.dirstate[name] == 'r':
2335 if r.dirstate[name] == 'r':
2336 wctx.undelete([name])
2336 wctx.undelete([name])
2337 wctx.copy(patch, name)
2337 wctx.copy(patch, name)
2338 wctx.remove([patch], False)
2338 wctx.remove([patch], False)
2339 finally:
2339 finally:
2340 wlock.release()
2340 wlock.release()
2341
2341
2342 q.save_dirty()
2342 q.save_dirty()
2343
2343
2344 def restore(ui, repo, rev, **opts):
2344 def restore(ui, repo, rev, **opts):
2345 """restore the queue state saved by a revision (DEPRECATED)
2345 """restore the queue state saved by a revision (DEPRECATED)
2346
2346
2347 This command is deprecated, use rebase --mq instead."""
2347 This command is deprecated, use rebase --mq instead."""
2348 rev = repo.lookup(rev)
2348 rev = repo.lookup(rev)
2349 q = repo.mq
2349 q = repo.mq
2350 q.restore(repo, rev, delete=opts['delete'],
2350 q.restore(repo, rev, delete=opts['delete'],
2351 qupdate=opts['update'])
2351 qupdate=opts['update'])
2352 q.save_dirty()
2352 q.save_dirty()
2353 return 0
2353 return 0
2354
2354
2355 def save(ui, repo, **opts):
2355 def save(ui, repo, **opts):
2356 """save current queue state (DEPRECATED)
2356 """save current queue state (DEPRECATED)
2357
2357
2358 This command is deprecated, use rebase --mq instead."""
2358 This command is deprecated, use rebase --mq instead."""
2359 q = repo.mq
2359 q = repo.mq
2360 message = cmdutil.logmessage(opts)
2360 message = cmdutil.logmessage(opts)
2361 ret = q.save(repo, msg=message)
2361 ret = q.save(repo, msg=message)
2362 if ret:
2362 if ret:
2363 return ret
2363 return ret
2364 q.save_dirty()
2364 q.save_dirty()
2365 if opts['copy']:
2365 if opts['copy']:
2366 path = q.path
2366 path = q.path
2367 if opts['name']:
2367 if opts['name']:
2368 newpath = os.path.join(q.basepath, opts['name'])
2368 newpath = os.path.join(q.basepath, opts['name'])
2369 if os.path.exists(newpath):
2369 if os.path.exists(newpath):
2370 if not os.path.isdir(newpath):
2370 if not os.path.isdir(newpath):
2371 raise util.Abort(_('destination %s exists and is not '
2371 raise util.Abort(_('destination %s exists and is not '
2372 'a directory') % newpath)
2372 'a directory') % newpath)
2373 if not opts['force']:
2373 if not opts['force']:
2374 raise util.Abort(_('destination %s exists, '
2374 raise util.Abort(_('destination %s exists, '
2375 'use -f to force') % newpath)
2375 'use -f to force') % newpath)
2376 else:
2376 else:
2377 newpath = savename(path)
2377 newpath = savename(path)
2378 ui.warn(_("copy %s to %s\n") % (path, newpath))
2378 ui.warn(_("copy %s to %s\n") % (path, newpath))
2379 util.copyfiles(path, newpath)
2379 util.copyfiles(path, newpath)
2380 if opts['empty']:
2380 if opts['empty']:
2381 try:
2381 try:
2382 os.unlink(q.join(q.status_path))
2382 os.unlink(q.join(q.status_path))
2383 except:
2383 except:
2384 pass
2384 pass
2385 return 0
2385 return 0
2386
2386
2387 def strip(ui, repo, rev, **opts):
2387 def strip(ui, repo, rev, **opts):
2388 """strip a changeset and all its descendants from the repository
2388 """strip a changeset and all its descendants from the repository
2389
2389
2390 The strip command removes all changesets whose local revision
2390 The strip command removes all changesets whose local revision
2391 number is greater than or equal to REV, and then restores any
2391 number is greater than or equal to REV, and then restores any
2392 changesets that are not descendants of REV. If the working
2392 changesets that are not descendants of REV. If the working
2393 directory has uncommitted changes, the operation is aborted unless
2393 directory has uncommitted changes, the operation is aborted unless
2394 the --force flag is supplied.
2394 the --force flag is supplied.
2395
2395
2396 If a parent of the working directory is stripped, then the working
2396 If a parent of the working directory is stripped, then the working
2397 directory will automatically be updated to the most recent
2397 directory will automatically be updated to the most recent
2398 available ancestor of the stripped parent after the operation
2398 available ancestor of the stripped parent after the operation
2399 completes.
2399 completes.
2400
2400
2401 Any stripped changesets are stored in ``.hg/strip-backup`` as a
2401 Any stripped changesets are stored in ``.hg/strip-backup`` as a
2402 bundle (see :hg:`help bundle` and :hg:`help unbundle`). They can
2402 bundle (see :hg:`help bundle` and :hg:`help unbundle`). They can
2403 be restored by running :hg:`unbundle .hg/strip-backup/BUNDLE`,
2403 be restored by running :hg:`unbundle .hg/strip-backup/BUNDLE`,
2404 where BUNDLE is the bundle file created by the strip. Note that
2404 where BUNDLE is the bundle file created by the strip. Note that
2405 the local revision numbers will in general be different after the
2405 the local revision numbers will in general be different after the
2406 restore.
2406 restore.
2407
2407
2408 Use the --nobackup option to discard the backup bundle once the
2408 Use the --nobackup option to discard the backup bundle once the
2409 operation completes.
2409 operation completes.
2410 """
2410 """
2411 backup = 'all'
2411 backup = 'all'
2412 if opts['backup']:
2412 if opts['backup']:
2413 backup = 'strip'
2413 backup = 'strip'
2414 elif opts['nobackup']:
2414 elif opts['nobackup']:
2415 backup = 'none'
2415 backup = 'none'
2416
2416
2417 rev = repo.lookup(rev)
2417 rev = repo.lookup(rev)
2418 p = repo.dirstate.parents()
2418 p = repo.dirstate.parents()
2419 cl = repo.changelog
2419 cl = repo.changelog
2420 update = True
2420 update = True
2421 if p[0] == nullid:
2421 if p[0] == nullid:
2422 update = False
2422 update = False
2423 elif p[1] == nullid and rev != cl.ancestor(p[0], rev):
2423 elif p[1] == nullid and rev != cl.ancestor(p[0], rev):
2424 update = False
2424 update = False
2425 elif rev not in (cl.ancestor(p[0], rev), cl.ancestor(p[1], rev)):
2425 elif rev not in (cl.ancestor(p[0], rev), cl.ancestor(p[1], rev)):
2426 update = False
2426 update = False
2427
2427
2428 q = repo.mq
2428 q = repo.mq
2429 if q.applied:
2429 if q.applied:
2430 if rev == cl.ancestor(repo.lookup('qtip'), rev):
2430 if rev == cl.ancestor(repo.lookup('qtip'), rev):
2431 q.applied_dirty = True
2431 q.applied_dirty = True
2432 start = 0
2432 start = 0
2433 end = len(q.applied)
2433 end = len(q.applied)
2434 applied_list = [i.node for i in q.applied]
2434 applied_list = [i.node for i in q.applied]
2435 if rev in applied_list:
2435 if rev in applied_list:
2436 start = applied_list.index(rev)
2436 start = applied_list.index(rev)
2437 del q.applied[start:end]
2437 del q.applied[start:end]
2438 q.save_dirty()
2438 q.save_dirty()
2439
2439
2440 repo.mq.strip(repo, rev, backup=backup, update=update, force=opts['force'])
2440 repo.mq.strip(repo, rev, backup=backup, update=update, force=opts['force'])
2441 return 0
2441 return 0
2442
2442
2443 def select(ui, repo, *args, **opts):
2443 def select(ui, repo, *args, **opts):
2444 '''set or print guarded patches to push
2444 '''set or print guarded patches to push
2445
2445
2446 Use the :hg:`qguard` command to set or print guards on patch, then use
2446 Use the :hg:`qguard` command to set or print guards on patch, then use
2447 qselect to tell mq which guards to use. A patch will be pushed if
2447 qselect to tell mq which guards to use. A patch will be pushed if
2448 it has no guards or any positive guards match the currently
2448 it has no guards or any positive guards match the currently
2449 selected guard, but will not be pushed if any negative guards
2449 selected guard, but will not be pushed if any negative guards
2450 match the current guard. For example::
2450 match the current guard. For example::
2451
2451
2452 qguard foo.patch -stable (negative guard)
2452 qguard foo.patch -stable (negative guard)
2453 qguard bar.patch +stable (positive guard)
2453 qguard bar.patch +stable (positive guard)
2454 qselect stable
2454 qselect stable
2455
2455
2456 This activates the "stable" guard. mq will skip foo.patch (because
2456 This activates the "stable" guard. mq will skip foo.patch (because
2457 it has a negative match) but push bar.patch (because it has a
2457 it has a negative match) but push bar.patch (because it has a
2458 positive match).
2458 positive match).
2459
2459
2460 With no arguments, prints the currently active guards.
2460 With no arguments, prints the currently active guards.
2461 With one argument, sets the active guard.
2461 With one argument, sets the active guard.
2462
2462
2463 Use -n/--none to deactivate guards (no other arguments needed).
2463 Use -n/--none to deactivate guards (no other arguments needed).
2464 When no guards are active, patches with positive guards are
2464 When no guards are active, patches with positive guards are
2465 skipped and patches with negative guards are pushed.
2465 skipped and patches with negative guards are pushed.
2466
2466
2467 qselect can change the guards on applied patches. It does not pop
2467 qselect can change the guards on applied patches. It does not pop
2468 guarded patches by default. Use --pop to pop back to the last
2468 guarded patches by default. Use --pop to pop back to the last
2469 applied patch that is not guarded. Use --reapply (which implies
2469 applied patch that is not guarded. Use --reapply (which implies
2470 --pop) to push back to the current patch afterwards, but skip
2470 --pop) to push back to the current patch afterwards, but skip
2471 guarded patches.
2471 guarded patches.
2472
2472
2473 Use -s/--series to print a list of all guards in the series file
2473 Use -s/--series to print a list of all guards in the series file
2474 (no other arguments needed). Use -v for more information.'''
2474 (no other arguments needed). Use -v for more information.'''
2475
2475
2476 q = repo.mq
2476 q = repo.mq
2477 guards = q.active()
2477 guards = q.active()
2478 if args or opts['none']:
2478 if args or opts['none']:
2479 old_unapplied = q.unapplied(repo)
2479 old_unapplied = q.unapplied(repo)
2480 old_guarded = [i for i in xrange(len(q.applied)) if
2480 old_guarded = [i for i in xrange(len(q.applied)) if
2481 not q.pushable(i)[0]]
2481 not q.pushable(i)[0]]
2482 q.set_active(args)
2482 q.set_active(args)
2483 q.save_dirty()
2483 q.save_dirty()
2484 if not args:
2484 if not args:
2485 ui.status(_('guards deactivated\n'))
2485 ui.status(_('guards deactivated\n'))
2486 if not opts['pop'] and not opts['reapply']:
2486 if not opts['pop'] and not opts['reapply']:
2487 unapplied = q.unapplied(repo)
2487 unapplied = q.unapplied(repo)
2488 guarded = [i for i in xrange(len(q.applied))
2488 guarded = [i for i in xrange(len(q.applied))
2489 if not q.pushable(i)[0]]
2489 if not q.pushable(i)[0]]
2490 if len(unapplied) != len(old_unapplied):
2490 if len(unapplied) != len(old_unapplied):
2491 ui.status(_('number of unguarded, unapplied patches has '
2491 ui.status(_('number of unguarded, unapplied patches has '
2492 'changed from %d to %d\n') %
2492 'changed from %d to %d\n') %
2493 (len(old_unapplied), len(unapplied)))
2493 (len(old_unapplied), len(unapplied)))
2494 if len(guarded) != len(old_guarded):
2494 if len(guarded) != len(old_guarded):
2495 ui.status(_('number of guarded, applied patches has changed '
2495 ui.status(_('number of guarded, applied patches has changed '
2496 'from %d to %d\n') %
2496 'from %d to %d\n') %
2497 (len(old_guarded), len(guarded)))
2497 (len(old_guarded), len(guarded)))
2498 elif opts['series']:
2498 elif opts['series']:
2499 guards = {}
2499 guards = {}
2500 noguards = 0
2500 noguards = 0
2501 for gs in q.series_guards:
2501 for gs in q.series_guards:
2502 if not gs:
2502 if not gs:
2503 noguards += 1
2503 noguards += 1
2504 for g in gs:
2504 for g in gs:
2505 guards.setdefault(g, 0)
2505 guards.setdefault(g, 0)
2506 guards[g] += 1
2506 guards[g] += 1
2507 if ui.verbose:
2507 if ui.verbose:
2508 guards['NONE'] = noguards
2508 guards['NONE'] = noguards
2509 guards = guards.items()
2509 guards = guards.items()
2510 guards.sort(key=lambda x: x[0][1:])
2510 guards.sort(key=lambda x: x[0][1:])
2511 if guards:
2511 if guards:
2512 ui.note(_('guards in series file:\n'))
2512 ui.note(_('guards in series file:\n'))
2513 for guard, count in guards:
2513 for guard, count in guards:
2514 ui.note('%2d ' % count)
2514 ui.note('%2d ' % count)
2515 ui.write(guard, '\n')
2515 ui.write(guard, '\n')
2516 else:
2516 else:
2517 ui.note(_('no guards in series file\n'))
2517 ui.note(_('no guards in series file\n'))
2518 else:
2518 else:
2519 if guards:
2519 if guards:
2520 ui.note(_('active guards:\n'))
2520 ui.note(_('active guards:\n'))
2521 for g in guards:
2521 for g in guards:
2522 ui.write(g, '\n')
2522 ui.write(g, '\n')
2523 else:
2523 else:
2524 ui.write(_('no active guards\n'))
2524 ui.write(_('no active guards\n'))
2525 reapply = opts['reapply'] and q.applied and q.appliedname(-1)
2525 reapply = opts['reapply'] and q.applied and q.appliedname(-1)
2526 popped = False
2526 popped = False
2527 if opts['pop'] or opts['reapply']:
2527 if opts['pop'] or opts['reapply']:
2528 for i in xrange(len(q.applied)):
2528 for i in xrange(len(q.applied)):
2529 pushable, reason = q.pushable(i)
2529 pushable, reason = q.pushable(i)
2530 if not pushable:
2530 if not pushable:
2531 ui.status(_('popping guarded patches\n'))
2531 ui.status(_('popping guarded patches\n'))
2532 popped = True
2532 popped = True
2533 if i == 0:
2533 if i == 0:
2534 q.pop(repo, all=True)
2534 q.pop(repo, all=True)
2535 else:
2535 else:
2536 q.pop(repo, i - 1)
2536 q.pop(repo, i - 1)
2537 break
2537 break
2538 if popped:
2538 if popped:
2539 try:
2539 try:
2540 if reapply:
2540 if reapply:
2541 ui.status(_('reapplying unguarded patches\n'))
2541 ui.status(_('reapplying unguarded patches\n'))
2542 q.push(repo, reapply)
2542 q.push(repo, reapply)
2543 finally:
2543 finally:
2544 q.save_dirty()
2544 q.save_dirty()
2545
2545
2546 def finish(ui, repo, *revrange, **opts):
2546 def finish(ui, repo, *revrange, **opts):
2547 """move applied patches into repository history
2547 """move applied patches into repository history
2548
2548
2549 Finishes the specified revisions (corresponding to applied
2549 Finishes the specified revisions (corresponding to applied
2550 patches) by moving them out of mq control into regular repository
2550 patches) by moving them out of mq control into regular repository
2551 history.
2551 history.
2552
2552
2553 Accepts a revision range or the -a/--applied option. If --applied
2553 Accepts a revision range or the -a/--applied option. If --applied
2554 is specified, all applied mq revisions are removed from mq
2554 is specified, all applied mq revisions are removed from mq
2555 control. Otherwise, the given revisions must be at the base of the
2555 control. Otherwise, the given revisions must be at the base of the
2556 stack of applied patches.
2556 stack of applied patches.
2557
2557
2558 This can be especially useful if your changes have been applied to
2558 This can be especially useful if your changes have been applied to
2559 an upstream repository, or if you are about to push your changes
2559 an upstream repository, or if you are about to push your changes
2560 to upstream.
2560 to upstream.
2561 """
2561 """
2562 if not opts['applied'] and not revrange:
2562 if not opts['applied'] and not revrange:
2563 raise util.Abort(_('no revisions specified'))
2563 raise util.Abort(_('no revisions specified'))
2564 elif opts['applied']:
2564 elif opts['applied']:
2565 revrange = ('qbase:qtip',) + revrange
2565 revrange = ('qbase:qtip',) + revrange
2566
2566
2567 q = repo.mq
2567 q = repo.mq
2568 if not q.applied:
2568 if not q.applied:
2569 ui.status(_('no patches applied\n'))
2569 ui.status(_('no patches applied\n'))
2570 return 0
2570 return 0
2571
2571
2572 revs = cmdutil.revrange(repo, revrange)
2572 revs = cmdutil.revrange(repo, revrange)
2573 q.finish(repo, revs)
2573 q.finish(repo, revs)
2574 q.save_dirty()
2574 q.save_dirty()
2575 return 0
2575 return 0
2576
2576
2577 def qqueue(ui, repo, name=None, **opts):
2577 def qqueue(ui, repo, name=None, **opts):
2578 '''manage multiple patch queues
2578 '''manage multiple patch queues
2579
2579
2580 Supports switching between different patch queues, as well as creating
2580 Supports switching between different patch queues, as well as creating
2581 new patch queues and deleting existing ones.
2581 new patch queues and deleting existing ones.
2582
2582
2583 Omitting a queue name or specifying -l/--list will show you the registered
2583 Omitting a queue name or specifying -l/--list will show you the registered
2584 queues - by default the "normal" patches queue is registered. The currently
2584 queues - by default the "normal" patches queue is registered. The currently
2585 active queue will be marked with "(active)".
2585 active queue will be marked with "(active)".
2586
2586
2587 To create a new queue, use -c/--create. The queue is automatically made
2587 To create a new queue, use -c/--create. The queue is automatically made
2588 active, except in the case where there are applied patches from the
2588 active, except in the case where there are applied patches from the
2589 currently active queue in the repository. Then the queue will only be
2589 currently active queue in the repository. Then the queue will only be
2590 created and switching will fail.
2590 created and switching will fail.
2591
2591
2592 To delete an existing queue, use --delete. You cannot delete the currently
2592 To delete an existing queue, use --delete. You cannot delete the currently
2593 active queue.
2593 active queue.
2594 '''
2594 '''
2595
2595
2596 q = repo.mq
2596 q = repo.mq
2597
2597
2598 _defaultqueue = 'patches'
2598 _defaultqueue = 'patches'
2599 _allqueues = 'patches.queues'
2599 _allqueues = 'patches.queues'
2600 _activequeue = 'patches.queue'
2600 _activequeue = 'patches.queue'
2601
2601
2602 def _getcurrent():
2602 def _getcurrent():
2603 cur = os.path.basename(q.path)
2603 cur = os.path.basename(q.path)
2604 if cur.startswith('patches-'):
2604 if cur.startswith('patches-'):
2605 cur = cur[8:]
2605 cur = cur[8:]
2606 return cur
2606 return cur
2607
2607
2608 def _noqueues():
2608 def _noqueues():
2609 try:
2609 try:
2610 fh = repo.opener(_allqueues, 'r')
2610 fh = repo.opener(_allqueues, 'r')
2611 fh.close()
2611 fh.close()
2612 except IOError:
2612 except IOError:
2613 return True
2613 return True
2614
2614
2615 return False
2615 return False
2616
2616
2617 def _getqueues():
2617 def _getqueues():
2618 current = _getcurrent()
2618 current = _getcurrent()
2619
2619
2620 try:
2620 try:
2621 fh = repo.opener(_allqueues, 'r')
2621 fh = repo.opener(_allqueues, 'r')
2622 queues = [queue.strip() for queue in fh if queue.strip()]
2622 queues = [queue.strip() for queue in fh if queue.strip()]
2623 if current not in queues:
2623 if current not in queues:
2624 queues.append(current)
2624 queues.append(current)
2625 except IOError:
2625 except IOError:
2626 queues = [_defaultqueue]
2626 queues = [_defaultqueue]
2627
2627
2628 return sorted(queues)
2628 return sorted(queues)
2629
2629
2630 def _setactive(name):
2630 def _setactive(name):
2631 if q.applied:
2631 if q.applied:
2632 raise util.Abort(_('patches applied - cannot set new queue active'))
2632 raise util.Abort(_('patches applied - cannot set new queue active'))
2633
2633
2634 fh = repo.opener(_activequeue, 'w')
2634 fh = repo.opener(_activequeue, 'w')
2635 if name != 'patches':
2635 if name != 'patches':
2636 fh.write(name)
2636 fh.write(name)
2637 fh.close()
2637 fh.close()
2638
2638
2639 def _addqueue(name):
2639 def _addqueue(name):
2640 fh = repo.opener(_allqueues, 'a')
2640 fh = repo.opener(_allqueues, 'a')
2641 fh.write('%s\n' % (name,))
2641 fh.write('%s\n' % (name,))
2642 fh.close()
2642 fh.close()
2643
2643
2644 def _validname(name):
2644 def _validname(name):
2645 for n in name:
2645 for n in name:
2646 if n in ':\\/.':
2646 if n in ':\\/.':
2647 return False
2647 return False
2648 return True
2648 return True
2649
2649
2650 if not name or opts.get('list'):
2650 if not name or opts.get('list'):
2651 current = _getcurrent()
2651 current = _getcurrent()
2652 for queue in _getqueues():
2652 for queue in _getqueues():
2653 ui.write('%s' % (queue,))
2653 ui.write('%s' % (queue,))
2654 if queue == current:
2654 if queue == current:
2655 ui.write(_(' (active)\n'))
2655 ui.write(_(' (active)\n'))
2656 else:
2656 else:
2657 ui.write('\n')
2657 ui.write('\n')
2658 return
2658 return
2659
2659
2660 if not _validname(name):
2660 if not _validname(name):
2661 raise util.Abort(
2661 raise util.Abort(
2662 _('invalid queue name, may not contain the characters ":\\/."'))
2662 _('invalid queue name, may not contain the characters ":\\/."'))
2663
2663
2664 existing = _getqueues()
2664 existing = _getqueues()
2665
2665
2666 if opts.get('create'):
2666 if opts.get('create'):
2667 if name in existing:
2667 if name in existing:
2668 raise util.Abort(_('queue "%s" already exists') % name)
2668 raise util.Abort(_('queue "%s" already exists') % name)
2669 if _noqueues():
2669 if _noqueues():
2670 _addqueue(_defaultqueue)
2670 _addqueue(_defaultqueue)
2671 _addqueue(name)
2671 _addqueue(name)
2672 _setactive(name)
2672 _setactive(name)
2673 elif opts.get('delete'):
2673 elif opts.get('delete'):
2674 if name not in existing:
2674 if name not in existing:
2675 raise util.Abort(_('cannot delete queue that does not exist'))
2675 raise util.Abort(_('cannot delete queue that does not exist'))
2676
2676
2677 current = _getcurrent()
2677 current = _getcurrent()
2678
2678
2679 if name == current:
2679 if name == current:
2680 raise util.Abort(_('cannot delete currently active queue'))
2680 raise util.Abort(_('cannot delete currently active queue'))
2681
2681
2682 fh = repo.opener('patches.queues.new', 'w')
2682 fh = repo.opener('patches.queues.new', 'w')
2683 for queue in existing:
2683 for queue in existing:
2684 if queue == name:
2684 if queue == name:
2685 continue
2685 continue
2686 fh.write('%s\n' % (queue,))
2686 fh.write('%s\n' % (queue,))
2687 fh.close()
2687 fh.close()
2688 util.rename(repo.join('patches.queues.new'), repo.join(_allqueues))
2688 util.rename(repo.join('patches.queues.new'), repo.join(_allqueues))
2689 else:
2689 else:
2690 if name not in existing:
2690 if name not in existing:
2691 raise util.Abort(_('use --create to create a new queue'))
2691 raise util.Abort(_('use --create to create a new queue'))
2692 _setactive(name)
2692 _setactive(name)
2693
2693
2694 def reposetup(ui, repo):
2694 def reposetup(ui, repo):
2695 class mqrepo(repo.__class__):
2695 class mqrepo(repo.__class__):
2696 @util.propertycache
2696 @util.propertycache
2697 def mq(self):
2697 def mq(self):
2698 return queue(self.ui, self.join(""))
2698 return queue(self.ui, self.join(""))
2699
2699
2700 def abort_if_wdir_patched(self, errmsg, force=False):
2700 def abort_if_wdir_patched(self, errmsg, force=False):
2701 if self.mq.applied and not force:
2701 if self.mq.applied and not force:
2702 parent = self.dirstate.parents()[0]
2702 parent = self.dirstate.parents()[0]
2703 if parent in [s.node for s in self.mq.applied]:
2703 if parent in [s.node for s in self.mq.applied]:
2704 raise util.Abort(errmsg)
2704 raise util.Abort(errmsg)
2705
2705
2706 def commit(self, text="", user=None, date=None, match=None,
2706 def commit(self, text="", user=None, date=None, match=None,
2707 force=False, editor=False, extra={}):
2707 force=False, editor=False, extra={}):
2708 self.abort_if_wdir_patched(
2708 self.abort_if_wdir_patched(
2709 _('cannot commit over an applied mq patch'),
2709 _('cannot commit over an applied mq patch'),
2710 force)
2710 force)
2711
2711
2712 return super(mqrepo, self).commit(text, user, date, match, force,
2712 return super(mqrepo, self).commit(text, user, date, match, force,
2713 editor, extra)
2713 editor, extra)
2714
2714
2715 def push(self, remote, force=False, revs=None, newbranch=False):
2715 def push(self, remote, force=False, revs=None, newbranch=False):
2716 if self.mq.applied and not force and not revs:
2716 if self.mq.applied and not force and not revs:
2717 raise util.Abort(_('source has mq patches applied'))
2717 raise util.Abort(_('source has mq patches applied'))
2718 return super(mqrepo, self).push(remote, force, revs, newbranch)
2718 return super(mqrepo, self).push(remote, force, revs, newbranch)
2719
2719
2720 def _findtags(self):
2720 def _findtags(self):
2721 '''augment tags from base class with patch tags'''
2721 '''augment tags from base class with patch tags'''
2722 result = super(mqrepo, self)._findtags()
2722 result = super(mqrepo, self)._findtags()
2723
2723
2724 q = self.mq
2724 q = self.mq
2725 if not q.applied:
2725 if not q.applied:
2726 return result
2726 return result
2727
2727
2728 mqtags = [(patch.node, patch.name) for patch in q.applied]
2728 mqtags = [(patch.node, patch.name) for patch in q.applied]
2729
2729
2730 if mqtags[-1][0] not in self.changelog.nodemap:
2730 if mqtags[-1][0] not in self.changelog.nodemap:
2731 self.ui.warn(_('mq status file refers to unknown node %s\n')
2731 self.ui.warn(_('mq status file refers to unknown node %s\n')
2732 % short(mqtags[-1][0]))
2732 % short(mqtags[-1][0]))
2733 return result
2733 return result
2734
2734
2735 mqtags.append((mqtags[-1][0], 'qtip'))
2735 mqtags.append((mqtags[-1][0], 'qtip'))
2736 mqtags.append((mqtags[0][0], 'qbase'))
2736 mqtags.append((mqtags[0][0], 'qbase'))
2737 mqtags.append((self.changelog.parents(mqtags[0][0])[0], 'qparent'))
2737 mqtags.append((self.changelog.parents(mqtags[0][0])[0], 'qparent'))
2738 tags = result[0]
2738 tags = result[0]
2739 for patch in mqtags:
2739 for patch in mqtags:
2740 if patch[1] in tags:
2740 if patch[1] in tags:
2741 self.ui.warn(_('Tag %s overrides mq patch of the same name\n')
2741 self.ui.warn(_('Tag %s overrides mq patch of the same name\n')
2742 % patch[1])
2742 % patch[1])
2743 else:
2743 else:
2744 tags[patch[1]] = patch[0]
2744 tags[patch[1]] = patch[0]
2745
2745
2746 return result
2746 return result
2747
2747
2748 def _branchtags(self, partial, lrev):
2748 def _branchtags(self, partial, lrev):
2749 q = self.mq
2749 q = self.mq
2750 if not q.applied:
2750 if not q.applied:
2751 return super(mqrepo, self)._branchtags(partial, lrev)
2751 return super(mqrepo, self)._branchtags(partial, lrev)
2752
2752
2753 cl = self.changelog
2753 cl = self.changelog
2754 qbasenode = q.applied[0].node
2754 qbasenode = q.applied[0].node
2755 if qbasenode not in cl.nodemap:
2755 if qbasenode not in cl.nodemap:
2756 self.ui.warn(_('mq status file refers to unknown node %s\n')
2756 self.ui.warn(_('mq status file refers to unknown node %s\n')
2757 % short(qbasenode))
2757 % short(qbasenode))
2758 return super(mqrepo, self)._branchtags(partial, lrev)
2758 return super(mqrepo, self)._branchtags(partial, lrev)
2759
2759
2760 qbase = cl.rev(qbasenode)
2760 qbase = cl.rev(qbasenode)
2761 start = lrev + 1
2761 start = lrev + 1
2762 if start < qbase:
2762 if start < qbase:
2763 # update the cache (excluding the patches) and save it
2763 # update the cache (excluding the patches) and save it
2764 ctxgen = (self[r] for r in xrange(lrev + 1, qbase))
2764 ctxgen = (self[r] for r in xrange(lrev + 1, qbase))
2765 self._updatebranchcache(partial, ctxgen)
2765 self._updatebranchcache(partial, ctxgen)
2766 self._writebranchcache(partial, cl.node(qbase - 1), qbase - 1)
2766 self._writebranchcache(partial, cl.node(qbase - 1), qbase - 1)
2767 start = qbase
2767 start = qbase
2768 # if start = qbase, the cache is as updated as it should be.
2768 # if start = qbase, the cache is as updated as it should be.
2769 # if start > qbase, the cache includes (part of) the patches.
2769 # if start > qbase, the cache includes (part of) the patches.
2770 # we might as well use it, but we won't save it.
2770 # we might as well use it, but we won't save it.
2771
2771
2772 # update the cache up to the tip
2772 # update the cache up to the tip
2773 ctxgen = (self[r] for r in xrange(start, len(cl)))
2773 ctxgen = (self[r] for r in xrange(start, len(cl)))
2774 self._updatebranchcache(partial, ctxgen)
2774 self._updatebranchcache(partial, ctxgen)
2775
2775
2776 return partial
2776 return partial
2777
2777
2778 if repo.local():
2778 if repo.local():
2779 repo.__class__ = mqrepo
2779 repo.__class__ = mqrepo
2780
2780
2781 def mqimport(orig, ui, repo, *args, **kwargs):
2781 def mqimport(orig, ui, repo, *args, **kwargs):
2782 if (hasattr(repo, 'abort_if_wdir_patched')
2782 if (hasattr(repo, 'abort_if_wdir_patched')
2783 and not kwargs.get('no_commit', False)):
2783 and not kwargs.get('no_commit', False)):
2784 repo.abort_if_wdir_patched(_('cannot import over an applied patch'),
2784 repo.abort_if_wdir_patched(_('cannot import over an applied patch'),
2785 kwargs.get('force'))
2785 kwargs.get('force'))
2786 return orig(ui, repo, *args, **kwargs)
2786 return orig(ui, repo, *args, **kwargs)
2787
2787
2788 def mqinit(orig, ui, *args, **kwargs):
2788 def mqinit(orig, ui, *args, **kwargs):
2789 mq = kwargs.pop('mq', None)
2789 mq = kwargs.pop('mq', None)
2790
2790
2791 if not mq:
2791 if not mq:
2792 return orig(ui, *args, **kwargs)
2792 return orig(ui, *args, **kwargs)
2793
2793
2794 if args:
2794 if args:
2795 repopath = args[0]
2795 repopath = args[0]
2796 if not hg.islocal(repopath):
2796 if not hg.islocal(repopath):
2797 raise util.Abort(_('only a local queue repository '
2797 raise util.Abort(_('only a local queue repository '
2798 'may be initialized'))
2798 'may be initialized'))
2799 else:
2799 else:
2800 repopath = cmdutil.findrepo(os.getcwd())
2800 repopath = cmdutil.findrepo(os.getcwd())
2801 if not repopath:
2801 if not repopath:
2802 raise util.Abort(_('there is no Mercurial repository here '
2802 raise util.Abort(_('there is no Mercurial repository here '
2803 '(.hg not found)'))
2803 '(.hg not found)'))
2804 repo = hg.repository(ui, repopath)
2804 repo = hg.repository(ui, repopath)
2805 return qinit(ui, repo, True)
2805 return qinit(ui, repo, True)
2806
2806
2807 def mqcommand(orig, ui, repo, *args, **kwargs):
2807 def mqcommand(orig, ui, repo, *args, **kwargs):
2808 """Add --mq option to operate on patch repository instead of main"""
2808 """Add --mq option to operate on patch repository instead of main"""
2809
2809
2810 # some commands do not like getting unknown options
2810 # some commands do not like getting unknown options
2811 mq = kwargs.pop('mq', None)
2811 mq = kwargs.pop('mq', None)
2812
2812
2813 if not mq:
2813 if not mq:
2814 return orig(ui, repo, *args, **kwargs)
2814 return orig(ui, repo, *args, **kwargs)
2815
2815
2816 q = repo.mq
2816 q = repo.mq
2817 r = q.qrepo()
2817 r = q.qrepo()
2818 if not r:
2818 if not r:
2819 raise util.Abort(_('no queue repository'))
2819 raise util.Abort(_('no queue repository'))
2820 return orig(r.ui, r, *args, **kwargs)
2820 return orig(r.ui, r, *args, **kwargs)
2821
2821
2822 def summary(orig, ui, repo, *args, **kwargs):
2822 def summary(orig, ui, repo, *args, **kwargs):
2823 r = orig(ui, repo, *args, **kwargs)
2823 r = orig(ui, repo, *args, **kwargs)
2824 q = repo.mq
2824 q = repo.mq
2825 m = []
2825 m = []
2826 a, u = len(q.applied), len(q.unapplied(repo))
2826 a, u = len(q.applied), len(q.unapplied(repo))
2827 if a:
2827 if a:
2828 m.append(ui.label(_("%d applied"), 'qseries.applied') % a)
2828 m.append(ui.label(_("%d applied"), 'qseries.applied') % a)
2829 if u:
2829 if u:
2830 m.append(ui.label(_("%d unapplied"), 'qseries.unapplied') % u)
2830 m.append(ui.label(_("%d unapplied"), 'qseries.unapplied') % u)
2831 if m:
2831 if m:
2832 ui.write("mq: %s\n" % ', '.join(m))
2832 ui.write("mq: %s\n" % ', '.join(m))
2833 else:
2833 else:
2834 ui.note(_("mq: (empty queue)\n"))
2834 ui.note(_("mq: (empty queue)\n"))
2835 return r
2835 return r
2836
2836
2837 def uisetup(ui):
2837 def uisetup(ui):
2838 mqopt = [('', 'mq', None, _("operate on patch repository"))]
2838 mqopt = [('', 'mq', None, _("operate on patch repository"))]
2839
2839
2840 extensions.wrapcommand(commands.table, 'import', mqimport)
2840 extensions.wrapcommand(commands.table, 'import', mqimport)
2841 extensions.wrapcommand(commands.table, 'summary', summary)
2841 extensions.wrapcommand(commands.table, 'summary', summary)
2842
2842
2843 entry = extensions.wrapcommand(commands.table, 'init', mqinit)
2843 entry = extensions.wrapcommand(commands.table, 'init', mqinit)
2844 entry[1].extend(mqopt)
2844 entry[1].extend(mqopt)
2845
2845
2846 norepo = commands.norepo.split(" ")
2846 norepo = commands.norepo.split(" ")
2847 for cmd in commands.table.keys():
2847 for cmd in commands.table.keys():
2848 cmd = cmdutil.parsealiases(cmd)[0]
2848 cmd = cmdutil.parsealiases(cmd)[0]
2849 if cmd in norepo:
2849 if cmd in norepo:
2850 continue
2850 continue
2851 entry = extensions.wrapcommand(commands.table, cmd, mqcommand)
2851 entry = extensions.wrapcommand(commands.table, cmd, mqcommand)
2852 entry[1].extend(mqopt)
2852 entry[1].extend(mqopt)
2853
2853
2854 seriesopts = [('s', 'summary', None, _('print first line of patch header'))]
2854 seriesopts = [('s', 'summary', None, _('print first line of patch header'))]
2855
2855
2856 cmdtable = {
2856 cmdtable = {
2857 "qapplied":
2857 "qapplied":
2858 (applied,
2858 (applied,
2859 [('1', 'last', None, _('show only the last patch'))] + seriesopts,
2859 [('1', 'last', None, _('show only the last patch'))] + seriesopts,
2860 _('hg qapplied [-1] [-s] [PATCH]')),
2860 _('hg qapplied [-1] [-s] [PATCH]')),
2861 "qclone":
2861 "qclone":
2862 (clone,
2862 (clone,
2863 [('', 'pull', None, _('use pull protocol to copy metadata')),
2863 [('', 'pull', None, _('use pull protocol to copy metadata')),
2864 ('U', 'noupdate', None, _('do not update the new working directories')),
2864 ('U', 'noupdate', None, _('do not update the new working directories')),
2865 ('', 'uncompressed', None,
2865 ('', 'uncompressed', None,
2866 _('use uncompressed transfer (fast over LAN)')),
2866 _('use uncompressed transfer (fast over LAN)')),
2867 ('p', 'patches', '',
2867 ('p', 'patches', '',
2868 _('location of source patch repository'), _('REPO')),
2868 _('location of source patch repository'), _('REPO')),
2869 ] + commands.remoteopts,
2869 ] + commands.remoteopts,
2870 _('hg qclone [OPTION]... SOURCE [DEST]')),
2870 _('hg qclone [OPTION]... SOURCE [DEST]')),
2871 "qcommit|qci":
2871 "qcommit|qci":
2872 (commit,
2872 (commit,
2873 commands.table["^commit|ci"][1],
2873 commands.table["^commit|ci"][1],
2874 _('hg qcommit [OPTION]... [FILE]...')),
2874 _('hg qcommit [OPTION]... [FILE]...')),
2875 "^qdiff":
2875 "^qdiff":
2876 (diff,
2876 (diff,
2877 commands.diffopts + commands.diffopts2 + commands.walkopts,
2877 commands.diffopts + commands.diffopts2 + commands.walkopts,
2878 _('hg qdiff [OPTION]... [FILE]...')),
2878 _('hg qdiff [OPTION]... [FILE]...')),
2879 "qdelete|qremove|qrm":
2879 "qdelete|qremove|qrm":
2880 (delete,
2880 (delete,
2881 [('k', 'keep', None, _('keep patch file')),
2881 [('k', 'keep', None, _('keep patch file')),
2882 ('r', 'rev', [],
2882 ('r', 'rev', [],
2883 _('stop managing a revision (DEPRECATED)'), _('REV'))],
2883 _('stop managing a revision (DEPRECATED)'), _('REV'))],
2884 _('hg qdelete [-k] [-r REV]... [PATCH]...')),
2884 _('hg qdelete [-k] [-r REV]... [PATCH]...')),
2885 'qfold':
2885 'qfold':
2886 (fold,
2886 (fold,
2887 [('e', 'edit', None, _('edit patch header')),
2887 [('e', 'edit', None, _('edit patch header')),
2888 ('k', 'keep', None, _('keep folded patch files')),
2888 ('k', 'keep', None, _('keep folded patch files')),
2889 ] + commands.commitopts,
2889 ] + commands.commitopts,
2890 _('hg qfold [-e] [-k] [-m TEXT] [-l FILE] PATCH...')),
2890 _('hg qfold [-e] [-k] [-m TEXT] [-l FILE] PATCH...')),
2891 'qgoto':
2891 'qgoto':
2892 (goto,
2892 (goto,
2893 [('f', 'force', None, _('overwrite any local changes'))],
2893 [('f', 'force', None, _('overwrite any local changes'))],
2894 _('hg qgoto [OPTION]... PATCH')),
2894 _('hg qgoto [OPTION]... PATCH')),
2895 'qguard':
2895 'qguard':
2896 (guard,
2896 (guard,
2897 [('l', 'list', None, _('list all patches and guards')),
2897 [('l', 'list', None, _('list all patches and guards')),
2898 ('n', 'none', None, _('drop all guards'))],
2898 ('n', 'none', None, _('drop all guards'))],
2899 _('hg qguard [-l] [-n] [PATCH] [-- [+GUARD]... [-GUARD]...]')),
2899 _('hg qguard [-l] [-n] [PATCH] [-- [+GUARD]... [-GUARD]...]')),
2900 'qheader': (header, [], _('hg qheader [PATCH]')),
2900 'qheader': (header, [], _('hg qheader [PATCH]')),
2901 "qimport":
2901 "qimport":
2902 (qimport,
2902 (qimport,
2903 [('e', 'existing', None, _('import file in patch directory')),
2903 [('e', 'existing', None, _('import file in patch directory')),
2904 ('n', 'name', '',
2904 ('n', 'name', '',
2905 _('name of patch file'), _('NAME')),
2905 _('name of patch file'), _('NAME')),
2906 ('f', 'force', None, _('overwrite existing files')),
2906 ('f', 'force', None, _('overwrite existing files')),
2907 ('r', 'rev', [],
2907 ('r', 'rev', [],
2908 _('place existing revisions under mq control'), _('REV')),
2908 _('place existing revisions under mq control'), _('REV')),
2909 ('g', 'git', None, _('use git extended diff format')),
2909 ('g', 'git', None, _('use git extended diff format')),
2910 ('P', 'push', None, _('qpush after importing'))],
2910 ('P', 'push', None, _('qpush after importing'))],
2911 _('hg qimport [-e] [-n NAME] [-f] [-g] [-P] [-r REV]... FILE...')),
2911 _('hg qimport [-e] [-n NAME] [-f] [-g] [-P] [-r REV]... FILE...')),
2912 "^qinit":
2912 "^qinit":
2913 (init,
2913 (init,
2914 [('c', 'create-repo', None, _('create queue repository'))],
2914 [('c', 'create-repo', None, _('create queue repository'))],
2915 _('hg qinit [-c]')),
2915 _('hg qinit [-c]')),
2916 "^qnew":
2916 "^qnew":
2917 (new,
2917 (new,
2918 [('e', 'edit', None, _('edit commit message')),
2918 [('e', 'edit', None, _('edit commit message')),
2919 ('f', 'force', None, _('import uncommitted changes (DEPRECATED)')),
2919 ('f', 'force', None, _('import uncommitted changes (DEPRECATED)')),
2920 ('g', 'git', None, _('use git extended diff format')),
2920 ('g', 'git', None, _('use git extended diff format')),
2921 ('U', 'currentuser', None, _('add "From: <current user>" to patch')),
2921 ('U', 'currentuser', None, _('add "From: <current user>" to patch')),
2922 ('u', 'user', '',
2922 ('u', 'user', '',
2923 _('add "From: <USER>" to patch'), _('USER')),
2923 _('add "From: <USER>" to patch'), _('USER')),
2924 ('D', 'currentdate', None, _('add "Date: <current date>" to patch')),
2924 ('D', 'currentdate', None, _('add "Date: <current date>" to patch')),
2925 ('d', 'date', '',
2925 ('d', 'date', '',
2926 _('add "Date: <DATE>" to patch'), _('DATE'))
2926 _('add "Date: <DATE>" to patch'), _('DATE'))
2927 ] + commands.walkopts + commands.commitopts,
2927 ] + commands.walkopts + commands.commitopts,
2928 _('hg qnew [-e] [-m TEXT] [-l FILE] PATCH [FILE]...')),
2928 _('hg qnew [-e] [-m TEXT] [-l FILE] PATCH [FILE]...')),
2929 "qnext": (next, [] + seriesopts, _('hg qnext [-s]')),
2929 "qnext": (next, [] + seriesopts, _('hg qnext [-s]')),
2930 "qprev": (prev, [] + seriesopts, _('hg qprev [-s]')),
2930 "qprev": (prev, [] + seriesopts, _('hg qprev [-s]')),
2931 "^qpop":
2931 "^qpop":
2932 (pop,
2932 (pop,
2933 [('a', 'all', None, _('pop all patches')),
2933 [('a', 'all', None, _('pop all patches')),
2934 ('n', 'name', '',
2934 ('n', 'name', '',
2935 _('queue name to pop (DEPRECATED)'), _('NAME')),
2935 _('queue name to pop (DEPRECATED)'), _('NAME')),
2936 ('f', 'force', None, _('forget any local changes to patched files'))],
2936 ('f', 'force', None, _('forget any local changes to patched files'))],
2937 _('hg qpop [-a] [-n NAME] [-f] [PATCH | INDEX]')),
2937 _('hg qpop [-a] [-n NAME] [-f] [PATCH | INDEX]')),
2938 "^qpush":
2938 "^qpush":
2939 (push,
2939 (push,
2940 [('f', 'force', None, _('apply if the patch has rejects')),
2940 [('f', 'force', None, _('apply if the patch has rejects')),
2941 ('l', 'list', None, _('list patch name in commit text')),
2941 ('l', 'list', None, _('list patch name in commit text')),
2942 ('a', 'all', None, _('apply all patches')),
2942 ('a', 'all', None, _('apply all patches')),
2943 ('m', 'merge', None, _('merge from another queue (DEPRECATED)')),
2943 ('m', 'merge', None, _('merge from another queue (DEPRECATED)')),
2944 ('n', 'name', '',
2944 ('n', 'name', '',
2945 _('merge queue name (DEPRECATED)'), _('NAME')),
2945 _('merge queue name (DEPRECATED)'), _('NAME')),
2946 ('', 'move', None, _('reorder patch series and apply only the patch'))],
2946 ('', 'move', None, _('reorder patch series and apply only the patch'))],
2947 _('hg qpush [-f] [-l] [-a] [-m] [-n NAME] [--move] [PATCH | INDEX]')),
2947 _('hg qpush [-f] [-l] [-a] [-m] [-n NAME] [--move] [PATCH | INDEX]')),
2948 "^qrefresh":
2948 "^qrefresh":
2949 (refresh,
2949 (refresh,
2950 [('e', 'edit', None, _('edit commit message')),
2950 [('e', 'edit', None, _('edit commit message')),
2951 ('g', 'git', None, _('use git extended diff format')),
2951 ('g', 'git', None, _('use git extended diff format')),
2952 ('s', 'short', None,
2952 ('s', 'short', None,
2953 _('refresh only files already in the patch and specified files')),
2953 _('refresh only files already in the patch and specified files')),
2954 ('U', 'currentuser', None,
2954 ('U', 'currentuser', None,
2955 _('add/update author field in patch with current user')),
2955 _('add/update author field in patch with current user')),
2956 ('u', 'user', '',
2956 ('u', 'user', '',
2957 _('add/update author field in patch with given user'), _('USER')),
2957 _('add/update author field in patch with given user'), _('USER')),
2958 ('D', 'currentdate', None,
2958 ('D', 'currentdate', None,
2959 _('add/update date field in patch with current date')),
2959 _('add/update date field in patch with current date')),
2960 ('d', 'date', '',
2960 ('d', 'date', '',
2961 _('add/update date field in patch with given date'), _('DATE'))
2961 _('add/update date field in patch with given date'), _('DATE'))
2962 ] + commands.walkopts + commands.commitopts,
2962 ] + commands.walkopts + commands.commitopts,
2963 _('hg qrefresh [-I] [-X] [-e] [-m TEXT] [-l FILE] [-s] [FILE]...')),
2963 _('hg qrefresh [-I] [-X] [-e] [-m TEXT] [-l FILE] [-s] [FILE]...')),
2964 'qrename|qmv':
2964 'qrename|qmv':
2965 (rename, [], _('hg qrename PATCH1 [PATCH2]')),
2965 (rename, [], _('hg qrename PATCH1 [PATCH2]')),
2966 "qrestore":
2966 "qrestore":
2967 (restore,
2967 (restore,
2968 [('d', 'delete', None, _('delete save entry')),
2968 [('d', 'delete', None, _('delete save entry')),
2969 ('u', 'update', None, _('update queue working directory'))],
2969 ('u', 'update', None, _('update queue working directory'))],
2970 _('hg qrestore [-d] [-u] REV')),
2970 _('hg qrestore [-d] [-u] REV')),
2971 "qsave":
2971 "qsave":
2972 (save,
2972 (save,
2973 [('c', 'copy', None, _('copy patch directory')),
2973 [('c', 'copy', None, _('copy patch directory')),
2974 ('n', 'name', '',
2974 ('n', 'name', '',
2975 _('copy directory name'), _('NAME')),
2975 _('copy directory name'), _('NAME')),
2976 ('e', 'empty', None, _('clear queue status file')),
2976 ('e', 'empty', None, _('clear queue status file')),
2977 ('f', 'force', None, _('force copy'))] + commands.commitopts,
2977 ('f', 'force', None, _('force copy'))] + commands.commitopts,
2978 _('hg qsave [-m TEXT] [-l FILE] [-c] [-n NAME] [-e] [-f]')),
2978 _('hg qsave [-m TEXT] [-l FILE] [-c] [-n NAME] [-e] [-f]')),
2979 "qselect":
2979 "qselect":
2980 (select,
2980 (select,
2981 [('n', 'none', None, _('disable all guards')),
2981 [('n', 'none', None, _('disable all guards')),
2982 ('s', 'series', None, _('list all guards in series file')),
2982 ('s', 'series', None, _('list all guards in series file')),
2983 ('', 'pop', None, _('pop to before first guarded applied patch')),
2983 ('', 'pop', None, _('pop to before first guarded applied patch')),
2984 ('', 'reapply', None, _('pop, then reapply patches'))],
2984 ('', 'reapply', None, _('pop, then reapply patches'))],
2985 _('hg qselect [OPTION]... [GUARD]...')),
2985 _('hg qselect [OPTION]... [GUARD]...')),
2986 "qseries":
2986 "qseries":
2987 (series,
2987 (series,
2988 [('m', 'missing', None, _('print patches not in series')),
2988 [('m', 'missing', None, _('print patches not in series')),
2989 ] + seriesopts,
2989 ] + seriesopts,
2990 _('hg qseries [-ms]')),
2990 _('hg qseries [-ms]')),
2991 "strip":
2991 "strip":
2992 (strip,
2992 (strip,
2993 [('f', 'force', None, _('force removal of changesets even if the '
2993 [('f', 'force', None, _('force removal of changesets even if the '
2994 'working directory has uncommitted changes')),
2994 'working directory has uncommitted changes')),
2995 ('b', 'backup', None, _('bundle only changesets with local revision'
2995 ('b', 'backup', None, _('bundle only changesets with local revision'
2996 ' number greater than REV which are not'
2996 ' number greater than REV which are not'
2997 ' descendants of REV (DEPRECATED)')),
2997 ' descendants of REV (DEPRECATED)')),
2998 ('n', 'nobackup', None, _('no backups'))],
2998 ('n', 'nobackup', None, _('no backups'))],
2999 _('hg strip [-f] [-n] REV')),
2999 _('hg strip [-f] [-n] REV')),
3000 "qtop": (top, [] + seriesopts, _('hg qtop [-s]')),
3000 "qtop": (top, [] + seriesopts, _('hg qtop [-s]')),
3001 "qunapplied":
3001 "qunapplied":
3002 (unapplied,
3002 (unapplied,
3003 [('1', 'first', None, _('show only the first patch'))] + seriesopts,
3003 [('1', 'first', None, _('show only the first patch'))] + seriesopts,
3004 _('hg qunapplied [-1] [-s] [PATCH]')),
3004 _('hg qunapplied [-1] [-s] [PATCH]')),
3005 "qfinish":
3005 "qfinish":
3006 (finish,
3006 (finish,
3007 [('a', 'applied', None, _('finish all applied changesets'))],
3007 [('a', 'applied', None, _('finish all applied changesets'))],
3008 _('hg qfinish [-a] [REV]...')),
3008 _('hg qfinish [-a] [REV]...')),
3009 'qqueue':
3009 'qqueue':
3010 (qqueue,
3010 (qqueue,
3011 [
3011 [
3012 ('l', 'list', False, _('list all available queues')),
3012 ('l', 'list', False, _('list all available queues')),
3013 ('c', 'create', False, _('create new queue')),
3013 ('c', 'create', False, _('create new queue')),
3014 ('', 'delete', False, _('delete reference to queue')),
3014 ('', 'delete', False, _('delete reference to queue')),
3015 ],
3015 ],
3016 _('[OPTION] [QUEUE]')),
3016 _('[OPTION] [QUEUE]')),
3017 }
3017 }
3018
3018
3019 colortable = {'qguard.negative': 'red',
3019 colortable = {'qguard.negative': 'red',
3020 'qguard.positive': 'yellow',
3020 'qguard.positive': 'yellow',
3021 'qguard.unguarded': 'green',
3021 'qguard.unguarded': 'green',
3022 'qseries.applied': 'blue bold underline',
3022 'qseries.applied': 'blue bold underline',
3023 'qseries.guarded': 'black bold',
3023 'qseries.guarded': 'black bold',
3024 'qseries.missing': 'red bold',
3024 'qseries.missing': 'red bold',
3025 'qseries.unapplied': 'black bold'}
3025 'qseries.unapplied': 'black bold'}
@@ -1,1086 +1,1086 b''
1 # context.py - changeset and file context objects for mercurial
1 # context.py - changeset and file context objects for mercurial
2 #
2 #
3 # Copyright 2006, 2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2006, 2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from node import nullid, nullrev, short, hex
8 from node import nullid, nullrev, short, hex
9 from i18n import _
9 from i18n import _
10 import ancestor, bdiff, error, util, subrepo, patch
10 import ancestor, bdiff, error, util, subrepo, patch
11 import os, errno, stat
11 import os, errno, stat
12
12
13 propertycache = util.propertycache
13 propertycache = util.propertycache
14
14
15 class changectx(object):
15 class changectx(object):
16 """A changecontext object makes access to data related to a particular
16 """A changecontext object makes access to data related to a particular
17 changeset convenient."""
17 changeset convenient."""
18 def __init__(self, repo, changeid=''):
18 def __init__(self, repo, changeid=''):
19 """changeid is a revision number, node, or tag"""
19 """changeid is a revision number, node, or tag"""
20 if changeid == '':
20 if changeid == '':
21 changeid = '.'
21 changeid = '.'
22 self._repo = repo
22 self._repo = repo
23 if isinstance(changeid, (long, int)):
23 if isinstance(changeid, (long, int)):
24 self._rev = changeid
24 self._rev = changeid
25 self._node = self._repo.changelog.node(changeid)
25 self._node = self._repo.changelog.node(changeid)
26 else:
26 else:
27 self._node = self._repo.lookup(changeid)
27 self._node = self._repo.lookup(changeid)
28 self._rev = self._repo.changelog.rev(self._node)
28 self._rev = self._repo.changelog.rev(self._node)
29
29
30 def __str__(self):
30 def __str__(self):
31 return short(self.node())
31 return short(self.node())
32
32
33 def __int__(self):
33 def __int__(self):
34 return self.rev()
34 return self.rev()
35
35
36 def __repr__(self):
36 def __repr__(self):
37 return "<changectx %s>" % str(self)
37 return "<changectx %s>" % str(self)
38
38
39 def __hash__(self):
39 def __hash__(self):
40 try:
40 try:
41 return hash(self._rev)
41 return hash(self._rev)
42 except AttributeError:
42 except AttributeError:
43 return id(self)
43 return id(self)
44
44
45 def __eq__(self, other):
45 def __eq__(self, other):
46 try:
46 try:
47 return self._rev == other._rev
47 return self._rev == other._rev
48 except AttributeError:
48 except AttributeError:
49 return False
49 return False
50
50
51 def __ne__(self, other):
51 def __ne__(self, other):
52 return not (self == other)
52 return not (self == other)
53
53
54 def __nonzero__(self):
54 def __nonzero__(self):
55 return self._rev != nullrev
55 return self._rev != nullrev
56
56
57 @propertycache
57 @propertycache
58 def _changeset(self):
58 def _changeset(self):
59 return self._repo.changelog.read(self.node())
59 return self._repo.changelog.read(self.node())
60
60
61 @propertycache
61 @propertycache
62 def _manifest(self):
62 def _manifest(self):
63 return self._repo.manifest.read(self._changeset[0])
63 return self._repo.manifest.read(self._changeset[0])
64
64
65 @propertycache
65 @propertycache
66 def _manifestdelta(self):
66 def _manifestdelta(self):
67 return self._repo.manifest.readdelta(self._changeset[0])
67 return self._repo.manifest.readdelta(self._changeset[0])
68
68
69 @propertycache
69 @propertycache
70 def _parents(self):
70 def _parents(self):
71 p = self._repo.changelog.parentrevs(self._rev)
71 p = self._repo.changelog.parentrevs(self._rev)
72 if p[1] == nullrev:
72 if p[1] == nullrev:
73 p = p[:-1]
73 p = p[:-1]
74 return [changectx(self._repo, x) for x in p]
74 return [changectx(self._repo, x) for x in p]
75
75
76 @propertycache
76 @propertycache
77 def substate(self):
77 def substate(self):
78 return subrepo.state(self)
78 return subrepo.state(self)
79
79
80 def __contains__(self, key):
80 def __contains__(self, key):
81 return key in self._manifest
81 return key in self._manifest
82
82
83 def __getitem__(self, key):
83 def __getitem__(self, key):
84 return self.filectx(key)
84 return self.filectx(key)
85
85
86 def __iter__(self):
86 def __iter__(self):
87 for f in sorted(self._manifest):
87 for f in sorted(self._manifest):
88 yield f
88 yield f
89
89
90 def changeset(self):
90 def changeset(self):
91 return self._changeset
91 return self._changeset
92 def manifest(self):
92 def manifest(self):
93 return self._manifest
93 return self._manifest
94 def manifestnode(self):
94 def manifestnode(self):
95 return self._changeset[0]
95 return self._changeset[0]
96
96
97 def rev(self):
97 def rev(self):
98 return self._rev
98 return self._rev
99 def node(self):
99 def node(self):
100 return self._node
100 return self._node
101 def hex(self):
101 def hex(self):
102 return hex(self._node)
102 return hex(self._node)
103 def user(self):
103 def user(self):
104 return self._changeset[1]
104 return self._changeset[1]
105 def date(self):
105 def date(self):
106 return self._changeset[2]
106 return self._changeset[2]
107 def files(self):
107 def files(self):
108 return self._changeset[3]
108 return self._changeset[3]
109 def description(self):
109 def description(self):
110 return self._changeset[4]
110 return self._changeset[4]
111 def branch(self):
111 def branch(self):
112 return self._changeset[5].get("branch")
112 return self._changeset[5].get("branch")
113 def extra(self):
113 def extra(self):
114 return self._changeset[5]
114 return self._changeset[5]
115 def tags(self):
115 def tags(self):
116 return self._repo.nodetags(self._node)
116 return self._repo.nodetags(self._node)
117
117
118 def parents(self):
118 def parents(self):
119 """return contexts for each parent changeset"""
119 """return contexts for each parent changeset"""
120 return self._parents
120 return self._parents
121
121
122 def p1(self):
122 def p1(self):
123 return self._parents[0]
123 return self._parents[0]
124
124
125 def p2(self):
125 def p2(self):
126 if len(self._parents) == 2:
126 if len(self._parents) == 2:
127 return self._parents[1]
127 return self._parents[1]
128 return changectx(self._repo, -1)
128 return changectx(self._repo, -1)
129
129
130 def children(self):
130 def children(self):
131 """return contexts for each child changeset"""
131 """return contexts for each child changeset"""
132 c = self._repo.changelog.children(self._node)
132 c = self._repo.changelog.children(self._node)
133 return [changectx(self._repo, x) for x in c]
133 return [changectx(self._repo, x) for x in c]
134
134
135 def ancestors(self):
135 def ancestors(self):
136 for a in self._repo.changelog.ancestors(self._rev):
136 for a in self._repo.changelog.ancestors(self._rev):
137 yield changectx(self._repo, a)
137 yield changectx(self._repo, a)
138
138
139 def descendants(self):
139 def descendants(self):
140 for d in self._repo.changelog.descendants(self._rev):
140 for d in self._repo.changelog.descendants(self._rev):
141 yield changectx(self._repo, d)
141 yield changectx(self._repo, d)
142
142
143 def _fileinfo(self, path):
143 def _fileinfo(self, path):
144 if '_manifest' in self.__dict__:
144 if '_manifest' in self.__dict__:
145 try:
145 try:
146 return self._manifest[path], self._manifest.flags(path)
146 return self._manifest[path], self._manifest.flags(path)
147 except KeyError:
147 except KeyError:
148 raise error.LookupError(self._node, path,
148 raise error.LookupError(self._node, path,
149 _('not found in manifest'))
149 _('not found in manifest'))
150 if '_manifestdelta' in self.__dict__ or path in self.files():
150 if '_manifestdelta' in self.__dict__ or path in self.files():
151 if path in self._manifestdelta:
151 if path in self._manifestdelta:
152 return self._manifestdelta[path], self._manifestdelta.flags(path)
152 return self._manifestdelta[path], self._manifestdelta.flags(path)
153 node, flag = self._repo.manifest.find(self._changeset[0], path)
153 node, flag = self._repo.manifest.find(self._changeset[0], path)
154 if not node:
154 if not node:
155 raise error.LookupError(self._node, path,
155 raise error.LookupError(self._node, path,
156 _('not found in manifest'))
156 _('not found in manifest'))
157
157
158 return node, flag
158 return node, flag
159
159
160 def filenode(self, path):
160 def filenode(self, path):
161 return self._fileinfo(path)[0]
161 return self._fileinfo(path)[0]
162
162
163 def flags(self, path):
163 def flags(self, path):
164 try:
164 try:
165 return self._fileinfo(path)[1]
165 return self._fileinfo(path)[1]
166 except error.LookupError:
166 except error.LookupError:
167 return ''
167 return ''
168
168
169 def filectx(self, path, fileid=None, filelog=None):
169 def filectx(self, path, fileid=None, filelog=None):
170 """get a file context from this changeset"""
170 """get a file context from this changeset"""
171 if fileid is None:
171 if fileid is None:
172 fileid = self.filenode(path)
172 fileid = self.filenode(path)
173 return filectx(self._repo, path, fileid=fileid,
173 return filectx(self._repo, path, fileid=fileid,
174 changectx=self, filelog=filelog)
174 changectx=self, filelog=filelog)
175
175
176 def ancestor(self, c2):
176 def ancestor(self, c2):
177 """
177 """
178 return the ancestor context of self and c2
178 return the ancestor context of self and c2
179 """
179 """
180 # deal with workingctxs
180 # deal with workingctxs
181 n2 = c2._node
181 n2 = c2._node
182 if n2 == None:
182 if n2 == None:
183 n2 = c2._parents[0]._node
183 n2 = c2._parents[0]._node
184 n = self._repo.changelog.ancestor(self._node, n2)
184 n = self._repo.changelog.ancestor(self._node, n2)
185 return changectx(self._repo, n)
185 return changectx(self._repo, n)
186
186
187 def walk(self, match):
187 def walk(self, match):
188 fset = set(match.files())
188 fset = set(match.files())
189 # for dirstate.walk, files=['.'] means "walk the whole tree".
189 # for dirstate.walk, files=['.'] means "walk the whole tree".
190 # follow that here, too
190 # follow that here, too
191 fset.discard('.')
191 fset.discard('.')
192 for fn in self:
192 for fn in self:
193 for ffn in fset:
193 for ffn in fset:
194 # match if the file is the exact name or a directory
194 # match if the file is the exact name or a directory
195 if ffn == fn or fn.startswith("%s/" % ffn):
195 if ffn == fn or fn.startswith("%s/" % ffn):
196 fset.remove(ffn)
196 fset.remove(ffn)
197 break
197 break
198 if match(fn):
198 if match(fn):
199 yield fn
199 yield fn
200 for fn in sorted(fset):
200 for fn in sorted(fset):
201 if match.bad(fn, _('no such file in rev %s') % self) and match(fn):
201 if match.bad(fn, _('no such file in rev %s') % self) and match(fn):
202 yield fn
202 yield fn
203
203
204 def sub(self, path):
204 def sub(self, path):
205 return subrepo.subrepo(self, path)
205 return subrepo.subrepo(self, path)
206
206
207 def diff(self, ctx2=None, match=None, **opts):
207 def diff(self, ctx2=None, match=None, **opts):
208 """Returns a diff generator for the given contexts and matcher"""
208 """Returns a diff generator for the given contexts and matcher"""
209 if ctx2 is None:
209 if ctx2 is None:
210 ctx2 = self.p1()
210 ctx2 = self.p1()
211 if ctx2 is not None and not isinstance(ctx2, changectx):
211 if ctx2 is not None and not isinstance(ctx2, changectx):
212 ctx2 = self._repo[ctx2]
212 ctx2 = self._repo[ctx2]
213 diffopts = patch.diffopts(self._repo.ui, opts)
213 diffopts = patch.diffopts(self._repo.ui, opts)
214 return patch.diff(self._repo, ctx2.node(), self.node(),
214 return patch.diff(self._repo, ctx2.node(), self.node(),
215 match=match, opts=diffopts)
215 match=match, opts=diffopts)
216
216
217 class filectx(object):
217 class filectx(object):
218 """A filecontext object makes access to data related to a particular
218 """A filecontext object makes access to data related to a particular
219 filerevision convenient."""
219 filerevision convenient."""
220 def __init__(self, repo, path, changeid=None, fileid=None,
220 def __init__(self, repo, path, changeid=None, fileid=None,
221 filelog=None, changectx=None):
221 filelog=None, changectx=None):
222 """changeid can be a changeset revision, node, or tag.
222 """changeid can be a changeset revision, node, or tag.
223 fileid can be a file revision or node."""
223 fileid can be a file revision or node."""
224 self._repo = repo
224 self._repo = repo
225 self._path = path
225 self._path = path
226
226
227 assert (changeid is not None
227 assert (changeid is not None
228 or fileid is not None
228 or fileid is not None
229 or changectx is not None), \
229 or changectx is not None), \
230 ("bad args: changeid=%r, fileid=%r, changectx=%r"
230 ("bad args: changeid=%r, fileid=%r, changectx=%r"
231 % (changeid, fileid, changectx))
231 % (changeid, fileid, changectx))
232
232
233 if filelog:
233 if filelog:
234 self._filelog = filelog
234 self._filelog = filelog
235
235
236 if changeid is not None:
236 if changeid is not None:
237 self._changeid = changeid
237 self._changeid = changeid
238 if changectx is not None:
238 if changectx is not None:
239 self._changectx = changectx
239 self._changectx = changectx
240 if fileid is not None:
240 if fileid is not None:
241 self._fileid = fileid
241 self._fileid = fileid
242
242
243 @propertycache
243 @propertycache
244 def _changectx(self):
244 def _changectx(self):
245 return changectx(self._repo, self._changeid)
245 return changectx(self._repo, self._changeid)
246
246
247 @propertycache
247 @propertycache
248 def _filelog(self):
248 def _filelog(self):
249 return self._repo.file(self._path)
249 return self._repo.file(self._path)
250
250
251 @propertycache
251 @propertycache
252 def _changeid(self):
252 def _changeid(self):
253 if '_changectx' in self.__dict__:
253 if '_changectx' in self.__dict__:
254 return self._changectx.rev()
254 return self._changectx.rev()
255 else:
255 else:
256 return self._filelog.linkrev(self._filerev)
256 return self._filelog.linkrev(self._filerev)
257
257
258 @propertycache
258 @propertycache
259 def _filenode(self):
259 def _filenode(self):
260 if '_fileid' in self.__dict__:
260 if '_fileid' in self.__dict__:
261 return self._filelog.lookup(self._fileid)
261 return self._filelog.lookup(self._fileid)
262 else:
262 else:
263 return self._changectx.filenode(self._path)
263 return self._changectx.filenode(self._path)
264
264
265 @propertycache
265 @propertycache
266 def _filerev(self):
266 def _filerev(self):
267 return self._filelog.rev(self._filenode)
267 return self._filelog.rev(self._filenode)
268
268
269 @propertycache
269 @propertycache
270 def _repopath(self):
270 def _repopath(self):
271 return self._path
271 return self._path
272
272
273 def __nonzero__(self):
273 def __nonzero__(self):
274 try:
274 try:
275 self._filenode
275 self._filenode
276 return True
276 return True
277 except error.LookupError:
277 except error.LookupError:
278 # file is missing
278 # file is missing
279 return False
279 return False
280
280
281 def __str__(self):
281 def __str__(self):
282 return "%s@%s" % (self.path(), short(self.node()))
282 return "%s@%s" % (self.path(), short(self.node()))
283
283
284 def __repr__(self):
284 def __repr__(self):
285 return "<filectx %s>" % str(self)
285 return "<filectx %s>" % str(self)
286
286
287 def __hash__(self):
287 def __hash__(self):
288 try:
288 try:
289 return hash((self._path, self._filenode))
289 return hash((self._path, self._filenode))
290 except AttributeError:
290 except AttributeError:
291 return id(self)
291 return id(self)
292
292
293 def __eq__(self, other):
293 def __eq__(self, other):
294 try:
294 try:
295 return (self._path == other._path
295 return (self._path == other._path
296 and self._filenode == other._filenode)
296 and self._filenode == other._filenode)
297 except AttributeError:
297 except AttributeError:
298 return False
298 return False
299
299
300 def __ne__(self, other):
300 def __ne__(self, other):
301 return not (self == other)
301 return not (self == other)
302
302
303 def filectx(self, fileid):
303 def filectx(self, fileid):
304 '''opens an arbitrary revision of the file without
304 '''opens an arbitrary revision of the file without
305 opening a new filelog'''
305 opening a new filelog'''
306 return filectx(self._repo, self._path, fileid=fileid,
306 return filectx(self._repo, self._path, fileid=fileid,
307 filelog=self._filelog)
307 filelog=self._filelog)
308
308
309 def filerev(self):
309 def filerev(self):
310 return self._filerev
310 return self._filerev
311 def filenode(self):
311 def filenode(self):
312 return self._filenode
312 return self._filenode
313 def flags(self):
313 def flags(self):
314 return self._changectx.flags(self._path)
314 return self._changectx.flags(self._path)
315 def filelog(self):
315 def filelog(self):
316 return self._filelog
316 return self._filelog
317
317
318 def rev(self):
318 def rev(self):
319 if '_changectx' in self.__dict__:
319 if '_changectx' in self.__dict__:
320 return self._changectx.rev()
320 return self._changectx.rev()
321 if '_changeid' in self.__dict__:
321 if '_changeid' in self.__dict__:
322 return self._changectx.rev()
322 return self._changectx.rev()
323 return self._filelog.linkrev(self._filerev)
323 return self._filelog.linkrev(self._filerev)
324
324
325 def linkrev(self):
325 def linkrev(self):
326 return self._filelog.linkrev(self._filerev)
326 return self._filelog.linkrev(self._filerev)
327 def node(self):
327 def node(self):
328 return self._changectx.node()
328 return self._changectx.node()
329 def hex(self):
329 def hex(self):
330 return hex(self.node())
330 return hex(self.node())
331 def user(self):
331 def user(self):
332 return self._changectx.user()
332 return self._changectx.user()
333 def date(self):
333 def date(self):
334 return self._changectx.date()
334 return self._changectx.date()
335 def files(self):
335 def files(self):
336 return self._changectx.files()
336 return self._changectx.files()
337 def description(self):
337 def description(self):
338 return self._changectx.description()
338 return self._changectx.description()
339 def branch(self):
339 def branch(self):
340 return self._changectx.branch()
340 return self._changectx.branch()
341 def extra(self):
341 def extra(self):
342 return self._changectx.extra()
342 return self._changectx.extra()
343 def manifest(self):
343 def manifest(self):
344 return self._changectx.manifest()
344 return self._changectx.manifest()
345 def changectx(self):
345 def changectx(self):
346 return self._changectx
346 return self._changectx
347
347
348 def data(self):
348 def data(self):
349 return self._filelog.read(self._filenode)
349 return self._filelog.read(self._filenode)
350 def path(self):
350 def path(self):
351 return self._path
351 return self._path
352 def size(self):
352 def size(self):
353 return self._filelog.size(self._filerev)
353 return self._filelog.size(self._filerev)
354
354
355 def cmp(self, text):
355 def cmp(self, text):
356 """compare text with stored file revision
356 """compare text with stored file revision
357
357
358 returns True if text is different than what is stored.
358 returns True if text is different than what is stored.
359 """
359 """
360 return self._filelog.cmp(self._filenode, text)
360 return self._filelog.cmp(self._filenode, text)
361
361
362 def renamed(self):
362 def renamed(self):
363 """check if file was actually renamed in this changeset revision
363 """check if file was actually renamed in this changeset revision
364
364
365 If rename logged in file revision, we report copy for changeset only
365 If rename logged in file revision, we report copy for changeset only
366 if file revisions linkrev points back to the changeset in question
366 if file revisions linkrev points back to the changeset in question
367 or both changeset parents contain different file revisions.
367 or both changeset parents contain different file revisions.
368 """
368 """
369
369
370 renamed = self._filelog.renamed(self._filenode)
370 renamed = self._filelog.renamed(self._filenode)
371 if not renamed:
371 if not renamed:
372 return renamed
372 return renamed
373
373
374 if self.rev() == self.linkrev():
374 if self.rev() == self.linkrev():
375 return renamed
375 return renamed
376
376
377 name = self.path()
377 name = self.path()
378 fnode = self._filenode
378 fnode = self._filenode
379 for p in self._changectx.parents():
379 for p in self._changectx.parents():
380 try:
380 try:
381 if fnode == p.filenode(name):
381 if fnode == p.filenode(name):
382 return None
382 return None
383 except error.LookupError:
383 except error.LookupError:
384 pass
384 pass
385 return renamed
385 return renamed
386
386
387 def parents(self):
387 def parents(self):
388 p = self._path
388 p = self._path
389 fl = self._filelog
389 fl = self._filelog
390 pl = [(p, n, fl) for n in self._filelog.parents(self._filenode)]
390 pl = [(p, n, fl) for n in self._filelog.parents(self._filenode)]
391
391
392 r = self._filelog.renamed(self._filenode)
392 r = self._filelog.renamed(self._filenode)
393 if r:
393 if r:
394 pl[0] = (r[0], r[1], None)
394 pl[0] = (r[0], r[1], None)
395
395
396 return [filectx(self._repo, p, fileid=n, filelog=l)
396 return [filectx(self._repo, p, fileid=n, filelog=l)
397 for p, n, l in pl if n != nullid]
397 for p, n, l in pl if n != nullid]
398
398
399 def children(self):
399 def children(self):
400 # hard for renames
400 # hard for renames
401 c = self._filelog.children(self._filenode)
401 c = self._filelog.children(self._filenode)
402 return [filectx(self._repo, self._path, fileid=x,
402 return [filectx(self._repo, self._path, fileid=x,
403 filelog=self._filelog) for x in c]
403 filelog=self._filelog) for x in c]
404
404
405 def annotate(self, follow=False, linenumber=None):
405 def annotate(self, follow=False, linenumber=None):
406 '''returns a list of tuples of (ctx, line) for each line
406 '''returns a list of tuples of (ctx, line) for each line
407 in the file, where ctx is the filectx of the node where
407 in the file, where ctx is the filectx of the node where
408 that line was last changed.
408 that line was last changed.
409 This returns tuples of ((ctx, linenumber), line) for each line,
409 This returns tuples of ((ctx, linenumber), line) for each line,
410 if "linenumber" parameter is NOT "None".
410 if "linenumber" parameter is NOT "None".
411 In such tuples, linenumber means one at the first appearance
411 In such tuples, linenumber means one at the first appearance
412 in the managed file.
412 in the managed file.
413 To reduce annotation cost,
413 To reduce annotation cost,
414 this returns fixed value(False is used) as linenumber,
414 this returns fixed value(False is used) as linenumber,
415 if "linenumber" parameter is "False".'''
415 if "linenumber" parameter is "False".'''
416
416
417 def decorate_compat(text, rev):
417 def decorate_compat(text, rev):
418 return ([rev] * len(text.splitlines()), text)
418 return ([rev] * len(text.splitlines()), text)
419
419
420 def without_linenumber(text, rev):
420 def without_linenumber(text, rev):
421 return ([(rev, False)] * len(text.splitlines()), text)
421 return ([(rev, False)] * len(text.splitlines()), text)
422
422
423 def with_linenumber(text, rev):
423 def with_linenumber(text, rev):
424 size = len(text.splitlines())
424 size = len(text.splitlines())
425 return ([(rev, i) for i in xrange(1, size + 1)], text)
425 return ([(rev, i) for i in xrange(1, size + 1)], text)
426
426
427 decorate = (((linenumber is None) and decorate_compat) or
427 decorate = (((linenumber is None) and decorate_compat) or
428 (linenumber and with_linenumber) or
428 (linenumber and with_linenumber) or
429 without_linenumber)
429 without_linenumber)
430
430
431 def pair(parent, child):
431 def pair(parent, child):
432 for a1, a2, b1, b2 in bdiff.blocks(parent[1], child[1]):
432 for a1, a2, b1, b2 in bdiff.blocks(parent[1], child[1]):
433 child[0][b1:b2] = parent[0][a1:a2]
433 child[0][b1:b2] = parent[0][a1:a2]
434 return child
434 return child
435
435
436 getlog = util.lrucachefunc(lambda x: self._repo.file(x))
436 getlog = util.lrucachefunc(lambda x: self._repo.file(x))
437 def getctx(path, fileid):
437 def getctx(path, fileid):
438 log = path == self._path and self._filelog or getlog(path)
438 log = path == self._path and self._filelog or getlog(path)
439 return filectx(self._repo, path, fileid=fileid, filelog=log)
439 return filectx(self._repo, path, fileid=fileid, filelog=log)
440 getctx = util.lrucachefunc(getctx)
440 getctx = util.lrucachefunc(getctx)
441
441
442 def parents(f):
442 def parents(f):
443 # we want to reuse filectx objects as much as possible
443 # we want to reuse filectx objects as much as possible
444 p = f._path
444 p = f._path
445 if f._filerev is None: # working dir
445 if f._filerev is None: # working dir
446 pl = [(n.path(), n.filerev()) for n in f.parents()]
446 pl = [(n.path(), n.filerev()) for n in f.parents()]
447 else:
447 else:
448 pl = [(p, n) for n in f._filelog.parentrevs(f._filerev)]
448 pl = [(p, n) for n in f._filelog.parentrevs(f._filerev)]
449
449
450 if follow:
450 if follow:
451 r = f.renamed()
451 r = f.renamed()
452 if r:
452 if r:
453 pl[0] = (r[0], getlog(r[0]).rev(r[1]))
453 pl[0] = (r[0], getlog(r[0]).rev(r[1]))
454
454
455 return [getctx(p, n) for p, n in pl if n != nullrev]
455 return [getctx(p, n) for p, n in pl if n != nullrev]
456
456
457 # use linkrev to find the first changeset where self appeared
457 # use linkrev to find the first changeset where self appeared
458 if self.rev() != self.linkrev():
458 if self.rev() != self.linkrev():
459 base = self.filectx(self.filerev())
459 base = self.filectx(self.filerev())
460 else:
460 else:
461 base = self
461 base = self
462
462
463 # find all ancestors
463 # find all ancestors
464 needed = {base: 1}
464 needed = {base: 1}
465 visit = [base]
465 visit = [base]
466 files = [base._path]
466 files = [base._path]
467 while visit:
467 while visit:
468 f = visit.pop(0)
468 f = visit.pop(0)
469 for p in parents(f):
469 for p in parents(f):
470 if p not in needed:
470 if p not in needed:
471 needed[p] = 1
471 needed[p] = 1
472 visit.append(p)
472 visit.append(p)
473 if p._path not in files:
473 if p._path not in files:
474 files.append(p._path)
474 files.append(p._path)
475 else:
475 else:
476 # count how many times we'll use this
476 # count how many times we'll use this
477 needed[p] += 1
477 needed[p] += 1
478
478
479 # sort by revision (per file) which is a topological order
479 # sort by revision (per file) which is a topological order
480 visit = []
480 visit = []
481 for f in files:
481 for f in files:
482 visit.extend(n for n in needed if n._path == f)
482 visit.extend(n for n in needed if n._path == f)
483
483
484 hist = {}
484 hist = {}
485 for f in sorted(visit, key=lambda x: x.rev()):
485 for f in sorted(visit, key=lambda x: x.rev()):
486 curr = decorate(f.data(), f)
486 curr = decorate(f.data(), f)
487 for p in parents(f):
487 for p in parents(f):
488 curr = pair(hist[p], curr)
488 curr = pair(hist[p], curr)
489 # trim the history of unneeded revs
489 # trim the history of unneeded revs
490 needed[p] -= 1
490 needed[p] -= 1
491 if not needed[p]:
491 if not needed[p]:
492 del hist[p]
492 del hist[p]
493 hist[f] = curr
493 hist[f] = curr
494
494
495 return zip(hist[f][0], hist[f][1].splitlines(True))
495 return zip(hist[f][0], hist[f][1].splitlines(True))
496
496
497 def ancestor(self, fc2, actx=None):
497 def ancestor(self, fc2, actx=None):
498 """
498 """
499 find the common ancestor file context, if any, of self, and fc2
499 find the common ancestor file context, if any, of self, and fc2
500
500
501 If actx is given, it must be the changectx of the common ancestor
501 If actx is given, it must be the changectx of the common ancestor
502 of self's and fc2's respective changesets.
502 of self's and fc2's respective changesets.
503 """
503 """
504
504
505 if actx is None:
505 if actx is None:
506 actx = self.changectx().ancestor(fc2.changectx())
506 actx = self.changectx().ancestor(fc2.changectx())
507
507
508 # the trivial case: changesets are unrelated, files must be too
508 # the trivial case: changesets are unrelated, files must be too
509 if not actx:
509 if not actx:
510 return None
510 return None
511
511
512 # the easy case: no (relevant) renames
512 # the easy case: no (relevant) renames
513 if fc2.path() == self.path() and self.path() in actx:
513 if fc2.path() == self.path() and self.path() in actx:
514 return actx[self.path()]
514 return actx[self.path()]
515 acache = {}
515 acache = {}
516
516
517 # prime the ancestor cache for the working directory
517 # prime the ancestor cache for the working directory
518 for c in (self, fc2):
518 for c in (self, fc2):
519 if c._filerev is None:
519 if c._filerev is None:
520 pl = [(n.path(), n.filenode()) for n in c.parents()]
520 pl = [(n.path(), n.filenode()) for n in c.parents()]
521 acache[(c._path, None)] = pl
521 acache[(c._path, None)] = pl
522
522
523 flcache = {self._repopath:self._filelog, fc2._repopath:fc2._filelog}
523 flcache = {self._repopath:self._filelog, fc2._repopath:fc2._filelog}
524 def parents(vertex):
524 def parents(vertex):
525 if vertex in acache:
525 if vertex in acache:
526 return acache[vertex]
526 return acache[vertex]
527 f, n = vertex
527 f, n = vertex
528 if f not in flcache:
528 if f not in flcache:
529 flcache[f] = self._repo.file(f)
529 flcache[f] = self._repo.file(f)
530 fl = flcache[f]
530 fl = flcache[f]
531 pl = [(f, p) for p in fl.parents(n) if p != nullid]
531 pl = [(f, p) for p in fl.parents(n) if p != nullid]
532 re = fl.renamed(n)
532 re = fl.renamed(n)
533 if re:
533 if re:
534 pl.append(re)
534 pl.append(re)
535 acache[vertex] = pl
535 acache[vertex] = pl
536 return pl
536 return pl
537
537
538 a, b = (self._path, self._filenode), (fc2._path, fc2._filenode)
538 a, b = (self._path, self._filenode), (fc2._path, fc2._filenode)
539 v = ancestor.ancestor(a, b, parents)
539 v = ancestor.ancestor(a, b, parents)
540 if v:
540 if v:
541 f, n = v
541 f, n = v
542 return filectx(self._repo, f, fileid=n, filelog=flcache[f])
542 return filectx(self._repo, f, fileid=n, filelog=flcache[f])
543
543
544 return None
544 return None
545
545
546 def ancestors(self):
546 def ancestors(self):
547 seen = set(str(self))
547 seen = set(str(self))
548 visit = [self]
548 visit = [self]
549 while visit:
549 while visit:
550 for parent in visit.pop(0).parents():
550 for parent in visit.pop(0).parents():
551 s = str(parent)
551 s = str(parent)
552 if s not in seen:
552 if s not in seen:
553 visit.append(parent)
553 visit.append(parent)
554 seen.add(s)
554 seen.add(s)
555 yield parent
555 yield parent
556
556
557 class workingctx(changectx):
557 class workingctx(changectx):
558 """A workingctx object makes access to data related to
558 """A workingctx object makes access to data related to
559 the current working directory convenient.
559 the current working directory convenient.
560 date - any valid date string or (unixtime, offset), or None.
560 date - any valid date string or (unixtime, offset), or None.
561 user - username string, or None.
561 user - username string, or None.
562 extra - a dictionary of extra values, or None.
562 extra - a dictionary of extra values, or None.
563 changes - a list of file lists as returned by localrepo.status()
563 changes - a list of file lists as returned by localrepo.status()
564 or None to use the repository status.
564 or None to use the repository status.
565 """
565 """
566 def __init__(self, repo, text="", user=None, date=None, extra=None,
566 def __init__(self, repo, text="", user=None, date=None, extra=None,
567 changes=None):
567 changes=None):
568 self._repo = repo
568 self._repo = repo
569 self._rev = None
569 self._rev = None
570 self._node = None
570 self._node = None
571 self._text = text
571 self._text = text
572 if date:
572 if date:
573 self._date = util.parsedate(date)
573 self._date = util.parsedate(date)
574 if user:
574 if user:
575 self._user = user
575 self._user = user
576 if changes:
576 if changes:
577 self._status = list(changes[:4])
577 self._status = list(changes[:4])
578 self._unknown = changes[4]
578 self._unknown = changes[4]
579 self._ignored = changes[5]
579 self._ignored = changes[5]
580 self._clean = changes[6]
580 self._clean = changes[6]
581 else:
581 else:
582 self._unknown = None
582 self._unknown = None
583 self._ignored = None
583 self._ignored = None
584 self._clean = None
584 self._clean = None
585
585
586 self._extra = {}
586 self._extra = {}
587 if extra:
587 if extra:
588 self._extra = extra.copy()
588 self._extra = extra.copy()
589 if 'branch' not in self._extra:
589 if 'branch' not in self._extra:
590 branch = self._repo.dirstate.branch()
590 branch = self._repo.dirstate.branch()
591 try:
591 try:
592 branch = branch.decode('UTF-8').encode('UTF-8')
592 branch = branch.decode('UTF-8').encode('UTF-8')
593 except UnicodeDecodeError:
593 except UnicodeDecodeError:
594 raise util.Abort(_('branch name not in UTF-8!'))
594 raise util.Abort(_('branch name not in UTF-8!'))
595 self._extra['branch'] = branch
595 self._extra['branch'] = branch
596 if self._extra['branch'] == '':
596 if self._extra['branch'] == '':
597 self._extra['branch'] = 'default'
597 self._extra['branch'] = 'default'
598
598
599 def __str__(self):
599 def __str__(self):
600 return str(self._parents[0]) + "+"
600 return str(self._parents[0]) + "+"
601
601
602 def __nonzero__(self):
602 def __nonzero__(self):
603 return True
603 return True
604
604
605 def __contains__(self, key):
605 def __contains__(self, key):
606 return self._repo.dirstate[key] not in "?r"
606 return self._repo.dirstate[key] not in "?r"
607
607
608 @propertycache
608 @propertycache
609 def _manifest(self):
609 def _manifest(self):
610 """generate a manifest corresponding to the working directory"""
610 """generate a manifest corresponding to the working directory"""
611
611
612 if self._unknown is None:
612 if self._unknown is None:
613 self.status(unknown=True)
613 self.status(unknown=True)
614
614
615 man = self._parents[0].manifest().copy()
615 man = self._parents[0].manifest().copy()
616 copied = self._repo.dirstate.copies()
616 copied = self._repo.dirstate.copies()
617 if len(self._parents) > 1:
617 if len(self._parents) > 1:
618 man2 = self.p2().manifest()
618 man2 = self.p2().manifest()
619 def getman(f):
619 def getman(f):
620 if f in man:
620 if f in man:
621 return man
621 return man
622 return man2
622 return man2
623 else:
623 else:
624 getman = lambda f: man
624 getman = lambda f: man
625 def cf(f):
625 def cf(f):
626 f = copied.get(f, f)
626 f = copied.get(f, f)
627 return getman(f).flags(f)
627 return getman(f).flags(f)
628 ff = self._repo.dirstate.flagfunc(cf)
628 ff = self._repo.dirstate.flagfunc(cf)
629 modified, added, removed, deleted = self._status
629 modified, added, removed, deleted = self._status
630 unknown = self._unknown
630 unknown = self._unknown
631 for i, l in (("a", added), ("m", modified), ("u", unknown)):
631 for i, l in (("a", added), ("m", modified), ("u", unknown)):
632 for f in l:
632 for f in l:
633 orig = copied.get(f, f)
633 orig = copied.get(f, f)
634 man[f] = getman(orig).get(orig, nullid) + i
634 man[f] = getman(orig).get(orig, nullid) + i
635 try:
635 try:
636 man.set(f, ff(f))
636 man.set(f, ff(f))
637 except OSError:
637 except OSError:
638 pass
638 pass
639
639
640 for f in deleted + removed:
640 for f in deleted + removed:
641 if f in man:
641 if f in man:
642 del man[f]
642 del man[f]
643
643
644 return man
644 return man
645
645
646 @propertycache
646 @propertycache
647 def _status(self):
647 def _status(self):
648 return self._repo.status()[:4]
648 return self._repo.status()[:4]
649
649
650 @propertycache
650 @propertycache
651 def _user(self):
651 def _user(self):
652 return self._repo.ui.username()
652 return self._repo.ui.username()
653
653
654 @propertycache
654 @propertycache
655 def _date(self):
655 def _date(self):
656 return util.makedate()
656 return util.makedate()
657
657
658 @propertycache
658 @propertycache
659 def _parents(self):
659 def _parents(self):
660 p = self._repo.dirstate.parents()
660 p = self._repo.dirstate.parents()
661 if p[1] == nullid:
661 if p[1] == nullid:
662 p = p[:-1]
662 p = p[:-1]
663 self._parents = [changectx(self._repo, x) for x in p]
663 self._parents = [changectx(self._repo, x) for x in p]
664 return self._parents
664 return self._parents
665
665
666 def status(self, ignored=False, clean=False, unknown=False):
666 def status(self, ignored=False, clean=False, unknown=False):
667 """Explicit status query
667 """Explicit status query
668 Unless this method is used to query the working copy status, the
668 Unless this method is used to query the working copy status, the
669 _status property will implicitly read the status using its default
669 _status property will implicitly read the status using its default
670 arguments."""
670 arguments."""
671 stat = self._repo.status(ignored=ignored, clean=clean, unknown=unknown)
671 stat = self._repo.status(ignored=ignored, clean=clean, unknown=unknown)
672 self._unknown = self._ignored = self._clean = None
672 self._unknown = self._ignored = self._clean = None
673 if unknown:
673 if unknown:
674 self._unknown = stat[4]
674 self._unknown = stat[4]
675 if ignored:
675 if ignored:
676 self._ignored = stat[5]
676 self._ignored = stat[5]
677 if clean:
677 if clean:
678 self._clean = stat[6]
678 self._clean = stat[6]
679 self._status = stat[:4]
679 self._status = stat[:4]
680 return stat
680 return stat
681
681
682 def manifest(self):
682 def manifest(self):
683 return self._manifest
683 return self._manifest
684 def user(self):
684 def user(self):
685 return self._user or self._repo.ui.username()
685 return self._user or self._repo.ui.username()
686 def date(self):
686 def date(self):
687 return self._date
687 return self._date
688 def description(self):
688 def description(self):
689 return self._text
689 return self._text
690 def files(self):
690 def files(self):
691 return sorted(self._status[0] + self._status[1] + self._status[2])
691 return sorted(self._status[0] + self._status[1] + self._status[2])
692
692
693 def modified(self):
693 def modified(self):
694 return self._status[0]
694 return self._status[0]
695 def added(self):
695 def added(self):
696 return self._status[1]
696 return self._status[1]
697 def removed(self):
697 def removed(self):
698 return self._status[2]
698 return self._status[2]
699 def deleted(self):
699 def deleted(self):
700 return self._status[3]
700 return self._status[3]
701 def unknown(self):
701 def unknown(self):
702 assert self._unknown is not None # must call status first
702 assert self._unknown is not None # must call status first
703 return self._unknown
703 return self._unknown
704 def ignored(self):
704 def ignored(self):
705 assert self._ignored is not None # must call status first
705 assert self._ignored is not None # must call status first
706 return self._ignored
706 return self._ignored
707 def clean(self):
707 def clean(self):
708 assert self._clean is not None # must call status first
708 assert self._clean is not None # must call status first
709 return self._clean
709 return self._clean
710 def branch(self):
710 def branch(self):
711 return self._extra['branch']
711 return self._extra['branch']
712 def extra(self):
712 def extra(self):
713 return self._extra
713 return self._extra
714
714
715 def tags(self):
715 def tags(self):
716 t = []
716 t = []
717 [t.extend(p.tags()) for p in self.parents()]
717 [t.extend(p.tags()) for p in self.parents()]
718 return t
718 return t
719
719
720 def children(self):
720 def children(self):
721 return []
721 return []
722
722
723 def flags(self, path):
723 def flags(self, path):
724 if '_manifest' in self.__dict__:
724 if '_manifest' in self.__dict__:
725 try:
725 try:
726 return self._manifest.flags(path)
726 return self._manifest.flags(path)
727 except KeyError:
727 except KeyError:
728 return ''
728 return ''
729
729
730 orig = self._repo.dirstate.copies().get(path, path)
730 orig = self._repo.dirstate.copies().get(path, path)
731
731
732 def findflag(ctx):
732 def findflag(ctx):
733 mnode = ctx.changeset()[0]
733 mnode = ctx.changeset()[0]
734 node, flag = self._repo.manifest.find(mnode, orig)
734 node, flag = self._repo.manifest.find(mnode, orig)
735 ff = self._repo.dirstate.flagfunc(lambda x: flag or '')
735 ff = self._repo.dirstate.flagfunc(lambda x: flag or '')
736 try:
736 try:
737 return ff(path)
737 return ff(path)
738 except OSError:
738 except OSError:
739 pass
739 pass
740
740
741 flag = findflag(self._parents[0])
741 flag = findflag(self._parents[0])
742 if flag is None and len(self.parents()) > 1:
742 if flag is None and len(self.parents()) > 1:
743 flag = findflag(self._parents[1])
743 flag = findflag(self._parents[1])
744 if flag is None or self._repo.dirstate[path] == 'r':
744 if flag is None or self._repo.dirstate[path] == 'r':
745 return ''
745 return ''
746 return flag
746 return flag
747
747
748 def filectx(self, path, filelog=None):
748 def filectx(self, path, filelog=None):
749 """get a file context from the working directory"""
749 """get a file context from the working directory"""
750 return workingfilectx(self._repo, path, workingctx=self,
750 return workingfilectx(self._repo, path, workingctx=self,
751 filelog=filelog)
751 filelog=filelog)
752
752
753 def ancestor(self, c2):
753 def ancestor(self, c2):
754 """return the ancestor context of self and c2"""
754 """return the ancestor context of self and c2"""
755 return self._parents[0].ancestor(c2) # punt on two parents for now
755 return self._parents[0].ancestor(c2) # punt on two parents for now
756
756
757 def walk(self, match):
757 def walk(self, match):
758 return sorted(self._repo.dirstate.walk(match, self.substate.keys(),
758 return sorted(self._repo.dirstate.walk(match, self.substate.keys(),
759 True, False))
759 True, False))
760
760
761 def dirty(self, missing=False):
761 def dirty(self, missing=False):
762 "check whether a working directory is modified"
762 "check whether a working directory is modified"
763 # check subrepos first
763 # check subrepos first
764 for s in self.substate:
764 for s in self.substate:
765 if self.sub(s).dirty():
765 if self.sub(s).dirty():
766 return True
766 return True
767 # check current working dir
767 # check current working dir
768 return (self.p2() or self.branch() != self.p1().branch() or
768 return (self.p2() or self.branch() != self.p1().branch() or
769 self.modified() or self.added() or self.removed() or
769 self.modified() or self.added() or self.removed() or
770 (missing and self.deleted()))
770 (missing and self.deleted()))
771
771
772 def add(self, list):
772 def add(self, list):
773 wlock = self._repo.wlock()
773 wlock = self._repo.wlock()
774 ui, ds = self._repo.ui, self._repo.dirstate
774 ui, ds = self._repo.ui, self._repo.dirstate
775 try:
775 try:
776 rejected = []
776 rejected = []
777 for f in list:
777 for f in list:
778 p = self._repo.wjoin(f)
778 p = self._repo.wjoin(f)
779 try:
779 try:
780 st = os.lstat(p)
780 st = os.lstat(p)
781 except:
781 except:
782 ui.warn(_("%s does not exist!\n") % f)
782 ui.warn(_("%s does not exist!\n") % f)
783 rejected.append(f)
783 rejected.append(f)
784 continue
784 continue
785 if st.st_size > 10000000:
785 if st.st_size > 10000000:
786 ui.warn(_("%s: up to %d MB of RAM may be required "
786 ui.warn(_("%s: up to %d MB of RAM may be required "
787 "to manage this file\n"
787 "to manage this file\n"
788 "(use 'hg revert %s' to cancel the "
788 "(use 'hg revert %s' to cancel the "
789 "pending addition)\n")
789 "pending addition)\n")
790 % (f, 3 * st.st_size // 1000000, f))
790 % (f, 3 * st.st_size // 1000000, f))
791 if not (stat.S_ISREG(st.st_mode) or stat.S_ISLNK(st.st_mode)):
791 if not (stat.S_ISREG(st.st_mode) or stat.S_ISLNK(st.st_mode)):
792 ui.warn(_("%s not added: only files and symlinks "
792 ui.warn(_("%s not added: only files and symlinks "
793 "supported currently\n") % f)
793 "supported currently\n") % f)
794 rejected.append(p)
794 rejected.append(p)
795 elif ds[f] in 'amn':
795 elif ds[f] in 'amn':
796 ui.warn(_("%s already tracked!\n") % f)
796 ui.warn(_("%s already tracked!\n") % f)
797 elif ds[f] == 'r':
797 elif ds[f] == 'r':
798 ds.normallookup(f)
798 ds.normallookup(f)
799 else:
799 else:
800 ds.add(f)
800 ds.add(f)
801 return rejected
801 return rejected
802 finally:
802 finally:
803 wlock.release()
803 wlock.release()
804
804
805 def forget(self, list):
805 def forget(self, list):
806 wlock = self._repo.wlock()
806 wlock = self._repo.wlock()
807 try:
807 try:
808 for f in list:
808 for f in list:
809 if self._repo.dirstate[f] != 'a':
809 if self._repo.dirstate[f] != 'a':
810 self._repo.ui.warn(_("%s not added!\n") % f)
810 self._repo.ui.warn(_("%s not added!\n") % f)
811 else:
811 else:
812 self._repo.dirstate.forget(f)
812 self._repo.dirstate.forget(f)
813 finally:
813 finally:
814 wlock.release()
814 wlock.release()
815
815
816 def remove(self, list, unlink=False):
816 def remove(self, list, unlink=False):
817 if unlink:
817 if unlink:
818 for f in list:
818 for f in list:
819 try:
819 try:
820 util.unlink(self._repo.wjoin(f))
820 util.unlink(self._repo.wjoin(f))
821 except OSError, inst:
821 except OSError, inst:
822 if inst.errno != errno.ENOENT:
822 if inst.errno != errno.ENOENT:
823 raise
823 raise
824 wlock = self._repo.wlock()
824 wlock = self._repo.wlock()
825 try:
825 try:
826 for f in list:
826 for f in list:
827 if unlink and os.path.exists(self._repo.wjoin(f)):
827 if unlink and os.path.lexists(self._repo.wjoin(f)):
828 self._repo.ui.warn(_("%s still exists!\n") % f)
828 self._repo.ui.warn(_("%s still exists!\n") % f)
829 elif self._repo.dirstate[f] == 'a':
829 elif self._repo.dirstate[f] == 'a':
830 self._repo.dirstate.forget(f)
830 self._repo.dirstate.forget(f)
831 elif f not in self._repo.dirstate:
831 elif f not in self._repo.dirstate:
832 self._repo.ui.warn(_("%s not tracked!\n") % f)
832 self._repo.ui.warn(_("%s not tracked!\n") % f)
833 else:
833 else:
834 self._repo.dirstate.remove(f)
834 self._repo.dirstate.remove(f)
835 finally:
835 finally:
836 wlock.release()
836 wlock.release()
837
837
838 def undelete(self, list):
838 def undelete(self, list):
839 pctxs = self.parents()
839 pctxs = self.parents()
840 wlock = self._repo.wlock()
840 wlock = self._repo.wlock()
841 try:
841 try:
842 for f in list:
842 for f in list:
843 if self._repo.dirstate[f] != 'r':
843 if self._repo.dirstate[f] != 'r':
844 self._repo.ui.warn(_("%s not removed!\n") % f)
844 self._repo.ui.warn(_("%s not removed!\n") % f)
845 else:
845 else:
846 fctx = f in pctxs[0] and pctxs[0] or pctxs[1]
846 fctx = f in pctxs[0] and pctxs[0] or pctxs[1]
847 t = fctx.data()
847 t = fctx.data()
848 self._repo.wwrite(f, t, fctx.flags())
848 self._repo.wwrite(f, t, fctx.flags())
849 self._repo.dirstate.normal(f)
849 self._repo.dirstate.normal(f)
850 finally:
850 finally:
851 wlock.release()
851 wlock.release()
852
852
853 def copy(self, source, dest):
853 def copy(self, source, dest):
854 p = self._repo.wjoin(dest)
854 p = self._repo.wjoin(dest)
855 if not (os.path.exists(p) or os.path.islink(p)):
855 if not os.path.lexists(p):
856 self._repo.ui.warn(_("%s does not exist!\n") % dest)
856 self._repo.ui.warn(_("%s does not exist!\n") % dest)
857 elif not (os.path.isfile(p) or os.path.islink(p)):
857 elif not (os.path.isfile(p) or os.path.islink(p)):
858 self._repo.ui.warn(_("copy failed: %s is not a file or a "
858 self._repo.ui.warn(_("copy failed: %s is not a file or a "
859 "symbolic link\n") % dest)
859 "symbolic link\n") % dest)
860 else:
860 else:
861 wlock = self._repo.wlock()
861 wlock = self._repo.wlock()
862 try:
862 try:
863 if self._repo.dirstate[dest] in '?r':
863 if self._repo.dirstate[dest] in '?r':
864 self._repo.dirstate.add(dest)
864 self._repo.dirstate.add(dest)
865 self._repo.dirstate.copy(source, dest)
865 self._repo.dirstate.copy(source, dest)
866 finally:
866 finally:
867 wlock.release()
867 wlock.release()
868
868
869 class workingfilectx(filectx):
869 class workingfilectx(filectx):
870 """A workingfilectx object makes access to data related to a particular
870 """A workingfilectx object makes access to data related to a particular
871 file in the working directory convenient."""
871 file in the working directory convenient."""
872 def __init__(self, repo, path, filelog=None, workingctx=None):
872 def __init__(self, repo, path, filelog=None, workingctx=None):
873 """changeid can be a changeset revision, node, or tag.
873 """changeid can be a changeset revision, node, or tag.
874 fileid can be a file revision or node."""
874 fileid can be a file revision or node."""
875 self._repo = repo
875 self._repo = repo
876 self._path = path
876 self._path = path
877 self._changeid = None
877 self._changeid = None
878 self._filerev = self._filenode = None
878 self._filerev = self._filenode = None
879
879
880 if filelog:
880 if filelog:
881 self._filelog = filelog
881 self._filelog = filelog
882 if workingctx:
882 if workingctx:
883 self._changectx = workingctx
883 self._changectx = workingctx
884
884
885 @propertycache
885 @propertycache
886 def _changectx(self):
886 def _changectx(self):
887 return workingctx(self._repo)
887 return workingctx(self._repo)
888
888
889 def __nonzero__(self):
889 def __nonzero__(self):
890 return True
890 return True
891
891
892 def __str__(self):
892 def __str__(self):
893 return "%s@%s" % (self.path(), self._changectx)
893 return "%s@%s" % (self.path(), self._changectx)
894
894
895 def data(self):
895 def data(self):
896 return self._repo.wread(self._path)
896 return self._repo.wread(self._path)
897 def renamed(self):
897 def renamed(self):
898 rp = self._repo.dirstate.copied(self._path)
898 rp = self._repo.dirstate.copied(self._path)
899 if not rp:
899 if not rp:
900 return None
900 return None
901 return rp, self._changectx._parents[0]._manifest.get(rp, nullid)
901 return rp, self._changectx._parents[0]._manifest.get(rp, nullid)
902
902
903 def parents(self):
903 def parents(self):
904 '''return parent filectxs, following copies if necessary'''
904 '''return parent filectxs, following copies if necessary'''
905 def filenode(ctx, path):
905 def filenode(ctx, path):
906 return ctx._manifest.get(path, nullid)
906 return ctx._manifest.get(path, nullid)
907
907
908 path = self._path
908 path = self._path
909 fl = self._filelog
909 fl = self._filelog
910 pcl = self._changectx._parents
910 pcl = self._changectx._parents
911 renamed = self.renamed()
911 renamed = self.renamed()
912
912
913 if renamed:
913 if renamed:
914 pl = [renamed + (None,)]
914 pl = [renamed + (None,)]
915 else:
915 else:
916 pl = [(path, filenode(pcl[0], path), fl)]
916 pl = [(path, filenode(pcl[0], path), fl)]
917
917
918 for pc in pcl[1:]:
918 for pc in pcl[1:]:
919 pl.append((path, filenode(pc, path), fl))
919 pl.append((path, filenode(pc, path), fl))
920
920
921 return [filectx(self._repo, p, fileid=n, filelog=l)
921 return [filectx(self._repo, p, fileid=n, filelog=l)
922 for p, n, l in pl if n != nullid]
922 for p, n, l in pl if n != nullid]
923
923
924 def children(self):
924 def children(self):
925 return []
925 return []
926
926
927 def size(self):
927 def size(self):
928 return os.lstat(self._repo.wjoin(self._path)).st_size
928 return os.lstat(self._repo.wjoin(self._path)).st_size
929 def date(self):
929 def date(self):
930 t, tz = self._changectx.date()
930 t, tz = self._changectx.date()
931 try:
931 try:
932 return (int(os.lstat(self._repo.wjoin(self._path)).st_mtime), tz)
932 return (int(os.lstat(self._repo.wjoin(self._path)).st_mtime), tz)
933 except OSError, err:
933 except OSError, err:
934 if err.errno != errno.ENOENT:
934 if err.errno != errno.ENOENT:
935 raise
935 raise
936 return (t, tz)
936 return (t, tz)
937
937
938 def cmp(self, text):
938 def cmp(self, text):
939 """compare text with disk content
939 """compare text with disk content
940
940
941 returns True if text is different than what is on disk.
941 returns True if text is different than what is on disk.
942 """
942 """
943 return self._repo.wread(self._path) != text
943 return self._repo.wread(self._path) != text
944
944
945 class memctx(object):
945 class memctx(object):
946 """Use memctx to perform in-memory commits via localrepo.commitctx().
946 """Use memctx to perform in-memory commits via localrepo.commitctx().
947
947
948 Revision information is supplied at initialization time while
948 Revision information is supplied at initialization time while
949 related files data and is made available through a callback
949 related files data and is made available through a callback
950 mechanism. 'repo' is the current localrepo, 'parents' is a
950 mechanism. 'repo' is the current localrepo, 'parents' is a
951 sequence of two parent revisions identifiers (pass None for every
951 sequence of two parent revisions identifiers (pass None for every
952 missing parent), 'text' is the commit message and 'files' lists
952 missing parent), 'text' is the commit message and 'files' lists
953 names of files touched by the revision (normalized and relative to
953 names of files touched by the revision (normalized and relative to
954 repository root).
954 repository root).
955
955
956 filectxfn(repo, memctx, path) is a callable receiving the
956 filectxfn(repo, memctx, path) is a callable receiving the
957 repository, the current memctx object and the normalized path of
957 repository, the current memctx object and the normalized path of
958 requested file, relative to repository root. It is fired by the
958 requested file, relative to repository root. It is fired by the
959 commit function for every file in 'files', but calls order is
959 commit function for every file in 'files', but calls order is
960 undefined. If the file is available in the revision being
960 undefined. If the file is available in the revision being
961 committed (updated or added), filectxfn returns a memfilectx
961 committed (updated or added), filectxfn returns a memfilectx
962 object. If the file was removed, filectxfn raises an
962 object. If the file was removed, filectxfn raises an
963 IOError. Moved files are represented by marking the source file
963 IOError. Moved files are represented by marking the source file
964 removed and the new file added with copy information (see
964 removed and the new file added with copy information (see
965 memfilectx).
965 memfilectx).
966
966
967 user receives the committer name and defaults to current
967 user receives the committer name and defaults to current
968 repository username, date is the commit date in any format
968 repository username, date is the commit date in any format
969 supported by util.parsedate() and defaults to current date, extra
969 supported by util.parsedate() and defaults to current date, extra
970 is a dictionary of metadata or is left empty.
970 is a dictionary of metadata or is left empty.
971 """
971 """
972 def __init__(self, repo, parents, text, files, filectxfn, user=None,
972 def __init__(self, repo, parents, text, files, filectxfn, user=None,
973 date=None, extra=None):
973 date=None, extra=None):
974 self._repo = repo
974 self._repo = repo
975 self._rev = None
975 self._rev = None
976 self._node = None
976 self._node = None
977 self._text = text
977 self._text = text
978 self._date = date and util.parsedate(date) or util.makedate()
978 self._date = date and util.parsedate(date) or util.makedate()
979 self._user = user
979 self._user = user
980 parents = [(p or nullid) for p in parents]
980 parents = [(p or nullid) for p in parents]
981 p1, p2 = parents
981 p1, p2 = parents
982 self._parents = [changectx(self._repo, p) for p in (p1, p2)]
982 self._parents = [changectx(self._repo, p) for p in (p1, p2)]
983 files = sorted(set(files))
983 files = sorted(set(files))
984 self._status = [files, [], [], [], []]
984 self._status = [files, [], [], [], []]
985 self._filectxfn = filectxfn
985 self._filectxfn = filectxfn
986
986
987 self._extra = extra and extra.copy() or {}
987 self._extra = extra and extra.copy() or {}
988 if 'branch' not in self._extra:
988 if 'branch' not in self._extra:
989 self._extra['branch'] = 'default'
989 self._extra['branch'] = 'default'
990 elif self._extra.get('branch') == '':
990 elif self._extra.get('branch') == '':
991 self._extra['branch'] = 'default'
991 self._extra['branch'] = 'default'
992
992
993 def __str__(self):
993 def __str__(self):
994 return str(self._parents[0]) + "+"
994 return str(self._parents[0]) + "+"
995
995
996 def __int__(self):
996 def __int__(self):
997 return self._rev
997 return self._rev
998
998
999 def __nonzero__(self):
999 def __nonzero__(self):
1000 return True
1000 return True
1001
1001
1002 def __getitem__(self, key):
1002 def __getitem__(self, key):
1003 return self.filectx(key)
1003 return self.filectx(key)
1004
1004
1005 def p1(self):
1005 def p1(self):
1006 return self._parents[0]
1006 return self._parents[0]
1007 def p2(self):
1007 def p2(self):
1008 return self._parents[1]
1008 return self._parents[1]
1009
1009
1010 def user(self):
1010 def user(self):
1011 return self._user or self._repo.ui.username()
1011 return self._user or self._repo.ui.username()
1012 def date(self):
1012 def date(self):
1013 return self._date
1013 return self._date
1014 def description(self):
1014 def description(self):
1015 return self._text
1015 return self._text
1016 def files(self):
1016 def files(self):
1017 return self.modified()
1017 return self.modified()
1018 def modified(self):
1018 def modified(self):
1019 return self._status[0]
1019 return self._status[0]
1020 def added(self):
1020 def added(self):
1021 return self._status[1]
1021 return self._status[1]
1022 def removed(self):
1022 def removed(self):
1023 return self._status[2]
1023 return self._status[2]
1024 def deleted(self):
1024 def deleted(self):
1025 return self._status[3]
1025 return self._status[3]
1026 def unknown(self):
1026 def unknown(self):
1027 return self._status[4]
1027 return self._status[4]
1028 def ignored(self):
1028 def ignored(self):
1029 return self._status[5]
1029 return self._status[5]
1030 def clean(self):
1030 def clean(self):
1031 return self._status[6]
1031 return self._status[6]
1032 def branch(self):
1032 def branch(self):
1033 return self._extra['branch']
1033 return self._extra['branch']
1034 def extra(self):
1034 def extra(self):
1035 return self._extra
1035 return self._extra
1036 def flags(self, f):
1036 def flags(self, f):
1037 return self[f].flags()
1037 return self[f].flags()
1038
1038
1039 def parents(self):
1039 def parents(self):
1040 """return contexts for each parent changeset"""
1040 """return contexts for each parent changeset"""
1041 return self._parents
1041 return self._parents
1042
1042
1043 def filectx(self, path, filelog=None):
1043 def filectx(self, path, filelog=None):
1044 """get a file context from the working directory"""
1044 """get a file context from the working directory"""
1045 return self._filectxfn(self._repo, self, path)
1045 return self._filectxfn(self._repo, self, path)
1046
1046
1047 def commit(self):
1047 def commit(self):
1048 """commit context to the repo"""
1048 """commit context to the repo"""
1049 return self._repo.commitctx(self)
1049 return self._repo.commitctx(self)
1050
1050
1051 class memfilectx(object):
1051 class memfilectx(object):
1052 """memfilectx represents an in-memory file to commit.
1052 """memfilectx represents an in-memory file to commit.
1053
1053
1054 See memctx for more details.
1054 See memctx for more details.
1055 """
1055 """
1056 def __init__(self, path, data, islink=False, isexec=False, copied=None):
1056 def __init__(self, path, data, islink=False, isexec=False, copied=None):
1057 """
1057 """
1058 path is the normalized file path relative to repository root.
1058 path is the normalized file path relative to repository root.
1059 data is the file content as a string.
1059 data is the file content as a string.
1060 islink is True if the file is a symbolic link.
1060 islink is True if the file is a symbolic link.
1061 isexec is True if the file is executable.
1061 isexec is True if the file is executable.
1062 copied is the source file path if current file was copied in the
1062 copied is the source file path if current file was copied in the
1063 revision being committed, or None."""
1063 revision being committed, or None."""
1064 self._path = path
1064 self._path = path
1065 self._data = data
1065 self._data = data
1066 self._flags = (islink and 'l' or '') + (isexec and 'x' or '')
1066 self._flags = (islink and 'l' or '') + (isexec and 'x' or '')
1067 self._copied = None
1067 self._copied = None
1068 if copied:
1068 if copied:
1069 self._copied = (copied, nullid)
1069 self._copied = (copied, nullid)
1070
1070
1071 def __nonzero__(self):
1071 def __nonzero__(self):
1072 return True
1072 return True
1073 def __str__(self):
1073 def __str__(self):
1074 return "%s@%s" % (self.path(), self._changectx)
1074 return "%s@%s" % (self.path(), self._changectx)
1075 def path(self):
1075 def path(self):
1076 return self._path
1076 return self._path
1077 def data(self):
1077 def data(self):
1078 return self._data
1078 return self._data
1079 def flags(self):
1079 def flags(self):
1080 return self._flags
1080 return self._flags
1081 def isexec(self):
1081 def isexec(self):
1082 return 'x' in self._flags
1082 return 'x' in self._flags
1083 def islink(self):
1083 def islink(self):
1084 return 'l' in self._flags
1084 return 'l' in self._flags
1085 def renamed(self):
1085 def renamed(self):
1086 return self._copied
1086 return self._copied
@@ -1,669 +1,669 b''
1 # dirstate.py - working directory tracking for mercurial
1 # dirstate.py - working directory tracking for mercurial
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from node import nullid
8 from node import nullid
9 from i18n import _
9 from i18n import _
10 import util, ignore, osutil, parsers
10 import util, ignore, osutil, parsers
11 import struct, os, stat, errno
11 import struct, os, stat, errno
12 import cStringIO
12 import cStringIO
13
13
14 _format = ">cllll"
14 _format = ">cllll"
15 propertycache = util.propertycache
15 propertycache = util.propertycache
16
16
17 def _finddirs(path):
17 def _finddirs(path):
18 pos = path.rfind('/')
18 pos = path.rfind('/')
19 while pos != -1:
19 while pos != -1:
20 yield path[:pos]
20 yield path[:pos]
21 pos = path.rfind('/', 0, pos)
21 pos = path.rfind('/', 0, pos)
22
22
23 def _incdirs(dirs, path):
23 def _incdirs(dirs, path):
24 for base in _finddirs(path):
24 for base in _finddirs(path):
25 if base in dirs:
25 if base in dirs:
26 dirs[base] += 1
26 dirs[base] += 1
27 return
27 return
28 dirs[base] = 1
28 dirs[base] = 1
29
29
30 def _decdirs(dirs, path):
30 def _decdirs(dirs, path):
31 for base in _finddirs(path):
31 for base in _finddirs(path):
32 if dirs[base] > 1:
32 if dirs[base] > 1:
33 dirs[base] -= 1
33 dirs[base] -= 1
34 return
34 return
35 del dirs[base]
35 del dirs[base]
36
36
37 class dirstate(object):
37 class dirstate(object):
38
38
39 def __init__(self, opener, ui, root):
39 def __init__(self, opener, ui, root):
40 '''Create a new dirstate object.
40 '''Create a new dirstate object.
41
41
42 opener is an open()-like callable that can be used to open the
42 opener is an open()-like callable that can be used to open the
43 dirstate file; root is the root of the directory tracked by
43 dirstate file; root is the root of the directory tracked by
44 the dirstate.
44 the dirstate.
45 '''
45 '''
46 self._opener = opener
46 self._opener = opener
47 self._root = root
47 self._root = root
48 self._rootdir = os.path.join(root, '')
48 self._rootdir = os.path.join(root, '')
49 self._dirty = False
49 self._dirty = False
50 self._dirtypl = False
50 self._dirtypl = False
51 self._ui = ui
51 self._ui = ui
52
52
53 @propertycache
53 @propertycache
54 def _map(self):
54 def _map(self):
55 '''Return the dirstate contents as a map from filename to
55 '''Return the dirstate contents as a map from filename to
56 (state, mode, size, time).'''
56 (state, mode, size, time).'''
57 self._read()
57 self._read()
58 return self._map
58 return self._map
59
59
60 @propertycache
60 @propertycache
61 def _copymap(self):
61 def _copymap(self):
62 self._read()
62 self._read()
63 return self._copymap
63 return self._copymap
64
64
65 @propertycache
65 @propertycache
66 def _foldmap(self):
66 def _foldmap(self):
67 f = {}
67 f = {}
68 for name in self._map:
68 for name in self._map:
69 f[os.path.normcase(name)] = name
69 f[os.path.normcase(name)] = name
70 return f
70 return f
71
71
72 @propertycache
72 @propertycache
73 def _branch(self):
73 def _branch(self):
74 try:
74 try:
75 return self._opener("branch").read().strip() or "default"
75 return self._opener("branch").read().strip() or "default"
76 except IOError:
76 except IOError:
77 return "default"
77 return "default"
78
78
79 @propertycache
79 @propertycache
80 def _pl(self):
80 def _pl(self):
81 try:
81 try:
82 st = self._opener("dirstate").read(40)
82 st = self._opener("dirstate").read(40)
83 l = len(st)
83 l = len(st)
84 if l == 40:
84 if l == 40:
85 return st[:20], st[20:40]
85 return st[:20], st[20:40]
86 elif l > 0 and l < 40:
86 elif l > 0 and l < 40:
87 raise util.Abort(_('working directory state appears damaged!'))
87 raise util.Abort(_('working directory state appears damaged!'))
88 except IOError, err:
88 except IOError, err:
89 if err.errno != errno.ENOENT:
89 if err.errno != errno.ENOENT:
90 raise
90 raise
91 return [nullid, nullid]
91 return [nullid, nullid]
92
92
93 @propertycache
93 @propertycache
94 def _dirs(self):
94 def _dirs(self):
95 dirs = {}
95 dirs = {}
96 for f, s in self._map.iteritems():
96 for f, s in self._map.iteritems():
97 if s[0] != 'r':
97 if s[0] != 'r':
98 _incdirs(dirs, f)
98 _incdirs(dirs, f)
99 return dirs
99 return dirs
100
100
101 @propertycache
101 @propertycache
102 def _ignore(self):
102 def _ignore(self):
103 files = [self._join('.hgignore')]
103 files = [self._join('.hgignore')]
104 for name, path in self._ui.configitems("ui"):
104 for name, path in self._ui.configitems("ui"):
105 if name == 'ignore' or name.startswith('ignore.'):
105 if name == 'ignore' or name.startswith('ignore.'):
106 files.append(util.expandpath(path))
106 files.append(util.expandpath(path))
107 return ignore.ignore(self._root, files, self._ui.warn)
107 return ignore.ignore(self._root, files, self._ui.warn)
108
108
109 @propertycache
109 @propertycache
110 def _slash(self):
110 def _slash(self):
111 return self._ui.configbool('ui', 'slash') and os.sep != '/'
111 return self._ui.configbool('ui', 'slash') and os.sep != '/'
112
112
113 @propertycache
113 @propertycache
114 def _checklink(self):
114 def _checklink(self):
115 return util.checklink(self._root)
115 return util.checklink(self._root)
116
116
117 @propertycache
117 @propertycache
118 def _checkexec(self):
118 def _checkexec(self):
119 return util.checkexec(self._root)
119 return util.checkexec(self._root)
120
120
121 @propertycache
121 @propertycache
122 def _checkcase(self):
122 def _checkcase(self):
123 return not util.checkcase(self._join('.hg'))
123 return not util.checkcase(self._join('.hg'))
124
124
125 def _join(self, f):
125 def _join(self, f):
126 # much faster than os.path.join()
126 # much faster than os.path.join()
127 # it's safe because f is always a relative path
127 # it's safe because f is always a relative path
128 return self._rootdir + f
128 return self._rootdir + f
129
129
130 def flagfunc(self, fallback):
130 def flagfunc(self, fallback):
131 if self._checklink:
131 if self._checklink:
132 if self._checkexec:
132 if self._checkexec:
133 def f(x):
133 def f(x):
134 p = self._join(x)
134 p = self._join(x)
135 if os.path.islink(p):
135 if os.path.islink(p):
136 return 'l'
136 return 'l'
137 if util.is_exec(p):
137 if util.is_exec(p):
138 return 'x'
138 return 'x'
139 return ''
139 return ''
140 return f
140 return f
141 def f(x):
141 def f(x):
142 if os.path.islink(self._join(x)):
142 if os.path.islink(self._join(x)):
143 return 'l'
143 return 'l'
144 if 'x' in fallback(x):
144 if 'x' in fallback(x):
145 return 'x'
145 return 'x'
146 return ''
146 return ''
147 return f
147 return f
148 if self._checkexec:
148 if self._checkexec:
149 def f(x):
149 def f(x):
150 if 'l' in fallback(x):
150 if 'l' in fallback(x):
151 return 'l'
151 return 'l'
152 if util.is_exec(self._join(x)):
152 if util.is_exec(self._join(x)):
153 return 'x'
153 return 'x'
154 return ''
154 return ''
155 return f
155 return f
156 return fallback
156 return fallback
157
157
158 def getcwd(self):
158 def getcwd(self):
159 cwd = os.getcwd()
159 cwd = os.getcwd()
160 if cwd == self._root:
160 if cwd == self._root:
161 return ''
161 return ''
162 # self._root ends with a path separator if self._root is '/' or 'C:\'
162 # self._root ends with a path separator if self._root is '/' or 'C:\'
163 rootsep = self._root
163 rootsep = self._root
164 if not util.endswithsep(rootsep):
164 if not util.endswithsep(rootsep):
165 rootsep += os.sep
165 rootsep += os.sep
166 if cwd.startswith(rootsep):
166 if cwd.startswith(rootsep):
167 return cwd[len(rootsep):]
167 return cwd[len(rootsep):]
168 else:
168 else:
169 # we're outside the repo. return an absolute path.
169 # we're outside the repo. return an absolute path.
170 return cwd
170 return cwd
171
171
172 def pathto(self, f, cwd=None):
172 def pathto(self, f, cwd=None):
173 if cwd is None:
173 if cwd is None:
174 cwd = self.getcwd()
174 cwd = self.getcwd()
175 path = util.pathto(self._root, cwd, f)
175 path = util.pathto(self._root, cwd, f)
176 if self._slash:
176 if self._slash:
177 return util.normpath(path)
177 return util.normpath(path)
178 return path
178 return path
179
179
180 def __getitem__(self, key):
180 def __getitem__(self, key):
181 '''Return the current state of key (a filename) in the dirstate.
181 '''Return the current state of key (a filename) in the dirstate.
182
182
183 States are:
183 States are:
184 n normal
184 n normal
185 m needs merging
185 m needs merging
186 r marked for removal
186 r marked for removal
187 a marked for addition
187 a marked for addition
188 ? not tracked
188 ? not tracked
189 '''
189 '''
190 return self._map.get(key, ("?",))[0]
190 return self._map.get(key, ("?",))[0]
191
191
192 def __contains__(self, key):
192 def __contains__(self, key):
193 return key in self._map
193 return key in self._map
194
194
195 def __iter__(self):
195 def __iter__(self):
196 for x in sorted(self._map):
196 for x in sorted(self._map):
197 yield x
197 yield x
198
198
199 def parents(self):
199 def parents(self):
200 return self._pl
200 return self._pl
201
201
202 def branch(self):
202 def branch(self):
203 return self._branch
203 return self._branch
204
204
205 def setparents(self, p1, p2=nullid):
205 def setparents(self, p1, p2=nullid):
206 self._dirty = self._dirtypl = True
206 self._dirty = self._dirtypl = True
207 self._pl = p1, p2
207 self._pl = p1, p2
208
208
209 def setbranch(self, branch):
209 def setbranch(self, branch):
210 if branch in ['tip', '.', 'null']:
210 if branch in ['tip', '.', 'null']:
211 raise util.Abort(_('the name \'%s\' is reserved') % branch)
211 raise util.Abort(_('the name \'%s\' is reserved') % branch)
212 self._branch = branch
212 self._branch = branch
213 self._opener("branch", "w").write(branch + '\n')
213 self._opener("branch", "w").write(branch + '\n')
214
214
215 def _read(self):
215 def _read(self):
216 self._map = {}
216 self._map = {}
217 self._copymap = {}
217 self._copymap = {}
218 try:
218 try:
219 st = self._opener("dirstate").read()
219 st = self._opener("dirstate").read()
220 except IOError, err:
220 except IOError, err:
221 if err.errno != errno.ENOENT:
221 if err.errno != errno.ENOENT:
222 raise
222 raise
223 return
223 return
224 if not st:
224 if not st:
225 return
225 return
226
226
227 p = parsers.parse_dirstate(self._map, self._copymap, st)
227 p = parsers.parse_dirstate(self._map, self._copymap, st)
228 if not self._dirtypl:
228 if not self._dirtypl:
229 self._pl = p
229 self._pl = p
230
230
231 def invalidate(self):
231 def invalidate(self):
232 for a in "_map _copymap _foldmap _branch _pl _dirs _ignore".split():
232 for a in "_map _copymap _foldmap _branch _pl _dirs _ignore".split():
233 if a in self.__dict__:
233 if a in self.__dict__:
234 delattr(self, a)
234 delattr(self, a)
235 self._dirty = False
235 self._dirty = False
236
236
237 def copy(self, source, dest):
237 def copy(self, source, dest):
238 """Mark dest as a copy of source. Unmark dest if source is None."""
238 """Mark dest as a copy of source. Unmark dest if source is None."""
239 if source == dest:
239 if source == dest:
240 return
240 return
241 self._dirty = True
241 self._dirty = True
242 if source is not None:
242 if source is not None:
243 self._copymap[dest] = source
243 self._copymap[dest] = source
244 elif dest in self._copymap:
244 elif dest in self._copymap:
245 del self._copymap[dest]
245 del self._copymap[dest]
246
246
247 def copied(self, file):
247 def copied(self, file):
248 return self._copymap.get(file, None)
248 return self._copymap.get(file, None)
249
249
250 def copies(self):
250 def copies(self):
251 return self._copymap
251 return self._copymap
252
252
253 def _droppath(self, f):
253 def _droppath(self, f):
254 if self[f] not in "?r" and "_dirs" in self.__dict__:
254 if self[f] not in "?r" and "_dirs" in self.__dict__:
255 _decdirs(self._dirs, f)
255 _decdirs(self._dirs, f)
256
256
257 def _addpath(self, f, check=False):
257 def _addpath(self, f, check=False):
258 oldstate = self[f]
258 oldstate = self[f]
259 if check or oldstate == "r":
259 if check or oldstate == "r":
260 if '\r' in f or '\n' in f:
260 if '\r' in f or '\n' in f:
261 raise util.Abort(
261 raise util.Abort(
262 _("'\\n' and '\\r' disallowed in filenames: %r") % f)
262 _("'\\n' and '\\r' disallowed in filenames: %r") % f)
263 if f in self._dirs:
263 if f in self._dirs:
264 raise util.Abort(_('directory %r already in dirstate') % f)
264 raise util.Abort(_('directory %r already in dirstate') % f)
265 # shadows
265 # shadows
266 for d in _finddirs(f):
266 for d in _finddirs(f):
267 if d in self._dirs:
267 if d in self._dirs:
268 break
268 break
269 if d in self._map and self[d] != 'r':
269 if d in self._map and self[d] != 'r':
270 raise util.Abort(
270 raise util.Abort(
271 _('file %r in dirstate clashes with %r') % (d, f))
271 _('file %r in dirstate clashes with %r') % (d, f))
272 if oldstate in "?r" and "_dirs" in self.__dict__:
272 if oldstate in "?r" and "_dirs" in self.__dict__:
273 _incdirs(self._dirs, f)
273 _incdirs(self._dirs, f)
274
274
275 def normal(self, f):
275 def normal(self, f):
276 '''Mark a file normal and clean.'''
276 '''Mark a file normal and clean.'''
277 self._dirty = True
277 self._dirty = True
278 self._addpath(f)
278 self._addpath(f)
279 s = os.lstat(self._join(f))
279 s = os.lstat(self._join(f))
280 self._map[f] = ('n', s.st_mode, s.st_size, int(s.st_mtime))
280 self._map[f] = ('n', s.st_mode, s.st_size, int(s.st_mtime))
281 if f in self._copymap:
281 if f in self._copymap:
282 del self._copymap[f]
282 del self._copymap[f]
283
283
284 def normallookup(self, f):
284 def normallookup(self, f):
285 '''Mark a file normal, but possibly dirty.'''
285 '''Mark a file normal, but possibly dirty.'''
286 if self._pl[1] != nullid and f in self._map:
286 if self._pl[1] != nullid and f in self._map:
287 # if there is a merge going on and the file was either
287 # if there is a merge going on and the file was either
288 # in state 'm' (-1) or coming from other parent (-2) before
288 # in state 'm' (-1) or coming from other parent (-2) before
289 # being removed, restore that state.
289 # being removed, restore that state.
290 entry = self._map[f]
290 entry = self._map[f]
291 if entry[0] == 'r' and entry[2] in (-1, -2):
291 if entry[0] == 'r' and entry[2] in (-1, -2):
292 source = self._copymap.get(f)
292 source = self._copymap.get(f)
293 if entry[2] == -1:
293 if entry[2] == -1:
294 self.merge(f)
294 self.merge(f)
295 elif entry[2] == -2:
295 elif entry[2] == -2:
296 self.otherparent(f)
296 self.otherparent(f)
297 if source:
297 if source:
298 self.copy(source, f)
298 self.copy(source, f)
299 return
299 return
300 if entry[0] == 'm' or entry[0] == 'n' and entry[2] == -2:
300 if entry[0] == 'm' or entry[0] == 'n' and entry[2] == -2:
301 return
301 return
302 self._dirty = True
302 self._dirty = True
303 self._addpath(f)
303 self._addpath(f)
304 self._map[f] = ('n', 0, -1, -1)
304 self._map[f] = ('n', 0, -1, -1)
305 if f in self._copymap:
305 if f in self._copymap:
306 del self._copymap[f]
306 del self._copymap[f]
307
307
308 def otherparent(self, f):
308 def otherparent(self, f):
309 '''Mark as coming from the other parent, always dirty.'''
309 '''Mark as coming from the other parent, always dirty.'''
310 if self._pl[1] == nullid:
310 if self._pl[1] == nullid:
311 raise util.Abort(_("setting %r to other parent "
311 raise util.Abort(_("setting %r to other parent "
312 "only allowed in merges") % f)
312 "only allowed in merges") % f)
313 self._dirty = True
313 self._dirty = True
314 self._addpath(f)
314 self._addpath(f)
315 self._map[f] = ('n', 0, -2, -1)
315 self._map[f] = ('n', 0, -2, -1)
316 if f in self._copymap:
316 if f in self._copymap:
317 del self._copymap[f]
317 del self._copymap[f]
318
318
319 def add(self, f):
319 def add(self, f):
320 '''Mark a file added.'''
320 '''Mark a file added.'''
321 self._dirty = True
321 self._dirty = True
322 self._addpath(f, True)
322 self._addpath(f, True)
323 self._map[f] = ('a', 0, -1, -1)
323 self._map[f] = ('a', 0, -1, -1)
324 if f in self._copymap:
324 if f in self._copymap:
325 del self._copymap[f]
325 del self._copymap[f]
326
326
327 def remove(self, f):
327 def remove(self, f):
328 '''Mark a file removed.'''
328 '''Mark a file removed.'''
329 self._dirty = True
329 self._dirty = True
330 self._droppath(f)
330 self._droppath(f)
331 size = 0
331 size = 0
332 if self._pl[1] != nullid and f in self._map:
332 if self._pl[1] != nullid and f in self._map:
333 # backup the previous state
333 # backup the previous state
334 entry = self._map[f]
334 entry = self._map[f]
335 if entry[0] == 'm': # merge
335 if entry[0] == 'm': # merge
336 size = -1
336 size = -1
337 elif entry[0] == 'n' and entry[2] == -2: # other parent
337 elif entry[0] == 'n' and entry[2] == -2: # other parent
338 size = -2
338 size = -2
339 self._map[f] = ('r', 0, size, 0)
339 self._map[f] = ('r', 0, size, 0)
340 if size == 0 and f in self._copymap:
340 if size == 0 and f in self._copymap:
341 del self._copymap[f]
341 del self._copymap[f]
342
342
343 def merge(self, f):
343 def merge(self, f):
344 '''Mark a file merged.'''
344 '''Mark a file merged.'''
345 self._dirty = True
345 self._dirty = True
346 s = os.lstat(self._join(f))
346 s = os.lstat(self._join(f))
347 self._addpath(f)
347 self._addpath(f)
348 self._map[f] = ('m', s.st_mode, s.st_size, int(s.st_mtime))
348 self._map[f] = ('m', s.st_mode, s.st_size, int(s.st_mtime))
349 if f in self._copymap:
349 if f in self._copymap:
350 del self._copymap[f]
350 del self._copymap[f]
351
351
352 def forget(self, f):
352 def forget(self, f):
353 '''Forget a file.'''
353 '''Forget a file.'''
354 self._dirty = True
354 self._dirty = True
355 try:
355 try:
356 self._droppath(f)
356 self._droppath(f)
357 del self._map[f]
357 del self._map[f]
358 except KeyError:
358 except KeyError:
359 self._ui.warn(_("not in dirstate: %s\n") % f)
359 self._ui.warn(_("not in dirstate: %s\n") % f)
360
360
361 def _normalize(self, path, knownpath):
361 def _normalize(self, path, knownpath):
362 norm_path = os.path.normcase(path)
362 norm_path = os.path.normcase(path)
363 fold_path = self._foldmap.get(norm_path, None)
363 fold_path = self._foldmap.get(norm_path, None)
364 if fold_path is None:
364 if fold_path is None:
365 if knownpath or not os.path.exists(os.path.join(self._root, path)):
365 if knownpath or not os.path.lexists(os.path.join(self._root, path)):
366 fold_path = path
366 fold_path = path
367 else:
367 else:
368 fold_path = self._foldmap.setdefault(norm_path,
368 fold_path = self._foldmap.setdefault(norm_path,
369 util.fspath(path, self._root))
369 util.fspath(path, self._root))
370 return fold_path
370 return fold_path
371
371
372 def clear(self):
372 def clear(self):
373 self._map = {}
373 self._map = {}
374 if "_dirs" in self.__dict__:
374 if "_dirs" in self.__dict__:
375 delattr(self, "_dirs")
375 delattr(self, "_dirs")
376 self._copymap = {}
376 self._copymap = {}
377 self._pl = [nullid, nullid]
377 self._pl = [nullid, nullid]
378 self._dirty = True
378 self._dirty = True
379
379
380 def rebuild(self, parent, files):
380 def rebuild(self, parent, files):
381 self.clear()
381 self.clear()
382 for f in files:
382 for f in files:
383 if 'x' in files.flags(f):
383 if 'x' in files.flags(f):
384 self._map[f] = ('n', 0777, -1, 0)
384 self._map[f] = ('n', 0777, -1, 0)
385 else:
385 else:
386 self._map[f] = ('n', 0666, -1, 0)
386 self._map[f] = ('n', 0666, -1, 0)
387 self._pl = (parent, nullid)
387 self._pl = (parent, nullid)
388 self._dirty = True
388 self._dirty = True
389
389
390 def write(self):
390 def write(self):
391 if not self._dirty:
391 if not self._dirty:
392 return
392 return
393 st = self._opener("dirstate", "w", atomictemp=True)
393 st = self._opener("dirstate", "w", atomictemp=True)
394
394
395 # use the modification time of the newly created temporary file as the
395 # use the modification time of the newly created temporary file as the
396 # filesystem's notion of 'now'
396 # filesystem's notion of 'now'
397 now = int(util.fstat(st).st_mtime)
397 now = int(util.fstat(st).st_mtime)
398
398
399 cs = cStringIO.StringIO()
399 cs = cStringIO.StringIO()
400 copymap = self._copymap
400 copymap = self._copymap
401 pack = struct.pack
401 pack = struct.pack
402 write = cs.write
402 write = cs.write
403 write("".join(self._pl))
403 write("".join(self._pl))
404 for f, e in self._map.iteritems():
404 for f, e in self._map.iteritems():
405 if e[0] == 'n' and e[3] == now:
405 if e[0] == 'n' and e[3] == now:
406 # The file was last modified "simultaneously" with the current
406 # The file was last modified "simultaneously" with the current
407 # write to dirstate (i.e. within the same second for file-
407 # write to dirstate (i.e. within the same second for file-
408 # systems with a granularity of 1 sec). This commonly happens
408 # systems with a granularity of 1 sec). This commonly happens
409 # for at least a couple of files on 'update'.
409 # for at least a couple of files on 'update'.
410 # The user could change the file without changing its size
410 # The user could change the file without changing its size
411 # within the same second. Invalidate the file's stat data in
411 # within the same second. Invalidate the file's stat data in
412 # dirstate, forcing future 'status' calls to compare the
412 # dirstate, forcing future 'status' calls to compare the
413 # contents of the file. This prevents mistakenly treating such
413 # contents of the file. This prevents mistakenly treating such
414 # files as clean.
414 # files as clean.
415 e = (e[0], 0, -1, -1) # mark entry as 'unset'
415 e = (e[0], 0, -1, -1) # mark entry as 'unset'
416 self._map[f] = e
416 self._map[f] = e
417
417
418 if f in copymap:
418 if f in copymap:
419 f = "%s\0%s" % (f, copymap[f])
419 f = "%s\0%s" % (f, copymap[f])
420 e = pack(_format, e[0], e[1], e[2], e[3], len(f))
420 e = pack(_format, e[0], e[1], e[2], e[3], len(f))
421 write(e)
421 write(e)
422 write(f)
422 write(f)
423 st.write(cs.getvalue())
423 st.write(cs.getvalue())
424 st.rename()
424 st.rename()
425 self._dirty = self._dirtypl = False
425 self._dirty = self._dirtypl = False
426
426
427 def _dirignore(self, f):
427 def _dirignore(self, f):
428 if f == '.':
428 if f == '.':
429 return False
429 return False
430 if self._ignore(f):
430 if self._ignore(f):
431 return True
431 return True
432 for p in _finddirs(f):
432 for p in _finddirs(f):
433 if self._ignore(p):
433 if self._ignore(p):
434 return True
434 return True
435 return False
435 return False
436
436
437 def walk(self, match, subrepos, unknown, ignored):
437 def walk(self, match, subrepos, unknown, ignored):
438 '''
438 '''
439 Walk recursively through the directory tree, finding all files
439 Walk recursively through the directory tree, finding all files
440 matched by match.
440 matched by match.
441
441
442 Return a dict mapping filename to stat-like object (either
442 Return a dict mapping filename to stat-like object (either
443 mercurial.osutil.stat instance or return value of os.stat()).
443 mercurial.osutil.stat instance or return value of os.stat()).
444 '''
444 '''
445
445
446 def fwarn(f, msg):
446 def fwarn(f, msg):
447 self._ui.warn('%s: %s\n' % (self.pathto(f), msg))
447 self._ui.warn('%s: %s\n' % (self.pathto(f), msg))
448 return False
448 return False
449
449
450 def badtype(mode):
450 def badtype(mode):
451 kind = _('unknown')
451 kind = _('unknown')
452 if stat.S_ISCHR(mode):
452 if stat.S_ISCHR(mode):
453 kind = _('character device')
453 kind = _('character device')
454 elif stat.S_ISBLK(mode):
454 elif stat.S_ISBLK(mode):
455 kind = _('block device')
455 kind = _('block device')
456 elif stat.S_ISFIFO(mode):
456 elif stat.S_ISFIFO(mode):
457 kind = _('fifo')
457 kind = _('fifo')
458 elif stat.S_ISSOCK(mode):
458 elif stat.S_ISSOCK(mode):
459 kind = _('socket')
459 kind = _('socket')
460 elif stat.S_ISDIR(mode):
460 elif stat.S_ISDIR(mode):
461 kind = _('directory')
461 kind = _('directory')
462 return _('unsupported file type (type is %s)') % kind
462 return _('unsupported file type (type is %s)') % kind
463
463
464 ignore = self._ignore
464 ignore = self._ignore
465 dirignore = self._dirignore
465 dirignore = self._dirignore
466 if ignored:
466 if ignored:
467 ignore = util.never
467 ignore = util.never
468 dirignore = util.never
468 dirignore = util.never
469 elif not unknown:
469 elif not unknown:
470 # if unknown and ignored are False, skip step 2
470 # if unknown and ignored are False, skip step 2
471 ignore = util.always
471 ignore = util.always
472 dirignore = util.always
472 dirignore = util.always
473
473
474 matchfn = match.matchfn
474 matchfn = match.matchfn
475 badfn = match.bad
475 badfn = match.bad
476 dmap = self._map
476 dmap = self._map
477 normpath = util.normpath
477 normpath = util.normpath
478 listdir = osutil.listdir
478 listdir = osutil.listdir
479 lstat = os.lstat
479 lstat = os.lstat
480 getkind = stat.S_IFMT
480 getkind = stat.S_IFMT
481 dirkind = stat.S_IFDIR
481 dirkind = stat.S_IFDIR
482 regkind = stat.S_IFREG
482 regkind = stat.S_IFREG
483 lnkkind = stat.S_IFLNK
483 lnkkind = stat.S_IFLNK
484 join = self._join
484 join = self._join
485 work = []
485 work = []
486 wadd = work.append
486 wadd = work.append
487
487
488 if self._checkcase:
488 if self._checkcase:
489 normalize = self._normalize
489 normalize = self._normalize
490 else:
490 else:
491 normalize = lambda x, y: x
491 normalize = lambda x, y: x
492
492
493 exact = skipstep3 = False
493 exact = skipstep3 = False
494 if matchfn == match.exact: # match.exact
494 if matchfn == match.exact: # match.exact
495 exact = True
495 exact = True
496 dirignore = util.always # skip step 2
496 dirignore = util.always # skip step 2
497 elif match.files() and not match.anypats(): # match.match, no patterns
497 elif match.files() and not match.anypats(): # match.match, no patterns
498 skipstep3 = True
498 skipstep3 = True
499
499
500 files = set(match.files())
500 files = set(match.files())
501 if not files or '.' in files:
501 if not files or '.' in files:
502 files = ['']
502 files = ['']
503 results = dict.fromkeys(subrepos)
503 results = dict.fromkeys(subrepos)
504 results['.hg'] = None
504 results['.hg'] = None
505
505
506 # step 1: find all explicit files
506 # step 1: find all explicit files
507 for ff in sorted(files):
507 for ff in sorted(files):
508 nf = normalize(normpath(ff), False)
508 nf = normalize(normpath(ff), False)
509 if nf in results:
509 if nf in results:
510 continue
510 continue
511
511
512 try:
512 try:
513 st = lstat(join(nf))
513 st = lstat(join(nf))
514 kind = getkind(st.st_mode)
514 kind = getkind(st.st_mode)
515 if kind == dirkind:
515 if kind == dirkind:
516 skipstep3 = False
516 skipstep3 = False
517 if nf in dmap:
517 if nf in dmap:
518 #file deleted on disk but still in dirstate
518 #file deleted on disk but still in dirstate
519 results[nf] = None
519 results[nf] = None
520 match.dir(nf)
520 match.dir(nf)
521 if not dirignore(nf):
521 if not dirignore(nf):
522 wadd(nf)
522 wadd(nf)
523 elif kind == regkind or kind == lnkkind:
523 elif kind == regkind or kind == lnkkind:
524 results[nf] = st
524 results[nf] = st
525 else:
525 else:
526 badfn(ff, badtype(kind))
526 badfn(ff, badtype(kind))
527 if nf in dmap:
527 if nf in dmap:
528 results[nf] = None
528 results[nf] = None
529 except OSError, inst:
529 except OSError, inst:
530 if nf in dmap: # does it exactly match a file?
530 if nf in dmap: # does it exactly match a file?
531 results[nf] = None
531 results[nf] = None
532 else: # does it match a directory?
532 else: # does it match a directory?
533 prefix = nf + "/"
533 prefix = nf + "/"
534 for fn in dmap:
534 for fn in dmap:
535 if fn.startswith(prefix):
535 if fn.startswith(prefix):
536 match.dir(nf)
536 match.dir(nf)
537 skipstep3 = False
537 skipstep3 = False
538 break
538 break
539 else:
539 else:
540 badfn(ff, inst.strerror)
540 badfn(ff, inst.strerror)
541
541
542 # step 2: visit subdirectories
542 # step 2: visit subdirectories
543 while work:
543 while work:
544 nd = work.pop()
544 nd = work.pop()
545 skip = None
545 skip = None
546 if nd == '.':
546 if nd == '.':
547 nd = ''
547 nd = ''
548 else:
548 else:
549 skip = '.hg'
549 skip = '.hg'
550 try:
550 try:
551 entries = listdir(join(nd), stat=True, skip=skip)
551 entries = listdir(join(nd), stat=True, skip=skip)
552 except OSError, inst:
552 except OSError, inst:
553 if inst.errno == errno.EACCES:
553 if inst.errno == errno.EACCES:
554 fwarn(nd, inst.strerror)
554 fwarn(nd, inst.strerror)
555 continue
555 continue
556 raise
556 raise
557 for f, kind, st in entries:
557 for f, kind, st in entries:
558 nf = normalize(nd and (nd + "/" + f) or f, True)
558 nf = normalize(nd and (nd + "/" + f) or f, True)
559 if nf not in results:
559 if nf not in results:
560 if kind == dirkind:
560 if kind == dirkind:
561 if not ignore(nf):
561 if not ignore(nf):
562 match.dir(nf)
562 match.dir(nf)
563 wadd(nf)
563 wadd(nf)
564 if nf in dmap and matchfn(nf):
564 if nf in dmap and matchfn(nf):
565 results[nf] = None
565 results[nf] = None
566 elif kind == regkind or kind == lnkkind:
566 elif kind == regkind or kind == lnkkind:
567 if nf in dmap:
567 if nf in dmap:
568 if matchfn(nf):
568 if matchfn(nf):
569 results[nf] = st
569 results[nf] = st
570 elif matchfn(nf) and not ignore(nf):
570 elif matchfn(nf) and not ignore(nf):
571 results[nf] = st
571 results[nf] = st
572 elif nf in dmap and matchfn(nf):
572 elif nf in dmap and matchfn(nf):
573 results[nf] = None
573 results[nf] = None
574
574
575 # step 3: report unseen items in the dmap hash
575 # step 3: report unseen items in the dmap hash
576 if not skipstep3 and not exact:
576 if not skipstep3 and not exact:
577 visit = sorted([f for f in dmap if f not in results and matchfn(f)])
577 visit = sorted([f for f in dmap if f not in results and matchfn(f)])
578 for nf, st in zip(visit, util.statfiles([join(i) for i in visit])):
578 for nf, st in zip(visit, util.statfiles([join(i) for i in visit])):
579 if not st is None and not getkind(st.st_mode) in (regkind, lnkkind):
579 if not st is None and not getkind(st.st_mode) in (regkind, lnkkind):
580 st = None
580 st = None
581 results[nf] = st
581 results[nf] = st
582 for s in subrepos:
582 for s in subrepos:
583 del results[s]
583 del results[s]
584 del results['.hg']
584 del results['.hg']
585 return results
585 return results
586
586
587 def status(self, match, subrepos, ignored, clean, unknown):
587 def status(self, match, subrepos, ignored, clean, unknown):
588 '''Determine the status of the working copy relative to the
588 '''Determine the status of the working copy relative to the
589 dirstate and return a tuple of lists (unsure, modified, added,
589 dirstate and return a tuple of lists (unsure, modified, added,
590 removed, deleted, unknown, ignored, clean), where:
590 removed, deleted, unknown, ignored, clean), where:
591
591
592 unsure:
592 unsure:
593 files that might have been modified since the dirstate was
593 files that might have been modified since the dirstate was
594 written, but need to be read to be sure (size is the same
594 written, but need to be read to be sure (size is the same
595 but mtime differs)
595 but mtime differs)
596 modified:
596 modified:
597 files that have definitely been modified since the dirstate
597 files that have definitely been modified since the dirstate
598 was written (different size or mode)
598 was written (different size or mode)
599 added:
599 added:
600 files that have been explicitly added with hg add
600 files that have been explicitly added with hg add
601 removed:
601 removed:
602 files that have been explicitly removed with hg remove
602 files that have been explicitly removed with hg remove
603 deleted:
603 deleted:
604 files that have been deleted through other means ("missing")
604 files that have been deleted through other means ("missing")
605 unknown:
605 unknown:
606 files not in the dirstate that are not ignored
606 files not in the dirstate that are not ignored
607 ignored:
607 ignored:
608 files not in the dirstate that are ignored
608 files not in the dirstate that are ignored
609 (by _dirignore())
609 (by _dirignore())
610 clean:
610 clean:
611 files that have definitely not been modified since the
611 files that have definitely not been modified since the
612 dirstate was written
612 dirstate was written
613 '''
613 '''
614 listignored, listclean, listunknown = ignored, clean, unknown
614 listignored, listclean, listunknown = ignored, clean, unknown
615 lookup, modified, added, unknown, ignored = [], [], [], [], []
615 lookup, modified, added, unknown, ignored = [], [], [], [], []
616 removed, deleted, clean = [], [], []
616 removed, deleted, clean = [], [], []
617
617
618 dmap = self._map
618 dmap = self._map
619 ladd = lookup.append # aka "unsure"
619 ladd = lookup.append # aka "unsure"
620 madd = modified.append
620 madd = modified.append
621 aadd = added.append
621 aadd = added.append
622 uadd = unknown.append
622 uadd = unknown.append
623 iadd = ignored.append
623 iadd = ignored.append
624 radd = removed.append
624 radd = removed.append
625 dadd = deleted.append
625 dadd = deleted.append
626 cadd = clean.append
626 cadd = clean.append
627
627
628 lnkkind = stat.S_IFLNK
628 lnkkind = stat.S_IFLNK
629
629
630 for fn, st in self.walk(match, subrepos, listunknown,
630 for fn, st in self.walk(match, subrepos, listunknown,
631 listignored).iteritems():
631 listignored).iteritems():
632 if fn not in dmap:
632 if fn not in dmap:
633 if (listignored or match.exact(fn)) and self._dirignore(fn):
633 if (listignored or match.exact(fn)) and self._dirignore(fn):
634 if listignored:
634 if listignored:
635 iadd(fn)
635 iadd(fn)
636 elif listunknown:
636 elif listunknown:
637 uadd(fn)
637 uadd(fn)
638 continue
638 continue
639
639
640 state, mode, size, time = dmap[fn]
640 state, mode, size, time = dmap[fn]
641
641
642 if not st and state in "nma":
642 if not st and state in "nma":
643 dadd(fn)
643 dadd(fn)
644 elif state == 'n':
644 elif state == 'n':
645 # The "mode & lnkkind != lnkkind or self._checklink"
645 # The "mode & lnkkind != lnkkind or self._checklink"
646 # lines are an expansion of "islink => checklink"
646 # lines are an expansion of "islink => checklink"
647 # where islink means "is this a link?" and checklink
647 # where islink means "is this a link?" and checklink
648 # means "can we check links?".
648 # means "can we check links?".
649 if (size >= 0 and
649 if (size >= 0 and
650 (size != st.st_size
650 (size != st.st_size
651 or ((mode ^ st.st_mode) & 0100 and self._checkexec))
651 or ((mode ^ st.st_mode) & 0100 and self._checkexec))
652 and (mode & lnkkind != lnkkind or self._checklink)
652 and (mode & lnkkind != lnkkind or self._checklink)
653 or size == -2 # other parent
653 or size == -2 # other parent
654 or fn in self._copymap):
654 or fn in self._copymap):
655 madd(fn)
655 madd(fn)
656 elif (time != int(st.st_mtime)
656 elif (time != int(st.st_mtime)
657 and (mode & lnkkind != lnkkind or self._checklink)):
657 and (mode & lnkkind != lnkkind or self._checklink)):
658 ladd(fn)
658 ladd(fn)
659 elif listclean:
659 elif listclean:
660 cadd(fn)
660 cadd(fn)
661 elif state == 'm':
661 elif state == 'm':
662 madd(fn)
662 madd(fn)
663 elif state == 'a':
663 elif state == 'a':
664 aadd(fn)
664 aadd(fn)
665 elif state == 'r':
665 elif state == 'r':
666 radd(fn)
666 radd(fn)
667
667
668 return (lookup, modified, added, removed, deleted, unknown, ignored,
668 return (lookup, modified, added, removed, deleted, unknown, ignored,
669 clean)
669 clean)
@@ -1,1709 +1,1709 b''
1 # patch.py - patch file parsing routines
1 # patch.py - patch file parsing routines
2 #
2 #
3 # Copyright 2006 Brendan Cully <brendan@kublai.com>
3 # Copyright 2006 Brendan Cully <brendan@kublai.com>
4 # Copyright 2007 Chris Mason <chris.mason@oracle.com>
4 # Copyright 2007 Chris Mason <chris.mason@oracle.com>
5 #
5 #
6 # This software may be used and distributed according to the terms of the
6 # This software may be used and distributed according to the terms of the
7 # GNU General Public License version 2 or any later version.
7 # GNU General Public License version 2 or any later version.
8
8
9 import cStringIO, email.Parser, os, re
9 import cStringIO, email.Parser, os, re
10 import tempfile, zlib
10 import tempfile, zlib
11
11
12 from i18n import _
12 from i18n import _
13 from node import hex, nullid, short
13 from node import hex, nullid, short
14 import base85, cmdutil, mdiff, util, diffhelpers, copies, encoding
14 import base85, cmdutil, mdiff, util, diffhelpers, copies, encoding
15
15
16 gitre = re.compile('diff --git a/(.*) b/(.*)')
16 gitre = re.compile('diff --git a/(.*) b/(.*)')
17
17
18 class PatchError(Exception):
18 class PatchError(Exception):
19 pass
19 pass
20
20
21 class NoHunks(PatchError):
21 class NoHunks(PatchError):
22 pass
22 pass
23
23
24 # helper functions
24 # helper functions
25
25
26 def copyfile(src, dst, basedir):
26 def copyfile(src, dst, basedir):
27 abssrc, absdst = [util.canonpath(basedir, basedir, x) for x in [src, dst]]
27 abssrc, absdst = [util.canonpath(basedir, basedir, x) for x in [src, dst]]
28 if os.path.lexists(absdst):
28 if os.path.lexists(absdst):
29 raise util.Abort(_("cannot create %s: destination already exists") %
29 raise util.Abort(_("cannot create %s: destination already exists") %
30 dst)
30 dst)
31
31
32 dstdir = os.path.dirname(absdst)
32 dstdir = os.path.dirname(absdst)
33 if dstdir and not os.path.isdir(dstdir):
33 if dstdir and not os.path.isdir(dstdir):
34 try:
34 try:
35 os.makedirs(dstdir)
35 os.makedirs(dstdir)
36 except IOError:
36 except IOError:
37 raise util.Abort(
37 raise util.Abort(
38 _("cannot create %s: unable to create destination directory")
38 _("cannot create %s: unable to create destination directory")
39 % dst)
39 % dst)
40
40
41 util.copyfile(abssrc, absdst)
41 util.copyfile(abssrc, absdst)
42
42
43 # public functions
43 # public functions
44
44
45 def split(stream):
45 def split(stream):
46 '''return an iterator of individual patches from a stream'''
46 '''return an iterator of individual patches from a stream'''
47 def isheader(line, inheader):
47 def isheader(line, inheader):
48 if inheader and line[0] in (' ', '\t'):
48 if inheader and line[0] in (' ', '\t'):
49 # continuation
49 # continuation
50 return True
50 return True
51 if line[0] in (' ', '-', '+'):
51 if line[0] in (' ', '-', '+'):
52 # diff line - don't check for header pattern in there
52 # diff line - don't check for header pattern in there
53 return False
53 return False
54 l = line.split(': ', 1)
54 l = line.split(': ', 1)
55 return len(l) == 2 and ' ' not in l[0]
55 return len(l) == 2 and ' ' not in l[0]
56
56
57 def chunk(lines):
57 def chunk(lines):
58 return cStringIO.StringIO(''.join(lines))
58 return cStringIO.StringIO(''.join(lines))
59
59
60 def hgsplit(stream, cur):
60 def hgsplit(stream, cur):
61 inheader = True
61 inheader = True
62
62
63 for line in stream:
63 for line in stream:
64 if not line.strip():
64 if not line.strip():
65 inheader = False
65 inheader = False
66 if not inheader and line.startswith('# HG changeset patch'):
66 if not inheader and line.startswith('# HG changeset patch'):
67 yield chunk(cur)
67 yield chunk(cur)
68 cur = []
68 cur = []
69 inheader = True
69 inheader = True
70
70
71 cur.append(line)
71 cur.append(line)
72
72
73 if cur:
73 if cur:
74 yield chunk(cur)
74 yield chunk(cur)
75
75
76 def mboxsplit(stream, cur):
76 def mboxsplit(stream, cur):
77 for line in stream:
77 for line in stream:
78 if line.startswith('From '):
78 if line.startswith('From '):
79 for c in split(chunk(cur[1:])):
79 for c in split(chunk(cur[1:])):
80 yield c
80 yield c
81 cur = []
81 cur = []
82
82
83 cur.append(line)
83 cur.append(line)
84
84
85 if cur:
85 if cur:
86 for c in split(chunk(cur[1:])):
86 for c in split(chunk(cur[1:])):
87 yield c
87 yield c
88
88
89 def mimesplit(stream, cur):
89 def mimesplit(stream, cur):
90 def msgfp(m):
90 def msgfp(m):
91 fp = cStringIO.StringIO()
91 fp = cStringIO.StringIO()
92 g = email.Generator.Generator(fp, mangle_from_=False)
92 g = email.Generator.Generator(fp, mangle_from_=False)
93 g.flatten(m)
93 g.flatten(m)
94 fp.seek(0)
94 fp.seek(0)
95 return fp
95 return fp
96
96
97 for line in stream:
97 for line in stream:
98 cur.append(line)
98 cur.append(line)
99 c = chunk(cur)
99 c = chunk(cur)
100
100
101 m = email.Parser.Parser().parse(c)
101 m = email.Parser.Parser().parse(c)
102 if not m.is_multipart():
102 if not m.is_multipart():
103 yield msgfp(m)
103 yield msgfp(m)
104 else:
104 else:
105 ok_types = ('text/plain', 'text/x-diff', 'text/x-patch')
105 ok_types = ('text/plain', 'text/x-diff', 'text/x-patch')
106 for part in m.walk():
106 for part in m.walk():
107 ct = part.get_content_type()
107 ct = part.get_content_type()
108 if ct not in ok_types:
108 if ct not in ok_types:
109 continue
109 continue
110 yield msgfp(part)
110 yield msgfp(part)
111
111
112 def headersplit(stream, cur):
112 def headersplit(stream, cur):
113 inheader = False
113 inheader = False
114
114
115 for line in stream:
115 for line in stream:
116 if not inheader and isheader(line, inheader):
116 if not inheader and isheader(line, inheader):
117 yield chunk(cur)
117 yield chunk(cur)
118 cur = []
118 cur = []
119 inheader = True
119 inheader = True
120 if inheader and not isheader(line, inheader):
120 if inheader and not isheader(line, inheader):
121 inheader = False
121 inheader = False
122
122
123 cur.append(line)
123 cur.append(line)
124
124
125 if cur:
125 if cur:
126 yield chunk(cur)
126 yield chunk(cur)
127
127
128 def remainder(cur):
128 def remainder(cur):
129 yield chunk(cur)
129 yield chunk(cur)
130
130
131 class fiter(object):
131 class fiter(object):
132 def __init__(self, fp):
132 def __init__(self, fp):
133 self.fp = fp
133 self.fp = fp
134
134
135 def __iter__(self):
135 def __iter__(self):
136 return self
136 return self
137
137
138 def next(self):
138 def next(self):
139 l = self.fp.readline()
139 l = self.fp.readline()
140 if not l:
140 if not l:
141 raise StopIteration
141 raise StopIteration
142 return l
142 return l
143
143
144 inheader = False
144 inheader = False
145 cur = []
145 cur = []
146
146
147 mimeheaders = ['content-type']
147 mimeheaders = ['content-type']
148
148
149 if not hasattr(stream, 'next'):
149 if not hasattr(stream, 'next'):
150 # http responses, for example, have readline but not next
150 # http responses, for example, have readline but not next
151 stream = fiter(stream)
151 stream = fiter(stream)
152
152
153 for line in stream:
153 for line in stream:
154 cur.append(line)
154 cur.append(line)
155 if line.startswith('# HG changeset patch'):
155 if line.startswith('# HG changeset patch'):
156 return hgsplit(stream, cur)
156 return hgsplit(stream, cur)
157 elif line.startswith('From '):
157 elif line.startswith('From '):
158 return mboxsplit(stream, cur)
158 return mboxsplit(stream, cur)
159 elif isheader(line, inheader):
159 elif isheader(line, inheader):
160 inheader = True
160 inheader = True
161 if line.split(':', 1)[0].lower() in mimeheaders:
161 if line.split(':', 1)[0].lower() in mimeheaders:
162 # let email parser handle this
162 # let email parser handle this
163 return mimesplit(stream, cur)
163 return mimesplit(stream, cur)
164 elif line.startswith('--- ') and inheader:
164 elif line.startswith('--- ') and inheader:
165 # No evil headers seen by diff start, split by hand
165 # No evil headers seen by diff start, split by hand
166 return headersplit(stream, cur)
166 return headersplit(stream, cur)
167 # Not enough info, keep reading
167 # Not enough info, keep reading
168
168
169 # if we are here, we have a very plain patch
169 # if we are here, we have a very plain patch
170 return remainder(cur)
170 return remainder(cur)
171
171
172 def extract(ui, fileobj):
172 def extract(ui, fileobj):
173 '''extract patch from data read from fileobj.
173 '''extract patch from data read from fileobj.
174
174
175 patch can be a normal patch or contained in an email message.
175 patch can be a normal patch or contained in an email message.
176
176
177 return tuple (filename, message, user, date, branch, node, p1, p2).
177 return tuple (filename, message, user, date, branch, node, p1, p2).
178 Any item in the returned tuple can be None. If filename is None,
178 Any item in the returned tuple can be None. If filename is None,
179 fileobj did not contain a patch. Caller must unlink filename when done.'''
179 fileobj did not contain a patch. Caller must unlink filename when done.'''
180
180
181 # attempt to detect the start of a patch
181 # attempt to detect the start of a patch
182 # (this heuristic is borrowed from quilt)
182 # (this heuristic is borrowed from quilt)
183 diffre = re.compile(r'^(?:Index:[ \t]|diff[ \t]|RCS file: |'
183 diffre = re.compile(r'^(?:Index:[ \t]|diff[ \t]|RCS file: |'
184 r'retrieving revision [0-9]+(\.[0-9]+)*$|'
184 r'retrieving revision [0-9]+(\.[0-9]+)*$|'
185 r'---[ \t].*?^\+\+\+[ \t]|'
185 r'---[ \t].*?^\+\+\+[ \t]|'
186 r'\*\*\*[ \t].*?^---[ \t])', re.MULTILINE|re.DOTALL)
186 r'\*\*\*[ \t].*?^---[ \t])', re.MULTILINE|re.DOTALL)
187
187
188 fd, tmpname = tempfile.mkstemp(prefix='hg-patch-')
188 fd, tmpname = tempfile.mkstemp(prefix='hg-patch-')
189 tmpfp = os.fdopen(fd, 'w')
189 tmpfp = os.fdopen(fd, 'w')
190 try:
190 try:
191 msg = email.Parser.Parser().parse(fileobj)
191 msg = email.Parser.Parser().parse(fileobj)
192
192
193 subject = msg['Subject']
193 subject = msg['Subject']
194 user = msg['From']
194 user = msg['From']
195 if not subject and not user:
195 if not subject and not user:
196 # Not an email, restore parsed headers if any
196 # Not an email, restore parsed headers if any
197 subject = '\n'.join(': '.join(h) for h in msg.items()) + '\n'
197 subject = '\n'.join(': '.join(h) for h in msg.items()) + '\n'
198
198
199 gitsendmail = 'git-send-email' in msg.get('X-Mailer', '')
199 gitsendmail = 'git-send-email' in msg.get('X-Mailer', '')
200 # should try to parse msg['Date']
200 # should try to parse msg['Date']
201 date = None
201 date = None
202 nodeid = None
202 nodeid = None
203 branch = None
203 branch = None
204 parents = []
204 parents = []
205
205
206 if subject:
206 if subject:
207 if subject.startswith('[PATCH'):
207 if subject.startswith('[PATCH'):
208 pend = subject.find(']')
208 pend = subject.find(']')
209 if pend >= 0:
209 if pend >= 0:
210 subject = subject[pend + 1:].lstrip()
210 subject = subject[pend + 1:].lstrip()
211 subject = subject.replace('\n\t', ' ')
211 subject = subject.replace('\n\t', ' ')
212 ui.debug('Subject: %s\n' % subject)
212 ui.debug('Subject: %s\n' % subject)
213 if user:
213 if user:
214 ui.debug('From: %s\n' % user)
214 ui.debug('From: %s\n' % user)
215 diffs_seen = 0
215 diffs_seen = 0
216 ok_types = ('text/plain', 'text/x-diff', 'text/x-patch')
216 ok_types = ('text/plain', 'text/x-diff', 'text/x-patch')
217 message = ''
217 message = ''
218 for part in msg.walk():
218 for part in msg.walk():
219 content_type = part.get_content_type()
219 content_type = part.get_content_type()
220 ui.debug('Content-Type: %s\n' % content_type)
220 ui.debug('Content-Type: %s\n' % content_type)
221 if content_type not in ok_types:
221 if content_type not in ok_types:
222 continue
222 continue
223 payload = part.get_payload(decode=True)
223 payload = part.get_payload(decode=True)
224 m = diffre.search(payload)
224 m = diffre.search(payload)
225 if m:
225 if m:
226 hgpatch = False
226 hgpatch = False
227 ignoretext = False
227 ignoretext = False
228
228
229 ui.debug('found patch at byte %d\n' % m.start(0))
229 ui.debug('found patch at byte %d\n' % m.start(0))
230 diffs_seen += 1
230 diffs_seen += 1
231 cfp = cStringIO.StringIO()
231 cfp = cStringIO.StringIO()
232 for line in payload[:m.start(0)].splitlines():
232 for line in payload[:m.start(0)].splitlines():
233 if line.startswith('# HG changeset patch'):
233 if line.startswith('# HG changeset patch'):
234 ui.debug('patch generated by hg export\n')
234 ui.debug('patch generated by hg export\n')
235 hgpatch = True
235 hgpatch = True
236 # drop earlier commit message content
236 # drop earlier commit message content
237 cfp.seek(0)
237 cfp.seek(0)
238 cfp.truncate()
238 cfp.truncate()
239 subject = None
239 subject = None
240 elif hgpatch:
240 elif hgpatch:
241 if line.startswith('# User '):
241 if line.startswith('# User '):
242 user = line[7:]
242 user = line[7:]
243 ui.debug('From: %s\n' % user)
243 ui.debug('From: %s\n' % user)
244 elif line.startswith("# Date "):
244 elif line.startswith("# Date "):
245 date = line[7:]
245 date = line[7:]
246 elif line.startswith("# Branch "):
246 elif line.startswith("# Branch "):
247 branch = line[9:]
247 branch = line[9:]
248 elif line.startswith("# Node ID "):
248 elif line.startswith("# Node ID "):
249 nodeid = line[10:]
249 nodeid = line[10:]
250 elif line.startswith("# Parent "):
250 elif line.startswith("# Parent "):
251 parents.append(line[10:])
251 parents.append(line[10:])
252 elif line == '---' and gitsendmail:
252 elif line == '---' and gitsendmail:
253 ignoretext = True
253 ignoretext = True
254 if not line.startswith('# ') and not ignoretext:
254 if not line.startswith('# ') and not ignoretext:
255 cfp.write(line)
255 cfp.write(line)
256 cfp.write('\n')
256 cfp.write('\n')
257 message = cfp.getvalue()
257 message = cfp.getvalue()
258 if tmpfp:
258 if tmpfp:
259 tmpfp.write(payload)
259 tmpfp.write(payload)
260 if not payload.endswith('\n'):
260 if not payload.endswith('\n'):
261 tmpfp.write('\n')
261 tmpfp.write('\n')
262 elif not diffs_seen and message and content_type == 'text/plain':
262 elif not diffs_seen and message and content_type == 'text/plain':
263 message += '\n' + payload
263 message += '\n' + payload
264 except:
264 except:
265 tmpfp.close()
265 tmpfp.close()
266 os.unlink(tmpname)
266 os.unlink(tmpname)
267 raise
267 raise
268
268
269 if subject and not message.startswith(subject):
269 if subject and not message.startswith(subject):
270 message = '%s\n%s' % (subject, message)
270 message = '%s\n%s' % (subject, message)
271 tmpfp.close()
271 tmpfp.close()
272 if not diffs_seen:
272 if not diffs_seen:
273 os.unlink(tmpname)
273 os.unlink(tmpname)
274 return None, message, user, date, branch, None, None, None
274 return None, message, user, date, branch, None, None, None
275 p1 = parents and parents.pop(0) or None
275 p1 = parents and parents.pop(0) or None
276 p2 = parents and parents.pop(0) or None
276 p2 = parents and parents.pop(0) or None
277 return tmpname, message, user, date, branch, nodeid, p1, p2
277 return tmpname, message, user, date, branch, nodeid, p1, p2
278
278
279 GP_PATCH = 1 << 0 # we have to run patch
279 GP_PATCH = 1 << 0 # we have to run patch
280 GP_FILTER = 1 << 1 # there's some copy/rename operation
280 GP_FILTER = 1 << 1 # there's some copy/rename operation
281 GP_BINARY = 1 << 2 # there's a binary patch
281 GP_BINARY = 1 << 2 # there's a binary patch
282
282
283 class patchmeta(object):
283 class patchmeta(object):
284 """Patched file metadata
284 """Patched file metadata
285
285
286 'op' is the performed operation within ADD, DELETE, RENAME, MODIFY
286 'op' is the performed operation within ADD, DELETE, RENAME, MODIFY
287 or COPY. 'path' is patched file path. 'oldpath' is set to the
287 or COPY. 'path' is patched file path. 'oldpath' is set to the
288 origin file when 'op' is either COPY or RENAME, None otherwise. If
288 origin file when 'op' is either COPY or RENAME, None otherwise. If
289 file mode is changed, 'mode' is a tuple (islink, isexec) where
289 file mode is changed, 'mode' is a tuple (islink, isexec) where
290 'islink' is True if the file is a symlink and 'isexec' is True if
290 'islink' is True if the file is a symlink and 'isexec' is True if
291 the file is executable. Otherwise, 'mode' is None.
291 the file is executable. Otherwise, 'mode' is None.
292 """
292 """
293 def __init__(self, path):
293 def __init__(self, path):
294 self.path = path
294 self.path = path
295 self.oldpath = None
295 self.oldpath = None
296 self.mode = None
296 self.mode = None
297 self.op = 'MODIFY'
297 self.op = 'MODIFY'
298 self.lineno = 0
298 self.lineno = 0
299 self.binary = False
299 self.binary = False
300
300
301 def setmode(self, mode):
301 def setmode(self, mode):
302 islink = mode & 020000
302 islink = mode & 020000
303 isexec = mode & 0100
303 isexec = mode & 0100
304 self.mode = (islink, isexec)
304 self.mode = (islink, isexec)
305
305
306 def __repr__(self):
306 def __repr__(self):
307 return "<patchmeta %s %r>" % (self.op, self.path)
307 return "<patchmeta %s %r>" % (self.op, self.path)
308
308
309 def readgitpatch(lr):
309 def readgitpatch(lr):
310 """extract git-style metadata about patches from <patchname>"""
310 """extract git-style metadata about patches from <patchname>"""
311
311
312 # Filter patch for git information
312 # Filter patch for git information
313 gp = None
313 gp = None
314 gitpatches = []
314 gitpatches = []
315 # Can have a git patch with only metadata, causing patch to complain
315 # Can have a git patch with only metadata, causing patch to complain
316 dopatch = 0
316 dopatch = 0
317
317
318 lineno = 0
318 lineno = 0
319 for line in lr:
319 for line in lr:
320 lineno += 1
320 lineno += 1
321 line = line.rstrip(' \r\n')
321 line = line.rstrip(' \r\n')
322 if line.startswith('diff --git'):
322 if line.startswith('diff --git'):
323 m = gitre.match(line)
323 m = gitre.match(line)
324 if m:
324 if m:
325 if gp:
325 if gp:
326 gitpatches.append(gp)
326 gitpatches.append(gp)
327 dst = m.group(2)
327 dst = m.group(2)
328 gp = patchmeta(dst)
328 gp = patchmeta(dst)
329 gp.lineno = lineno
329 gp.lineno = lineno
330 elif gp:
330 elif gp:
331 if line.startswith('--- '):
331 if line.startswith('--- '):
332 if gp.op in ('COPY', 'RENAME'):
332 if gp.op in ('COPY', 'RENAME'):
333 dopatch |= GP_FILTER
333 dopatch |= GP_FILTER
334 gitpatches.append(gp)
334 gitpatches.append(gp)
335 gp = None
335 gp = None
336 dopatch |= GP_PATCH
336 dopatch |= GP_PATCH
337 continue
337 continue
338 if line.startswith('rename from '):
338 if line.startswith('rename from '):
339 gp.op = 'RENAME'
339 gp.op = 'RENAME'
340 gp.oldpath = line[12:]
340 gp.oldpath = line[12:]
341 elif line.startswith('rename to '):
341 elif line.startswith('rename to '):
342 gp.path = line[10:]
342 gp.path = line[10:]
343 elif line.startswith('copy from '):
343 elif line.startswith('copy from '):
344 gp.op = 'COPY'
344 gp.op = 'COPY'
345 gp.oldpath = line[10:]
345 gp.oldpath = line[10:]
346 elif line.startswith('copy to '):
346 elif line.startswith('copy to '):
347 gp.path = line[8:]
347 gp.path = line[8:]
348 elif line.startswith('deleted file'):
348 elif line.startswith('deleted file'):
349 gp.op = 'DELETE'
349 gp.op = 'DELETE'
350 elif line.startswith('new file mode '):
350 elif line.startswith('new file mode '):
351 gp.op = 'ADD'
351 gp.op = 'ADD'
352 gp.setmode(int(line[-6:], 8))
352 gp.setmode(int(line[-6:], 8))
353 elif line.startswith('new mode '):
353 elif line.startswith('new mode '):
354 gp.setmode(int(line[-6:], 8))
354 gp.setmode(int(line[-6:], 8))
355 elif line.startswith('GIT binary patch'):
355 elif line.startswith('GIT binary patch'):
356 dopatch |= GP_BINARY
356 dopatch |= GP_BINARY
357 gp.binary = True
357 gp.binary = True
358 if gp:
358 if gp:
359 gitpatches.append(gp)
359 gitpatches.append(gp)
360
360
361 if not gitpatches:
361 if not gitpatches:
362 dopatch = GP_PATCH
362 dopatch = GP_PATCH
363
363
364 return (dopatch, gitpatches)
364 return (dopatch, gitpatches)
365
365
366 class linereader(object):
366 class linereader(object):
367 # simple class to allow pushing lines back into the input stream
367 # simple class to allow pushing lines back into the input stream
368 def __init__(self, fp, textmode=False):
368 def __init__(self, fp, textmode=False):
369 self.fp = fp
369 self.fp = fp
370 self.buf = []
370 self.buf = []
371 self.textmode = textmode
371 self.textmode = textmode
372 self.eol = None
372 self.eol = None
373
373
374 def push(self, line):
374 def push(self, line):
375 if line is not None:
375 if line is not None:
376 self.buf.append(line)
376 self.buf.append(line)
377
377
378 def readline(self):
378 def readline(self):
379 if self.buf:
379 if self.buf:
380 l = self.buf[0]
380 l = self.buf[0]
381 del self.buf[0]
381 del self.buf[0]
382 return l
382 return l
383 l = self.fp.readline()
383 l = self.fp.readline()
384 if not self.eol:
384 if not self.eol:
385 if l.endswith('\r\n'):
385 if l.endswith('\r\n'):
386 self.eol = '\r\n'
386 self.eol = '\r\n'
387 elif l.endswith('\n'):
387 elif l.endswith('\n'):
388 self.eol = '\n'
388 self.eol = '\n'
389 if self.textmode and l.endswith('\r\n'):
389 if self.textmode and l.endswith('\r\n'):
390 l = l[:-2] + '\n'
390 l = l[:-2] + '\n'
391 return l
391 return l
392
392
393 def __iter__(self):
393 def __iter__(self):
394 while 1:
394 while 1:
395 l = self.readline()
395 l = self.readline()
396 if not l:
396 if not l:
397 break
397 break
398 yield l
398 yield l
399
399
400 # @@ -start,len +start,len @@ or @@ -start +start @@ if len is 1
400 # @@ -start,len +start,len @@ or @@ -start +start @@ if len is 1
401 unidesc = re.compile('@@ -(\d+)(,(\d+))? \+(\d+)(,(\d+))? @@')
401 unidesc = re.compile('@@ -(\d+)(,(\d+))? \+(\d+)(,(\d+))? @@')
402 contextdesc = re.compile('(---|\*\*\*) (\d+)(,(\d+))? (---|\*\*\*)')
402 contextdesc = re.compile('(---|\*\*\*) (\d+)(,(\d+))? (---|\*\*\*)')
403 eolmodes = ['strict', 'crlf', 'lf', 'auto']
403 eolmodes = ['strict', 'crlf', 'lf', 'auto']
404
404
405 class patchfile(object):
405 class patchfile(object):
406 def __init__(self, ui, fname, opener, missing=False, eolmode='strict'):
406 def __init__(self, ui, fname, opener, missing=False, eolmode='strict'):
407 self.fname = fname
407 self.fname = fname
408 self.eolmode = eolmode
408 self.eolmode = eolmode
409 self.eol = None
409 self.eol = None
410 self.opener = opener
410 self.opener = opener
411 self.ui = ui
411 self.ui = ui
412 self.lines = []
412 self.lines = []
413 self.exists = False
413 self.exists = False
414 self.missing = missing
414 self.missing = missing
415 if not missing:
415 if not missing:
416 try:
416 try:
417 self.lines = self.readlines(fname)
417 self.lines = self.readlines(fname)
418 self.exists = True
418 self.exists = True
419 except IOError:
419 except IOError:
420 pass
420 pass
421 else:
421 else:
422 self.ui.warn(_("unable to find '%s' for patching\n") % self.fname)
422 self.ui.warn(_("unable to find '%s' for patching\n") % self.fname)
423
423
424 self.hash = {}
424 self.hash = {}
425 self.dirty = 0
425 self.dirty = 0
426 self.offset = 0
426 self.offset = 0
427 self.skew = 0
427 self.skew = 0
428 self.rej = []
428 self.rej = []
429 self.fileprinted = False
429 self.fileprinted = False
430 self.printfile(False)
430 self.printfile(False)
431 self.hunks = 0
431 self.hunks = 0
432
432
433 def readlines(self, fname):
433 def readlines(self, fname):
434 if os.path.islink(fname):
434 if os.path.islink(fname):
435 return [os.readlink(fname)]
435 return [os.readlink(fname)]
436 fp = self.opener(fname, 'r')
436 fp = self.opener(fname, 'r')
437 try:
437 try:
438 lr = linereader(fp, self.eolmode != 'strict')
438 lr = linereader(fp, self.eolmode != 'strict')
439 lines = list(lr)
439 lines = list(lr)
440 self.eol = lr.eol
440 self.eol = lr.eol
441 return lines
441 return lines
442 finally:
442 finally:
443 fp.close()
443 fp.close()
444
444
445 def writelines(self, fname, lines):
445 def writelines(self, fname, lines):
446 # Ensure supplied data ends in fname, being a regular file or
446 # Ensure supplied data ends in fname, being a regular file or
447 # a symlink. updatedir() will -too magically- take care of
447 # a symlink. updatedir() will -too magically- take care of
448 # setting it to the proper type afterwards.
448 # setting it to the proper type afterwards.
449 islink = os.path.islink(fname)
449 islink = os.path.islink(fname)
450 if islink:
450 if islink:
451 fp = cStringIO.StringIO()
451 fp = cStringIO.StringIO()
452 else:
452 else:
453 fp = self.opener(fname, 'w')
453 fp = self.opener(fname, 'w')
454 try:
454 try:
455 if self.eolmode == 'auto':
455 if self.eolmode == 'auto':
456 eol = self.eol
456 eol = self.eol
457 elif self.eolmode == 'crlf':
457 elif self.eolmode == 'crlf':
458 eol = '\r\n'
458 eol = '\r\n'
459 else:
459 else:
460 eol = '\n'
460 eol = '\n'
461
461
462 if self.eolmode != 'strict' and eol and eol != '\n':
462 if self.eolmode != 'strict' and eol and eol != '\n':
463 for l in lines:
463 for l in lines:
464 if l and l[-1] == '\n':
464 if l and l[-1] == '\n':
465 l = l[:-1] + eol
465 l = l[:-1] + eol
466 fp.write(l)
466 fp.write(l)
467 else:
467 else:
468 fp.writelines(lines)
468 fp.writelines(lines)
469 if islink:
469 if islink:
470 self.opener.symlink(fp.getvalue(), fname)
470 self.opener.symlink(fp.getvalue(), fname)
471 finally:
471 finally:
472 fp.close()
472 fp.close()
473
473
474 def unlink(self, fname):
474 def unlink(self, fname):
475 os.unlink(fname)
475 os.unlink(fname)
476
476
477 def printfile(self, warn):
477 def printfile(self, warn):
478 if self.fileprinted:
478 if self.fileprinted:
479 return
479 return
480 if warn or self.ui.verbose:
480 if warn or self.ui.verbose:
481 self.fileprinted = True
481 self.fileprinted = True
482 s = _("patching file %s\n") % self.fname
482 s = _("patching file %s\n") % self.fname
483 if warn:
483 if warn:
484 self.ui.warn(s)
484 self.ui.warn(s)
485 else:
485 else:
486 self.ui.note(s)
486 self.ui.note(s)
487
487
488
488
489 def findlines(self, l, linenum):
489 def findlines(self, l, linenum):
490 # looks through the hash and finds candidate lines. The
490 # looks through the hash and finds candidate lines. The
491 # result is a list of line numbers sorted based on distance
491 # result is a list of line numbers sorted based on distance
492 # from linenum
492 # from linenum
493
493
494 cand = self.hash.get(l, [])
494 cand = self.hash.get(l, [])
495 if len(cand) > 1:
495 if len(cand) > 1:
496 # resort our list of potentials forward then back.
496 # resort our list of potentials forward then back.
497 cand.sort(key=lambda x: abs(x - linenum))
497 cand.sort(key=lambda x: abs(x - linenum))
498 return cand
498 return cand
499
499
500 def hashlines(self):
500 def hashlines(self):
501 self.hash = {}
501 self.hash = {}
502 for x, s in enumerate(self.lines):
502 for x, s in enumerate(self.lines):
503 self.hash.setdefault(s, []).append(x)
503 self.hash.setdefault(s, []).append(x)
504
504
505 def write_rej(self):
505 def write_rej(self):
506 # our rejects are a little different from patch(1). This always
506 # our rejects are a little different from patch(1). This always
507 # creates rejects in the same form as the original patch. A file
507 # creates rejects in the same form as the original patch. A file
508 # header is inserted so that you can run the reject through patch again
508 # header is inserted so that you can run the reject through patch again
509 # without having to type the filename.
509 # without having to type the filename.
510
510
511 if not self.rej:
511 if not self.rej:
512 return
512 return
513
513
514 fname = self.fname + ".rej"
514 fname = self.fname + ".rej"
515 self.ui.warn(
515 self.ui.warn(
516 _("%d out of %d hunks FAILED -- saving rejects to file %s\n") %
516 _("%d out of %d hunks FAILED -- saving rejects to file %s\n") %
517 (len(self.rej), self.hunks, fname))
517 (len(self.rej), self.hunks, fname))
518
518
519 def rejlines():
519 def rejlines():
520 base = os.path.basename(self.fname)
520 base = os.path.basename(self.fname)
521 yield "--- %s\n+++ %s\n" % (base, base)
521 yield "--- %s\n+++ %s\n" % (base, base)
522 for x in self.rej:
522 for x in self.rej:
523 for l in x.hunk:
523 for l in x.hunk:
524 yield l
524 yield l
525 if l[-1] != '\n':
525 if l[-1] != '\n':
526 yield "\n\ No newline at end of file\n"
526 yield "\n\ No newline at end of file\n"
527
527
528 self.writelines(fname, rejlines())
528 self.writelines(fname, rejlines())
529
529
530 def apply(self, h):
530 def apply(self, h):
531 if not h.complete():
531 if not h.complete():
532 raise PatchError(_("bad hunk #%d %s (%d %d %d %d)") %
532 raise PatchError(_("bad hunk #%d %s (%d %d %d %d)") %
533 (h.number, h.desc, len(h.a), h.lena, len(h.b),
533 (h.number, h.desc, len(h.a), h.lena, len(h.b),
534 h.lenb))
534 h.lenb))
535
535
536 self.hunks += 1
536 self.hunks += 1
537
537
538 if self.missing:
538 if self.missing:
539 self.rej.append(h)
539 self.rej.append(h)
540 return -1
540 return -1
541
541
542 if self.exists and h.createfile():
542 if self.exists and h.createfile():
543 self.ui.warn(_("file %s already exists\n") % self.fname)
543 self.ui.warn(_("file %s already exists\n") % self.fname)
544 self.rej.append(h)
544 self.rej.append(h)
545 return -1
545 return -1
546
546
547 if isinstance(h, binhunk):
547 if isinstance(h, binhunk):
548 if h.rmfile():
548 if h.rmfile():
549 self.unlink(self.fname)
549 self.unlink(self.fname)
550 else:
550 else:
551 self.lines[:] = h.new()
551 self.lines[:] = h.new()
552 self.offset += len(h.new())
552 self.offset += len(h.new())
553 self.dirty = 1
553 self.dirty = 1
554 return 0
554 return 0
555
555
556 horig = h
556 horig = h
557 if (self.eolmode in ('crlf', 'lf')
557 if (self.eolmode in ('crlf', 'lf')
558 or self.eolmode == 'auto' and self.eol):
558 or self.eolmode == 'auto' and self.eol):
559 # If new eols are going to be normalized, then normalize
559 # If new eols are going to be normalized, then normalize
560 # hunk data before patching. Otherwise, preserve input
560 # hunk data before patching. Otherwise, preserve input
561 # line-endings.
561 # line-endings.
562 h = h.getnormalized()
562 h = h.getnormalized()
563
563
564 # fast case first, no offsets, no fuzz
564 # fast case first, no offsets, no fuzz
565 old = h.old()
565 old = h.old()
566 # patch starts counting at 1 unless we are adding the file
566 # patch starts counting at 1 unless we are adding the file
567 if h.starta == 0:
567 if h.starta == 0:
568 start = 0
568 start = 0
569 else:
569 else:
570 start = h.starta + self.offset - 1
570 start = h.starta + self.offset - 1
571 orig_start = start
571 orig_start = start
572 # if there's skew we want to emit the "(offset %d lines)" even
572 # if there's skew we want to emit the "(offset %d lines)" even
573 # when the hunk cleanly applies at start + skew, so skip the
573 # when the hunk cleanly applies at start + skew, so skip the
574 # fast case code
574 # fast case code
575 if self.skew == 0 and diffhelpers.testhunk(old, self.lines, start) == 0:
575 if self.skew == 0 and diffhelpers.testhunk(old, self.lines, start) == 0:
576 if h.rmfile():
576 if h.rmfile():
577 self.unlink(self.fname)
577 self.unlink(self.fname)
578 else:
578 else:
579 self.lines[start : start + h.lena] = h.new()
579 self.lines[start : start + h.lena] = h.new()
580 self.offset += h.lenb - h.lena
580 self.offset += h.lenb - h.lena
581 self.dirty = 1
581 self.dirty = 1
582 return 0
582 return 0
583
583
584 # ok, we couldn't match the hunk. Lets look for offsets and fuzz it
584 # ok, we couldn't match the hunk. Lets look for offsets and fuzz it
585 self.hashlines()
585 self.hashlines()
586 if h.hunk[-1][0] != ' ':
586 if h.hunk[-1][0] != ' ':
587 # if the hunk tried to put something at the bottom of the file
587 # if the hunk tried to put something at the bottom of the file
588 # override the start line and use eof here
588 # override the start line and use eof here
589 search_start = len(self.lines)
589 search_start = len(self.lines)
590 else:
590 else:
591 search_start = orig_start + self.skew
591 search_start = orig_start + self.skew
592
592
593 for fuzzlen in xrange(3):
593 for fuzzlen in xrange(3):
594 for toponly in [True, False]:
594 for toponly in [True, False]:
595 old = h.old(fuzzlen, toponly)
595 old = h.old(fuzzlen, toponly)
596
596
597 cand = self.findlines(old[0][1:], search_start)
597 cand = self.findlines(old[0][1:], search_start)
598 for l in cand:
598 for l in cand:
599 if diffhelpers.testhunk(old, self.lines, l) == 0:
599 if diffhelpers.testhunk(old, self.lines, l) == 0:
600 newlines = h.new(fuzzlen, toponly)
600 newlines = h.new(fuzzlen, toponly)
601 self.lines[l : l + len(old)] = newlines
601 self.lines[l : l + len(old)] = newlines
602 self.offset += len(newlines) - len(old)
602 self.offset += len(newlines) - len(old)
603 self.skew = l - orig_start
603 self.skew = l - orig_start
604 self.dirty = 1
604 self.dirty = 1
605 offset = l - orig_start - fuzzlen
605 offset = l - orig_start - fuzzlen
606 if fuzzlen:
606 if fuzzlen:
607 msg = _("Hunk #%d succeeded at %d "
607 msg = _("Hunk #%d succeeded at %d "
608 "with fuzz %d "
608 "with fuzz %d "
609 "(offset %d lines).\n")
609 "(offset %d lines).\n")
610 self.printfile(True)
610 self.printfile(True)
611 self.ui.warn(msg %
611 self.ui.warn(msg %
612 (h.number, l + 1, fuzzlen, offset))
612 (h.number, l + 1, fuzzlen, offset))
613 else:
613 else:
614 msg = _("Hunk #%d succeeded at %d "
614 msg = _("Hunk #%d succeeded at %d "
615 "(offset %d lines).\n")
615 "(offset %d lines).\n")
616 self.ui.note(msg % (h.number, l + 1, offset))
616 self.ui.note(msg % (h.number, l + 1, offset))
617 return fuzzlen
617 return fuzzlen
618 self.printfile(True)
618 self.printfile(True)
619 self.ui.warn(_("Hunk #%d FAILED at %d\n") % (h.number, orig_start))
619 self.ui.warn(_("Hunk #%d FAILED at %d\n") % (h.number, orig_start))
620 self.rej.append(horig)
620 self.rej.append(horig)
621 return -1
621 return -1
622
622
623 class hunk(object):
623 class hunk(object):
624 def __init__(self, desc, num, lr, context, create=False, remove=False):
624 def __init__(self, desc, num, lr, context, create=False, remove=False):
625 self.number = num
625 self.number = num
626 self.desc = desc
626 self.desc = desc
627 self.hunk = [desc]
627 self.hunk = [desc]
628 self.a = []
628 self.a = []
629 self.b = []
629 self.b = []
630 self.starta = self.lena = None
630 self.starta = self.lena = None
631 self.startb = self.lenb = None
631 self.startb = self.lenb = None
632 if lr is not None:
632 if lr is not None:
633 if context:
633 if context:
634 self.read_context_hunk(lr)
634 self.read_context_hunk(lr)
635 else:
635 else:
636 self.read_unified_hunk(lr)
636 self.read_unified_hunk(lr)
637 self.create = create
637 self.create = create
638 self.remove = remove and not create
638 self.remove = remove and not create
639
639
640 def getnormalized(self):
640 def getnormalized(self):
641 """Return a copy with line endings normalized to LF."""
641 """Return a copy with line endings normalized to LF."""
642
642
643 def normalize(lines):
643 def normalize(lines):
644 nlines = []
644 nlines = []
645 for line in lines:
645 for line in lines:
646 if line.endswith('\r\n'):
646 if line.endswith('\r\n'):
647 line = line[:-2] + '\n'
647 line = line[:-2] + '\n'
648 nlines.append(line)
648 nlines.append(line)
649 return nlines
649 return nlines
650
650
651 # Dummy object, it is rebuilt manually
651 # Dummy object, it is rebuilt manually
652 nh = hunk(self.desc, self.number, None, None, False, False)
652 nh = hunk(self.desc, self.number, None, None, False, False)
653 nh.number = self.number
653 nh.number = self.number
654 nh.desc = self.desc
654 nh.desc = self.desc
655 nh.hunk = self.hunk
655 nh.hunk = self.hunk
656 nh.a = normalize(self.a)
656 nh.a = normalize(self.a)
657 nh.b = normalize(self.b)
657 nh.b = normalize(self.b)
658 nh.starta = self.starta
658 nh.starta = self.starta
659 nh.startb = self.startb
659 nh.startb = self.startb
660 nh.lena = self.lena
660 nh.lena = self.lena
661 nh.lenb = self.lenb
661 nh.lenb = self.lenb
662 nh.create = self.create
662 nh.create = self.create
663 nh.remove = self.remove
663 nh.remove = self.remove
664 return nh
664 return nh
665
665
666 def read_unified_hunk(self, lr):
666 def read_unified_hunk(self, lr):
667 m = unidesc.match(self.desc)
667 m = unidesc.match(self.desc)
668 if not m:
668 if not m:
669 raise PatchError(_("bad hunk #%d") % self.number)
669 raise PatchError(_("bad hunk #%d") % self.number)
670 self.starta, foo, self.lena, self.startb, foo2, self.lenb = m.groups()
670 self.starta, foo, self.lena, self.startb, foo2, self.lenb = m.groups()
671 if self.lena is None:
671 if self.lena is None:
672 self.lena = 1
672 self.lena = 1
673 else:
673 else:
674 self.lena = int(self.lena)
674 self.lena = int(self.lena)
675 if self.lenb is None:
675 if self.lenb is None:
676 self.lenb = 1
676 self.lenb = 1
677 else:
677 else:
678 self.lenb = int(self.lenb)
678 self.lenb = int(self.lenb)
679 self.starta = int(self.starta)
679 self.starta = int(self.starta)
680 self.startb = int(self.startb)
680 self.startb = int(self.startb)
681 diffhelpers.addlines(lr, self.hunk, self.lena, self.lenb, self.a, self.b)
681 diffhelpers.addlines(lr, self.hunk, self.lena, self.lenb, self.a, self.b)
682 # if we hit eof before finishing out the hunk, the last line will
682 # if we hit eof before finishing out the hunk, the last line will
683 # be zero length. Lets try to fix it up.
683 # be zero length. Lets try to fix it up.
684 while len(self.hunk[-1]) == 0:
684 while len(self.hunk[-1]) == 0:
685 del self.hunk[-1]
685 del self.hunk[-1]
686 del self.a[-1]
686 del self.a[-1]
687 del self.b[-1]
687 del self.b[-1]
688 self.lena -= 1
688 self.lena -= 1
689 self.lenb -= 1
689 self.lenb -= 1
690
690
691 def read_context_hunk(self, lr):
691 def read_context_hunk(self, lr):
692 self.desc = lr.readline()
692 self.desc = lr.readline()
693 m = contextdesc.match(self.desc)
693 m = contextdesc.match(self.desc)
694 if not m:
694 if not m:
695 raise PatchError(_("bad hunk #%d") % self.number)
695 raise PatchError(_("bad hunk #%d") % self.number)
696 foo, self.starta, foo2, aend, foo3 = m.groups()
696 foo, self.starta, foo2, aend, foo3 = m.groups()
697 self.starta = int(self.starta)
697 self.starta = int(self.starta)
698 if aend is None:
698 if aend is None:
699 aend = self.starta
699 aend = self.starta
700 self.lena = int(aend) - self.starta
700 self.lena = int(aend) - self.starta
701 if self.starta:
701 if self.starta:
702 self.lena += 1
702 self.lena += 1
703 for x in xrange(self.lena):
703 for x in xrange(self.lena):
704 l = lr.readline()
704 l = lr.readline()
705 if l.startswith('---'):
705 if l.startswith('---'):
706 lr.push(l)
706 lr.push(l)
707 break
707 break
708 s = l[2:]
708 s = l[2:]
709 if l.startswith('- ') or l.startswith('! '):
709 if l.startswith('- ') or l.startswith('! '):
710 u = '-' + s
710 u = '-' + s
711 elif l.startswith(' '):
711 elif l.startswith(' '):
712 u = ' ' + s
712 u = ' ' + s
713 else:
713 else:
714 raise PatchError(_("bad hunk #%d old text line %d") %
714 raise PatchError(_("bad hunk #%d old text line %d") %
715 (self.number, x))
715 (self.number, x))
716 self.a.append(u)
716 self.a.append(u)
717 self.hunk.append(u)
717 self.hunk.append(u)
718
718
719 l = lr.readline()
719 l = lr.readline()
720 if l.startswith('\ '):
720 if l.startswith('\ '):
721 s = self.a[-1][:-1]
721 s = self.a[-1][:-1]
722 self.a[-1] = s
722 self.a[-1] = s
723 self.hunk[-1] = s
723 self.hunk[-1] = s
724 l = lr.readline()
724 l = lr.readline()
725 m = contextdesc.match(l)
725 m = contextdesc.match(l)
726 if not m:
726 if not m:
727 raise PatchError(_("bad hunk #%d") % self.number)
727 raise PatchError(_("bad hunk #%d") % self.number)
728 foo, self.startb, foo2, bend, foo3 = m.groups()
728 foo, self.startb, foo2, bend, foo3 = m.groups()
729 self.startb = int(self.startb)
729 self.startb = int(self.startb)
730 if bend is None:
730 if bend is None:
731 bend = self.startb
731 bend = self.startb
732 self.lenb = int(bend) - self.startb
732 self.lenb = int(bend) - self.startb
733 if self.startb:
733 if self.startb:
734 self.lenb += 1
734 self.lenb += 1
735 hunki = 1
735 hunki = 1
736 for x in xrange(self.lenb):
736 for x in xrange(self.lenb):
737 l = lr.readline()
737 l = lr.readline()
738 if l.startswith('\ '):
738 if l.startswith('\ '):
739 s = self.b[-1][:-1]
739 s = self.b[-1][:-1]
740 self.b[-1] = s
740 self.b[-1] = s
741 self.hunk[hunki - 1] = s
741 self.hunk[hunki - 1] = s
742 continue
742 continue
743 if not l:
743 if not l:
744 lr.push(l)
744 lr.push(l)
745 break
745 break
746 s = l[2:]
746 s = l[2:]
747 if l.startswith('+ ') or l.startswith('! '):
747 if l.startswith('+ ') or l.startswith('! '):
748 u = '+' + s
748 u = '+' + s
749 elif l.startswith(' '):
749 elif l.startswith(' '):
750 u = ' ' + s
750 u = ' ' + s
751 elif len(self.b) == 0:
751 elif len(self.b) == 0:
752 # this can happen when the hunk does not add any lines
752 # this can happen when the hunk does not add any lines
753 lr.push(l)
753 lr.push(l)
754 break
754 break
755 else:
755 else:
756 raise PatchError(_("bad hunk #%d old text line %d") %
756 raise PatchError(_("bad hunk #%d old text line %d") %
757 (self.number, x))
757 (self.number, x))
758 self.b.append(s)
758 self.b.append(s)
759 while True:
759 while True:
760 if hunki >= len(self.hunk):
760 if hunki >= len(self.hunk):
761 h = ""
761 h = ""
762 else:
762 else:
763 h = self.hunk[hunki]
763 h = self.hunk[hunki]
764 hunki += 1
764 hunki += 1
765 if h == u:
765 if h == u:
766 break
766 break
767 elif h.startswith('-'):
767 elif h.startswith('-'):
768 continue
768 continue
769 else:
769 else:
770 self.hunk.insert(hunki - 1, u)
770 self.hunk.insert(hunki - 1, u)
771 break
771 break
772
772
773 if not self.a:
773 if not self.a:
774 # this happens when lines were only added to the hunk
774 # this happens when lines were only added to the hunk
775 for x in self.hunk:
775 for x in self.hunk:
776 if x.startswith('-') or x.startswith(' '):
776 if x.startswith('-') or x.startswith(' '):
777 self.a.append(x)
777 self.a.append(x)
778 if not self.b:
778 if not self.b:
779 # this happens when lines were only deleted from the hunk
779 # this happens when lines were only deleted from the hunk
780 for x in self.hunk:
780 for x in self.hunk:
781 if x.startswith('+') or x.startswith(' '):
781 if x.startswith('+') or x.startswith(' '):
782 self.b.append(x[1:])
782 self.b.append(x[1:])
783 # @@ -start,len +start,len @@
783 # @@ -start,len +start,len @@
784 self.desc = "@@ -%d,%d +%d,%d @@\n" % (self.starta, self.lena,
784 self.desc = "@@ -%d,%d +%d,%d @@\n" % (self.starta, self.lena,
785 self.startb, self.lenb)
785 self.startb, self.lenb)
786 self.hunk[0] = self.desc
786 self.hunk[0] = self.desc
787
787
788 def fix_newline(self):
788 def fix_newline(self):
789 diffhelpers.fix_newline(self.hunk, self.a, self.b)
789 diffhelpers.fix_newline(self.hunk, self.a, self.b)
790
790
791 def complete(self):
791 def complete(self):
792 return len(self.a) == self.lena and len(self.b) == self.lenb
792 return len(self.a) == self.lena and len(self.b) == self.lenb
793
793
794 def createfile(self):
794 def createfile(self):
795 return self.starta == 0 and self.lena == 0 and self.create
795 return self.starta == 0 and self.lena == 0 and self.create
796
796
797 def rmfile(self):
797 def rmfile(self):
798 return self.startb == 0 and self.lenb == 0 and self.remove
798 return self.startb == 0 and self.lenb == 0 and self.remove
799
799
800 def fuzzit(self, l, fuzz, toponly):
800 def fuzzit(self, l, fuzz, toponly):
801 # this removes context lines from the top and bottom of list 'l'. It
801 # this removes context lines from the top and bottom of list 'l'. It
802 # checks the hunk to make sure only context lines are removed, and then
802 # checks the hunk to make sure only context lines are removed, and then
803 # returns a new shortened list of lines.
803 # returns a new shortened list of lines.
804 fuzz = min(fuzz, len(l)-1)
804 fuzz = min(fuzz, len(l)-1)
805 if fuzz:
805 if fuzz:
806 top = 0
806 top = 0
807 bot = 0
807 bot = 0
808 hlen = len(self.hunk)
808 hlen = len(self.hunk)
809 for x in xrange(hlen - 1):
809 for x in xrange(hlen - 1):
810 # the hunk starts with the @@ line, so use x+1
810 # the hunk starts with the @@ line, so use x+1
811 if self.hunk[x + 1][0] == ' ':
811 if self.hunk[x + 1][0] == ' ':
812 top += 1
812 top += 1
813 else:
813 else:
814 break
814 break
815 if not toponly:
815 if not toponly:
816 for x in xrange(hlen - 1):
816 for x in xrange(hlen - 1):
817 if self.hunk[hlen - bot - 1][0] == ' ':
817 if self.hunk[hlen - bot - 1][0] == ' ':
818 bot += 1
818 bot += 1
819 else:
819 else:
820 break
820 break
821
821
822 # top and bot now count context in the hunk
822 # top and bot now count context in the hunk
823 # adjust them if either one is short
823 # adjust them if either one is short
824 context = max(top, bot, 3)
824 context = max(top, bot, 3)
825 if bot < context:
825 if bot < context:
826 bot = max(0, fuzz - (context - bot))
826 bot = max(0, fuzz - (context - bot))
827 else:
827 else:
828 bot = min(fuzz, bot)
828 bot = min(fuzz, bot)
829 if top < context:
829 if top < context:
830 top = max(0, fuzz - (context - top))
830 top = max(0, fuzz - (context - top))
831 else:
831 else:
832 top = min(fuzz, top)
832 top = min(fuzz, top)
833
833
834 return l[top:len(l)-bot]
834 return l[top:len(l)-bot]
835 return l
835 return l
836
836
837 def old(self, fuzz=0, toponly=False):
837 def old(self, fuzz=0, toponly=False):
838 return self.fuzzit(self.a, fuzz, toponly)
838 return self.fuzzit(self.a, fuzz, toponly)
839
839
840 def new(self, fuzz=0, toponly=False):
840 def new(self, fuzz=0, toponly=False):
841 return self.fuzzit(self.b, fuzz, toponly)
841 return self.fuzzit(self.b, fuzz, toponly)
842
842
843 class binhunk:
843 class binhunk:
844 'A binary patch file. Only understands literals so far.'
844 'A binary patch file. Only understands literals so far.'
845 def __init__(self, gitpatch):
845 def __init__(self, gitpatch):
846 self.gitpatch = gitpatch
846 self.gitpatch = gitpatch
847 self.text = None
847 self.text = None
848 self.hunk = ['GIT binary patch\n']
848 self.hunk = ['GIT binary patch\n']
849
849
850 def createfile(self):
850 def createfile(self):
851 return self.gitpatch.op in ('ADD', 'RENAME', 'COPY')
851 return self.gitpatch.op in ('ADD', 'RENAME', 'COPY')
852
852
853 def rmfile(self):
853 def rmfile(self):
854 return self.gitpatch.op == 'DELETE'
854 return self.gitpatch.op == 'DELETE'
855
855
856 def complete(self):
856 def complete(self):
857 return self.text is not None
857 return self.text is not None
858
858
859 def new(self):
859 def new(self):
860 return [self.text]
860 return [self.text]
861
861
862 def extract(self, lr):
862 def extract(self, lr):
863 line = lr.readline()
863 line = lr.readline()
864 self.hunk.append(line)
864 self.hunk.append(line)
865 while line and not line.startswith('literal '):
865 while line and not line.startswith('literal '):
866 line = lr.readline()
866 line = lr.readline()
867 self.hunk.append(line)
867 self.hunk.append(line)
868 if not line:
868 if not line:
869 raise PatchError(_('could not extract binary patch'))
869 raise PatchError(_('could not extract binary patch'))
870 size = int(line[8:].rstrip())
870 size = int(line[8:].rstrip())
871 dec = []
871 dec = []
872 line = lr.readline()
872 line = lr.readline()
873 self.hunk.append(line)
873 self.hunk.append(line)
874 while len(line) > 1:
874 while len(line) > 1:
875 l = line[0]
875 l = line[0]
876 if l <= 'Z' and l >= 'A':
876 if l <= 'Z' and l >= 'A':
877 l = ord(l) - ord('A') + 1
877 l = ord(l) - ord('A') + 1
878 else:
878 else:
879 l = ord(l) - ord('a') + 27
879 l = ord(l) - ord('a') + 27
880 dec.append(base85.b85decode(line[1:-1])[:l])
880 dec.append(base85.b85decode(line[1:-1])[:l])
881 line = lr.readline()
881 line = lr.readline()
882 self.hunk.append(line)
882 self.hunk.append(line)
883 text = zlib.decompress(''.join(dec))
883 text = zlib.decompress(''.join(dec))
884 if len(text) != size:
884 if len(text) != size:
885 raise PatchError(_('binary patch is %d bytes, not %d') %
885 raise PatchError(_('binary patch is %d bytes, not %d') %
886 len(text), size)
886 len(text), size)
887 self.text = text
887 self.text = text
888
888
889 def parsefilename(str):
889 def parsefilename(str):
890 # --- filename \t|space stuff
890 # --- filename \t|space stuff
891 s = str[4:].rstrip('\r\n')
891 s = str[4:].rstrip('\r\n')
892 i = s.find('\t')
892 i = s.find('\t')
893 if i < 0:
893 if i < 0:
894 i = s.find(' ')
894 i = s.find(' ')
895 if i < 0:
895 if i < 0:
896 return s
896 return s
897 return s[:i]
897 return s[:i]
898
898
899 def pathstrip(path, strip):
899 def pathstrip(path, strip):
900 pathlen = len(path)
900 pathlen = len(path)
901 i = 0
901 i = 0
902 if strip == 0:
902 if strip == 0:
903 return '', path.rstrip()
903 return '', path.rstrip()
904 count = strip
904 count = strip
905 while count > 0:
905 while count > 0:
906 i = path.find('/', i)
906 i = path.find('/', i)
907 if i == -1:
907 if i == -1:
908 raise PatchError(_("unable to strip away %d of %d dirs from %s") %
908 raise PatchError(_("unable to strip away %d of %d dirs from %s") %
909 (count, strip, path))
909 (count, strip, path))
910 i += 1
910 i += 1
911 # consume '//' in the path
911 # consume '//' in the path
912 while i < pathlen - 1 and path[i] == '/':
912 while i < pathlen - 1 and path[i] == '/':
913 i += 1
913 i += 1
914 count -= 1
914 count -= 1
915 return path[:i].lstrip(), path[i:].rstrip()
915 return path[:i].lstrip(), path[i:].rstrip()
916
916
917 def selectfile(afile_orig, bfile_orig, hunk, strip):
917 def selectfile(afile_orig, bfile_orig, hunk, strip):
918 nulla = afile_orig == "/dev/null"
918 nulla = afile_orig == "/dev/null"
919 nullb = bfile_orig == "/dev/null"
919 nullb = bfile_orig == "/dev/null"
920 abase, afile = pathstrip(afile_orig, strip)
920 abase, afile = pathstrip(afile_orig, strip)
921 gooda = not nulla and util.lexists(afile)
921 gooda = not nulla and util.lexists(afile)
922 bbase, bfile = pathstrip(bfile_orig, strip)
922 bbase, bfile = pathstrip(bfile_orig, strip)
923 if afile == bfile:
923 if afile == bfile:
924 goodb = gooda
924 goodb = gooda
925 else:
925 else:
926 goodb = not nullb and os.path.lexists(bfile)
926 goodb = not nullb and os.path.lexists(bfile)
927 createfunc = hunk.createfile
927 createfunc = hunk.createfile
928 missing = not goodb and not gooda and not createfunc()
928 missing = not goodb and not gooda and not createfunc()
929
929
930 # some diff programs apparently produce create patches where the
930 # some diff programs apparently produce create patches where the
931 # afile is not /dev/null, but afile starts with bfile
931 # afile is not /dev/null, but afile starts with bfile
932 abasedir = afile[:afile.rfind('/') + 1]
932 abasedir = afile[:afile.rfind('/') + 1]
933 bbasedir = bfile[:bfile.rfind('/') + 1]
933 bbasedir = bfile[:bfile.rfind('/') + 1]
934 if missing and abasedir == bbasedir and afile.startswith(bfile):
934 if missing and abasedir == bbasedir and afile.startswith(bfile):
935 # this isn't very pretty
935 # this isn't very pretty
936 hunk.create = True
936 hunk.create = True
937 if createfunc():
937 if createfunc():
938 missing = False
938 missing = False
939 else:
939 else:
940 hunk.create = False
940 hunk.create = False
941
941
942 # If afile is "a/b/foo" and bfile is "a/b/foo.orig" we assume the
942 # If afile is "a/b/foo" and bfile is "a/b/foo.orig" we assume the
943 # diff is between a file and its backup. In this case, the original
943 # diff is between a file and its backup. In this case, the original
944 # file should be patched (see original mpatch code).
944 # file should be patched (see original mpatch code).
945 isbackup = (abase == bbase and bfile.startswith(afile))
945 isbackup = (abase == bbase and bfile.startswith(afile))
946 fname = None
946 fname = None
947 if not missing:
947 if not missing:
948 if gooda and goodb:
948 if gooda and goodb:
949 fname = isbackup and afile or bfile
949 fname = isbackup and afile or bfile
950 elif gooda:
950 elif gooda:
951 fname = afile
951 fname = afile
952
952
953 if not fname:
953 if not fname:
954 if not nullb:
954 if not nullb:
955 fname = isbackup and afile or bfile
955 fname = isbackup and afile or bfile
956 elif not nulla:
956 elif not nulla:
957 fname = afile
957 fname = afile
958 else:
958 else:
959 raise PatchError(_("undefined source and destination files"))
959 raise PatchError(_("undefined source and destination files"))
960
960
961 return fname, missing
961 return fname, missing
962
962
963 def scangitpatch(lr, firstline):
963 def scangitpatch(lr, firstline):
964 """
964 """
965 Git patches can emit:
965 Git patches can emit:
966 - rename a to b
966 - rename a to b
967 - change b
967 - change b
968 - copy a to c
968 - copy a to c
969 - change c
969 - change c
970
970
971 We cannot apply this sequence as-is, the renamed 'a' could not be
971 We cannot apply this sequence as-is, the renamed 'a' could not be
972 found for it would have been renamed already. And we cannot copy
972 found for it would have been renamed already. And we cannot copy
973 from 'b' instead because 'b' would have been changed already. So
973 from 'b' instead because 'b' would have been changed already. So
974 we scan the git patch for copy and rename commands so we can
974 we scan the git patch for copy and rename commands so we can
975 perform the copies ahead of time.
975 perform the copies ahead of time.
976 """
976 """
977 pos = 0
977 pos = 0
978 try:
978 try:
979 pos = lr.fp.tell()
979 pos = lr.fp.tell()
980 fp = lr.fp
980 fp = lr.fp
981 except IOError:
981 except IOError:
982 fp = cStringIO.StringIO(lr.fp.read())
982 fp = cStringIO.StringIO(lr.fp.read())
983 gitlr = linereader(fp, lr.textmode)
983 gitlr = linereader(fp, lr.textmode)
984 gitlr.push(firstline)
984 gitlr.push(firstline)
985 (dopatch, gitpatches) = readgitpatch(gitlr)
985 (dopatch, gitpatches) = readgitpatch(gitlr)
986 fp.seek(pos)
986 fp.seek(pos)
987 return dopatch, gitpatches
987 return dopatch, gitpatches
988
988
989 def iterhunks(ui, fp, sourcefile=None):
989 def iterhunks(ui, fp, sourcefile=None):
990 """Read a patch and yield the following events:
990 """Read a patch and yield the following events:
991 - ("file", afile, bfile, firsthunk): select a new target file.
991 - ("file", afile, bfile, firsthunk): select a new target file.
992 - ("hunk", hunk): a new hunk is ready to be applied, follows a
992 - ("hunk", hunk): a new hunk is ready to be applied, follows a
993 "file" event.
993 "file" event.
994 - ("git", gitchanges): current diff is in git format, gitchanges
994 - ("git", gitchanges): current diff is in git format, gitchanges
995 maps filenames to gitpatch records. Unique event.
995 maps filenames to gitpatch records. Unique event.
996 """
996 """
997 changed = {}
997 changed = {}
998 current_hunk = None
998 current_hunk = None
999 afile = ""
999 afile = ""
1000 bfile = ""
1000 bfile = ""
1001 state = None
1001 state = None
1002 hunknum = 0
1002 hunknum = 0
1003 emitfile = False
1003 emitfile = False
1004 git = False
1004 git = False
1005
1005
1006 # our states
1006 # our states
1007 BFILE = 1
1007 BFILE = 1
1008 context = None
1008 context = None
1009 lr = linereader(fp)
1009 lr = linereader(fp)
1010 # gitworkdone is True if a git operation (copy, rename, ...) was
1010 # gitworkdone is True if a git operation (copy, rename, ...) was
1011 # performed already for the current file. Useful when the file
1011 # performed already for the current file. Useful when the file
1012 # section may have no hunk.
1012 # section may have no hunk.
1013 gitworkdone = False
1013 gitworkdone = False
1014 empty = None
1014 empty = None
1015
1015
1016 while True:
1016 while True:
1017 newfile = newgitfile = False
1017 newfile = newgitfile = False
1018 x = lr.readline()
1018 x = lr.readline()
1019 if not x:
1019 if not x:
1020 break
1020 break
1021 if current_hunk:
1021 if current_hunk:
1022 if x.startswith('\ '):
1022 if x.startswith('\ '):
1023 current_hunk.fix_newline()
1023 current_hunk.fix_newline()
1024 yield 'hunk', current_hunk
1024 yield 'hunk', current_hunk
1025 current_hunk = None
1025 current_hunk = None
1026 empty = False
1026 empty = False
1027 if ((sourcefile or state == BFILE) and ((not context and x[0] == '@') or
1027 if ((sourcefile or state == BFILE) and ((not context and x[0] == '@') or
1028 ((context is not False) and x.startswith('***************')))):
1028 ((context is not False) and x.startswith('***************')))):
1029 try:
1029 try:
1030 if context is None and x.startswith('***************'):
1030 if context is None and x.startswith('***************'):
1031 context = True
1031 context = True
1032 gpatch = changed.get(bfile)
1032 gpatch = changed.get(bfile)
1033 create = afile == '/dev/null' or gpatch and gpatch.op == 'ADD'
1033 create = afile == '/dev/null' or gpatch and gpatch.op == 'ADD'
1034 remove = bfile == '/dev/null' or gpatch and gpatch.op == 'DELETE'
1034 remove = bfile == '/dev/null' or gpatch and gpatch.op == 'DELETE'
1035 current_hunk = hunk(x, hunknum + 1, lr, context, create, remove)
1035 current_hunk = hunk(x, hunknum + 1, lr, context, create, remove)
1036 except PatchError, err:
1036 except PatchError, err:
1037 ui.debug(err)
1037 ui.debug(err)
1038 current_hunk = None
1038 current_hunk = None
1039 continue
1039 continue
1040 hunknum += 1
1040 hunknum += 1
1041 if emitfile:
1041 if emitfile:
1042 emitfile = False
1042 emitfile = False
1043 yield 'file', (afile, bfile, current_hunk)
1043 yield 'file', (afile, bfile, current_hunk)
1044 empty = False
1044 empty = False
1045 elif state == BFILE and x.startswith('GIT binary patch'):
1045 elif state == BFILE and x.startswith('GIT binary patch'):
1046 current_hunk = binhunk(changed[bfile])
1046 current_hunk = binhunk(changed[bfile])
1047 hunknum += 1
1047 hunknum += 1
1048 if emitfile:
1048 if emitfile:
1049 emitfile = False
1049 emitfile = False
1050 yield 'file', ('a/' + afile, 'b/' + bfile, current_hunk)
1050 yield 'file', ('a/' + afile, 'b/' + bfile, current_hunk)
1051 empty = False
1051 empty = False
1052 current_hunk.extract(lr)
1052 current_hunk.extract(lr)
1053 elif x.startswith('diff --git'):
1053 elif x.startswith('diff --git'):
1054 # check for git diff, scanning the whole patch file if needed
1054 # check for git diff, scanning the whole patch file if needed
1055 m = gitre.match(x)
1055 m = gitre.match(x)
1056 gitworkdone = False
1056 gitworkdone = False
1057 if m:
1057 if m:
1058 afile, bfile = m.group(1, 2)
1058 afile, bfile = m.group(1, 2)
1059 if not git:
1059 if not git:
1060 git = True
1060 git = True
1061 gitpatches = scangitpatch(lr, x)[1]
1061 gitpatches = scangitpatch(lr, x)[1]
1062 yield 'git', gitpatches
1062 yield 'git', gitpatches
1063 for gp in gitpatches:
1063 for gp in gitpatches:
1064 changed[gp.path] = gp
1064 changed[gp.path] = gp
1065 # else error?
1065 # else error?
1066 # copy/rename + modify should modify target, not source
1066 # copy/rename + modify should modify target, not source
1067 gp = changed.get(bfile)
1067 gp = changed.get(bfile)
1068 if gp and (gp.op in ('COPY', 'DELETE', 'RENAME', 'ADD')
1068 if gp and (gp.op in ('COPY', 'DELETE', 'RENAME', 'ADD')
1069 or gp.mode):
1069 or gp.mode):
1070 afile = bfile
1070 afile = bfile
1071 gitworkdone = True
1071 gitworkdone = True
1072 newgitfile = True
1072 newgitfile = True
1073 elif x.startswith('---'):
1073 elif x.startswith('---'):
1074 # check for a unified diff
1074 # check for a unified diff
1075 l2 = lr.readline()
1075 l2 = lr.readline()
1076 if not l2.startswith('+++'):
1076 if not l2.startswith('+++'):
1077 lr.push(l2)
1077 lr.push(l2)
1078 continue
1078 continue
1079 newfile = True
1079 newfile = True
1080 context = False
1080 context = False
1081 afile = parsefilename(x)
1081 afile = parsefilename(x)
1082 bfile = parsefilename(l2)
1082 bfile = parsefilename(l2)
1083 elif x.startswith('***'):
1083 elif x.startswith('***'):
1084 # check for a context diff
1084 # check for a context diff
1085 l2 = lr.readline()
1085 l2 = lr.readline()
1086 if not l2.startswith('---'):
1086 if not l2.startswith('---'):
1087 lr.push(l2)
1087 lr.push(l2)
1088 continue
1088 continue
1089 l3 = lr.readline()
1089 l3 = lr.readline()
1090 lr.push(l3)
1090 lr.push(l3)
1091 if not l3.startswith("***************"):
1091 if not l3.startswith("***************"):
1092 lr.push(l2)
1092 lr.push(l2)
1093 continue
1093 continue
1094 newfile = True
1094 newfile = True
1095 context = True
1095 context = True
1096 afile = parsefilename(x)
1096 afile = parsefilename(x)
1097 bfile = parsefilename(l2)
1097 bfile = parsefilename(l2)
1098
1098
1099 if newfile:
1099 if newfile:
1100 if empty:
1100 if empty:
1101 raise NoHunks
1101 raise NoHunks
1102 empty = not gitworkdone
1102 empty = not gitworkdone
1103 gitworkdone = False
1103 gitworkdone = False
1104
1104
1105 if newgitfile or newfile:
1105 if newgitfile or newfile:
1106 emitfile = True
1106 emitfile = True
1107 state = BFILE
1107 state = BFILE
1108 hunknum = 0
1108 hunknum = 0
1109 if current_hunk:
1109 if current_hunk:
1110 if current_hunk.complete():
1110 if current_hunk.complete():
1111 yield 'hunk', current_hunk
1111 yield 'hunk', current_hunk
1112 empty = False
1112 empty = False
1113 else:
1113 else:
1114 raise PatchError(_("malformed patch %s %s") % (afile,
1114 raise PatchError(_("malformed patch %s %s") % (afile,
1115 current_hunk.desc))
1115 current_hunk.desc))
1116
1116
1117 if (empty is None and not gitworkdone) or empty:
1117 if (empty is None and not gitworkdone) or empty:
1118 raise NoHunks
1118 raise NoHunks
1119
1119
1120
1120
1121 def applydiff(ui, fp, changed, strip=1, sourcefile=None, eolmode='strict'):
1121 def applydiff(ui, fp, changed, strip=1, sourcefile=None, eolmode='strict'):
1122 """Reads a patch from fp and tries to apply it.
1122 """Reads a patch from fp and tries to apply it.
1123
1123
1124 The dict 'changed' is filled in with all of the filenames changed
1124 The dict 'changed' is filled in with all of the filenames changed
1125 by the patch. Returns 0 for a clean patch, -1 if any rejects were
1125 by the patch. Returns 0 for a clean patch, -1 if any rejects were
1126 found and 1 if there was any fuzz.
1126 found and 1 if there was any fuzz.
1127
1127
1128 If 'eolmode' is 'strict', the patch content and patched file are
1128 If 'eolmode' is 'strict', the patch content and patched file are
1129 read in binary mode. Otherwise, line endings are ignored when
1129 read in binary mode. Otherwise, line endings are ignored when
1130 patching then normalized according to 'eolmode'.
1130 patching then normalized according to 'eolmode'.
1131
1131
1132 Callers probably want to call 'updatedir' after this to apply
1132 Callers probably want to call 'updatedir' after this to apply
1133 certain categories of changes not done by this function.
1133 certain categories of changes not done by this function.
1134 """
1134 """
1135 return _applydiff(
1135 return _applydiff(
1136 ui, fp, patchfile, copyfile,
1136 ui, fp, patchfile, copyfile,
1137 changed, strip=strip, sourcefile=sourcefile, eolmode=eolmode)
1137 changed, strip=strip, sourcefile=sourcefile, eolmode=eolmode)
1138
1138
1139
1139
1140 def _applydiff(ui, fp, patcher, copyfn, changed, strip=1,
1140 def _applydiff(ui, fp, patcher, copyfn, changed, strip=1,
1141 sourcefile=None, eolmode='strict'):
1141 sourcefile=None, eolmode='strict'):
1142 rejects = 0
1142 rejects = 0
1143 err = 0
1143 err = 0
1144 current_file = None
1144 current_file = None
1145 cwd = os.getcwd()
1145 cwd = os.getcwd()
1146 opener = util.opener(cwd)
1146 opener = util.opener(cwd)
1147
1147
1148 def closefile():
1148 def closefile():
1149 if not current_file:
1149 if not current_file:
1150 return 0
1150 return 0
1151 if current_file.dirty:
1151 if current_file.dirty:
1152 current_file.writelines(current_file.fname, current_file.lines)
1152 current_file.writelines(current_file.fname, current_file.lines)
1153 current_file.write_rej()
1153 current_file.write_rej()
1154 return len(current_file.rej)
1154 return len(current_file.rej)
1155
1155
1156 for state, values in iterhunks(ui, fp, sourcefile):
1156 for state, values in iterhunks(ui, fp, sourcefile):
1157 if state == 'hunk':
1157 if state == 'hunk':
1158 if not current_file:
1158 if not current_file:
1159 continue
1159 continue
1160 ret = current_file.apply(values)
1160 ret = current_file.apply(values)
1161 if ret >= 0:
1161 if ret >= 0:
1162 changed.setdefault(current_file.fname, None)
1162 changed.setdefault(current_file.fname, None)
1163 if ret > 0:
1163 if ret > 0:
1164 err = 1
1164 err = 1
1165 elif state == 'file':
1165 elif state == 'file':
1166 rejects += closefile()
1166 rejects += closefile()
1167 afile, bfile, first_hunk = values
1167 afile, bfile, first_hunk = values
1168 try:
1168 try:
1169 if sourcefile:
1169 if sourcefile:
1170 current_file = patcher(ui, sourcefile, opener,
1170 current_file = patcher(ui, sourcefile, opener,
1171 eolmode=eolmode)
1171 eolmode=eolmode)
1172 else:
1172 else:
1173 current_file, missing = selectfile(afile, bfile,
1173 current_file, missing = selectfile(afile, bfile,
1174 first_hunk, strip)
1174 first_hunk, strip)
1175 current_file = patcher(ui, current_file, opener,
1175 current_file = patcher(ui, current_file, opener,
1176 missing=missing, eolmode=eolmode)
1176 missing=missing, eolmode=eolmode)
1177 except PatchError, err:
1177 except PatchError, err:
1178 ui.warn(str(err) + '\n')
1178 ui.warn(str(err) + '\n')
1179 current_file = None
1179 current_file = None
1180 rejects += 1
1180 rejects += 1
1181 continue
1181 continue
1182 elif state == 'git':
1182 elif state == 'git':
1183 for gp in values:
1183 for gp in values:
1184 gp.path = pathstrip(gp.path, strip - 1)[1]
1184 gp.path = pathstrip(gp.path, strip - 1)[1]
1185 if gp.oldpath:
1185 if gp.oldpath:
1186 gp.oldpath = pathstrip(gp.oldpath, strip - 1)[1]
1186 gp.oldpath = pathstrip(gp.oldpath, strip - 1)[1]
1187 if gp.op in ('COPY', 'RENAME'):
1187 if gp.op in ('COPY', 'RENAME'):
1188 copyfn(gp.oldpath, gp.path, cwd)
1188 copyfn(gp.oldpath, gp.path, cwd)
1189 changed[gp.path] = gp
1189 changed[gp.path] = gp
1190 else:
1190 else:
1191 raise util.Abort(_('unsupported parser state: %s') % state)
1191 raise util.Abort(_('unsupported parser state: %s') % state)
1192
1192
1193 rejects += closefile()
1193 rejects += closefile()
1194
1194
1195 if rejects:
1195 if rejects:
1196 return -1
1196 return -1
1197 return err
1197 return err
1198
1198
1199 def updatedir(ui, repo, patches, similarity=0):
1199 def updatedir(ui, repo, patches, similarity=0):
1200 '''Update dirstate after patch application according to metadata'''
1200 '''Update dirstate after patch application according to metadata'''
1201 if not patches:
1201 if not patches:
1202 return
1202 return
1203 copies = []
1203 copies = []
1204 removes = set()
1204 removes = set()
1205 cfiles = patches.keys()
1205 cfiles = patches.keys()
1206 cwd = repo.getcwd()
1206 cwd = repo.getcwd()
1207 if cwd:
1207 if cwd:
1208 cfiles = [util.pathto(repo.root, cwd, f) for f in patches.keys()]
1208 cfiles = [util.pathto(repo.root, cwd, f) for f in patches.keys()]
1209 for f in patches:
1209 for f in patches:
1210 gp = patches[f]
1210 gp = patches[f]
1211 if not gp:
1211 if not gp:
1212 continue
1212 continue
1213 if gp.op == 'RENAME':
1213 if gp.op == 'RENAME':
1214 copies.append((gp.oldpath, gp.path))
1214 copies.append((gp.oldpath, gp.path))
1215 removes.add(gp.oldpath)
1215 removes.add(gp.oldpath)
1216 elif gp.op == 'COPY':
1216 elif gp.op == 'COPY':
1217 copies.append((gp.oldpath, gp.path))
1217 copies.append((gp.oldpath, gp.path))
1218 elif gp.op == 'DELETE':
1218 elif gp.op == 'DELETE':
1219 removes.add(gp.path)
1219 removes.add(gp.path)
1220
1220
1221 wctx = repo[None]
1221 wctx = repo[None]
1222 for src, dst in copies:
1222 for src, dst in copies:
1223 wctx.copy(src, dst)
1223 wctx.copy(src, dst)
1224 if (not similarity) and removes:
1224 if (not similarity) and removes:
1225 wctx.remove(sorted(removes), True)
1225 wctx.remove(sorted(removes), True)
1226
1226
1227 for f in patches:
1227 for f in patches:
1228 gp = patches[f]
1228 gp = patches[f]
1229 if gp and gp.mode:
1229 if gp and gp.mode:
1230 islink, isexec = gp.mode
1230 islink, isexec = gp.mode
1231 dst = repo.wjoin(gp.path)
1231 dst = repo.wjoin(gp.path)
1232 # patch won't create empty files
1232 # patch won't create empty files
1233 if gp.op == 'ADD' and not os.path.exists(dst):
1233 if gp.op == 'ADD' and not os.path.lexists(dst):
1234 flags = (isexec and 'x' or '') + (islink and 'l' or '')
1234 flags = (isexec and 'x' or '') + (islink and 'l' or '')
1235 repo.wwrite(gp.path, '', flags)
1235 repo.wwrite(gp.path, '', flags)
1236 util.set_flags(dst, islink, isexec)
1236 util.set_flags(dst, islink, isexec)
1237 cmdutil.addremove(repo, cfiles, similarity=similarity)
1237 cmdutil.addremove(repo, cfiles, similarity=similarity)
1238 files = patches.keys()
1238 files = patches.keys()
1239 files.extend([r for r in removes if r not in files])
1239 files.extend([r for r in removes if r not in files])
1240 return sorted(files)
1240 return sorted(files)
1241
1241
1242 def externalpatch(patcher, args, patchname, ui, strip, cwd, files):
1242 def externalpatch(patcher, args, patchname, ui, strip, cwd, files):
1243 """use <patcher> to apply <patchname> to the working directory.
1243 """use <patcher> to apply <patchname> to the working directory.
1244 returns whether patch was applied with fuzz factor."""
1244 returns whether patch was applied with fuzz factor."""
1245
1245
1246 fuzz = False
1246 fuzz = False
1247 if cwd:
1247 if cwd:
1248 args.append('-d %s' % util.shellquote(cwd))
1248 args.append('-d %s' % util.shellquote(cwd))
1249 fp = util.popen('%s %s -p%d < %s' % (patcher, ' '.join(args), strip,
1249 fp = util.popen('%s %s -p%d < %s' % (patcher, ' '.join(args), strip,
1250 util.shellquote(patchname)))
1250 util.shellquote(patchname)))
1251
1251
1252 for line in fp:
1252 for line in fp:
1253 line = line.rstrip()
1253 line = line.rstrip()
1254 ui.note(line + '\n')
1254 ui.note(line + '\n')
1255 if line.startswith('patching file '):
1255 if line.startswith('patching file '):
1256 pf = util.parse_patch_output(line)
1256 pf = util.parse_patch_output(line)
1257 printed_file = False
1257 printed_file = False
1258 files.setdefault(pf, None)
1258 files.setdefault(pf, None)
1259 elif line.find('with fuzz') >= 0:
1259 elif line.find('with fuzz') >= 0:
1260 fuzz = True
1260 fuzz = True
1261 if not printed_file:
1261 if not printed_file:
1262 ui.warn(pf + '\n')
1262 ui.warn(pf + '\n')
1263 printed_file = True
1263 printed_file = True
1264 ui.warn(line + '\n')
1264 ui.warn(line + '\n')
1265 elif line.find('saving rejects to file') >= 0:
1265 elif line.find('saving rejects to file') >= 0:
1266 ui.warn(line + '\n')
1266 ui.warn(line + '\n')
1267 elif line.find('FAILED') >= 0:
1267 elif line.find('FAILED') >= 0:
1268 if not printed_file:
1268 if not printed_file:
1269 ui.warn(pf + '\n')
1269 ui.warn(pf + '\n')
1270 printed_file = True
1270 printed_file = True
1271 ui.warn(line + '\n')
1271 ui.warn(line + '\n')
1272 code = fp.close()
1272 code = fp.close()
1273 if code:
1273 if code:
1274 raise PatchError(_("patch command failed: %s") %
1274 raise PatchError(_("patch command failed: %s") %
1275 util.explain_exit(code)[0])
1275 util.explain_exit(code)[0])
1276 return fuzz
1276 return fuzz
1277
1277
1278 def internalpatch(patchobj, ui, strip, cwd, files=None, eolmode='strict'):
1278 def internalpatch(patchobj, ui, strip, cwd, files=None, eolmode='strict'):
1279 """use builtin patch to apply <patchobj> to the working directory.
1279 """use builtin patch to apply <patchobj> to the working directory.
1280 returns whether patch was applied with fuzz factor."""
1280 returns whether patch was applied with fuzz factor."""
1281
1281
1282 if files is None:
1282 if files is None:
1283 files = {}
1283 files = {}
1284 if eolmode is None:
1284 if eolmode is None:
1285 eolmode = ui.config('patch', 'eol', 'strict')
1285 eolmode = ui.config('patch', 'eol', 'strict')
1286 if eolmode.lower() not in eolmodes:
1286 if eolmode.lower() not in eolmodes:
1287 raise util.Abort(_('unsupported line endings type: %s') % eolmode)
1287 raise util.Abort(_('unsupported line endings type: %s') % eolmode)
1288 eolmode = eolmode.lower()
1288 eolmode = eolmode.lower()
1289
1289
1290 try:
1290 try:
1291 fp = open(patchobj, 'rb')
1291 fp = open(patchobj, 'rb')
1292 except TypeError:
1292 except TypeError:
1293 fp = patchobj
1293 fp = patchobj
1294 if cwd:
1294 if cwd:
1295 curdir = os.getcwd()
1295 curdir = os.getcwd()
1296 os.chdir(cwd)
1296 os.chdir(cwd)
1297 try:
1297 try:
1298 ret = applydiff(ui, fp, files, strip=strip, eolmode=eolmode)
1298 ret = applydiff(ui, fp, files, strip=strip, eolmode=eolmode)
1299 finally:
1299 finally:
1300 if cwd:
1300 if cwd:
1301 os.chdir(curdir)
1301 os.chdir(curdir)
1302 if fp != patchobj:
1302 if fp != patchobj:
1303 fp.close()
1303 fp.close()
1304 if ret < 0:
1304 if ret < 0:
1305 raise PatchError
1305 raise PatchError
1306 return ret > 0
1306 return ret > 0
1307
1307
1308 def patch(patchname, ui, strip=1, cwd=None, files=None, eolmode='strict'):
1308 def patch(patchname, ui, strip=1, cwd=None, files=None, eolmode='strict'):
1309 """Apply <patchname> to the working directory.
1309 """Apply <patchname> to the working directory.
1310
1310
1311 'eolmode' specifies how end of lines should be handled. It can be:
1311 'eolmode' specifies how end of lines should be handled. It can be:
1312 - 'strict': inputs are read in binary mode, EOLs are preserved
1312 - 'strict': inputs are read in binary mode, EOLs are preserved
1313 - 'crlf': EOLs are ignored when patching and reset to CRLF
1313 - 'crlf': EOLs are ignored when patching and reset to CRLF
1314 - 'lf': EOLs are ignored when patching and reset to LF
1314 - 'lf': EOLs are ignored when patching and reset to LF
1315 - None: get it from user settings, default to 'strict'
1315 - None: get it from user settings, default to 'strict'
1316 'eolmode' is ignored when using an external patcher program.
1316 'eolmode' is ignored when using an external patcher program.
1317
1317
1318 Returns whether patch was applied with fuzz factor.
1318 Returns whether patch was applied with fuzz factor.
1319 """
1319 """
1320 patcher = ui.config('ui', 'patch')
1320 patcher = ui.config('ui', 'patch')
1321 args = []
1321 args = []
1322 if files is None:
1322 if files is None:
1323 files = {}
1323 files = {}
1324 try:
1324 try:
1325 if patcher:
1325 if patcher:
1326 return externalpatch(patcher, args, patchname, ui, strip, cwd,
1326 return externalpatch(patcher, args, patchname, ui, strip, cwd,
1327 files)
1327 files)
1328 else:
1328 else:
1329 try:
1329 try:
1330 return internalpatch(patchname, ui, strip, cwd, files, eolmode)
1330 return internalpatch(patchname, ui, strip, cwd, files, eolmode)
1331 except NoHunks:
1331 except NoHunks:
1332 ui.warn(_('internal patcher failed\n'
1332 ui.warn(_('internal patcher failed\n'
1333 'please report details to '
1333 'please report details to '
1334 'http://mercurial.selenic.com/bts/\n'
1334 'http://mercurial.selenic.com/bts/\n'
1335 'or mercurial@selenic.com\n'))
1335 'or mercurial@selenic.com\n'))
1336 patcher = (util.find_exe('gpatch') or util.find_exe('patch')
1336 patcher = (util.find_exe('gpatch') or util.find_exe('patch')
1337 or 'patch')
1337 or 'patch')
1338 ui.debug('no valid hunks found; trying with %r instead\n' %
1338 ui.debug('no valid hunks found; trying with %r instead\n' %
1339 patcher)
1339 patcher)
1340 if util.needbinarypatch():
1340 if util.needbinarypatch():
1341 args.append('--binary')
1341 args.append('--binary')
1342 return externalpatch(patcher, args, patchname, ui, strip, cwd,
1342 return externalpatch(patcher, args, patchname, ui, strip, cwd,
1343 files)
1343 files)
1344 except PatchError, err:
1344 except PatchError, err:
1345 s = str(err)
1345 s = str(err)
1346 if s:
1346 if s:
1347 raise util.Abort(s)
1347 raise util.Abort(s)
1348 else:
1348 else:
1349 raise util.Abort(_('patch failed to apply'))
1349 raise util.Abort(_('patch failed to apply'))
1350
1350
1351 def b85diff(to, tn):
1351 def b85diff(to, tn):
1352 '''print base85-encoded binary diff'''
1352 '''print base85-encoded binary diff'''
1353 def gitindex(text):
1353 def gitindex(text):
1354 if not text:
1354 if not text:
1355 return '0' * 40
1355 return '0' * 40
1356 l = len(text)
1356 l = len(text)
1357 s = util.sha1('blob %d\0' % l)
1357 s = util.sha1('blob %d\0' % l)
1358 s.update(text)
1358 s.update(text)
1359 return s.hexdigest()
1359 return s.hexdigest()
1360
1360
1361 def fmtline(line):
1361 def fmtline(line):
1362 l = len(line)
1362 l = len(line)
1363 if l <= 26:
1363 if l <= 26:
1364 l = chr(ord('A') + l - 1)
1364 l = chr(ord('A') + l - 1)
1365 else:
1365 else:
1366 l = chr(l - 26 + ord('a') - 1)
1366 l = chr(l - 26 + ord('a') - 1)
1367 return '%c%s\n' % (l, base85.b85encode(line, True))
1367 return '%c%s\n' % (l, base85.b85encode(line, True))
1368
1368
1369 def chunk(text, csize=52):
1369 def chunk(text, csize=52):
1370 l = len(text)
1370 l = len(text)
1371 i = 0
1371 i = 0
1372 while i < l:
1372 while i < l:
1373 yield text[i:i + csize]
1373 yield text[i:i + csize]
1374 i += csize
1374 i += csize
1375
1375
1376 tohash = gitindex(to)
1376 tohash = gitindex(to)
1377 tnhash = gitindex(tn)
1377 tnhash = gitindex(tn)
1378 if tohash == tnhash:
1378 if tohash == tnhash:
1379 return ""
1379 return ""
1380
1380
1381 # TODO: deltas
1381 # TODO: deltas
1382 ret = ['index %s..%s\nGIT binary patch\nliteral %s\n' %
1382 ret = ['index %s..%s\nGIT binary patch\nliteral %s\n' %
1383 (tohash, tnhash, len(tn))]
1383 (tohash, tnhash, len(tn))]
1384 for l in chunk(zlib.compress(tn)):
1384 for l in chunk(zlib.compress(tn)):
1385 ret.append(fmtline(l))
1385 ret.append(fmtline(l))
1386 ret.append('\n')
1386 ret.append('\n')
1387 return ''.join(ret)
1387 return ''.join(ret)
1388
1388
1389 class GitDiffRequired(Exception):
1389 class GitDiffRequired(Exception):
1390 pass
1390 pass
1391
1391
1392 def diffopts(ui, opts=None, untrusted=False):
1392 def diffopts(ui, opts=None, untrusted=False):
1393 def get(key, name=None, getter=ui.configbool):
1393 def get(key, name=None, getter=ui.configbool):
1394 return ((opts and opts.get(key)) or
1394 return ((opts and opts.get(key)) or
1395 getter('diff', name or key, None, untrusted=untrusted))
1395 getter('diff', name or key, None, untrusted=untrusted))
1396 return mdiff.diffopts(
1396 return mdiff.diffopts(
1397 text=opts and opts.get('text'),
1397 text=opts and opts.get('text'),
1398 git=get('git'),
1398 git=get('git'),
1399 nodates=get('nodates'),
1399 nodates=get('nodates'),
1400 showfunc=get('show_function', 'showfunc'),
1400 showfunc=get('show_function', 'showfunc'),
1401 ignorews=get('ignore_all_space', 'ignorews'),
1401 ignorews=get('ignore_all_space', 'ignorews'),
1402 ignorewsamount=get('ignore_space_change', 'ignorewsamount'),
1402 ignorewsamount=get('ignore_space_change', 'ignorewsamount'),
1403 ignoreblanklines=get('ignore_blank_lines', 'ignoreblanklines'),
1403 ignoreblanklines=get('ignore_blank_lines', 'ignoreblanklines'),
1404 context=get('unified', getter=ui.config))
1404 context=get('unified', getter=ui.config))
1405
1405
1406 def diff(repo, node1=None, node2=None, match=None, changes=None, opts=None,
1406 def diff(repo, node1=None, node2=None, match=None, changes=None, opts=None,
1407 losedatafn=None):
1407 losedatafn=None):
1408 '''yields diff of changes to files between two nodes, or node and
1408 '''yields diff of changes to files between two nodes, or node and
1409 working directory.
1409 working directory.
1410
1410
1411 if node1 is None, use first dirstate parent instead.
1411 if node1 is None, use first dirstate parent instead.
1412 if node2 is None, compare node1 with working directory.
1412 if node2 is None, compare node1 with working directory.
1413
1413
1414 losedatafn(**kwarg) is a callable run when opts.upgrade=True and
1414 losedatafn(**kwarg) is a callable run when opts.upgrade=True and
1415 every time some change cannot be represented with the current
1415 every time some change cannot be represented with the current
1416 patch format. Return False to upgrade to git patch format, True to
1416 patch format. Return False to upgrade to git patch format, True to
1417 accept the loss or raise an exception to abort the diff. It is
1417 accept the loss or raise an exception to abort the diff. It is
1418 called with the name of current file being diffed as 'fn'. If set
1418 called with the name of current file being diffed as 'fn'. If set
1419 to None, patches will always be upgraded to git format when
1419 to None, patches will always be upgraded to git format when
1420 necessary.
1420 necessary.
1421 '''
1421 '''
1422
1422
1423 if opts is None:
1423 if opts is None:
1424 opts = mdiff.defaultopts
1424 opts = mdiff.defaultopts
1425
1425
1426 if not node1 and not node2:
1426 if not node1 and not node2:
1427 node1 = repo.dirstate.parents()[0]
1427 node1 = repo.dirstate.parents()[0]
1428
1428
1429 def lrugetfilectx():
1429 def lrugetfilectx():
1430 cache = {}
1430 cache = {}
1431 order = []
1431 order = []
1432 def getfilectx(f, ctx):
1432 def getfilectx(f, ctx):
1433 fctx = ctx.filectx(f, filelog=cache.get(f))
1433 fctx = ctx.filectx(f, filelog=cache.get(f))
1434 if f not in cache:
1434 if f not in cache:
1435 if len(cache) > 20:
1435 if len(cache) > 20:
1436 del cache[order.pop(0)]
1436 del cache[order.pop(0)]
1437 cache[f] = fctx.filelog()
1437 cache[f] = fctx.filelog()
1438 else:
1438 else:
1439 order.remove(f)
1439 order.remove(f)
1440 order.append(f)
1440 order.append(f)
1441 return fctx
1441 return fctx
1442 return getfilectx
1442 return getfilectx
1443 getfilectx = lrugetfilectx()
1443 getfilectx = lrugetfilectx()
1444
1444
1445 ctx1 = repo[node1]
1445 ctx1 = repo[node1]
1446 ctx2 = repo[node2]
1446 ctx2 = repo[node2]
1447
1447
1448 if not changes:
1448 if not changes:
1449 changes = repo.status(ctx1, ctx2, match=match)
1449 changes = repo.status(ctx1, ctx2, match=match)
1450 modified, added, removed = changes[:3]
1450 modified, added, removed = changes[:3]
1451
1451
1452 if not modified and not added and not removed:
1452 if not modified and not added and not removed:
1453 return []
1453 return []
1454
1454
1455 revs = None
1455 revs = None
1456 if not repo.ui.quiet:
1456 if not repo.ui.quiet:
1457 hexfunc = repo.ui.debugflag and hex or short
1457 hexfunc = repo.ui.debugflag and hex or short
1458 revs = [hexfunc(node) for node in [node1, node2] if node]
1458 revs = [hexfunc(node) for node in [node1, node2] if node]
1459
1459
1460 copy = {}
1460 copy = {}
1461 if opts.git or opts.upgrade:
1461 if opts.git or opts.upgrade:
1462 copy = copies.copies(repo, ctx1, ctx2, repo[nullid])[0]
1462 copy = copies.copies(repo, ctx1, ctx2, repo[nullid])[0]
1463
1463
1464 difffn = lambda opts, losedata: trydiff(repo, revs, ctx1, ctx2,
1464 difffn = lambda opts, losedata: trydiff(repo, revs, ctx1, ctx2,
1465 modified, added, removed, copy, getfilectx, opts, losedata)
1465 modified, added, removed, copy, getfilectx, opts, losedata)
1466 if opts.upgrade and not opts.git:
1466 if opts.upgrade and not opts.git:
1467 try:
1467 try:
1468 def losedata(fn):
1468 def losedata(fn):
1469 if not losedatafn or not losedatafn(fn=fn):
1469 if not losedatafn or not losedatafn(fn=fn):
1470 raise GitDiffRequired()
1470 raise GitDiffRequired()
1471 # Buffer the whole output until we are sure it can be generated
1471 # Buffer the whole output until we are sure it can be generated
1472 return list(difffn(opts.copy(git=False), losedata))
1472 return list(difffn(opts.copy(git=False), losedata))
1473 except GitDiffRequired:
1473 except GitDiffRequired:
1474 return difffn(opts.copy(git=True), None)
1474 return difffn(opts.copy(git=True), None)
1475 else:
1475 else:
1476 return difffn(opts, None)
1476 return difffn(opts, None)
1477
1477
1478 def difflabel(func, *args, **kw):
1478 def difflabel(func, *args, **kw):
1479 '''yields 2-tuples of (output, label) based on the output of func()'''
1479 '''yields 2-tuples of (output, label) based on the output of func()'''
1480 prefixes = [('diff', 'diff.diffline'),
1480 prefixes = [('diff', 'diff.diffline'),
1481 ('copy', 'diff.extended'),
1481 ('copy', 'diff.extended'),
1482 ('rename', 'diff.extended'),
1482 ('rename', 'diff.extended'),
1483 ('old', 'diff.extended'),
1483 ('old', 'diff.extended'),
1484 ('new', 'diff.extended'),
1484 ('new', 'diff.extended'),
1485 ('deleted', 'diff.extended'),
1485 ('deleted', 'diff.extended'),
1486 ('---', 'diff.file_a'),
1486 ('---', 'diff.file_a'),
1487 ('+++', 'diff.file_b'),
1487 ('+++', 'diff.file_b'),
1488 ('@@', 'diff.hunk'),
1488 ('@@', 'diff.hunk'),
1489 ('-', 'diff.deleted'),
1489 ('-', 'diff.deleted'),
1490 ('+', 'diff.inserted')]
1490 ('+', 'diff.inserted')]
1491
1491
1492 for chunk in func(*args, **kw):
1492 for chunk in func(*args, **kw):
1493 lines = chunk.split('\n')
1493 lines = chunk.split('\n')
1494 for i, line in enumerate(lines):
1494 for i, line in enumerate(lines):
1495 if i != 0:
1495 if i != 0:
1496 yield ('\n', '')
1496 yield ('\n', '')
1497 stripline = line
1497 stripline = line
1498 if line and line[0] in '+-':
1498 if line and line[0] in '+-':
1499 # highlight trailing whitespace, but only in changed lines
1499 # highlight trailing whitespace, but only in changed lines
1500 stripline = line.rstrip()
1500 stripline = line.rstrip()
1501 for prefix, label in prefixes:
1501 for prefix, label in prefixes:
1502 if stripline.startswith(prefix):
1502 if stripline.startswith(prefix):
1503 yield (stripline, label)
1503 yield (stripline, label)
1504 break
1504 break
1505 else:
1505 else:
1506 yield (line, '')
1506 yield (line, '')
1507 if line != stripline:
1507 if line != stripline:
1508 yield (line[len(stripline):], 'diff.trailingwhitespace')
1508 yield (line[len(stripline):], 'diff.trailingwhitespace')
1509
1509
1510 def diffui(*args, **kw):
1510 def diffui(*args, **kw):
1511 '''like diff(), but yields 2-tuples of (output, label) for ui.write()'''
1511 '''like diff(), but yields 2-tuples of (output, label) for ui.write()'''
1512 return difflabel(diff, *args, **kw)
1512 return difflabel(diff, *args, **kw)
1513
1513
1514
1514
1515 def _addmodehdr(header, omode, nmode):
1515 def _addmodehdr(header, omode, nmode):
1516 if omode != nmode:
1516 if omode != nmode:
1517 header.append('old mode %s\n' % omode)
1517 header.append('old mode %s\n' % omode)
1518 header.append('new mode %s\n' % nmode)
1518 header.append('new mode %s\n' % nmode)
1519
1519
1520 def trydiff(repo, revs, ctx1, ctx2, modified, added, removed,
1520 def trydiff(repo, revs, ctx1, ctx2, modified, added, removed,
1521 copy, getfilectx, opts, losedatafn):
1521 copy, getfilectx, opts, losedatafn):
1522
1522
1523 date1 = util.datestr(ctx1.date())
1523 date1 = util.datestr(ctx1.date())
1524 man1 = ctx1.manifest()
1524 man1 = ctx1.manifest()
1525
1525
1526 gone = set()
1526 gone = set()
1527 gitmode = {'l': '120000', 'x': '100755', '': '100644'}
1527 gitmode = {'l': '120000', 'x': '100755', '': '100644'}
1528
1528
1529 copyto = dict([(v, k) for k, v in copy.items()])
1529 copyto = dict([(v, k) for k, v in copy.items()])
1530
1530
1531 if opts.git:
1531 if opts.git:
1532 revs = None
1532 revs = None
1533
1533
1534 for f in sorted(modified + added + removed):
1534 for f in sorted(modified + added + removed):
1535 to = None
1535 to = None
1536 tn = None
1536 tn = None
1537 dodiff = True
1537 dodiff = True
1538 header = []
1538 header = []
1539 if f in man1:
1539 if f in man1:
1540 to = getfilectx(f, ctx1).data()
1540 to = getfilectx(f, ctx1).data()
1541 if f not in removed:
1541 if f not in removed:
1542 tn = getfilectx(f, ctx2).data()
1542 tn = getfilectx(f, ctx2).data()
1543 a, b = f, f
1543 a, b = f, f
1544 if opts.git or losedatafn:
1544 if opts.git or losedatafn:
1545 if f in added:
1545 if f in added:
1546 mode = gitmode[ctx2.flags(f)]
1546 mode = gitmode[ctx2.flags(f)]
1547 if f in copy or f in copyto:
1547 if f in copy or f in copyto:
1548 if opts.git:
1548 if opts.git:
1549 if f in copy:
1549 if f in copy:
1550 a = copy[f]
1550 a = copy[f]
1551 else:
1551 else:
1552 a = copyto[f]
1552 a = copyto[f]
1553 omode = gitmode[man1.flags(a)]
1553 omode = gitmode[man1.flags(a)]
1554 _addmodehdr(header, omode, mode)
1554 _addmodehdr(header, omode, mode)
1555 if a in removed and a not in gone:
1555 if a in removed and a not in gone:
1556 op = 'rename'
1556 op = 'rename'
1557 gone.add(a)
1557 gone.add(a)
1558 else:
1558 else:
1559 op = 'copy'
1559 op = 'copy'
1560 header.append('%s from %s\n' % (op, a))
1560 header.append('%s from %s\n' % (op, a))
1561 header.append('%s to %s\n' % (op, f))
1561 header.append('%s to %s\n' % (op, f))
1562 to = getfilectx(a, ctx1).data()
1562 to = getfilectx(a, ctx1).data()
1563 else:
1563 else:
1564 losedatafn(f)
1564 losedatafn(f)
1565 else:
1565 else:
1566 if opts.git:
1566 if opts.git:
1567 header.append('new file mode %s\n' % mode)
1567 header.append('new file mode %s\n' % mode)
1568 elif ctx2.flags(f):
1568 elif ctx2.flags(f):
1569 losedatafn(f)
1569 losedatafn(f)
1570 if util.binary(tn):
1570 if util.binary(tn):
1571 if opts.git:
1571 if opts.git:
1572 dodiff = 'binary'
1572 dodiff = 'binary'
1573 else:
1573 else:
1574 losedatafn(f)
1574 losedatafn(f)
1575 if not opts.git and not tn:
1575 if not opts.git and not tn:
1576 # regular diffs cannot represent new empty file
1576 # regular diffs cannot represent new empty file
1577 losedatafn(f)
1577 losedatafn(f)
1578 elif f in removed:
1578 elif f in removed:
1579 if opts.git:
1579 if opts.git:
1580 # have we already reported a copy above?
1580 # have we already reported a copy above?
1581 if ((f in copy and copy[f] in added
1581 if ((f in copy and copy[f] in added
1582 and copyto[copy[f]] == f) or
1582 and copyto[copy[f]] == f) or
1583 (f in copyto and copyto[f] in added
1583 (f in copyto and copyto[f] in added
1584 and copy[copyto[f]] == f)):
1584 and copy[copyto[f]] == f)):
1585 dodiff = False
1585 dodiff = False
1586 else:
1586 else:
1587 header.append('deleted file mode %s\n' %
1587 header.append('deleted file mode %s\n' %
1588 gitmode[man1.flags(f)])
1588 gitmode[man1.flags(f)])
1589 elif not to:
1589 elif not to:
1590 # regular diffs cannot represent empty file deletion
1590 # regular diffs cannot represent empty file deletion
1591 losedatafn(f)
1591 losedatafn(f)
1592 else:
1592 else:
1593 oflag = man1.flags(f)
1593 oflag = man1.flags(f)
1594 nflag = ctx2.flags(f)
1594 nflag = ctx2.flags(f)
1595 binary = util.binary(to) or util.binary(tn)
1595 binary = util.binary(to) or util.binary(tn)
1596 if opts.git:
1596 if opts.git:
1597 _addmodehdr(header, gitmode[oflag], gitmode[nflag])
1597 _addmodehdr(header, gitmode[oflag], gitmode[nflag])
1598 if binary:
1598 if binary:
1599 dodiff = 'binary'
1599 dodiff = 'binary'
1600 elif binary or nflag != oflag:
1600 elif binary or nflag != oflag:
1601 losedatafn(f)
1601 losedatafn(f)
1602 if opts.git:
1602 if opts.git:
1603 header.insert(0, mdiff.diffline(revs, a, b, opts))
1603 header.insert(0, mdiff.diffline(revs, a, b, opts))
1604
1604
1605 if dodiff:
1605 if dodiff:
1606 if dodiff == 'binary':
1606 if dodiff == 'binary':
1607 text = b85diff(to, tn)
1607 text = b85diff(to, tn)
1608 else:
1608 else:
1609 text = mdiff.unidiff(to, date1,
1609 text = mdiff.unidiff(to, date1,
1610 # ctx2 date may be dynamic
1610 # ctx2 date may be dynamic
1611 tn, util.datestr(ctx2.date()),
1611 tn, util.datestr(ctx2.date()),
1612 a, b, revs, opts=opts)
1612 a, b, revs, opts=opts)
1613 if header and (text or len(header) > 1):
1613 if header and (text or len(header) > 1):
1614 yield ''.join(header)
1614 yield ''.join(header)
1615 if text:
1615 if text:
1616 yield text
1616 yield text
1617
1617
1618 def diffstatdata(lines):
1618 def diffstatdata(lines):
1619 filename, adds, removes = None, 0, 0
1619 filename, adds, removes = None, 0, 0
1620 for line in lines:
1620 for line in lines:
1621 if line.startswith('diff'):
1621 if line.startswith('diff'):
1622 if filename:
1622 if filename:
1623 isbinary = adds == 0 and removes == 0
1623 isbinary = adds == 0 and removes == 0
1624 yield (filename, adds, removes, isbinary)
1624 yield (filename, adds, removes, isbinary)
1625 # set numbers to 0 anyway when starting new file
1625 # set numbers to 0 anyway when starting new file
1626 adds, removes = 0, 0
1626 adds, removes = 0, 0
1627 if line.startswith('diff --git'):
1627 if line.startswith('diff --git'):
1628 filename = gitre.search(line).group(1)
1628 filename = gitre.search(line).group(1)
1629 else:
1629 else:
1630 # format: "diff -r ... -r ... filename"
1630 # format: "diff -r ... -r ... filename"
1631 filename = line.split(None, 5)[-1]
1631 filename = line.split(None, 5)[-1]
1632 elif line.startswith('+') and not line.startswith('+++'):
1632 elif line.startswith('+') and not line.startswith('+++'):
1633 adds += 1
1633 adds += 1
1634 elif line.startswith('-') and not line.startswith('---'):
1634 elif line.startswith('-') and not line.startswith('---'):
1635 removes += 1
1635 removes += 1
1636 if filename:
1636 if filename:
1637 isbinary = adds == 0 and removes == 0
1637 isbinary = adds == 0 and removes == 0
1638 yield (filename, adds, removes, isbinary)
1638 yield (filename, adds, removes, isbinary)
1639
1639
1640 def diffstat(lines, width=80, git=False):
1640 def diffstat(lines, width=80, git=False):
1641 output = []
1641 output = []
1642 stats = list(diffstatdata(lines))
1642 stats = list(diffstatdata(lines))
1643
1643
1644 maxtotal, maxname = 0, 0
1644 maxtotal, maxname = 0, 0
1645 totaladds, totalremoves = 0, 0
1645 totaladds, totalremoves = 0, 0
1646 hasbinary = False
1646 hasbinary = False
1647
1647
1648 sized = [(filename, adds, removes, isbinary, encoding.colwidth(filename))
1648 sized = [(filename, adds, removes, isbinary, encoding.colwidth(filename))
1649 for filename, adds, removes, isbinary in stats]
1649 for filename, adds, removes, isbinary in stats]
1650
1650
1651 for filename, adds, removes, isbinary, namewidth in sized:
1651 for filename, adds, removes, isbinary, namewidth in sized:
1652 totaladds += adds
1652 totaladds += adds
1653 totalremoves += removes
1653 totalremoves += removes
1654 maxname = max(maxname, namewidth)
1654 maxname = max(maxname, namewidth)
1655 maxtotal = max(maxtotal, adds + removes)
1655 maxtotal = max(maxtotal, adds + removes)
1656 if isbinary:
1656 if isbinary:
1657 hasbinary = True
1657 hasbinary = True
1658
1658
1659 countwidth = len(str(maxtotal))
1659 countwidth = len(str(maxtotal))
1660 if hasbinary and countwidth < 3:
1660 if hasbinary and countwidth < 3:
1661 countwidth = 3
1661 countwidth = 3
1662 graphwidth = width - countwidth - maxname - 6
1662 graphwidth = width - countwidth - maxname - 6
1663 if graphwidth < 10:
1663 if graphwidth < 10:
1664 graphwidth = 10
1664 graphwidth = 10
1665
1665
1666 def scale(i):
1666 def scale(i):
1667 if maxtotal <= graphwidth:
1667 if maxtotal <= graphwidth:
1668 return i
1668 return i
1669 # If diffstat runs out of room it doesn't print anything,
1669 # If diffstat runs out of room it doesn't print anything,
1670 # which isn't very useful, so always print at least one + or -
1670 # which isn't very useful, so always print at least one + or -
1671 # if there were at least some changes.
1671 # if there were at least some changes.
1672 return max(i * graphwidth // maxtotal, int(bool(i)))
1672 return max(i * graphwidth // maxtotal, int(bool(i)))
1673
1673
1674 for filename, adds, removes, isbinary, namewidth in sized:
1674 for filename, adds, removes, isbinary, namewidth in sized:
1675 if git and isbinary:
1675 if git and isbinary:
1676 count = 'Bin'
1676 count = 'Bin'
1677 else:
1677 else:
1678 count = adds + removes
1678 count = adds + removes
1679 pluses = '+' * scale(adds)
1679 pluses = '+' * scale(adds)
1680 minuses = '-' * scale(removes)
1680 minuses = '-' * scale(removes)
1681 output.append(' %s%s | %*s %s%s\n' %
1681 output.append(' %s%s | %*s %s%s\n' %
1682 (filename, ' ' * (maxname - namewidth),
1682 (filename, ' ' * (maxname - namewidth),
1683 countwidth, count,
1683 countwidth, count,
1684 pluses, minuses))
1684 pluses, minuses))
1685
1685
1686 if stats:
1686 if stats:
1687 output.append(_(' %d files changed, %d insertions(+), %d deletions(-)\n')
1687 output.append(_(' %d files changed, %d insertions(+), %d deletions(-)\n')
1688 % (len(stats), totaladds, totalremoves))
1688 % (len(stats), totaladds, totalremoves))
1689
1689
1690 return ''.join(output)
1690 return ''.join(output)
1691
1691
1692 def diffstatui(*args, **kw):
1692 def diffstatui(*args, **kw):
1693 '''like diffstat(), but yields 2-tuples of (output, label) for
1693 '''like diffstat(), but yields 2-tuples of (output, label) for
1694 ui.write()
1694 ui.write()
1695 '''
1695 '''
1696
1696
1697 for line in diffstat(*args, **kw).splitlines():
1697 for line in diffstat(*args, **kw).splitlines():
1698 if line and line[-1] in '+-':
1698 if line and line[-1] in '+-':
1699 name, graph = line.rsplit(' ', 1)
1699 name, graph = line.rsplit(' ', 1)
1700 yield (name + ' ', '')
1700 yield (name + ' ', '')
1701 m = re.search(r'\++', graph)
1701 m = re.search(r'\++', graph)
1702 if m:
1702 if m:
1703 yield (m.group(0), 'diffstat.inserted')
1703 yield (m.group(0), 'diffstat.inserted')
1704 m = re.search(r'-+', graph)
1704 m = re.search(r'-+', graph)
1705 if m:
1705 if m:
1706 yield (m.group(0), 'diffstat.deleted')
1706 yield (m.group(0), 'diffstat.deleted')
1707 else:
1707 else:
1708 yield (line, '')
1708 yield (line, '')
1709 yield ('\n', '')
1709 yield ('\n', '')
@@ -1,1395 +1,1395 b''
1 # util.py - Mercurial utility functions and platform specfic implementations
1 # util.py - Mercurial utility functions and platform specfic implementations
2 #
2 #
3 # Copyright 2005 K. Thananchayan <thananck@yahoo.com>
3 # Copyright 2005 K. Thananchayan <thananck@yahoo.com>
4 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
5 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
5 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
6 #
6 #
7 # This software may be used and distributed according to the terms of the
7 # This software may be used and distributed according to the terms of the
8 # GNU General Public License version 2 or any later version.
8 # GNU General Public License version 2 or any later version.
9
9
10 """Mercurial utility functions and platform specfic implementations.
10 """Mercurial utility functions and platform specfic implementations.
11
11
12 This contains helper routines that are independent of the SCM core and
12 This contains helper routines that are independent of the SCM core and
13 hide platform-specific details from the core.
13 hide platform-specific details from the core.
14 """
14 """
15
15
16 from i18n import _
16 from i18n import _
17 import error, osutil, encoding
17 import error, osutil, encoding
18 import errno, re, shutil, sys, tempfile, traceback
18 import errno, re, shutil, sys, tempfile, traceback
19 import os, stat, time, calendar, textwrap, unicodedata, signal
19 import os, stat, time, calendar, textwrap, unicodedata, signal
20 import imp
20 import imp
21
21
22 # Python compatibility
22 # Python compatibility
23
23
24 def sha1(s):
24 def sha1(s):
25 return _fastsha1(s)
25 return _fastsha1(s)
26
26
27 def _fastsha1(s):
27 def _fastsha1(s):
28 # This function will import sha1 from hashlib or sha (whichever is
28 # This function will import sha1 from hashlib or sha (whichever is
29 # available) and overwrite itself with it on the first call.
29 # available) and overwrite itself with it on the first call.
30 # Subsequent calls will go directly to the imported function.
30 # Subsequent calls will go directly to the imported function.
31 if sys.version_info >= (2, 5):
31 if sys.version_info >= (2, 5):
32 from hashlib import sha1 as _sha1
32 from hashlib import sha1 as _sha1
33 else:
33 else:
34 from sha import sha as _sha1
34 from sha import sha as _sha1
35 global _fastsha1, sha1
35 global _fastsha1, sha1
36 _fastsha1 = sha1 = _sha1
36 _fastsha1 = sha1 = _sha1
37 return _sha1(s)
37 return _sha1(s)
38
38
39 import __builtin__
39 import __builtin__
40
40
41 def fakebuffer(sliceable, offset=0):
41 def fakebuffer(sliceable, offset=0):
42 return sliceable[offset:]
42 return sliceable[offset:]
43 if not hasattr(__builtin__, 'buffer'):
43 if not hasattr(__builtin__, 'buffer'):
44 __builtin__.buffer = fakebuffer
44 __builtin__.buffer = fakebuffer
45
45
46 import subprocess
46 import subprocess
47 closefds = os.name == 'posix'
47 closefds = os.name == 'posix'
48
48
49 def popen2(cmd, env=None, newlines=False):
49 def popen2(cmd, env=None, newlines=False):
50 # Setting bufsize to -1 lets the system decide the buffer size.
50 # Setting bufsize to -1 lets the system decide the buffer size.
51 # The default for bufsize is 0, meaning unbuffered. This leads to
51 # The default for bufsize is 0, meaning unbuffered. This leads to
52 # poor performance on Mac OS X: http://bugs.python.org/issue4194
52 # poor performance on Mac OS X: http://bugs.python.org/issue4194
53 p = subprocess.Popen(cmd, shell=True, bufsize=-1,
53 p = subprocess.Popen(cmd, shell=True, bufsize=-1,
54 close_fds=closefds,
54 close_fds=closefds,
55 stdin=subprocess.PIPE, stdout=subprocess.PIPE,
55 stdin=subprocess.PIPE, stdout=subprocess.PIPE,
56 universal_newlines=newlines,
56 universal_newlines=newlines,
57 env=env)
57 env=env)
58 return p.stdin, p.stdout
58 return p.stdin, p.stdout
59
59
60 def popen3(cmd, env=None, newlines=False):
60 def popen3(cmd, env=None, newlines=False):
61 p = subprocess.Popen(cmd, shell=True, bufsize=-1,
61 p = subprocess.Popen(cmd, shell=True, bufsize=-1,
62 close_fds=closefds,
62 close_fds=closefds,
63 stdin=subprocess.PIPE, stdout=subprocess.PIPE,
63 stdin=subprocess.PIPE, stdout=subprocess.PIPE,
64 stderr=subprocess.PIPE,
64 stderr=subprocess.PIPE,
65 universal_newlines=newlines,
65 universal_newlines=newlines,
66 env=env)
66 env=env)
67 return p.stdin, p.stdout, p.stderr
67 return p.stdin, p.stdout, p.stderr
68
68
69 def version():
69 def version():
70 """Return version information if available."""
70 """Return version information if available."""
71 try:
71 try:
72 import __version__
72 import __version__
73 return __version__.version
73 return __version__.version
74 except ImportError:
74 except ImportError:
75 return 'unknown'
75 return 'unknown'
76
76
77 # used by parsedate
77 # used by parsedate
78 defaultdateformats = (
78 defaultdateformats = (
79 '%Y-%m-%d %H:%M:%S',
79 '%Y-%m-%d %H:%M:%S',
80 '%Y-%m-%d %I:%M:%S%p',
80 '%Y-%m-%d %I:%M:%S%p',
81 '%Y-%m-%d %H:%M',
81 '%Y-%m-%d %H:%M',
82 '%Y-%m-%d %I:%M%p',
82 '%Y-%m-%d %I:%M%p',
83 '%Y-%m-%d',
83 '%Y-%m-%d',
84 '%m-%d',
84 '%m-%d',
85 '%m/%d',
85 '%m/%d',
86 '%m/%d/%y',
86 '%m/%d/%y',
87 '%m/%d/%Y',
87 '%m/%d/%Y',
88 '%a %b %d %H:%M:%S %Y',
88 '%a %b %d %H:%M:%S %Y',
89 '%a %b %d %I:%M:%S%p %Y',
89 '%a %b %d %I:%M:%S%p %Y',
90 '%a, %d %b %Y %H:%M:%S', # GNU coreutils "/bin/date --rfc-2822"
90 '%a, %d %b %Y %H:%M:%S', # GNU coreutils "/bin/date --rfc-2822"
91 '%b %d %H:%M:%S %Y',
91 '%b %d %H:%M:%S %Y',
92 '%b %d %I:%M:%S%p %Y',
92 '%b %d %I:%M:%S%p %Y',
93 '%b %d %H:%M:%S',
93 '%b %d %H:%M:%S',
94 '%b %d %I:%M:%S%p',
94 '%b %d %I:%M:%S%p',
95 '%b %d %H:%M',
95 '%b %d %H:%M',
96 '%b %d %I:%M%p',
96 '%b %d %I:%M%p',
97 '%b %d %Y',
97 '%b %d %Y',
98 '%b %d',
98 '%b %d',
99 '%H:%M:%S',
99 '%H:%M:%S',
100 '%I:%M:%S%p',
100 '%I:%M:%S%p',
101 '%H:%M',
101 '%H:%M',
102 '%I:%M%p',
102 '%I:%M%p',
103 )
103 )
104
104
105 extendeddateformats = defaultdateformats + (
105 extendeddateformats = defaultdateformats + (
106 "%Y",
106 "%Y",
107 "%Y-%m",
107 "%Y-%m",
108 "%b",
108 "%b",
109 "%b %Y",
109 "%b %Y",
110 )
110 )
111
111
112 def cachefunc(func):
112 def cachefunc(func):
113 '''cache the result of function calls'''
113 '''cache the result of function calls'''
114 # XXX doesn't handle keywords args
114 # XXX doesn't handle keywords args
115 cache = {}
115 cache = {}
116 if func.func_code.co_argcount == 1:
116 if func.func_code.co_argcount == 1:
117 # we gain a small amount of time because
117 # we gain a small amount of time because
118 # we don't need to pack/unpack the list
118 # we don't need to pack/unpack the list
119 def f(arg):
119 def f(arg):
120 if arg not in cache:
120 if arg not in cache:
121 cache[arg] = func(arg)
121 cache[arg] = func(arg)
122 return cache[arg]
122 return cache[arg]
123 else:
123 else:
124 def f(*args):
124 def f(*args):
125 if args not in cache:
125 if args not in cache:
126 cache[args] = func(*args)
126 cache[args] = func(*args)
127 return cache[args]
127 return cache[args]
128
128
129 return f
129 return f
130
130
131 def lrucachefunc(func):
131 def lrucachefunc(func):
132 '''cache most recent results of function calls'''
132 '''cache most recent results of function calls'''
133 cache = {}
133 cache = {}
134 order = []
134 order = []
135 if func.func_code.co_argcount == 1:
135 if func.func_code.co_argcount == 1:
136 def f(arg):
136 def f(arg):
137 if arg not in cache:
137 if arg not in cache:
138 if len(cache) > 20:
138 if len(cache) > 20:
139 del cache[order.pop(0)]
139 del cache[order.pop(0)]
140 cache[arg] = func(arg)
140 cache[arg] = func(arg)
141 else:
141 else:
142 order.remove(arg)
142 order.remove(arg)
143 order.append(arg)
143 order.append(arg)
144 return cache[arg]
144 return cache[arg]
145 else:
145 else:
146 def f(*args):
146 def f(*args):
147 if args not in cache:
147 if args not in cache:
148 if len(cache) > 20:
148 if len(cache) > 20:
149 del cache[order.pop(0)]
149 del cache[order.pop(0)]
150 cache[args] = func(*args)
150 cache[args] = func(*args)
151 else:
151 else:
152 order.remove(args)
152 order.remove(args)
153 order.append(args)
153 order.append(args)
154 return cache[args]
154 return cache[args]
155
155
156 return f
156 return f
157
157
158 class propertycache(object):
158 class propertycache(object):
159 def __init__(self, func):
159 def __init__(self, func):
160 self.func = func
160 self.func = func
161 self.name = func.__name__
161 self.name = func.__name__
162 def __get__(self, obj, type=None):
162 def __get__(self, obj, type=None):
163 result = self.func(obj)
163 result = self.func(obj)
164 setattr(obj, self.name, result)
164 setattr(obj, self.name, result)
165 return result
165 return result
166
166
167 def pipefilter(s, cmd):
167 def pipefilter(s, cmd):
168 '''filter string S through command CMD, returning its output'''
168 '''filter string S through command CMD, returning its output'''
169 p = subprocess.Popen(cmd, shell=True, close_fds=closefds,
169 p = subprocess.Popen(cmd, shell=True, close_fds=closefds,
170 stdin=subprocess.PIPE, stdout=subprocess.PIPE)
170 stdin=subprocess.PIPE, stdout=subprocess.PIPE)
171 pout, perr = p.communicate(s)
171 pout, perr = p.communicate(s)
172 return pout
172 return pout
173
173
174 def tempfilter(s, cmd):
174 def tempfilter(s, cmd):
175 '''filter string S through a pair of temporary files with CMD.
175 '''filter string S through a pair of temporary files with CMD.
176 CMD is used as a template to create the real command to be run,
176 CMD is used as a template to create the real command to be run,
177 with the strings INFILE and OUTFILE replaced by the real names of
177 with the strings INFILE and OUTFILE replaced by the real names of
178 the temporary files generated.'''
178 the temporary files generated.'''
179 inname, outname = None, None
179 inname, outname = None, None
180 try:
180 try:
181 infd, inname = tempfile.mkstemp(prefix='hg-filter-in-')
181 infd, inname = tempfile.mkstemp(prefix='hg-filter-in-')
182 fp = os.fdopen(infd, 'wb')
182 fp = os.fdopen(infd, 'wb')
183 fp.write(s)
183 fp.write(s)
184 fp.close()
184 fp.close()
185 outfd, outname = tempfile.mkstemp(prefix='hg-filter-out-')
185 outfd, outname = tempfile.mkstemp(prefix='hg-filter-out-')
186 os.close(outfd)
186 os.close(outfd)
187 cmd = cmd.replace('INFILE', inname)
187 cmd = cmd.replace('INFILE', inname)
188 cmd = cmd.replace('OUTFILE', outname)
188 cmd = cmd.replace('OUTFILE', outname)
189 code = os.system(cmd)
189 code = os.system(cmd)
190 if sys.platform == 'OpenVMS' and code & 1:
190 if sys.platform == 'OpenVMS' and code & 1:
191 code = 0
191 code = 0
192 if code:
192 if code:
193 raise Abort(_("command '%s' failed: %s") %
193 raise Abort(_("command '%s' failed: %s") %
194 (cmd, explain_exit(code)))
194 (cmd, explain_exit(code)))
195 return open(outname, 'rb').read()
195 return open(outname, 'rb').read()
196 finally:
196 finally:
197 try:
197 try:
198 if inname:
198 if inname:
199 os.unlink(inname)
199 os.unlink(inname)
200 except:
200 except:
201 pass
201 pass
202 try:
202 try:
203 if outname:
203 if outname:
204 os.unlink(outname)
204 os.unlink(outname)
205 except:
205 except:
206 pass
206 pass
207
207
208 filtertable = {
208 filtertable = {
209 'tempfile:': tempfilter,
209 'tempfile:': tempfilter,
210 'pipe:': pipefilter,
210 'pipe:': pipefilter,
211 }
211 }
212
212
213 def filter(s, cmd):
213 def filter(s, cmd):
214 "filter a string through a command that transforms its input to its output"
214 "filter a string through a command that transforms its input to its output"
215 for name, fn in filtertable.iteritems():
215 for name, fn in filtertable.iteritems():
216 if cmd.startswith(name):
216 if cmd.startswith(name):
217 return fn(s, cmd[len(name):].lstrip())
217 return fn(s, cmd[len(name):].lstrip())
218 return pipefilter(s, cmd)
218 return pipefilter(s, cmd)
219
219
220 def binary(s):
220 def binary(s):
221 """return true if a string is binary data"""
221 """return true if a string is binary data"""
222 return bool(s and '\0' in s)
222 return bool(s and '\0' in s)
223
223
224 def increasingchunks(source, min=1024, max=65536):
224 def increasingchunks(source, min=1024, max=65536):
225 '''return no less than min bytes per chunk while data remains,
225 '''return no less than min bytes per chunk while data remains,
226 doubling min after each chunk until it reaches max'''
226 doubling min after each chunk until it reaches max'''
227 def log2(x):
227 def log2(x):
228 if not x:
228 if not x:
229 return 0
229 return 0
230 i = 0
230 i = 0
231 while x:
231 while x:
232 x >>= 1
232 x >>= 1
233 i += 1
233 i += 1
234 return i - 1
234 return i - 1
235
235
236 buf = []
236 buf = []
237 blen = 0
237 blen = 0
238 for chunk in source:
238 for chunk in source:
239 buf.append(chunk)
239 buf.append(chunk)
240 blen += len(chunk)
240 blen += len(chunk)
241 if blen >= min:
241 if blen >= min:
242 if min < max:
242 if min < max:
243 min = min << 1
243 min = min << 1
244 nmin = 1 << log2(blen)
244 nmin = 1 << log2(blen)
245 if nmin > min:
245 if nmin > min:
246 min = nmin
246 min = nmin
247 if min > max:
247 if min > max:
248 min = max
248 min = max
249 yield ''.join(buf)
249 yield ''.join(buf)
250 blen = 0
250 blen = 0
251 buf = []
251 buf = []
252 if buf:
252 if buf:
253 yield ''.join(buf)
253 yield ''.join(buf)
254
254
255 Abort = error.Abort
255 Abort = error.Abort
256
256
257 def always(fn):
257 def always(fn):
258 return True
258 return True
259
259
260 def never(fn):
260 def never(fn):
261 return False
261 return False
262
262
263 def pathto(root, n1, n2):
263 def pathto(root, n1, n2):
264 '''return the relative path from one place to another.
264 '''return the relative path from one place to another.
265 root should use os.sep to separate directories
265 root should use os.sep to separate directories
266 n1 should use os.sep to separate directories
266 n1 should use os.sep to separate directories
267 n2 should use "/" to separate directories
267 n2 should use "/" to separate directories
268 returns an os.sep-separated path.
268 returns an os.sep-separated path.
269
269
270 If n1 is a relative path, it's assumed it's
270 If n1 is a relative path, it's assumed it's
271 relative to root.
271 relative to root.
272 n2 should always be relative to root.
272 n2 should always be relative to root.
273 '''
273 '''
274 if not n1:
274 if not n1:
275 return localpath(n2)
275 return localpath(n2)
276 if os.path.isabs(n1):
276 if os.path.isabs(n1):
277 if os.path.splitdrive(root)[0] != os.path.splitdrive(n1)[0]:
277 if os.path.splitdrive(root)[0] != os.path.splitdrive(n1)[0]:
278 return os.path.join(root, localpath(n2))
278 return os.path.join(root, localpath(n2))
279 n2 = '/'.join((pconvert(root), n2))
279 n2 = '/'.join((pconvert(root), n2))
280 a, b = splitpath(n1), n2.split('/')
280 a, b = splitpath(n1), n2.split('/')
281 a.reverse()
281 a.reverse()
282 b.reverse()
282 b.reverse()
283 while a and b and a[-1] == b[-1]:
283 while a and b and a[-1] == b[-1]:
284 a.pop()
284 a.pop()
285 b.pop()
285 b.pop()
286 b.reverse()
286 b.reverse()
287 return os.sep.join((['..'] * len(a)) + b) or '.'
287 return os.sep.join((['..'] * len(a)) + b) or '.'
288
288
289 def canonpath(root, cwd, myname):
289 def canonpath(root, cwd, myname):
290 """return the canonical path of myname, given cwd and root"""
290 """return the canonical path of myname, given cwd and root"""
291 if endswithsep(root):
291 if endswithsep(root):
292 rootsep = root
292 rootsep = root
293 else:
293 else:
294 rootsep = root + os.sep
294 rootsep = root + os.sep
295 name = myname
295 name = myname
296 if not os.path.isabs(name):
296 if not os.path.isabs(name):
297 name = os.path.join(root, cwd, name)
297 name = os.path.join(root, cwd, name)
298 name = os.path.normpath(name)
298 name = os.path.normpath(name)
299 audit_path = path_auditor(root)
299 audit_path = path_auditor(root)
300 if name != rootsep and name.startswith(rootsep):
300 if name != rootsep and name.startswith(rootsep):
301 name = name[len(rootsep):]
301 name = name[len(rootsep):]
302 audit_path(name)
302 audit_path(name)
303 return pconvert(name)
303 return pconvert(name)
304 elif name == root:
304 elif name == root:
305 return ''
305 return ''
306 else:
306 else:
307 # Determine whether `name' is in the hierarchy at or beneath `root',
307 # Determine whether `name' is in the hierarchy at or beneath `root',
308 # by iterating name=dirname(name) until that causes no change (can't
308 # by iterating name=dirname(name) until that causes no change (can't
309 # check name == '/', because that doesn't work on windows). For each
309 # check name == '/', because that doesn't work on windows). For each
310 # `name', compare dev/inode numbers. If they match, the list `rel'
310 # `name', compare dev/inode numbers. If they match, the list `rel'
311 # holds the reversed list of components making up the relative file
311 # holds the reversed list of components making up the relative file
312 # name we want.
312 # name we want.
313 root_st = os.stat(root)
313 root_st = os.stat(root)
314 rel = []
314 rel = []
315 while True:
315 while True:
316 try:
316 try:
317 name_st = os.stat(name)
317 name_st = os.stat(name)
318 except OSError:
318 except OSError:
319 break
319 break
320 if samestat(name_st, root_st):
320 if samestat(name_st, root_st):
321 if not rel:
321 if not rel:
322 # name was actually the same as root (maybe a symlink)
322 # name was actually the same as root (maybe a symlink)
323 return ''
323 return ''
324 rel.reverse()
324 rel.reverse()
325 name = os.path.join(*rel)
325 name = os.path.join(*rel)
326 audit_path(name)
326 audit_path(name)
327 return pconvert(name)
327 return pconvert(name)
328 dirname, basename = os.path.split(name)
328 dirname, basename = os.path.split(name)
329 rel.append(basename)
329 rel.append(basename)
330 if dirname == name:
330 if dirname == name:
331 break
331 break
332 name = dirname
332 name = dirname
333
333
334 raise Abort('%s not under root' % myname)
334 raise Abort('%s not under root' % myname)
335
335
336 _hgexecutable = None
336 _hgexecutable = None
337
337
338 def main_is_frozen():
338 def main_is_frozen():
339 """return True if we are a frozen executable.
339 """return True if we are a frozen executable.
340
340
341 The code supports py2exe (most common, Windows only) and tools/freeze
341 The code supports py2exe (most common, Windows only) and tools/freeze
342 (portable, not much used).
342 (portable, not much used).
343 """
343 """
344 return (hasattr(sys, "frozen") or # new py2exe
344 return (hasattr(sys, "frozen") or # new py2exe
345 hasattr(sys, "importers") or # old py2exe
345 hasattr(sys, "importers") or # old py2exe
346 imp.is_frozen("__main__")) # tools/freeze
346 imp.is_frozen("__main__")) # tools/freeze
347
347
348 def hgexecutable():
348 def hgexecutable():
349 """return location of the 'hg' executable.
349 """return location of the 'hg' executable.
350
350
351 Defaults to $HG or 'hg' in the search path.
351 Defaults to $HG or 'hg' in the search path.
352 """
352 """
353 if _hgexecutable is None:
353 if _hgexecutable is None:
354 hg = os.environ.get('HG')
354 hg = os.environ.get('HG')
355 if hg:
355 if hg:
356 set_hgexecutable(hg)
356 set_hgexecutable(hg)
357 elif main_is_frozen():
357 elif main_is_frozen():
358 set_hgexecutable(sys.executable)
358 set_hgexecutable(sys.executable)
359 else:
359 else:
360 exe = find_exe('hg') or os.path.basename(sys.argv[0])
360 exe = find_exe('hg') or os.path.basename(sys.argv[0])
361 set_hgexecutable(exe)
361 set_hgexecutable(exe)
362 return _hgexecutable
362 return _hgexecutable
363
363
364 def set_hgexecutable(path):
364 def set_hgexecutable(path):
365 """set location of the 'hg' executable"""
365 """set location of the 'hg' executable"""
366 global _hgexecutable
366 global _hgexecutable
367 _hgexecutable = path
367 _hgexecutable = path
368
368
369 def system(cmd, environ={}, cwd=None, onerr=None, errprefix=None, out=None):
369 def system(cmd, environ={}, cwd=None, onerr=None, errprefix=None, out=None):
370 '''enhanced shell command execution.
370 '''enhanced shell command execution.
371 run with environment maybe modified, maybe in different dir.
371 run with environment maybe modified, maybe in different dir.
372
372
373 if command fails and onerr is None, return status. if ui object,
373 if command fails and onerr is None, return status. if ui object,
374 print error message and return status, else raise onerr object as
374 print error message and return status, else raise onerr object as
375 exception.
375 exception.
376
376
377 if out is specified, it is assumed to be a file-like object that has a
377 if out is specified, it is assumed to be a file-like object that has a
378 write() method. stdout and stderr will be redirected to out.'''
378 write() method. stdout and stderr will be redirected to out.'''
379 def py2shell(val):
379 def py2shell(val):
380 'convert python object into string that is useful to shell'
380 'convert python object into string that is useful to shell'
381 if val is None or val is False:
381 if val is None or val is False:
382 return '0'
382 return '0'
383 if val is True:
383 if val is True:
384 return '1'
384 return '1'
385 return str(val)
385 return str(val)
386 origcmd = cmd
386 origcmd = cmd
387 if os.name == 'nt':
387 if os.name == 'nt':
388 cmd = '"%s"' % cmd
388 cmd = '"%s"' % cmd
389 env = dict(os.environ)
389 env = dict(os.environ)
390 env.update((k, py2shell(v)) for k, v in environ.iteritems())
390 env.update((k, py2shell(v)) for k, v in environ.iteritems())
391 env['HG'] = hgexecutable()
391 env['HG'] = hgexecutable()
392 if out is None:
392 if out is None:
393 rc = subprocess.call(cmd, shell=True, close_fds=closefds,
393 rc = subprocess.call(cmd, shell=True, close_fds=closefds,
394 env=env, cwd=cwd)
394 env=env, cwd=cwd)
395 else:
395 else:
396 proc = subprocess.Popen(cmd, shell=True, close_fds=closefds,
396 proc = subprocess.Popen(cmd, shell=True, close_fds=closefds,
397 env=env, cwd=cwd, stdout=subprocess.PIPE,
397 env=env, cwd=cwd, stdout=subprocess.PIPE,
398 stderr=subprocess.STDOUT)
398 stderr=subprocess.STDOUT)
399 for line in proc.stdout:
399 for line in proc.stdout:
400 out.write(line)
400 out.write(line)
401 proc.wait()
401 proc.wait()
402 rc = proc.returncode
402 rc = proc.returncode
403 if sys.platform == 'OpenVMS' and rc & 1:
403 if sys.platform == 'OpenVMS' and rc & 1:
404 rc = 0
404 rc = 0
405 if rc and onerr:
405 if rc and onerr:
406 errmsg = '%s %s' % (os.path.basename(origcmd.split(None, 1)[0]),
406 errmsg = '%s %s' % (os.path.basename(origcmd.split(None, 1)[0]),
407 explain_exit(rc)[0])
407 explain_exit(rc)[0])
408 if errprefix:
408 if errprefix:
409 errmsg = '%s: %s' % (errprefix, errmsg)
409 errmsg = '%s: %s' % (errprefix, errmsg)
410 try:
410 try:
411 onerr.warn(errmsg + '\n')
411 onerr.warn(errmsg + '\n')
412 except AttributeError:
412 except AttributeError:
413 raise onerr(errmsg)
413 raise onerr(errmsg)
414 return rc
414 return rc
415
415
416 def checksignature(func):
416 def checksignature(func):
417 '''wrap a function with code to check for calling errors'''
417 '''wrap a function with code to check for calling errors'''
418 def check(*args, **kwargs):
418 def check(*args, **kwargs):
419 try:
419 try:
420 return func(*args, **kwargs)
420 return func(*args, **kwargs)
421 except TypeError:
421 except TypeError:
422 if len(traceback.extract_tb(sys.exc_info()[2])) == 1:
422 if len(traceback.extract_tb(sys.exc_info()[2])) == 1:
423 raise error.SignatureError
423 raise error.SignatureError
424 raise
424 raise
425
425
426 return check
426 return check
427
427
428 # os.path.lexists is not available on python2.3
428 # os.path.lexists is not available on python2.3
429 def lexists(filename):
429 def lexists(filename):
430 "test whether a file with this name exists. does not follow symlinks"
430 "test whether a file with this name exists. does not follow symlinks"
431 try:
431 try:
432 os.lstat(filename)
432 os.lstat(filename)
433 except:
433 except:
434 return False
434 return False
435 return True
435 return True
436
436
437 def unlink(f):
437 def unlink(f):
438 """unlink and remove the directory if it is empty"""
438 """unlink and remove the directory if it is empty"""
439 os.unlink(f)
439 os.unlink(f)
440 # try removing directories that might now be empty
440 # try removing directories that might now be empty
441 try:
441 try:
442 os.removedirs(os.path.dirname(f))
442 os.removedirs(os.path.dirname(f))
443 except OSError:
443 except OSError:
444 pass
444 pass
445
445
446 def copyfile(src, dest):
446 def copyfile(src, dest):
447 "copy a file, preserving mode and atime/mtime"
447 "copy a file, preserving mode and atime/mtime"
448 if os.path.islink(src):
448 if os.path.islink(src):
449 try:
449 try:
450 os.unlink(dest)
450 os.unlink(dest)
451 except:
451 except:
452 pass
452 pass
453 os.symlink(os.readlink(src), dest)
453 os.symlink(os.readlink(src), dest)
454 else:
454 else:
455 try:
455 try:
456 shutil.copyfile(src, dest)
456 shutil.copyfile(src, dest)
457 shutil.copystat(src, dest)
457 shutil.copystat(src, dest)
458 except shutil.Error, inst:
458 except shutil.Error, inst:
459 raise Abort(str(inst))
459 raise Abort(str(inst))
460
460
461 def copyfiles(src, dst, hardlink=None):
461 def copyfiles(src, dst, hardlink=None):
462 """Copy a directory tree using hardlinks if possible"""
462 """Copy a directory tree using hardlinks if possible"""
463
463
464 if hardlink is None:
464 if hardlink is None:
465 hardlink = (os.stat(src).st_dev ==
465 hardlink = (os.stat(src).st_dev ==
466 os.stat(os.path.dirname(dst)).st_dev)
466 os.stat(os.path.dirname(dst)).st_dev)
467
467
468 num = 0
468 num = 0
469 if os.path.isdir(src):
469 if os.path.isdir(src):
470 os.mkdir(dst)
470 os.mkdir(dst)
471 for name, kind in osutil.listdir(src):
471 for name, kind in osutil.listdir(src):
472 srcname = os.path.join(src, name)
472 srcname = os.path.join(src, name)
473 dstname = os.path.join(dst, name)
473 dstname = os.path.join(dst, name)
474 hardlink, n = copyfiles(srcname, dstname, hardlink)
474 hardlink, n = copyfiles(srcname, dstname, hardlink)
475 num += n
475 num += n
476 else:
476 else:
477 if hardlink:
477 if hardlink:
478 try:
478 try:
479 os_link(src, dst)
479 os_link(src, dst)
480 except (IOError, OSError):
480 except (IOError, OSError):
481 hardlink = False
481 hardlink = False
482 shutil.copy(src, dst)
482 shutil.copy(src, dst)
483 else:
483 else:
484 shutil.copy(src, dst)
484 shutil.copy(src, dst)
485 num += 1
485 num += 1
486
486
487 return hardlink, num
487 return hardlink, num
488
488
489 class path_auditor(object):
489 class path_auditor(object):
490 '''ensure that a filesystem path contains no banned components.
490 '''ensure that a filesystem path contains no banned components.
491 the following properties of a path are checked:
491 the following properties of a path are checked:
492
492
493 - under top-level .hg
493 - under top-level .hg
494 - starts at the root of a windows drive
494 - starts at the root of a windows drive
495 - contains ".."
495 - contains ".."
496 - traverses a symlink (e.g. a/symlink_here/b)
496 - traverses a symlink (e.g. a/symlink_here/b)
497 - inside a nested repository'''
497 - inside a nested repository'''
498
498
499 def __init__(self, root):
499 def __init__(self, root):
500 self.audited = set()
500 self.audited = set()
501 self.auditeddir = set()
501 self.auditeddir = set()
502 self.root = root
502 self.root = root
503
503
504 def __call__(self, path):
504 def __call__(self, path):
505 if path in self.audited:
505 if path in self.audited:
506 return
506 return
507 normpath = os.path.normcase(path)
507 normpath = os.path.normcase(path)
508 parts = splitpath(normpath)
508 parts = splitpath(normpath)
509 if (os.path.splitdrive(path)[0]
509 if (os.path.splitdrive(path)[0]
510 or parts[0].lower() in ('.hg', '.hg.', '')
510 or parts[0].lower() in ('.hg', '.hg.', '')
511 or os.pardir in parts):
511 or os.pardir in parts):
512 raise Abort(_("path contains illegal component: %s") % path)
512 raise Abort(_("path contains illegal component: %s") % path)
513 if '.hg' in path.lower():
513 if '.hg' in path.lower():
514 lparts = [p.lower() for p in parts]
514 lparts = [p.lower() for p in parts]
515 for p in '.hg', '.hg.':
515 for p in '.hg', '.hg.':
516 if p in lparts[1:]:
516 if p in lparts[1:]:
517 pos = lparts.index(p)
517 pos = lparts.index(p)
518 base = os.path.join(*parts[:pos])
518 base = os.path.join(*parts[:pos])
519 raise Abort(_('path %r is inside repo %r') % (path, base))
519 raise Abort(_('path %r is inside repo %r') % (path, base))
520 def check(prefix):
520 def check(prefix):
521 curpath = os.path.join(self.root, prefix)
521 curpath = os.path.join(self.root, prefix)
522 try:
522 try:
523 st = os.lstat(curpath)
523 st = os.lstat(curpath)
524 except OSError, err:
524 except OSError, err:
525 # EINVAL can be raised as invalid path syntax under win32.
525 # EINVAL can be raised as invalid path syntax under win32.
526 # They must be ignored for patterns can be checked too.
526 # They must be ignored for patterns can be checked too.
527 if err.errno not in (errno.ENOENT, errno.ENOTDIR, errno.EINVAL):
527 if err.errno not in (errno.ENOENT, errno.ENOTDIR, errno.EINVAL):
528 raise
528 raise
529 else:
529 else:
530 if stat.S_ISLNK(st.st_mode):
530 if stat.S_ISLNK(st.st_mode):
531 raise Abort(_('path %r traverses symbolic link %r') %
531 raise Abort(_('path %r traverses symbolic link %r') %
532 (path, prefix))
532 (path, prefix))
533 elif (stat.S_ISDIR(st.st_mode) and
533 elif (stat.S_ISDIR(st.st_mode) and
534 os.path.isdir(os.path.join(curpath, '.hg'))):
534 os.path.isdir(os.path.join(curpath, '.hg'))):
535 raise Abort(_('path %r is inside repo %r') %
535 raise Abort(_('path %r is inside repo %r') %
536 (path, prefix))
536 (path, prefix))
537 parts.pop()
537 parts.pop()
538 prefixes = []
538 prefixes = []
539 while parts:
539 while parts:
540 prefix = os.sep.join(parts)
540 prefix = os.sep.join(parts)
541 if prefix in self.auditeddir:
541 if prefix in self.auditeddir:
542 break
542 break
543 check(prefix)
543 check(prefix)
544 prefixes.append(prefix)
544 prefixes.append(prefix)
545 parts.pop()
545 parts.pop()
546
546
547 self.audited.add(path)
547 self.audited.add(path)
548 # only add prefixes to the cache after checking everything: we don't
548 # only add prefixes to the cache after checking everything: we don't
549 # want to add "foo/bar/baz" before checking if there's a "foo/.hg"
549 # want to add "foo/bar/baz" before checking if there's a "foo/.hg"
550 self.auditeddir.update(prefixes)
550 self.auditeddir.update(prefixes)
551
551
552 def nlinks(pathname):
552 def nlinks(pathname):
553 """Return number of hardlinks for the given file."""
553 """Return number of hardlinks for the given file."""
554 return os.lstat(pathname).st_nlink
554 return os.lstat(pathname).st_nlink
555
555
556 if hasattr(os, 'link'):
556 if hasattr(os, 'link'):
557 os_link = os.link
557 os_link = os.link
558 else:
558 else:
559 def os_link(src, dst):
559 def os_link(src, dst):
560 raise OSError(0, _("Hardlinks not supported"))
560 raise OSError(0, _("Hardlinks not supported"))
561
561
562 def lookup_reg(key, name=None, scope=None):
562 def lookup_reg(key, name=None, scope=None):
563 return None
563 return None
564
564
565 def hidewindow():
565 def hidewindow():
566 """Hide current shell window.
566 """Hide current shell window.
567
567
568 Used to hide the window opened when starting asynchronous
568 Used to hide the window opened when starting asynchronous
569 child process under Windows, unneeded on other systems.
569 child process under Windows, unneeded on other systems.
570 """
570 """
571 pass
571 pass
572
572
573 if os.name == 'nt':
573 if os.name == 'nt':
574 from windows import *
574 from windows import *
575 else:
575 else:
576 from posix import *
576 from posix import *
577
577
578 def makelock(info, pathname):
578 def makelock(info, pathname):
579 try:
579 try:
580 return os.symlink(info, pathname)
580 return os.symlink(info, pathname)
581 except OSError, why:
581 except OSError, why:
582 if why.errno == errno.EEXIST:
582 if why.errno == errno.EEXIST:
583 raise
583 raise
584 except AttributeError: # no symlink in os
584 except AttributeError: # no symlink in os
585 pass
585 pass
586
586
587 ld = os.open(pathname, os.O_CREAT | os.O_WRONLY | os.O_EXCL)
587 ld = os.open(pathname, os.O_CREAT | os.O_WRONLY | os.O_EXCL)
588 os.write(ld, info)
588 os.write(ld, info)
589 os.close(ld)
589 os.close(ld)
590
590
591 def readlock(pathname):
591 def readlock(pathname):
592 try:
592 try:
593 return os.readlink(pathname)
593 return os.readlink(pathname)
594 except OSError, why:
594 except OSError, why:
595 if why.errno not in (errno.EINVAL, errno.ENOSYS):
595 if why.errno not in (errno.EINVAL, errno.ENOSYS):
596 raise
596 raise
597 except AttributeError: # no symlink in os
597 except AttributeError: # no symlink in os
598 pass
598 pass
599 return posixfile(pathname).read()
599 return posixfile(pathname).read()
600
600
601 def fstat(fp):
601 def fstat(fp):
602 '''stat file object that may not have fileno method.'''
602 '''stat file object that may not have fileno method.'''
603 try:
603 try:
604 return os.fstat(fp.fileno())
604 return os.fstat(fp.fileno())
605 except AttributeError:
605 except AttributeError:
606 return os.stat(fp.name)
606 return os.stat(fp.name)
607
607
608 # File system features
608 # File system features
609
609
610 def checkcase(path):
610 def checkcase(path):
611 """
611 """
612 Check whether the given path is on a case-sensitive filesystem
612 Check whether the given path is on a case-sensitive filesystem
613
613
614 Requires a path (like /foo/.hg) ending with a foldable final
614 Requires a path (like /foo/.hg) ending with a foldable final
615 directory component.
615 directory component.
616 """
616 """
617 s1 = os.stat(path)
617 s1 = os.stat(path)
618 d, b = os.path.split(path)
618 d, b = os.path.split(path)
619 p2 = os.path.join(d, b.upper())
619 p2 = os.path.join(d, b.upper())
620 if path == p2:
620 if path == p2:
621 p2 = os.path.join(d, b.lower())
621 p2 = os.path.join(d, b.lower())
622 try:
622 try:
623 s2 = os.stat(p2)
623 s2 = os.stat(p2)
624 if s2 == s1:
624 if s2 == s1:
625 return False
625 return False
626 return True
626 return True
627 except:
627 except:
628 return True
628 return True
629
629
630 _fspathcache = {}
630 _fspathcache = {}
631 def fspath(name, root):
631 def fspath(name, root):
632 '''Get name in the case stored in the filesystem
632 '''Get name in the case stored in the filesystem
633
633
634 The name is either relative to root, or it is an absolute path starting
634 The name is either relative to root, or it is an absolute path starting
635 with root. Note that this function is unnecessary, and should not be
635 with root. Note that this function is unnecessary, and should not be
636 called, for case-sensitive filesystems (simply because it's expensive).
636 called, for case-sensitive filesystems (simply because it's expensive).
637 '''
637 '''
638 # If name is absolute, make it relative
638 # If name is absolute, make it relative
639 if name.lower().startswith(root.lower()):
639 if name.lower().startswith(root.lower()):
640 l = len(root)
640 l = len(root)
641 if name[l] == os.sep or name[l] == os.altsep:
641 if name[l] == os.sep or name[l] == os.altsep:
642 l = l + 1
642 l = l + 1
643 name = name[l:]
643 name = name[l:]
644
644
645 if not os.path.exists(os.path.join(root, name)):
645 if not os.path.lexists(os.path.join(root, name)):
646 return None
646 return None
647
647
648 seps = os.sep
648 seps = os.sep
649 if os.altsep:
649 if os.altsep:
650 seps = seps + os.altsep
650 seps = seps + os.altsep
651 # Protect backslashes. This gets silly very quickly.
651 # Protect backslashes. This gets silly very quickly.
652 seps.replace('\\','\\\\')
652 seps.replace('\\','\\\\')
653 pattern = re.compile(r'([^%s]+)|([%s]+)' % (seps, seps))
653 pattern = re.compile(r'([^%s]+)|([%s]+)' % (seps, seps))
654 dir = os.path.normcase(os.path.normpath(root))
654 dir = os.path.normcase(os.path.normpath(root))
655 result = []
655 result = []
656 for part, sep in pattern.findall(name):
656 for part, sep in pattern.findall(name):
657 if sep:
657 if sep:
658 result.append(sep)
658 result.append(sep)
659 continue
659 continue
660
660
661 if dir not in _fspathcache:
661 if dir not in _fspathcache:
662 _fspathcache[dir] = os.listdir(dir)
662 _fspathcache[dir] = os.listdir(dir)
663 contents = _fspathcache[dir]
663 contents = _fspathcache[dir]
664
664
665 lpart = part.lower()
665 lpart = part.lower()
666 lenp = len(part)
666 lenp = len(part)
667 for n in contents:
667 for n in contents:
668 if lenp == len(n) and n.lower() == lpart:
668 if lenp == len(n) and n.lower() == lpart:
669 result.append(n)
669 result.append(n)
670 break
670 break
671 else:
671 else:
672 # Cannot happen, as the file exists!
672 # Cannot happen, as the file exists!
673 result.append(part)
673 result.append(part)
674 dir = os.path.join(dir, lpart)
674 dir = os.path.join(dir, lpart)
675
675
676 return ''.join(result)
676 return ''.join(result)
677
677
678 def checkexec(path):
678 def checkexec(path):
679 """
679 """
680 Check whether the given path is on a filesystem with UNIX-like exec flags
680 Check whether the given path is on a filesystem with UNIX-like exec flags
681
681
682 Requires a directory (like /foo/.hg)
682 Requires a directory (like /foo/.hg)
683 """
683 """
684
684
685 # VFAT on some Linux versions can flip mode but it doesn't persist
685 # VFAT on some Linux versions can flip mode but it doesn't persist
686 # a FS remount. Frequently we can detect it if files are created
686 # a FS remount. Frequently we can detect it if files are created
687 # with exec bit on.
687 # with exec bit on.
688
688
689 try:
689 try:
690 EXECFLAGS = stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH
690 EXECFLAGS = stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH
691 fh, fn = tempfile.mkstemp(dir=path, prefix='hg-checkexec-')
691 fh, fn = tempfile.mkstemp(dir=path, prefix='hg-checkexec-')
692 try:
692 try:
693 os.close(fh)
693 os.close(fh)
694 m = os.stat(fn).st_mode & 0777
694 m = os.stat(fn).st_mode & 0777
695 new_file_has_exec = m & EXECFLAGS
695 new_file_has_exec = m & EXECFLAGS
696 os.chmod(fn, m ^ EXECFLAGS)
696 os.chmod(fn, m ^ EXECFLAGS)
697 exec_flags_cannot_flip = ((os.stat(fn).st_mode & 0777) == m)
697 exec_flags_cannot_flip = ((os.stat(fn).st_mode & 0777) == m)
698 finally:
698 finally:
699 os.unlink(fn)
699 os.unlink(fn)
700 except (IOError, OSError):
700 except (IOError, OSError):
701 # we don't care, the user probably won't be able to commit anyway
701 # we don't care, the user probably won't be able to commit anyway
702 return False
702 return False
703 return not (new_file_has_exec or exec_flags_cannot_flip)
703 return not (new_file_has_exec or exec_flags_cannot_flip)
704
704
705 def checklink(path):
705 def checklink(path):
706 """check whether the given path is on a symlink-capable filesystem"""
706 """check whether the given path is on a symlink-capable filesystem"""
707 # mktemp is not racy because symlink creation will fail if the
707 # mktemp is not racy because symlink creation will fail if the
708 # file already exists
708 # file already exists
709 name = tempfile.mktemp(dir=path, prefix='hg-checklink-')
709 name = tempfile.mktemp(dir=path, prefix='hg-checklink-')
710 try:
710 try:
711 os.symlink(".", name)
711 os.symlink(".", name)
712 os.unlink(name)
712 os.unlink(name)
713 return True
713 return True
714 except (OSError, AttributeError):
714 except (OSError, AttributeError):
715 return False
715 return False
716
716
717 def needbinarypatch():
717 def needbinarypatch():
718 """return True if patches should be applied in binary mode by default."""
718 """return True if patches should be applied in binary mode by default."""
719 return os.name == 'nt'
719 return os.name == 'nt'
720
720
721 def endswithsep(path):
721 def endswithsep(path):
722 '''Check path ends with os.sep or os.altsep.'''
722 '''Check path ends with os.sep or os.altsep.'''
723 return path.endswith(os.sep) or os.altsep and path.endswith(os.altsep)
723 return path.endswith(os.sep) or os.altsep and path.endswith(os.altsep)
724
724
725 def splitpath(path):
725 def splitpath(path):
726 '''Split path by os.sep.
726 '''Split path by os.sep.
727 Note that this function does not use os.altsep because this is
727 Note that this function does not use os.altsep because this is
728 an alternative of simple "xxx.split(os.sep)".
728 an alternative of simple "xxx.split(os.sep)".
729 It is recommended to use os.path.normpath() before using this
729 It is recommended to use os.path.normpath() before using this
730 function if need.'''
730 function if need.'''
731 return path.split(os.sep)
731 return path.split(os.sep)
732
732
733 def gui():
733 def gui():
734 '''Are we running in a GUI?'''
734 '''Are we running in a GUI?'''
735 return os.name == "nt" or os.name == "mac" or os.environ.get("DISPLAY")
735 return os.name == "nt" or os.name == "mac" or os.environ.get("DISPLAY")
736
736
737 def mktempcopy(name, emptyok=False, createmode=None):
737 def mktempcopy(name, emptyok=False, createmode=None):
738 """Create a temporary file with the same contents from name
738 """Create a temporary file with the same contents from name
739
739
740 The permission bits are copied from the original file.
740 The permission bits are copied from the original file.
741
741
742 If the temporary file is going to be truncated immediately, you
742 If the temporary file is going to be truncated immediately, you
743 can use emptyok=True as an optimization.
743 can use emptyok=True as an optimization.
744
744
745 Returns the name of the temporary file.
745 Returns the name of the temporary file.
746 """
746 """
747 d, fn = os.path.split(name)
747 d, fn = os.path.split(name)
748 fd, temp = tempfile.mkstemp(prefix='.%s-' % fn, dir=d)
748 fd, temp = tempfile.mkstemp(prefix='.%s-' % fn, dir=d)
749 os.close(fd)
749 os.close(fd)
750 # Temporary files are created with mode 0600, which is usually not
750 # Temporary files are created with mode 0600, which is usually not
751 # what we want. If the original file already exists, just copy
751 # what we want. If the original file already exists, just copy
752 # its mode. Otherwise, manually obey umask.
752 # its mode. Otherwise, manually obey umask.
753 try:
753 try:
754 st_mode = os.lstat(name).st_mode & 0777
754 st_mode = os.lstat(name).st_mode & 0777
755 except OSError, inst:
755 except OSError, inst:
756 if inst.errno != errno.ENOENT:
756 if inst.errno != errno.ENOENT:
757 raise
757 raise
758 st_mode = createmode
758 st_mode = createmode
759 if st_mode is None:
759 if st_mode is None:
760 st_mode = ~umask
760 st_mode = ~umask
761 st_mode &= 0666
761 st_mode &= 0666
762 os.chmod(temp, st_mode)
762 os.chmod(temp, st_mode)
763 if emptyok:
763 if emptyok:
764 return temp
764 return temp
765 try:
765 try:
766 try:
766 try:
767 ifp = posixfile(name, "rb")
767 ifp = posixfile(name, "rb")
768 except IOError, inst:
768 except IOError, inst:
769 if inst.errno == errno.ENOENT:
769 if inst.errno == errno.ENOENT:
770 return temp
770 return temp
771 if not getattr(inst, 'filename', None):
771 if not getattr(inst, 'filename', None):
772 inst.filename = name
772 inst.filename = name
773 raise
773 raise
774 ofp = posixfile(temp, "wb")
774 ofp = posixfile(temp, "wb")
775 for chunk in filechunkiter(ifp):
775 for chunk in filechunkiter(ifp):
776 ofp.write(chunk)
776 ofp.write(chunk)
777 ifp.close()
777 ifp.close()
778 ofp.close()
778 ofp.close()
779 except:
779 except:
780 try: os.unlink(temp)
780 try: os.unlink(temp)
781 except: pass
781 except: pass
782 raise
782 raise
783 return temp
783 return temp
784
784
785 class atomictempfile(object):
785 class atomictempfile(object):
786 """file-like object that atomically updates a file
786 """file-like object that atomically updates a file
787
787
788 All writes will be redirected to a temporary copy of the original
788 All writes will be redirected to a temporary copy of the original
789 file. When rename is called, the copy is renamed to the original
789 file. When rename is called, the copy is renamed to the original
790 name, making the changes visible.
790 name, making the changes visible.
791 """
791 """
792 def __init__(self, name, mode='w+b', createmode=None):
792 def __init__(self, name, mode='w+b', createmode=None):
793 self.__name = name
793 self.__name = name
794 self._fp = None
794 self._fp = None
795 self.temp = mktempcopy(name, emptyok=('w' in mode),
795 self.temp = mktempcopy(name, emptyok=('w' in mode),
796 createmode=createmode)
796 createmode=createmode)
797 self._fp = posixfile(self.temp, mode)
797 self._fp = posixfile(self.temp, mode)
798
798
799 def __getattr__(self, name):
799 def __getattr__(self, name):
800 return getattr(self._fp, name)
800 return getattr(self._fp, name)
801
801
802 def rename(self):
802 def rename(self):
803 if not self._fp.closed:
803 if not self._fp.closed:
804 self._fp.close()
804 self._fp.close()
805 rename(self.temp, localpath(self.__name))
805 rename(self.temp, localpath(self.__name))
806
806
807 def __del__(self):
807 def __del__(self):
808 if not self._fp:
808 if not self._fp:
809 return
809 return
810 if not self._fp.closed:
810 if not self._fp.closed:
811 try:
811 try:
812 os.unlink(self.temp)
812 os.unlink(self.temp)
813 except: pass
813 except: pass
814 self._fp.close()
814 self._fp.close()
815
815
816 def makedirs(name, mode=None):
816 def makedirs(name, mode=None):
817 """recursive directory creation with parent mode inheritance"""
817 """recursive directory creation with parent mode inheritance"""
818 try:
818 try:
819 os.mkdir(name)
819 os.mkdir(name)
820 if mode is not None:
820 if mode is not None:
821 os.chmod(name, mode)
821 os.chmod(name, mode)
822 return
822 return
823 except OSError, err:
823 except OSError, err:
824 if err.errno == errno.EEXIST:
824 if err.errno == errno.EEXIST:
825 return
825 return
826 if err.errno != errno.ENOENT:
826 if err.errno != errno.ENOENT:
827 raise
827 raise
828 parent = os.path.abspath(os.path.dirname(name))
828 parent = os.path.abspath(os.path.dirname(name))
829 makedirs(parent, mode)
829 makedirs(parent, mode)
830 makedirs(name, mode)
830 makedirs(name, mode)
831
831
832 class opener(object):
832 class opener(object):
833 """Open files relative to a base directory
833 """Open files relative to a base directory
834
834
835 This class is used to hide the details of COW semantics and
835 This class is used to hide the details of COW semantics and
836 remote file access from higher level code.
836 remote file access from higher level code.
837 """
837 """
838 def __init__(self, base, audit=True):
838 def __init__(self, base, audit=True):
839 self.base = base
839 self.base = base
840 if audit:
840 if audit:
841 self.audit_path = path_auditor(base)
841 self.audit_path = path_auditor(base)
842 else:
842 else:
843 self.audit_path = always
843 self.audit_path = always
844 self.createmode = None
844 self.createmode = None
845
845
846 @propertycache
846 @propertycache
847 def _can_symlink(self):
847 def _can_symlink(self):
848 return checklink(self.base)
848 return checklink(self.base)
849
849
850 def _fixfilemode(self, name):
850 def _fixfilemode(self, name):
851 if self.createmode is None:
851 if self.createmode is None:
852 return
852 return
853 os.chmod(name, self.createmode & 0666)
853 os.chmod(name, self.createmode & 0666)
854
854
855 def __call__(self, path, mode="r", text=False, atomictemp=False):
855 def __call__(self, path, mode="r", text=False, atomictemp=False):
856 self.audit_path(path)
856 self.audit_path(path)
857 f = os.path.join(self.base, path)
857 f = os.path.join(self.base, path)
858
858
859 if not text and "b" not in mode:
859 if not text and "b" not in mode:
860 mode += "b" # for that other OS
860 mode += "b" # for that other OS
861
861
862 nlink = -1
862 nlink = -1
863 if mode not in ("r", "rb"):
863 if mode not in ("r", "rb"):
864 try:
864 try:
865 nlink = nlinks(f)
865 nlink = nlinks(f)
866 except OSError:
866 except OSError:
867 nlink = 0
867 nlink = 0
868 d = os.path.dirname(f)
868 d = os.path.dirname(f)
869 if not os.path.isdir(d):
869 if not os.path.isdir(d):
870 makedirs(d, self.createmode)
870 makedirs(d, self.createmode)
871 if atomictemp:
871 if atomictemp:
872 return atomictempfile(f, mode, self.createmode)
872 return atomictempfile(f, mode, self.createmode)
873 if nlink > 1:
873 if nlink > 1:
874 rename(mktempcopy(f), f)
874 rename(mktempcopy(f), f)
875 fp = posixfile(f, mode)
875 fp = posixfile(f, mode)
876 if nlink == 0:
876 if nlink == 0:
877 self._fixfilemode(f)
877 self._fixfilemode(f)
878 return fp
878 return fp
879
879
880 def symlink(self, src, dst):
880 def symlink(self, src, dst):
881 self.audit_path(dst)
881 self.audit_path(dst)
882 linkname = os.path.join(self.base, dst)
882 linkname = os.path.join(self.base, dst)
883 try:
883 try:
884 os.unlink(linkname)
884 os.unlink(linkname)
885 except OSError:
885 except OSError:
886 pass
886 pass
887
887
888 dirname = os.path.dirname(linkname)
888 dirname = os.path.dirname(linkname)
889 if not os.path.exists(dirname):
889 if not os.path.exists(dirname):
890 makedirs(dirname, self.createmode)
890 makedirs(dirname, self.createmode)
891
891
892 if self._can_symlink:
892 if self._can_symlink:
893 try:
893 try:
894 os.symlink(src, linkname)
894 os.symlink(src, linkname)
895 except OSError, err:
895 except OSError, err:
896 raise OSError(err.errno, _('could not symlink to %r: %s') %
896 raise OSError(err.errno, _('could not symlink to %r: %s') %
897 (src, err.strerror), linkname)
897 (src, err.strerror), linkname)
898 else:
898 else:
899 f = self(dst, "w")
899 f = self(dst, "w")
900 f.write(src)
900 f.write(src)
901 f.close()
901 f.close()
902 self._fixfilemode(dst)
902 self._fixfilemode(dst)
903
903
904 class chunkbuffer(object):
904 class chunkbuffer(object):
905 """Allow arbitrary sized chunks of data to be efficiently read from an
905 """Allow arbitrary sized chunks of data to be efficiently read from an
906 iterator over chunks of arbitrary size."""
906 iterator over chunks of arbitrary size."""
907
907
908 def __init__(self, in_iter):
908 def __init__(self, in_iter):
909 """in_iter is the iterator that's iterating over the input chunks.
909 """in_iter is the iterator that's iterating over the input chunks.
910 targetsize is how big a buffer to try to maintain."""
910 targetsize is how big a buffer to try to maintain."""
911 self.iter = iter(in_iter)
911 self.iter = iter(in_iter)
912 self._queue = []
912 self._queue = []
913
913
914 def read(self, l):
914 def read(self, l):
915 """Read L bytes of data from the iterator of chunks of data.
915 """Read L bytes of data from the iterator of chunks of data.
916 Returns less than L bytes if the iterator runs dry."""
916 Returns less than L bytes if the iterator runs dry."""
917 left = l
917 left = l
918 buf = ''
918 buf = ''
919 queue = self._queue
919 queue = self._queue
920 while left > 0:
920 while left > 0:
921 # refill the queue
921 # refill the queue
922 if not queue:
922 if not queue:
923 target = 2**18
923 target = 2**18
924 for chunk in self.iter:
924 for chunk in self.iter:
925 queue.append(chunk)
925 queue.append(chunk)
926 target -= len(chunk)
926 target -= len(chunk)
927 if target <= 0:
927 if target <= 0:
928 break
928 break
929 if not queue:
929 if not queue:
930 break
930 break
931
931
932 chunk = queue.pop(0)
932 chunk = queue.pop(0)
933 left -= len(chunk)
933 left -= len(chunk)
934 if left < 0:
934 if left < 0:
935 queue.insert(0, chunk[left:])
935 queue.insert(0, chunk[left:])
936 buf += chunk[:left]
936 buf += chunk[:left]
937 else:
937 else:
938 buf += chunk
938 buf += chunk
939
939
940 return buf
940 return buf
941
941
942
942
943 def filechunkiter(f, size=65536, limit=None):
943 def filechunkiter(f, size=65536, limit=None):
944 """Create a generator that produces the data in the file size
944 """Create a generator that produces the data in the file size
945 (default 65536) bytes at a time, up to optional limit (default is
945 (default 65536) bytes at a time, up to optional limit (default is
946 to read all data). Chunks may be less than size bytes if the
946 to read all data). Chunks may be less than size bytes if the
947 chunk is the last chunk in the file, or the file is a socket or
947 chunk is the last chunk in the file, or the file is a socket or
948 some other type of file that sometimes reads less data than is
948 some other type of file that sometimes reads less data than is
949 requested."""
949 requested."""
950 assert size >= 0
950 assert size >= 0
951 assert limit is None or limit >= 0
951 assert limit is None or limit >= 0
952 while True:
952 while True:
953 if limit is None:
953 if limit is None:
954 nbytes = size
954 nbytes = size
955 else:
955 else:
956 nbytes = min(limit, size)
956 nbytes = min(limit, size)
957 s = nbytes and f.read(nbytes)
957 s = nbytes and f.read(nbytes)
958 if not s:
958 if not s:
959 break
959 break
960 if limit:
960 if limit:
961 limit -= len(s)
961 limit -= len(s)
962 yield s
962 yield s
963
963
964 def makedate():
964 def makedate():
965 lt = time.localtime()
965 lt = time.localtime()
966 if lt[8] == 1 and time.daylight:
966 if lt[8] == 1 and time.daylight:
967 tz = time.altzone
967 tz = time.altzone
968 else:
968 else:
969 tz = time.timezone
969 tz = time.timezone
970 return time.mktime(lt), tz
970 return time.mktime(lt), tz
971
971
972 def datestr(date=None, format='%a %b %d %H:%M:%S %Y %1%2'):
972 def datestr(date=None, format='%a %b %d %H:%M:%S %Y %1%2'):
973 """represent a (unixtime, offset) tuple as a localized time.
973 """represent a (unixtime, offset) tuple as a localized time.
974 unixtime is seconds since the epoch, and offset is the time zone's
974 unixtime is seconds since the epoch, and offset is the time zone's
975 number of seconds away from UTC. if timezone is false, do not
975 number of seconds away from UTC. if timezone is false, do not
976 append time zone to string."""
976 append time zone to string."""
977 t, tz = date or makedate()
977 t, tz = date or makedate()
978 if "%1" in format or "%2" in format:
978 if "%1" in format or "%2" in format:
979 sign = (tz > 0) and "-" or "+"
979 sign = (tz > 0) and "-" or "+"
980 minutes = abs(tz) // 60
980 minutes = abs(tz) // 60
981 format = format.replace("%1", "%c%02d" % (sign, minutes // 60))
981 format = format.replace("%1", "%c%02d" % (sign, minutes // 60))
982 format = format.replace("%2", "%02d" % (minutes % 60))
982 format = format.replace("%2", "%02d" % (minutes % 60))
983 s = time.strftime(format, time.gmtime(float(t) - tz))
983 s = time.strftime(format, time.gmtime(float(t) - tz))
984 return s
984 return s
985
985
986 def shortdate(date=None):
986 def shortdate(date=None):
987 """turn (timestamp, tzoff) tuple into iso 8631 date."""
987 """turn (timestamp, tzoff) tuple into iso 8631 date."""
988 return datestr(date, format='%Y-%m-%d')
988 return datestr(date, format='%Y-%m-%d')
989
989
990 def strdate(string, format, defaults=[]):
990 def strdate(string, format, defaults=[]):
991 """parse a localized time string and return a (unixtime, offset) tuple.
991 """parse a localized time string and return a (unixtime, offset) tuple.
992 if the string cannot be parsed, ValueError is raised."""
992 if the string cannot be parsed, ValueError is raised."""
993 def timezone(string):
993 def timezone(string):
994 tz = string.split()[-1]
994 tz = string.split()[-1]
995 if tz[0] in "+-" and len(tz) == 5 and tz[1:].isdigit():
995 if tz[0] in "+-" and len(tz) == 5 and tz[1:].isdigit():
996 sign = (tz[0] == "+") and 1 or -1
996 sign = (tz[0] == "+") and 1 or -1
997 hours = int(tz[1:3])
997 hours = int(tz[1:3])
998 minutes = int(tz[3:5])
998 minutes = int(tz[3:5])
999 return -sign * (hours * 60 + minutes) * 60
999 return -sign * (hours * 60 + minutes) * 60
1000 if tz == "GMT" or tz == "UTC":
1000 if tz == "GMT" or tz == "UTC":
1001 return 0
1001 return 0
1002 return None
1002 return None
1003
1003
1004 # NOTE: unixtime = localunixtime + offset
1004 # NOTE: unixtime = localunixtime + offset
1005 offset, date = timezone(string), string
1005 offset, date = timezone(string), string
1006 if offset != None:
1006 if offset != None:
1007 date = " ".join(string.split()[:-1])
1007 date = " ".join(string.split()[:-1])
1008
1008
1009 # add missing elements from defaults
1009 # add missing elements from defaults
1010 for part in defaults:
1010 for part in defaults:
1011 found = [True for p in part if ("%"+p) in format]
1011 found = [True for p in part if ("%"+p) in format]
1012 if not found:
1012 if not found:
1013 date += "@" + defaults[part]
1013 date += "@" + defaults[part]
1014 format += "@%" + part[0]
1014 format += "@%" + part[0]
1015
1015
1016 timetuple = time.strptime(date, format)
1016 timetuple = time.strptime(date, format)
1017 localunixtime = int(calendar.timegm(timetuple))
1017 localunixtime = int(calendar.timegm(timetuple))
1018 if offset is None:
1018 if offset is None:
1019 # local timezone
1019 # local timezone
1020 unixtime = int(time.mktime(timetuple))
1020 unixtime = int(time.mktime(timetuple))
1021 offset = unixtime - localunixtime
1021 offset = unixtime - localunixtime
1022 else:
1022 else:
1023 unixtime = localunixtime + offset
1023 unixtime = localunixtime + offset
1024 return unixtime, offset
1024 return unixtime, offset
1025
1025
1026 def parsedate(date, formats=None, defaults=None):
1026 def parsedate(date, formats=None, defaults=None):
1027 """parse a localized date/time string and return a (unixtime, offset) tuple.
1027 """parse a localized date/time string and return a (unixtime, offset) tuple.
1028
1028
1029 The date may be a "unixtime offset" string or in one of the specified
1029 The date may be a "unixtime offset" string or in one of the specified
1030 formats. If the date already is a (unixtime, offset) tuple, it is returned.
1030 formats. If the date already is a (unixtime, offset) tuple, it is returned.
1031 """
1031 """
1032 if not date:
1032 if not date:
1033 return 0, 0
1033 return 0, 0
1034 if isinstance(date, tuple) and len(date) == 2:
1034 if isinstance(date, tuple) and len(date) == 2:
1035 return date
1035 return date
1036 if not formats:
1036 if not formats:
1037 formats = defaultdateformats
1037 formats = defaultdateformats
1038 date = date.strip()
1038 date = date.strip()
1039 try:
1039 try:
1040 when, offset = map(int, date.split(' '))
1040 when, offset = map(int, date.split(' '))
1041 except ValueError:
1041 except ValueError:
1042 # fill out defaults
1042 # fill out defaults
1043 if not defaults:
1043 if not defaults:
1044 defaults = {}
1044 defaults = {}
1045 now = makedate()
1045 now = makedate()
1046 for part in "d mb yY HI M S".split():
1046 for part in "d mb yY HI M S".split():
1047 if part not in defaults:
1047 if part not in defaults:
1048 if part[0] in "HMS":
1048 if part[0] in "HMS":
1049 defaults[part] = "00"
1049 defaults[part] = "00"
1050 else:
1050 else:
1051 defaults[part] = datestr(now, "%" + part[0])
1051 defaults[part] = datestr(now, "%" + part[0])
1052
1052
1053 for format in formats:
1053 for format in formats:
1054 try:
1054 try:
1055 when, offset = strdate(date, format, defaults)
1055 when, offset = strdate(date, format, defaults)
1056 except (ValueError, OverflowError):
1056 except (ValueError, OverflowError):
1057 pass
1057 pass
1058 else:
1058 else:
1059 break
1059 break
1060 else:
1060 else:
1061 raise Abort(_('invalid date: %r ') % date)
1061 raise Abort(_('invalid date: %r ') % date)
1062 # validate explicit (probably user-specified) date and
1062 # validate explicit (probably user-specified) date and
1063 # time zone offset. values must fit in signed 32 bits for
1063 # time zone offset. values must fit in signed 32 bits for
1064 # current 32-bit linux runtimes. timezones go from UTC-12
1064 # current 32-bit linux runtimes. timezones go from UTC-12
1065 # to UTC+14
1065 # to UTC+14
1066 if abs(when) > 0x7fffffff:
1066 if abs(when) > 0x7fffffff:
1067 raise Abort(_('date exceeds 32 bits: %d') % when)
1067 raise Abort(_('date exceeds 32 bits: %d') % when)
1068 if offset < -50400 or offset > 43200:
1068 if offset < -50400 or offset > 43200:
1069 raise Abort(_('impossible time zone offset: %d') % offset)
1069 raise Abort(_('impossible time zone offset: %d') % offset)
1070 return when, offset
1070 return when, offset
1071
1071
1072 def matchdate(date):
1072 def matchdate(date):
1073 """Return a function that matches a given date match specifier
1073 """Return a function that matches a given date match specifier
1074
1074
1075 Formats include:
1075 Formats include:
1076
1076
1077 '{date}' match a given date to the accuracy provided
1077 '{date}' match a given date to the accuracy provided
1078
1078
1079 '<{date}' on or before a given date
1079 '<{date}' on or before a given date
1080
1080
1081 '>{date}' on or after a given date
1081 '>{date}' on or after a given date
1082
1082
1083 """
1083 """
1084
1084
1085 def lower(date):
1085 def lower(date):
1086 d = dict(mb="1", d="1")
1086 d = dict(mb="1", d="1")
1087 return parsedate(date, extendeddateformats, d)[0]
1087 return parsedate(date, extendeddateformats, d)[0]
1088
1088
1089 def upper(date):
1089 def upper(date):
1090 d = dict(mb="12", HI="23", M="59", S="59")
1090 d = dict(mb="12", HI="23", M="59", S="59")
1091 for days in "31 30 29".split():
1091 for days in "31 30 29".split():
1092 try:
1092 try:
1093 d["d"] = days
1093 d["d"] = days
1094 return parsedate(date, extendeddateformats, d)[0]
1094 return parsedate(date, extendeddateformats, d)[0]
1095 except:
1095 except:
1096 pass
1096 pass
1097 d["d"] = "28"
1097 d["d"] = "28"
1098 return parsedate(date, extendeddateformats, d)[0]
1098 return parsedate(date, extendeddateformats, d)[0]
1099
1099
1100 date = date.strip()
1100 date = date.strip()
1101 if date[0] == "<":
1101 if date[0] == "<":
1102 when = upper(date[1:])
1102 when = upper(date[1:])
1103 return lambda x: x <= when
1103 return lambda x: x <= when
1104 elif date[0] == ">":
1104 elif date[0] == ">":
1105 when = lower(date[1:])
1105 when = lower(date[1:])
1106 return lambda x: x >= when
1106 return lambda x: x >= when
1107 elif date[0] == "-":
1107 elif date[0] == "-":
1108 try:
1108 try:
1109 days = int(date[1:])
1109 days = int(date[1:])
1110 except ValueError:
1110 except ValueError:
1111 raise Abort(_("invalid day spec: %s") % date[1:])
1111 raise Abort(_("invalid day spec: %s") % date[1:])
1112 when = makedate()[0] - days * 3600 * 24
1112 when = makedate()[0] - days * 3600 * 24
1113 return lambda x: x >= when
1113 return lambda x: x >= when
1114 elif " to " in date:
1114 elif " to " in date:
1115 a, b = date.split(" to ")
1115 a, b = date.split(" to ")
1116 start, stop = lower(a), upper(b)
1116 start, stop = lower(a), upper(b)
1117 return lambda x: x >= start and x <= stop
1117 return lambda x: x >= start and x <= stop
1118 else:
1118 else:
1119 start, stop = lower(date), upper(date)
1119 start, stop = lower(date), upper(date)
1120 return lambda x: x >= start and x <= stop
1120 return lambda x: x >= start and x <= stop
1121
1121
1122 def shortuser(user):
1122 def shortuser(user):
1123 """Return a short representation of a user name or email address."""
1123 """Return a short representation of a user name or email address."""
1124 f = user.find('@')
1124 f = user.find('@')
1125 if f >= 0:
1125 if f >= 0:
1126 user = user[:f]
1126 user = user[:f]
1127 f = user.find('<')
1127 f = user.find('<')
1128 if f >= 0:
1128 if f >= 0:
1129 user = user[f + 1:]
1129 user = user[f + 1:]
1130 f = user.find(' ')
1130 f = user.find(' ')
1131 if f >= 0:
1131 if f >= 0:
1132 user = user[:f]
1132 user = user[:f]
1133 f = user.find('.')
1133 f = user.find('.')
1134 if f >= 0:
1134 if f >= 0:
1135 user = user[:f]
1135 user = user[:f]
1136 return user
1136 return user
1137
1137
1138 def email(author):
1138 def email(author):
1139 '''get email of author.'''
1139 '''get email of author.'''
1140 r = author.find('>')
1140 r = author.find('>')
1141 if r == -1:
1141 if r == -1:
1142 r = None
1142 r = None
1143 return author[author.find('<') + 1:r]
1143 return author[author.find('<') + 1:r]
1144
1144
1145 def ellipsis(text, maxlength=400):
1145 def ellipsis(text, maxlength=400):
1146 """Trim string to at most maxlength (default: 400) characters."""
1146 """Trim string to at most maxlength (default: 400) characters."""
1147 if len(text) <= maxlength:
1147 if len(text) <= maxlength:
1148 return text
1148 return text
1149 else:
1149 else:
1150 return "%s..." % (text[:maxlength - 3])
1150 return "%s..." % (text[:maxlength - 3])
1151
1151
1152 def walkrepos(path, followsym=False, seen_dirs=None, recurse=False):
1152 def walkrepos(path, followsym=False, seen_dirs=None, recurse=False):
1153 '''yield every hg repository under path, recursively.'''
1153 '''yield every hg repository under path, recursively.'''
1154 def errhandler(err):
1154 def errhandler(err):
1155 if err.filename == path:
1155 if err.filename == path:
1156 raise err
1156 raise err
1157 if followsym and hasattr(os.path, 'samestat'):
1157 if followsym and hasattr(os.path, 'samestat'):
1158 def _add_dir_if_not_there(dirlst, dirname):
1158 def _add_dir_if_not_there(dirlst, dirname):
1159 match = False
1159 match = False
1160 samestat = os.path.samestat
1160 samestat = os.path.samestat
1161 dirstat = os.stat(dirname)
1161 dirstat = os.stat(dirname)
1162 for lstdirstat in dirlst:
1162 for lstdirstat in dirlst:
1163 if samestat(dirstat, lstdirstat):
1163 if samestat(dirstat, lstdirstat):
1164 match = True
1164 match = True
1165 break
1165 break
1166 if not match:
1166 if not match:
1167 dirlst.append(dirstat)
1167 dirlst.append(dirstat)
1168 return not match
1168 return not match
1169 else:
1169 else:
1170 followsym = False
1170 followsym = False
1171
1171
1172 if (seen_dirs is None) and followsym:
1172 if (seen_dirs is None) and followsym:
1173 seen_dirs = []
1173 seen_dirs = []
1174 _add_dir_if_not_there(seen_dirs, path)
1174 _add_dir_if_not_there(seen_dirs, path)
1175 for root, dirs, files in os.walk(path, topdown=True, onerror=errhandler):
1175 for root, dirs, files in os.walk(path, topdown=True, onerror=errhandler):
1176 dirs.sort()
1176 dirs.sort()
1177 if '.hg' in dirs:
1177 if '.hg' in dirs:
1178 yield root # found a repository
1178 yield root # found a repository
1179 qroot = os.path.join(root, '.hg', 'patches')
1179 qroot = os.path.join(root, '.hg', 'patches')
1180 if os.path.isdir(os.path.join(qroot, '.hg')):
1180 if os.path.isdir(os.path.join(qroot, '.hg')):
1181 yield qroot # we have a patch queue repo here
1181 yield qroot # we have a patch queue repo here
1182 if recurse:
1182 if recurse:
1183 # avoid recursing inside the .hg directory
1183 # avoid recursing inside the .hg directory
1184 dirs.remove('.hg')
1184 dirs.remove('.hg')
1185 else:
1185 else:
1186 dirs[:] = [] # don't descend further
1186 dirs[:] = [] # don't descend further
1187 elif followsym:
1187 elif followsym:
1188 newdirs = []
1188 newdirs = []
1189 for d in dirs:
1189 for d in dirs:
1190 fname = os.path.join(root, d)
1190 fname = os.path.join(root, d)
1191 if _add_dir_if_not_there(seen_dirs, fname):
1191 if _add_dir_if_not_there(seen_dirs, fname):
1192 if os.path.islink(fname):
1192 if os.path.islink(fname):
1193 for hgname in walkrepos(fname, True, seen_dirs):
1193 for hgname in walkrepos(fname, True, seen_dirs):
1194 yield hgname
1194 yield hgname
1195 else:
1195 else:
1196 newdirs.append(d)
1196 newdirs.append(d)
1197 dirs[:] = newdirs
1197 dirs[:] = newdirs
1198
1198
1199 _rcpath = None
1199 _rcpath = None
1200
1200
1201 def os_rcpath():
1201 def os_rcpath():
1202 '''return default os-specific hgrc search path'''
1202 '''return default os-specific hgrc search path'''
1203 path = system_rcpath()
1203 path = system_rcpath()
1204 path.extend(user_rcpath())
1204 path.extend(user_rcpath())
1205 path = [os.path.normpath(f) for f in path]
1205 path = [os.path.normpath(f) for f in path]
1206 return path
1206 return path
1207
1207
1208 def rcpath():
1208 def rcpath():
1209 '''return hgrc search path. if env var HGRCPATH is set, use it.
1209 '''return hgrc search path. if env var HGRCPATH is set, use it.
1210 for each item in path, if directory, use files ending in .rc,
1210 for each item in path, if directory, use files ending in .rc,
1211 else use item.
1211 else use item.
1212 make HGRCPATH empty to only look in .hg/hgrc of current repo.
1212 make HGRCPATH empty to only look in .hg/hgrc of current repo.
1213 if no HGRCPATH, use default os-specific path.'''
1213 if no HGRCPATH, use default os-specific path.'''
1214 global _rcpath
1214 global _rcpath
1215 if _rcpath is None:
1215 if _rcpath is None:
1216 if 'HGRCPATH' in os.environ:
1216 if 'HGRCPATH' in os.environ:
1217 _rcpath = []
1217 _rcpath = []
1218 for p in os.environ['HGRCPATH'].split(os.pathsep):
1218 for p in os.environ['HGRCPATH'].split(os.pathsep):
1219 if not p:
1219 if not p:
1220 continue
1220 continue
1221 p = expandpath(p)
1221 p = expandpath(p)
1222 if os.path.isdir(p):
1222 if os.path.isdir(p):
1223 for f, kind in osutil.listdir(p):
1223 for f, kind in osutil.listdir(p):
1224 if f.endswith('.rc'):
1224 if f.endswith('.rc'):
1225 _rcpath.append(os.path.join(p, f))
1225 _rcpath.append(os.path.join(p, f))
1226 else:
1226 else:
1227 _rcpath.append(p)
1227 _rcpath.append(p)
1228 else:
1228 else:
1229 _rcpath = os_rcpath()
1229 _rcpath = os_rcpath()
1230 return _rcpath
1230 return _rcpath
1231
1231
1232 def bytecount(nbytes):
1232 def bytecount(nbytes):
1233 '''return byte count formatted as readable string, with units'''
1233 '''return byte count formatted as readable string, with units'''
1234
1234
1235 units = (
1235 units = (
1236 (100, 1 << 30, _('%.0f GB')),
1236 (100, 1 << 30, _('%.0f GB')),
1237 (10, 1 << 30, _('%.1f GB')),
1237 (10, 1 << 30, _('%.1f GB')),
1238 (1, 1 << 30, _('%.2f GB')),
1238 (1, 1 << 30, _('%.2f GB')),
1239 (100, 1 << 20, _('%.0f MB')),
1239 (100, 1 << 20, _('%.0f MB')),
1240 (10, 1 << 20, _('%.1f MB')),
1240 (10, 1 << 20, _('%.1f MB')),
1241 (1, 1 << 20, _('%.2f MB')),
1241 (1, 1 << 20, _('%.2f MB')),
1242 (100, 1 << 10, _('%.0f KB')),
1242 (100, 1 << 10, _('%.0f KB')),
1243 (10, 1 << 10, _('%.1f KB')),
1243 (10, 1 << 10, _('%.1f KB')),
1244 (1, 1 << 10, _('%.2f KB')),
1244 (1, 1 << 10, _('%.2f KB')),
1245 (1, 1, _('%.0f bytes')),
1245 (1, 1, _('%.0f bytes')),
1246 )
1246 )
1247
1247
1248 for multiplier, divisor, format in units:
1248 for multiplier, divisor, format in units:
1249 if nbytes >= divisor * multiplier:
1249 if nbytes >= divisor * multiplier:
1250 return format % (nbytes / float(divisor))
1250 return format % (nbytes / float(divisor))
1251 return units[-1][2] % nbytes
1251 return units[-1][2] % nbytes
1252
1252
1253 def drop_scheme(scheme, path):
1253 def drop_scheme(scheme, path):
1254 sc = scheme + ':'
1254 sc = scheme + ':'
1255 if path.startswith(sc):
1255 if path.startswith(sc):
1256 path = path[len(sc):]
1256 path = path[len(sc):]
1257 if path.startswith('//'):
1257 if path.startswith('//'):
1258 if scheme == 'file':
1258 if scheme == 'file':
1259 i = path.find('/', 2)
1259 i = path.find('/', 2)
1260 if i == -1:
1260 if i == -1:
1261 return ''
1261 return ''
1262 # On Windows, absolute paths are rooted at the current drive
1262 # On Windows, absolute paths are rooted at the current drive
1263 # root. On POSIX they are rooted at the file system root.
1263 # root. On POSIX they are rooted at the file system root.
1264 if os.name == 'nt':
1264 if os.name == 'nt':
1265 droot = os.path.splitdrive(os.getcwd())[0] + '/'
1265 droot = os.path.splitdrive(os.getcwd())[0] + '/'
1266 path = os.path.join(droot, path[i + 1:])
1266 path = os.path.join(droot, path[i + 1:])
1267 else:
1267 else:
1268 path = path[i:]
1268 path = path[i:]
1269 else:
1269 else:
1270 path = path[2:]
1270 path = path[2:]
1271 return path
1271 return path
1272
1272
1273 def uirepr(s):
1273 def uirepr(s):
1274 # Avoid double backslash in Windows path repr()
1274 # Avoid double backslash in Windows path repr()
1275 return repr(s).replace('\\\\', '\\')
1275 return repr(s).replace('\\\\', '\\')
1276
1276
1277 #### naming convention of below implementation follows 'textwrap' module
1277 #### naming convention of below implementation follows 'textwrap' module
1278
1278
1279 class MBTextWrapper(textwrap.TextWrapper):
1279 class MBTextWrapper(textwrap.TextWrapper):
1280 def __init__(self, **kwargs):
1280 def __init__(self, **kwargs):
1281 textwrap.TextWrapper.__init__(self, **kwargs)
1281 textwrap.TextWrapper.__init__(self, **kwargs)
1282
1282
1283 def _cutdown(self, str, space_left):
1283 def _cutdown(self, str, space_left):
1284 l = 0
1284 l = 0
1285 ucstr = unicode(str, encoding.encoding)
1285 ucstr = unicode(str, encoding.encoding)
1286 w = unicodedata.east_asian_width
1286 w = unicodedata.east_asian_width
1287 for i in xrange(len(ucstr)):
1287 for i in xrange(len(ucstr)):
1288 l += w(ucstr[i]) in 'WFA' and 2 or 1
1288 l += w(ucstr[i]) in 'WFA' and 2 or 1
1289 if space_left < l:
1289 if space_left < l:
1290 return (ucstr[:i].encode(encoding.encoding),
1290 return (ucstr[:i].encode(encoding.encoding),
1291 ucstr[i:].encode(encoding.encoding))
1291 ucstr[i:].encode(encoding.encoding))
1292 return str, ''
1292 return str, ''
1293
1293
1294 # ----------------------------------------
1294 # ----------------------------------------
1295 # overriding of base class
1295 # overriding of base class
1296
1296
1297 def _handle_long_word(self, reversed_chunks, cur_line, cur_len, width):
1297 def _handle_long_word(self, reversed_chunks, cur_line, cur_len, width):
1298 space_left = max(width - cur_len, 1)
1298 space_left = max(width - cur_len, 1)
1299
1299
1300 if self.break_long_words:
1300 if self.break_long_words:
1301 cut, res = self._cutdown(reversed_chunks[-1], space_left)
1301 cut, res = self._cutdown(reversed_chunks[-1], space_left)
1302 cur_line.append(cut)
1302 cur_line.append(cut)
1303 reversed_chunks[-1] = res
1303 reversed_chunks[-1] = res
1304 elif not cur_line:
1304 elif not cur_line:
1305 cur_line.append(reversed_chunks.pop())
1305 cur_line.append(reversed_chunks.pop())
1306
1306
1307 #### naming convention of above implementation follows 'textwrap' module
1307 #### naming convention of above implementation follows 'textwrap' module
1308
1308
1309 def wrap(line, width=None, initindent='', hangindent=''):
1309 def wrap(line, width=None, initindent='', hangindent=''):
1310 if width is None:
1310 if width is None:
1311 width = termwidth() - 2
1311 width = termwidth() - 2
1312 maxindent = max(len(hangindent), len(initindent))
1312 maxindent = max(len(hangindent), len(initindent))
1313 if width <= maxindent:
1313 if width <= maxindent:
1314 # adjust for weird terminal size
1314 # adjust for weird terminal size
1315 width = max(78, maxindent + 1)
1315 width = max(78, maxindent + 1)
1316 wrapper = MBTextWrapper(width=width,
1316 wrapper = MBTextWrapper(width=width,
1317 initial_indent=initindent,
1317 initial_indent=initindent,
1318 subsequent_indent=hangindent)
1318 subsequent_indent=hangindent)
1319 return wrapper.fill(line)
1319 return wrapper.fill(line)
1320
1320
1321 def iterlines(iterator):
1321 def iterlines(iterator):
1322 for chunk in iterator:
1322 for chunk in iterator:
1323 for line in chunk.splitlines():
1323 for line in chunk.splitlines():
1324 yield line
1324 yield line
1325
1325
1326 def expandpath(path):
1326 def expandpath(path):
1327 return os.path.expanduser(os.path.expandvars(path))
1327 return os.path.expanduser(os.path.expandvars(path))
1328
1328
1329 def hgcmd():
1329 def hgcmd():
1330 """Return the command used to execute current hg
1330 """Return the command used to execute current hg
1331
1331
1332 This is different from hgexecutable() because on Windows we want
1332 This is different from hgexecutable() because on Windows we want
1333 to avoid things opening new shell windows like batch files, so we
1333 to avoid things opening new shell windows like batch files, so we
1334 get either the python call or current executable.
1334 get either the python call or current executable.
1335 """
1335 """
1336 if main_is_frozen():
1336 if main_is_frozen():
1337 return [sys.executable]
1337 return [sys.executable]
1338 return gethgcmd()
1338 return gethgcmd()
1339
1339
1340 def rundetached(args, condfn):
1340 def rundetached(args, condfn):
1341 """Execute the argument list in a detached process.
1341 """Execute the argument list in a detached process.
1342
1342
1343 condfn is a callable which is called repeatedly and should return
1343 condfn is a callable which is called repeatedly and should return
1344 True once the child process is known to have started successfully.
1344 True once the child process is known to have started successfully.
1345 At this point, the child process PID is returned. If the child
1345 At this point, the child process PID is returned. If the child
1346 process fails to start or finishes before condfn() evaluates to
1346 process fails to start or finishes before condfn() evaluates to
1347 True, return -1.
1347 True, return -1.
1348 """
1348 """
1349 # Windows case is easier because the child process is either
1349 # Windows case is easier because the child process is either
1350 # successfully starting and validating the condition or exiting
1350 # successfully starting and validating the condition or exiting
1351 # on failure. We just poll on its PID. On Unix, if the child
1351 # on failure. We just poll on its PID. On Unix, if the child
1352 # process fails to start, it will be left in a zombie state until
1352 # process fails to start, it will be left in a zombie state until
1353 # the parent wait on it, which we cannot do since we expect a long
1353 # the parent wait on it, which we cannot do since we expect a long
1354 # running process on success. Instead we listen for SIGCHLD telling
1354 # running process on success. Instead we listen for SIGCHLD telling
1355 # us our child process terminated.
1355 # us our child process terminated.
1356 terminated = set()
1356 terminated = set()
1357 def handler(signum, frame):
1357 def handler(signum, frame):
1358 terminated.add(os.wait())
1358 terminated.add(os.wait())
1359 prevhandler = None
1359 prevhandler = None
1360 if hasattr(signal, 'SIGCHLD'):
1360 if hasattr(signal, 'SIGCHLD'):
1361 prevhandler = signal.signal(signal.SIGCHLD, handler)
1361 prevhandler = signal.signal(signal.SIGCHLD, handler)
1362 try:
1362 try:
1363 pid = spawndetached(args)
1363 pid = spawndetached(args)
1364 while not condfn():
1364 while not condfn():
1365 if ((pid in terminated or not testpid(pid))
1365 if ((pid in terminated or not testpid(pid))
1366 and not condfn()):
1366 and not condfn()):
1367 return -1
1367 return -1
1368 time.sleep(0.1)
1368 time.sleep(0.1)
1369 return pid
1369 return pid
1370 finally:
1370 finally:
1371 if prevhandler is not None:
1371 if prevhandler is not None:
1372 signal.signal(signal.SIGCHLD, prevhandler)
1372 signal.signal(signal.SIGCHLD, prevhandler)
1373
1373
1374 try:
1374 try:
1375 any, all = any, all
1375 any, all = any, all
1376 except NameError:
1376 except NameError:
1377 def any(iterable):
1377 def any(iterable):
1378 for i in iterable:
1378 for i in iterable:
1379 if i:
1379 if i:
1380 return True
1380 return True
1381 return False
1381 return False
1382
1382
1383 def all(iterable):
1383 def all(iterable):
1384 for i in iterable:
1384 for i in iterable:
1385 if not i:
1385 if not i:
1386 return False
1386 return False
1387 return True
1387 return True
1388
1388
1389 def termwidth():
1389 def termwidth():
1390 if 'COLUMNS' in os.environ:
1390 if 'COLUMNS' in os.environ:
1391 try:
1391 try:
1392 return int(os.environ['COLUMNS'])
1392 return int(os.environ['COLUMNS'])
1393 except ValueError:
1393 except ValueError:
1394 pass
1394 pass
1395 return termwidth_()
1395 return termwidth_()
General Comments 0
You need to be logged in to leave comments. Login now